Compare commits

..

38 Commits

Author SHA1 Message Date
Archer
1e48922bc9 Context extract support value type (#1620)
* perf: chat box components

* perf: chatbox context

* feat: extract support value type

* workflow performance

* update doc

* feat: error response

* feat: error response

* oauth sort

* perf: logo

* fix: update laf account

* perf: team permission api

* update type
2024-05-28 23:33:05 +08:00
Archer
8ba8488086 4.8.2-fix (#1612) 2024-05-28 16:55:06 +08:00
Archer
9639139b52 Sandbox (#1610) (#1611) 2024-05-28 14:47:10 +08:00
Archer
d9f5f4ede0 Update README.md 2024-05-27 00:00:17 +08:00
Carson Yang
6609cb98dc Update README (#1589)
Signed-off-by: Carson Yang <yangchuansheng33@gmail.com>
2024-05-24 21:12:26 +08:00
Archer
74830f0ac8 Update 481.md 2024-05-24 11:10:56 +08:00
Archer
9c7c74050b Release update (#1580)
* release doc

* fix: reg metch

* perf: tool call arg

* fix: stream update variables

* remove status

* update prompt

* rename embeddong model
2024-05-24 11:07:03 +08:00
ShinChven ✨
92a3d6d268 Update config.json (#1583)
Fix embedding model names
2024-05-24 11:02:10 +08:00
Archer
c4ce1236ea perf: tool promot and reg slice;query extension prompt (#1576) 2024-05-23 15:14:22 +08:00
RandyZhang
4eb2c9bd07 更新Nextjs配置,更新服务端组件的声明 (#1570)
Co-authored-by: randy <randy@aihelp.net>
2024-05-23 09:11:33 +08:00
Archer
b1aafde7c9 4.8.1 test-fix (#1561) 2024-05-22 18:49:39 +08:00
Cheer
87e4afe89b fix: chunk preview drawer can not scroll, common drawer content scroll should decide by content; (#1539) 2024-05-22 10:18:21 +08:00
Archer
a14a8ae627 perf: input guide (#1558) 2024-05-21 18:18:32 +08:00
Archer
fb368a581c Perf input guide (#1557)
* perf: input guide code

* perf: input guide ui

* Chat input guide api

* Update app chat config store

* perf: app chat config field

* perf: app context

* perf: params

* fix: ts

* perf: filter private config

* perf: filter private config

* perf: import workflow

* perf: limit max tip amount
2024-05-21 17:52:04 +08:00
Archer
8e8ceb7439 Add request log and uncatch error tip. (#1531) 2024-05-20 10:31:44 +08:00
heheer
e35ce2caa0 feat: question guide (#1508)
* feat: question guide

* fix

* fix

* fix

* change interface

* fix
2024-05-19 17:34:16 +08:00
Archer
fd31a0b763 feat: fix admin role (#1527) 2024-05-18 13:32:50 +08:00
Archer
ba517b6a73 fix: api key delete bug (#1524) 2024-05-17 18:03:14 +08:00
Archer
2f93dedfb6 Update permission (#1522)
* Permission (#1442)

* Revert "lafAccount add pat & re request when token invalid (#76)" (#77)

This reverts commit 83d85dfe37adcaef4833385ea52ee79fd84720be.

* feat: add permission display in the team manager modal

* feat: add permission i18n

* feat: let team module acquire permission ablity

* feat: add ownerPermission property into metaData

* feat: team premission system

* feat: extract the resourcePermission from resource schemas

* fix: move enum definition to constant

* feat: auth member permission handler, invite user

* feat: permission manage

* feat: adjust the style

* feat: team card style
- add a new icon

* feat: team permission in guest mode

* chore: change the type

* chore: delete useless file

* chore: delete useless code

* feat: do not show owner in PermissionManage view

* chore: fix style

* fix: icon remove fill

* feat: adjust the codes

---------

Co-authored-by: Archer <545436317@qq.com>

* perf: permission modal

* lock

---------

Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
2024-05-17 17:42:33 +08:00
Archer
67c52992d7 External dataset (#1519)
* perf: local file create collection

* rename middleware

* perf: remove code

* feat: next14

* feat: external file dataset

* collection tags field

* external file dataset doc

* fix: ts
2024-05-17 16:44:15 +08:00
Archer
2d1ec9b3ad perf: token count (#1509) 2024-05-16 17:05:56 +08:00
Archer
6067f5aff3 perf: tiktoken count (#1507)
* perf: tiktoken count

* fix: rerank histories

* fix: rerank histories

* update npmrc
2024-05-16 15:42:15 +08:00
Archer
c6d9b15897 External dataset (#1497)
* perf: read rawText and chunk code

* perf: read raw text

* perf: read rawtext

* perf: token count

* log
2024-05-16 11:47:53 +08:00
heheer
d5073f98ab perf:change plugin version update position (#1493)
* perf:change plugin version update position

* use nodeversion
2024-05-15 17:06:49 +08:00
Archer
8386f707cd Perf workflow (#1492)
* perf: handle edge check

* search model

* feat: plugin input can render all input; fix: plugin default value

* fix ts

* feat: plugin input support required
2024-05-15 16:17:43 +08:00
Archer
cd876251b7 External dataset (#1485)
* fix: revert version

* feat: external collection

* import context

* external ui

* doc

* fix: ts

* clear invalid data

* feat: rename sub name

* fix: node if else edge remove

* fix: init

* api size

* fix: if else node refresh
2024-05-15 10:19:51 +08:00
heheer
fb04889a31 feat: node version (#1484)
* feat: node version tip

* fix

* i18n

* init version

* fix ts

* fix ts

* fix ts
2024-05-14 23:26:03 +08:00
Archer
b779e2806d fix doc (#1475) 2024-05-14 12:58:04 +08:00
Fengrui Liu
240f60c0ca Fixes: fix edge handler with onDelEdge (#1471)
* fixes: Fix edge handler

* fixes: fix edge handler with onDelEdge

* fixes: fix edge handler with onDelEdge
2024-05-14 00:17:44 +08:00
Archer
8d2230f24f Update intro.md 2024-05-13 17:08:37 +08:00
Archer
610ebded3b Dir tree doc and move some code (#1466)
* tree doc and move some code

* fix: ts
2024-05-13 17:07:29 +08:00
Archer
80a84a5733 Change embedding (#1463)
* rebuild embedding queue

* dataset menu

* feat: rebuild data api

* feat: ui change embedding model

* dataset ui

* feat: rebuild index ui

* rename collection
2024-05-13 14:51:42 +08:00
Archer
59fd94384d fix: session (#1455)
* fix: session

* doc

* fix: i188n
2024-05-13 11:04:50 +08:00
Archer
ee8cb0915e i18n (#1444)
* adapt not input type

* adapt not input type

* file i18n

* publish i18n

* translate

* i18n
2024-05-11 00:21:01 +08:00
Archer
8cf643d972 adapt not input type (#1443)
* adapt not input type

* adapt not input type
2024-05-10 22:27:32 +08:00
Archer
26f4c92124 Perf: i18n ns (#1441)
* i18n

* fix: handle
2024-05-10 18:41:41 +08:00
heheer
f351d4ea68 fix: openapi integer & array type (#1439) 2024-05-10 18:40:01 +08:00
Archer
d70efe1d6f Fix export dataset (#1436)
* fix: export dataset

* remove file buffer
2024-05-10 16:15:23 +08:00
496 changed files with 17105 additions and 6999 deletions

View File

@@ -0,0 +1,108 @@
name: Build fastgpt-sandbox images and copy image to docker hub
on:
workflow_dispatch:
push:
paths:
- 'projects/sandbox/**'
tags:
- 'v*'
jobs:
build-fastgpt-sandbox-images:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install Dependencies
run: |
sudo apt update && sudo apt install -y nodejs npm
- name: Set up QEMU (optional)
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
driver-opts: network=host
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GH_PAT }}
- name: Set DOCKER_REPO_TAGGED based on branch or tag
run: |
if [[ "${{ github.ref_name }}" == "main" ]]; then
echo "DOCKER_REPO_TAGGED=ghcr.io/${{ github.repository_owner }}/fastgpt-sandbox:latest" >> $GITHUB_ENV
else
echo "DOCKER_REPO_TAGGED=ghcr.io/${{ github.repository_owner }}/fastgpt-sandbox:${{ github.ref_name }}" >> $GITHUB_ENV
fi
- name: Build and publish image for main branch or tag push event
env:
DOCKER_REPO_TAGGED: ${{ env.DOCKER_REPO_TAGGED }}
run: |
docker buildx build \
-f projects/sandbox/Dockerfile \
--platform linux/amd64,linux/arm64 \
--label "org.opencontainers.image.source=https://github.com/${{ github.repository_owner }}/fastgpt-sandbox" \
--label "org.opencontainers.image.description=fastgpt-sandbox image" \
--push \
--cache-from=type=local,src=/tmp/.buildx-cache \
--cache-to=type=local,dest=/tmp/.buildx-cache \
-t ${DOCKER_REPO_TAGGED} \
.
push-to-ali-hub:
needs: build-fastgpt-sandbox-images
runs-on: ubuntu-20.04
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Login to Ali Hub
uses: docker/login-action@v2
with:
registry: registry.cn-hangzhou.aliyuncs.com
username: ${{ secrets.ALI_HUB_USERNAME }}
password: ${{ secrets.ALI_HUB_PASSWORD }}
- name: Set DOCKER_REPO_TAGGED based on branch or tag
run: |
if [[ "${{ github.ref_name }}" == "main" ]]; then
echo "IMAGE_TAG=latest" >> $GITHUB_ENV
else
echo "IMAGE_TAG=${{ github.ref_name }}" >> $GITHUB_ENV
fi
- name: Pull image from GitHub Container Registry
run: docker pull ghcr.io/${{ github.repository_owner }}/fastgpt-sandbox:${{env.IMAGE_TAG}}
- name: Tag image with Docker Hub repository name and version tag
run: docker tag ghcr.io/${{ github.repository_owner }}/fastgpt-sandbox:${{env.IMAGE_TAG}} ${{ secrets.ALI_IMAGE_NAME }}/fastgpt-sandbox:${{env.IMAGE_TAG}}
- name: Push image to Docker Hub
run: docker push ${{ secrets.ALI_IMAGE_NAME }}/fastgpt-sandbox:${{env.IMAGE_TAG}}
push-to-docker-hub:
needs: build-fastgpt-sandbox-images
runs-on: ubuntu-20.04
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_HUB_NAME }}
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- name: Set DOCKER_REPO_TAGGED based on branch or tag
run: |
if [[ "${{ github.ref_name }}" == "main" ]]; then
echo "IMAGE_TAG=latest" >> $GITHUB_ENV
else
echo "IMAGE_TAG=${{ github.ref_name }}" >> $GITHUB_ENV
fi
- name: Pull image from GitHub Container Registry
run: docker pull ghcr.io/${{ github.repository_owner }}/fastgpt-sandbox:${{env.IMAGE_TAG}}
- name: Tag image with Docker Hub repository name and version tag
run: docker tag ghcr.io/${{ github.repository_owner }}/fastgpt-sandbox:${{env.IMAGE_TAG}} ${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-sandbox:${{env.IMAGE_TAG}}
- name: Push image to Docker Hub
run: docker push ${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-sandbox:${{env.IMAGE_TAG}}

View File

@@ -49,7 +49,7 @@ jobs:
DOCKER_REPO_TAGGED: ${{ env.DOCKER_REPO_TAGGED }}
run: |
docker buildx build \
--build-arg name=app \
-f projects/app/Dockerfile \
--platform linux/amd64,linux/arm64 \
--label "org.opencontainers.image.source=https://github.com/${{ github.repository_owner }}/FastGPT" \
--label "org.opencontainers.image.description=fastgpt image" \
@@ -57,7 +57,6 @@ jobs:
--cache-from=type=local,src=/tmp/.buildx-cache \
--cache-to=type=local,dest=/tmp/.buildx-cache \
-t ${DOCKER_REPO_TAGGED} \
-f Dockerfile \
.
push-to-docker-hub:
needs: build-fastgpt-images
@@ -81,9 +80,9 @@ jobs:
- name: Pull image from GitHub Container Registry
run: docker pull ghcr.io/${{ github.repository_owner }}/fastgpt:${{env.IMAGE_TAG}}
- name: Tag image with Docker Hub repository name and version tag
run: docker tag ghcr.io/${{ github.repository_owner }}/fastgpt:${{env.IMAGE_TAG}} ${{ secrets.DOCKER_IMAGE_NAME }}:${{env.IMAGE_TAG}}
run: docker tag ghcr.io/${{ github.repository_owner }}/fastgpt:${{env.IMAGE_TAG}} ${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt:${{env.IMAGE_TAG}}
- name: Push image to Docker Hub
run: docker push ${{ secrets.DOCKER_IMAGE_NAME }}:${{env.IMAGE_TAG}}
run: docker push ${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt:${{env.IMAGE_TAG}}
push-to-ali-hub:
needs: build-fastgpt-images
if: github.repository == 'labring/FastGPT'
@@ -107,6 +106,6 @@ jobs:
- name: Pull image from GitHub Container Registry
run: docker pull ghcr.io/${{ github.repository_owner }}/fastgpt:${{env.IMAGE_TAG}}
- name: Tag image with Docker Hub repository name and version tag
run: docker tag ghcr.io/${{ github.repository_owner }}/fastgpt:${{env.IMAGE_TAG}} ${{ secrets.ALI_IMAGE_NAME }}:${{env.IMAGE_TAG}}
run: docker tag ghcr.io/${{ github.repository_owner }}/fastgpt:${{env.IMAGE_TAG}} ${{ secrets.ALI_IMAGE_NAME }}/fastgpt:${{env.IMAGE_TAG}}
- name: Push image to Docker Hub
run: docker push ${{ secrets.ALI_IMAGE_NAME }}:${{env.IMAGE_TAG}}
run: docker push ${{ secrets.ALI_IMAGE_NAME }}/fastgpt:${{env.IMAGE_TAG}}

View File

@@ -44,15 +44,14 @@ jobs:
DOCKER_REPO_TAGGED: ${{ env.DOCKER_REPO_TAGGED }}
run: |
docker buildx build \
--build-arg name=app \
--label "org.opencontainers.image.source= https://github.com/ ${{ github.repository_owner }}/FastGPT" \
--label "org.opencontainers.image.description=fastgpt-pr image" \
-f projects/app/Dockerfile \
--label "org.opencontainers.image.source=https://github.com/${{ github.repository_owner }}/FastGPT" \
--label "org.opencontainers.image.description=fastgpt-pr imae" \
--label "org.opencontainers.image.licenses=Apache" \
--push \
--cache-from=type=local,src=/tmp/.buildx-cache \
--cache-to=type=local,dest=/tmp/.buildx-cache \
-t ${DOCKER_REPO_TAGGED} \
-f Dockerfile \
.
helm-check:

1
.npmrc Normal file
View File

@@ -0,0 +1 @@
public-hoist-pattern[]=*tiktoken*

46
.vscode/i18n-ally-custom-framework.yml vendored Normal file
View File

@@ -0,0 +1,46 @@
# .vscode/i18n-ally-custom-framework.yml
# An array of strings which contain Language Ids defined by VS Code
# You can check available language ids here: https://code.visualstudio.com/docs/languages/identifiers
languageIds:
- javascript
- typescript
- javascriptreact
- typescriptreact
# An array of RegExes to find the key usage. **The key should be captured in the first match group**.
# You should unescape RegEx strings in order to fit in the YAML file
# To help with this, you can use https://www.freeformatter.com/json-escape.html
usageMatchRegex:
# The following example shows how to detect `t("your.i18n.keys")`
# the `{key}` will be placed by a proper keypath matching regex,
# you can ignore it and use your own matching rules as well
- "[^\\w\\d]t\\(['\"`]({key})['\"`]"
- "[^\\w\\d]commonT\\(['\"`]({key})['\"`]"
# 支持 appT("your.i18n.keys")
- "[^\\w\\d]appT\\(['\"`]({key})['\"`]"
# 支持 datasetT("your.i18n.keys")
- "[^\\w\\d]datasetT\\(['\"`]({key})['\"`]"
- "[^\\w\\d]fileT\\(['\"`]({key})['\"`]"
- "[^\\w\\d]publishT\\(['\"`]({key})['\"`]"
- "[^\\w\\d]workflowT\\(['\"`]({key})['\"`]"
- "[^\\w\\d]userT\\(['\"`]({key})['\"`]"
- "[^\\w\\d]chatT\\(['\"`]({key})['\"`]"
# A RegEx to set a custom scope range. This scope will be used as a prefix when detecting keys
# and works like how the i18next framework identifies the namespace scope from the
# useTranslation() hook.
# You should unescape RegEx strings in order to fit in the YAML file
# To help with this, you can use https://www.freeformatter.com/json-escape.html
scopeRangeRegex: "useTranslation\\(\\s*\\[?\\s*['\"`](.*?)['\"`]"
# An array of strings containing refactor templates.
# The "$1" will be replaced by the keypath specified.
# Optional: uncomment the following two lines to use
# refactorTemplates:
# - i18n.get("$1")
# If set to true, only enables this custom framework (will disable all built-in frameworks)
monopoly: true

View File

@@ -10,14 +10,19 @@
"scope": "javascript,typescript",
"prefix": "nextapi",
"body": [
"import type { NextApiRequest, NextApiResponse } from 'next';",
"import { NextAPI } from '@/service/middle/entry';",
"import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';",
"import { NextAPI } from '@/service/middleware/entry';",
"",
"type Props = {};",
"export type ${TM_FILENAME_BASE}Query = {};",
"",
"type Response = {};",
"export type ${TM_FILENAME_BASE}Body = {};",
"",
"async function handler(req: NextApiRequest, res: NextApiResponse<any>): Promise<Response> {",
"export type ${TM_FILENAME_BASE}Response = {};",
"",
"async function handler(",
" req: ApiRequestProps<${TM_FILENAME_BASE}Body, ${TM_FILENAME_BASE}Query>,",
" res: ApiResponseType<any>",
"): Promise<${TM_FILENAME_BASE}Response> {",
" $1",
" return {}",
"}",
@@ -25,5 +30,23 @@
"export default NextAPI(handler);"
],
"description": "FastGPT Next API template"
},
"use context template": {
"scope": "typescriptreact",
"prefix": "context",
"body": [
"import { ReactNode } from 'react';",
"import { createContext } from 'use-context-selector';",
"",
"type ContextType = {$1};",
"",
"export const Context = createContext<ContextType>({});",
"",
"export const ContextProvider = ({ children }: { children: ReactNode }) => {",
" const contextValue: ContextType = {};",
" return <Context.Provider value={contextValue}>{children}</Context.Provider>;",
"};",
],
"description": "FastGPT usecontext template"
}
}

View File

@@ -11,5 +11,6 @@
"i18n-ally.sortKeys": true,
"i18n-ally.keepFulfilled": false,
"i18n-ally.sourceLanguage": "zh", // 根据此语言文件翻译其他语言文件的变量和内容
"i18n-ally.displayLanguage": "zh" // 显示语言
"i18n-ally.displayLanguage": "zh", // 显示语言
"i18n-ally.extract.targetPickingStrategy": "most-similar-by-key"
}

25
Makefile Normal file
View File

@@ -0,0 +1,25 @@
# 定义默认变量
proxy=null
image=null
# 定义目标
.PHONY: build
# 检查 target 是否定义
ifndef name
$(error name is not defined)
endif
filePath=./projects/$(name)/Dockerfile
dev:
pnpm --prefix ./projects/$(name) dev
build:
ifeq ($(proxy), taobao)
docker build -f $(filePath) -t $(image) . --build-arg proxy=taobao
else ifeq ($(proxy), clash)
docker build -f $(filePath) -t $(image) . --network host --build-arg HTTP_PROXY=http://127.0.0.1:7890 --build-arg HTTPS_PROXY=http://127.0.0.1:7890
else
docker build -f $(filePath) -t $(image) .
endif

View File

@@ -53,10 +53,9 @@ https://github.com/labring/FastGPT/assets/15308462/7d3a38df-eb0e-4388-9250-2409b
- [x] 提供简易模式,无需操作编排
- [x] 工作流编排
- [x] 源文件引用追踪
- [x] 模块封装,实现多级复
- [x] Tool 模块
- [ ] 嵌入 [Laf](https://github.com/labring/laf),实现在线编写 HTTP 模块。初版已完成。
- [ ] 插件封装功能,支持低代码渲染
- [x] 工具调
- [x] 插件 - 工作流封装能力
- [ ] Code sandbox
`2` 知识库能力
- [x] 多库复用,混用
@@ -67,14 +66,13 @@ https://github.com/labring/FastGPT/assets/15308462/7d3a38df-eb0e-4388-9250-2409b
- [x] 支持 url 读取、CSV 批量导入
- [x] 混合检索 & 重排
- [ ] 支持文件阅读器
- [ ] 更多的数据预处理方案
`3` 应用调试能力
- [x] 知识库单点搜索测试
- [x] 对话时反馈引用并可修改与删除
- [x] 完整上下文呈现
- [x] 完整模块中间值呈现
- [ ] 高级编排 DeBug 模式
- [x] 高级编排 DeBug 模式
`4` OpenAPI 接口
- [x] completions 接口 (chat 模式对齐 GPT 接口)
@@ -89,6 +87,8 @@ https://github.com/labring/FastGPT/assets/15308462/7d3a38df-eb0e-4388-9250-2409b
`6` 其他
- [x] 支持语音输入和输出 (可配置语音输入语音回答)
- [x] 模糊输入提示
- [ ] 模板市场
<a href="#readme">
<img src="https://img.shields.io/badge/-返回顶部-7d09f1.svg" alt="#" align="right">

View File

@@ -10,28 +10,31 @@
<a href="./README_ja.md">日语</a>
</p>
FastGPT is a knowledge-based Q&A system built on the LLM, offers out-of-the-box data processing and model invocation capabilities, allows for workflow orchestration through Flow visualization!
FastGPT is a knowledge-based platform built on the LLMs, offers a comprehensive suite of out-of-the-box capabilities such as data processing, RAG retrieval, and visual AI workflow orchestration, letting you easily develop and deploy complex question-answering systems without the need for extensive setup or configuration.
</div>
<p align="center">
<a href="https://fastgpt.in/">
<img height="21" src="https://img.shields.io/badge/在线使用-d4eaf7?style=flat-square&logo=spoj&logoColor=7d09f1" alt="cloud">
<img height="21" src="https://img.shields.io/badge/Try it Online-d4eaf7?style=flat-square&logo=spoj&logoColor=7d09f1" alt="cloud">
</a>
<a href="https://doc.fastgpt.in/docs/intro">
<img height="21" src="https://img.shields.io/badge/相关文档-7d09f1?style=flat-square" alt="document">
<img height="21" src="https://img.shields.io/badge/Documents-7d09f1?style=flat-square" alt="document">
</a>
<a href="https://doc.fastgpt.in/docs/development">
<img height="21" src="https://img.shields.io/badge/本地开发-%23d4eaf7?style=flat-square&logo=xcode&logoColor=7d09f1" alt="development">
</a>
<a href="/#-%E7%9B%B8%E5%85%B3%E9%A1%B9%E7%9B%AE">
<img height="21" src="https://img.shields.io/badge/相关项目-7d09f1?style=flat-square" alt="project">
<img height="21" src="https://img.shields.io/badge/Local Development-%23d4eaf7?style=flat-square&logo=xcode&logoColor=7d09f1" alt="development">
</a>
<a href="https://github.com/labring/FastGPT/blob/main/LICENSE">
<img height="21" src="https://img.shields.io/badge/License-Apache--2.0-ffffff?style=flat-square&labelColor=d4eaf7&color=7d09f1" alt="license">
</a>
</p>
<div align="center">
[![discord](https://theme.zdassets.com/theme_assets/678183/cc59daa07820943e943c2fc283b9079d7003ff76.svg)](https://discord.gg/mp68xkZn2Q)
</div>
https://github.com/labring/FastGPT/assets/15308462/7d3a38df-eb0e-4388-9250-2409bd33f6d4
## 🛸 Use Cloud Services
@@ -117,11 +120,11 @@ Project tech stack: NextJs + TS + ChakraUI + Mongo + Postgres (Vector plugin)
- [Version Updates & Upgrades](https://doc.fastgpt.in/docs/installation/upgrading)
## 🏘️ Community
## 🏘️ Community & support
| Community Group |
| ------------------------------------------------- |
| ![](https://oss.laf.run/htr4n1-images/fastgpt-qr-code.jpg) |
+ 🌐 Visit the [FastGPT website](https://fastgpt.in/) for full documentation and useful links.
+ 💬 Join our [Discord server](https://discord.gg/mp68xkZn2Q) is to chat with FastGPT developers and other FastGPT users. This is a good place to learn about FastGPT, ask questions, and share your experiences.
+ 🐞 Create [GitHub Issues](https://github.com/labring/FastGPT/issues/new/choose) for bug reports and feature requests.
<a href="#readme">
<img src="https://img.shields.io/badge/-Back_to_Top-7d09f1.svg" alt="#" align="right">

45
dev.md
View File

@@ -1,17 +1,40 @@
# 打包命令
## Premise
Since FastGPT is managed in the same way as monorepo, it is recommended to install 'make' first during development.
monorepo Project Name:
- app: main project
-......
## Dev
```sh
# Build image, not proxy
docker build -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.4.7 --build-arg name=app .
# Give automatic script code execution permission (on non-Linux systems, you can manually execute the postinstall.sh file content)
chmod -R +x ./scripts/
# Executing under the code root directory installs all dependencies within the root package, projects, and packages
pnpm i
# build image with proxy
docker build -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.4.7 --build-arg name=app --build-arg proxy=taobao .
# Not make cmd
cd projects/app
pnpm dev
# Make cmd
make dev name=app
```
# Pg 常用索引
```sql
CREATE INDEX IF NOT EXISTS modelData_dataset_id_index ON modeldata (dataset_id);
CREATE INDEX IF NOT EXISTS modelData_collection_id_index ON modeldata (collection_id);
CREATE INDEX IF NOT EXISTS modelData_teamId_index ON modeldata (team_id);
```
## Build
```sh
# Docker cmd: Build image, not proxy
docker build -f ./projects/app/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 . --build-arg name=app
# Make cmd: Build image, not proxy
make build name=app image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1
# Docker cmd: Build image with proxy
docker build -f ./projects/app/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 . --build-arg name=app --build-arg proxy=taobao
# Make cmd: Build image with proxy
make build name=app image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 proxy=taobao
```

Binary file not shown.

After

Width:  |  Height:  |  Size: 163 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 122 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

View File

@@ -0,0 +1,54 @@
---
title: "对话问题引导"
description: "FastGPT 对话问题引导"
icon: "code"
draft: false
toc: true
weight: 350
---
![](/imgs/questionGuide.png)
## 什么是自定义问题引导
你可以为你的应用提前预设一些问题,用户在输入时,会根据输入的内容,动态搜索这些问题作为提示,从而引导用户更快的进行提问。
你可以直接在 FastGPT 中配置词库,或者提供自定义词库接口。
## 自定义词库接口
需要保证这个接口可以被用户浏览器访问。
**请求:**
```bash
curl --location --request GET 'http://localhost:3000/api/core/chat/inputGuide/query?appId=663c75302caf8315b1c00194&searchKey=你'
```
其中 `appId` 为应用ID`searchKey` 为搜索关键字最多是50个字符。
**响应**
```json
{
"code": 200,
"statusText": "",
"message": "",
"data": [
"是你",
"你是谁呀",
"你好好呀",
"你好呀",
"你是谁!",
"你好"
]
}
```
data是一个数组包含了搜索到的问题最多只需要返回5个问题。
**参数说明:**
- appId - 应用ID
- searchKey - 搜索关键字

View File

@@ -0,0 +1,26 @@
---
title: '外部文件知识库'
description: 'FastGPT 外部文件知识库功能介绍和使用方式'
icon: 'language'
draft: false
toc: true
weight: 107
---
外部文件库是 FastGPT 商业版特有功能。它允许接入你现在的文件系统,无需将文件再导入一份到 FastGPT 中。
并且,阅读权限可以通过你的文件系统进行控制。
| | | |
| --------------------- | --------------------- | --------------------- |
| ![](/imgs/external_file0.png) | ![](/imgs/external_file1.png) | ![](/imgs/external_file2.png) |
## 导入参数说明
- 外部预览地址用于跳转你的文件阅读地址会携带“文件阅读ID”进行访问。
- 文件访问URL文件可访问的地址。
- 文件阅读ID通常情况下文件访问URL是临时的。如果希望永久可以访问你需要使用该文件阅读ID并配合上“外部预览地址”跳转至新的阅读地址进行原文件访问。
- 文件名默认会自动解析文件访问URL上的文件名。如果你手动填写将会以手动填写的值为准。
[点击查看API导入文档](/docs/development/openapi/dataset/#创建一个外部文件库集合商业版)

View File

@@ -257,6 +257,13 @@ PG 数据库没有连接上/初始化失败可以查看日志。FastGPT 会
2. 非 docker 部署的,需要手动安装 pg vector 插件
3. 查看 fastgpt 日志,有没有相关报错
### Illegal instruction
可能原因:
1. arm架构。需要使用 Mongo 官方镜像: mongo:5.0.18
2. cpu 不支持 AVX无法用 mongo5需要换成 mongo4.x。把 mongo 的 image 换成: mongo:4.4.29
### Operation `auth_codes.findOne()` buffering timed out after 10000ms
mongo连接失败查看mongo的运行状态对应日志。

View File

@@ -118,4 +118,5 @@ OneAPI 的 API Key 配置错误,需要修改`OPENAI_API_KEY`环境变量,并
### bad_response_status_code bad response status code 503
1. 模型服务不可用
2. ....
2. 模型接口参数异常温度、max token等可能不适配
3. ....

View File

@@ -16,8 +16,9 @@ weight: 705
- [Git](http://git-scm.com/)
- [Docker](https://www.docker.com/)(构建镜像)
- [Node.js v18.x (不推荐最新的,可能有兼容问题)](http://nodejs.org)
- [pnpm](https://pnpm.io/) 版本 8.x.x
- [Node.js v18.17 / v20.x](http://nodejs.org)
- [pnpm](https://pnpm.io/) 版本 8.6.0 (目前官方的开发环境)
- make命令: 根据不同平台,百度安装 (官方是GNU Make 4.3)
## 开始本地开发
@@ -72,24 +73,34 @@ Mongo 数据库需要注意,需要注意在连接地址中增加 `directConnec
### 5. 运行
可参考项目根目录下的 `dev.md`
```bash
# 给自动化脚本代码执行权限(非 linux 系统, 可以手动执行里面的 postinstall.sh 文件内容)
chmod -R +x ./scripts/
# 代码根目录下执行,会安装根 package、projects 和 packages 内所有依赖
pnpm i
# 切换到应用目录
cd projects/app
# 开发模式运行
# 非 Make 运行
cd projects/app
pnpm dev
# Make 运行
make dev name=app
```
### 6. 部署打包
```bash
# 根目录下执行
docker build -t dockername/fastgpt:tag --build-arg name=app .
# 使用代理
docker build -t dockername/fastgpt:tag --build-arg name=app --build-arg proxy=taobao .
# Docker cmd: Build image, not proxy
docker build -f ./projects/app/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 . --build-arg name=app
# Make cmd: Build image, not proxy
make build name=app image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1
# Docker cmd: Build image with proxy
docker build -f ./projects/app/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 . --build-arg name=app --build-arg proxy=taobao
# Make cmd: Build image with proxy
make build name=app image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 proxy=taobao
```
## 提交代码至开源仓库
@@ -101,21 +112,21 @@ docker build -t dockername/fastgpt:tag --build-arg name=app --build-arg proxy=ta
如果遇到问题,比如合并冲突或不知道如何打开拉取请求,请查看 GitHub 的[拉取请求教程](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests),了解如何解决合并冲突和其他问题。一旦您的 PR 被合并,您将自豪地被列为[贡献者表](https://github.com/labring/FastGPT/graphs/contributors)中的一员。
## QA
### 本地数据库无法连接
1. 如果你是连接远程的数据库,先检查对应的端口是否开放。
2. 如果是本地运行的数据库,可尝试`host`改成`localhost``127.0.0.1`
3. 本地连接远程的 Mongo需要增加 `directConnection=true` 参数,才能连接上副本集的数据库。
4. mongo使用`mongocompass`客户端进行连接测试和可视化管理。
5. pg使用`navicat`进行连接和管理。
### sh ./scripts/postinstall.sh 没权限
FastGPT 在`pnpm i`后会执行`postinstall`脚本,用于自动生成`ChakraUI``Type`。如果没有权限,可以先执行`chmod -R +x ./scripts/`,再执行`pnpm i`
### 长时间运行后崩溃
似乎是由于 tiktoken 库的开发环境问题,生产环境中未遇到,暂时可忽略。
仍不可行的话,可以手动执行`./scripts/postinstall.sh`里的内容。
### TypeError: Cannot read properties of null (reading 'useMemo' )
@@ -133,3 +144,57 @@ FastGPT 在`pnpm i`后会执行`postinstall`脚本,用于自动生成`ChakraUI
遇到困难了吗?有任何问题吗? 加入微信群与开发者和用户保持沟通。
<img width="400px" src="https://oss.laf.run/htr4n1-images/fastgpt-qr-code.jpg" class="medium-zoom-image" />
## 代码结构说明
### nextjs
FastGPT 使用了 nextjs 的 page route 作为框架。为了区分好前后端代码,在目录分配上会分成 global, service, web 3个自目录分别对应着 `前后端共用``后端专用``前端专用`的代码。
### monorepo
FastGPT 采用 pnpm workspace 方式构建 monorepo 项目,主要分为两个部分:
- projects/app - FastGPT 主项目
- packages/ - 子模块
- global - 共用代码,通常是放一些前后端都能执行的函数、类型声明、常量。
- service - 服务端代码
- web - 前端代码
- plugin - 工作流自定义插件的代码
### 领域驱动模式DDD
FastGPT 在代码模块划分时按DDD的思想进行划分主要分为以下几个领域
core - 核心功能(知识库,工作流,应用,对话)
support - 支撑功能(用户体系,计费,鉴权等)
common - 基础功能(日志管理,文件读写等)
{{% details title="代码结构说明" closed="true" %}}
```
.
├── .github // github 相关配置
├── .husky // 格式化配置
├── docSite // 文档
├── files // 一些外部文件,例如 docker-compose, helm
├── packages // 子包
│ ├── global // 前后端通用子包
│ ├── plugins // 工作流插件(需要自定义包时候使用到)
│ ├── service // 后端子包
│ └── web // 前端子包
├── projects
│ └── app // FastGPT 主项目
├── python // 存放一些模型代码,和 FastGPT 本身无关
└── scripts // 一些自动化脚本
├── icon // icon预览脚本可以在顶层 pnpm initIcon(把svg写入到代码中), pnpm previewIcon预览icon
└── postinstall.sh // chakraUI自定义theme初始化 ts 类型
├── package.json // 顶层monorepo
├── pnpm-lock.yaml
├── pnpm-workspace.yaml // monorepo 声明
├── Dockerfile
├── LICENSE
├── README.md
├── README_en.md
├── README_ja.md
├── dev.md
```
{{% /details %}}

View File

@@ -1,10 +1,9 @@
---
weight: 762
title: "Docker Mongo迁移(dump模式)"
description: "FastGPT Docker Mongo迁移"
icon: database
draft: false
images: []
weight: 762
---
## 作者

View File

@@ -69,7 +69,7 @@ curl --location --request POST 'https://api.fastgpt.in/api/v1/chat/completions'
## 响应
{{< tabs tabTotal="4" >}}
{{< tabs tabTotal="5" >}}
{{< tab tabName="detail=false,stream=false 响应" >}}
{{< markdownify >}}
@@ -211,10 +211,10 @@ data: {"id":"","object":"","created":0,"choices":[{"delta":{"content":"《"},"in
{{< markdownify >}}
```bash
event: moduleStatus
event: flowNodeStatus
data: {"status":"running","name":"知识库搜索"}
event: moduleStatus
event: flowNodeStatus
data: {"status":"running","name":"AI 对话"}
event: answer
@@ -238,10 +238,28 @@ data: {"id":"","object":"","created":0,"model":"","choices":[{"delta":{},"index"
event: answer
data: [DONE]
event: appStreamResponse
event: flowResponses
data: [{"moduleName":"知识库搜索","moduleType":"datasetSearchNode","runningTime":1.78},{"question":"导演是谁","quoteList":[{"id":"654f2e49b64caef1d9431e8b","q":"电影《铃芽之旅》的导演是谁?","a":"电影《铃芽之旅》的导演是新海诚!","indexes":[{"type":"qa","dataId":"3515487","text":"电影《铃芽之旅》的导演是谁?","_id":"654f2e49b64caef1d9431e8c","defaultIndex":true}],"datasetId":"646627f4f7b896cfd8910e38","collectionId":"653279b16cd42ab509e766e8","sourceName":"data (81).csv","sourceId":"64fd3b6423aa1307b65896f6","score":0.8935586214065552},{"id":"6552e14c50f4a2a8e632af11","q":"导演是谁?","a":"电影《铃芽之旅》的导演是新海诚。","indexes":[{"defaultIndex":true,"type":"qa","dataId":"3644565","text":"导演是谁?\n电影《铃芽之旅》的导演是新海诚。","_id":"6552e14dde5cc7ba3954e417"}],"datasetId":"646627f4f7b896cfd8910e38","collectionId":"653279b16cd42ab509e766e8","sourceName":"data (81).csv","sourceId":"64fd3b6423aa1307b65896f6","score":0.8890955448150635},{"id":"654f34a0b64caef1d946337e","q":"本作的主人公是谁?","a":"本作的主人公是名叫铃芽的少女。","indexes":[{"type":"qa","dataId":"3515541","text":"本作的主人公是谁?","_id":"654f34a0b64caef1d946337f","defaultIndex":true}],"datasetId":"646627f4f7b896cfd8910e38","collectionId":"653279b16cd42ab509e766e8","sourceName":"data (81).csv","sourceId":"64fd3b6423aa1307b65896f6","score":0.8738770484924316},{"id":"654f3002b64caef1d944207a","q":"电影《铃芽之旅》男主角是谁?","a":"电影《铃芽之旅》男主角是宗像草太,由松村北斗配音。","indexes":[{"type":"qa","dataId":"3515538","text":"电影《铃芽之旅》男主角是谁?","_id":"654f3002b64caef1d944207b","defaultIndex":true}],"datasetId":"646627f4f7b896cfd8910e38","collectionId":"653279b16cd42ab509e766e8","sourceName":"data (81).csv","sourceId":"64fd3b6423aa1307b65896f6","score":0.8607980012893677},{"id":"654f2fc8b64caef1d943fd46","q":"电影《铃芽之旅》的编剧是谁?","a":"新海诚是本片的编剧。","indexes":[{"defaultIndex":true,"type":"qa","dataId":"3515550","text":"电影《铃芽之旅》的编剧是谁22","_id":"654f2fc8b64caef1d943fd47"}],"datasetId":"646627f4f7b896cfd8910e38","collectionId":"653279b16cd42ab509e766e8","sourceName":"data (81).csv","sourceId":"64fd3b6423aa1307b65896f6","score":0.8468944430351257}],"moduleName":"AI 对话","moduleType":"chatNode","runningTime":1.86}]
```
{{< /markdownify >}}
{{< /tab >}}
{{< tab tabName="detail=true,stream=true 时event值" >}}
{{< markdownify >}}
event取值
- answer: 返回给客户端的文本(最终会算作回答)
- fastAnswer: 指定回复返回给客户端的文本(最终会算作回答)
- toolCall: 执行工具
- toolParams: 工具参数
- toolResponse: 工具返回
- flowNodeStatus: 运行到的节点状态
- flowResponses: 节点完整响应
- updateVariables: 更新变量
- error: 报错
{{< /markdownify >}}
{{< /tab >}}
{{< /tabs >}}

View File

@@ -295,6 +295,24 @@ curl --location --request DELETE 'http://localhost:3000/api/core/dataset/delete?
## 集合
### 通用创建参数说明
**入参**
| 参数 | 说明 | 必填 |
| --- | --- | --- |
| datasetId | 知识库ID | ✅ |
| parentId | 父级ID不填则默认为根目录 | |
| trainingType | 训练模式。chunk: 按文本长度进行分割;qa: QA拆分;auto: 增强训练 | ✅ |
| chunkSize | 预估块大小 | |
| chunkSplitter | 自定义最高优先分割符号 | |
| qaPrompt | qa拆分提示词 | |
**出参**
- collectionId - 新建的集合ID
- insertLen插入的块数量
### 创建一个空的集合
{{< tabs tabTotal="3" >}}
@@ -500,7 +518,7 @@ data 为集合的 ID。
{{< /tab >}}
{{< /tabs >}}
### 创建一个文件集合(商业版)
### 创建一个文件集合
传入一个文件创建一个集合会读取文件内容进行分割。目前支持pdf, docx, md, txt, html, csv。
@@ -509,7 +527,7 @@ data 为集合的 ID。
{{< markdownify >}}
```bash
curl --location --request POST 'http://localhost:3000/api/proApi/core/dataset/collection/create/file' \
curl --location --request POST 'http://localhost:3000/api/core/dataset/collection/create/localFile' \
--header 'Authorization: Bearer {{authorization}}' \
--form 'file=@"C:\\Users\\user\\Desktop\\fastgpt测试文件\\index.html"' \
--form 'data="{\"datasetId\":\"6593e137231a2be9c5603ba7\",\"parentId\":null,\"trainingType\":\"chunk\",\"chunkSize\":512,\"chunkSplitter\":\"\",\"qaPrompt\":\"\",\"metadata\":{}}"'
@@ -565,6 +583,68 @@ data 为集合的 ID。
{{< /tab >}}
{{< /tabs >}}
### 创建一个外部文件库集合(商业版)
{{< tabs tabTotal="3" >}}
{{< tab tabName="请求示例" >}}
{{< markdownify >}}
```bash
curl --location --request POST 'http://localhost:3000/api/proApi/core/dataset/collection/create/externalFileUrl' \
--header 'Authorization: Bearer {{authorization}}' \
--header 'User-Agent: Apifox/1.0.0 (https://apifox.com)' \
--header 'Content-Type: application/json' \
--data-raw '{
"externalFileUrl":"https://image.xxxxx.com/fastgpt-dev/%E6%91%82.pdf",
"externalFileId":"1111",
"filename":"自定义文件名",
"datasetId":"6642d105a5e9d2b00255b27b",
"parentId": null,
"trainingType": "chunk",
"chunkSize":512,
"chunkSplitter":"",
"qaPrompt":""
}'
```
{{< /markdownify >}}
{{< /tab >}}
{{< tab tabName="参数说明" >}}
{{< markdownify >}}
| 参数 | 说明 | 必填 |
| --- | --- | --- |
| externalFileUrl | 文件访问链接(可以是临时链接) | ✅ |
| externalFileId | 外部文件ID | |
| filename | 自定义文件名 | |
{{< /markdownify >}}
{{< /tab >}}
{{< tab tabName="响应示例" >}}
{{< markdownify >}}
data 为集合的 ID。
```json
{
"code": 200,
"statusText": "",
"message": "",
"data": {
"collectionId": "6646fcedfabd823cdc6de746",
"insertLen": 3
}
}
```
{{< /markdownify >}}
{{< /tab >}}
{{< /tabs >}}
### 获取集合列表
{{< tabs tabTotal="3" >}}

View File

@@ -247,7 +247,7 @@ curl --location --request POST '{{host}}/shareAuth/finish' \
```ts
type ResponseType = {
moduleType: `${FlowNodeTypeEnum}`; // 模块类型
moduleType: FlowNodeTypeEnum; // 模块类型
moduleName: string; // 模块名
moduleLogo?: string; // logo
runningTime?: number; // 运行时间

View File

@@ -1,5 +1,5 @@
---
title: 'V4.8(开发中)'
title: 'V4.8'
description: 'FastGPT V4.8 更新说明'
icon: 'upgrade'
draft: false

View File

@@ -0,0 +1,39 @@
---
title: 'V4.8.1(需要初始化)'
description: 'FastGPT V4.8.1 更新说明'
icon: 'upgrade'
draft: false
toc: true
weight: 823
---
## 初始化脚本
从任意终端,发起 1 个 HTTP 请求。其中 {{rootkey}} 替换成环境变量里的 `rootkey`{{host}} 替换成FastGPT的域名。
```bash
curl --location --request POST 'https://{{host}}/api/admin/initv481' \
--header 'rootkey: {{rootkey}}' \
--header 'Content-Type: application/json'
```
由于之前集合名不规范,该初始化会重置表名。请在初始化前,确保 dataset.trainings 表没有数据。
最好更新该版本时,暂停所有进行中业务,再进行初始化,避免数据冲突。
## 执行脏数据清理
从任意终端,发起 1 个 HTTP 请求。其中 {{rootkey}} 替换成环境变量里的 `rootkey`{{host}} 替换成FastGPT的域名。
```bash
curl --location --request POST 'https://{{host}}/api/admin/clearInvalidData' \
--header 'rootkey: {{rootkey}}' \
--header 'Content-Type: application/json'
```
初始化完后,可以执行这个命令。之前定时清理的定时器有些问题,部分数据没被清理,可以手动执行清理。
## V4.8.1 更新说明
使用 Chat api 接口需要注意,增加了 event: updateVariables 事件,用于更新变量。
[点击查看升级说明](https://github.com/labring/FastGPT/releases/tag/v4.8.1)

View File

@@ -0,0 +1,34 @@
---
title: 'V4.8.2'
description: 'FastGPT V4.8.2 更新说明'
icon: 'upgrade'
draft: false
toc: true
weight: 822
---
## Sealos 升级说明
1. 在应用管理中新建一个应用镜像为registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-sandbox:v4.8.1
2. 无需外网访问地址
3. 部署完后,复制应用的内网地址
4. 点击变更`FastGPT - 修改环境变量,增加下面的环境变量即可
```
SANDBOX_URL=内网地址
```
## Docker 部署
可以拉取最新 [docker-compose.yml](https://github.com/labring/FastGPT/blob/main/files/deploy/fastgpt/docker-compose.yml) 文件参考
1. 新增一个容器 `sandbox`
2. fastgpt容器新增环境变量: `SANDBOX_URL`
3. sandbox 简易不要开启外网访问,未做凭证校验。
## V4.8.2 更新说明
1. 新增 - js代码运行节点更完整的type提醒后续继续完善
2. 新增 - 内容提取节点支持数据类型选择
3. 修复 - 新增的站点同步无法使用
4. 修复 - 定时任务无法输入内容

View File

@@ -1,12 +1,13 @@
# 数据库的默认账号和密码仅首次运行时设置有效
# 如果修改了账号密码,记得改数据库和项目连接参数,别只改一处~
# 该配置文件只是给快速启动,测试使用。正式使用,记得务必修改账号密码,以及调整合适的知识库参数,共享内存等。
# 如何无法访问 dockerhub 和 git可以用阿里云阿里云没有arm包
version: '3.3'
services:
pg:
# image: pgvector/pgvector:0.7.0-pg15 # docker hub
image: registry.cn-hangzhou.aliyuncs.com/fastgpt/pgvector:v0.7.0 # 阿里云
image: pgvector/pgvector:0.7.0-pg15 # docker hub
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/pgvector:v0.7.0 # 阿里云
container_name: pg
restart: always
ports: # 生产环境建议不要暴露
@@ -21,7 +22,9 @@ services:
volumes:
- ./pg/data:/var/lib/postgresql/data
mongo:
image: registry.cn-hangzhou.aliyuncs.com/fastgpt/mongo:5.0.18
image: mongo:5.0.18 # dockerhub
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/mongo:5.0.18 # 阿里云
# image: mongo:4.4.29 # cpu不支持AVX时候使用
container_name: mongo
restart: always
ports:
@@ -64,10 +67,17 @@ services:
# 等待docker-entrypoint.sh脚本执行的MongoDB服务进程
wait $$!
sandbox:
container_name: sandbox
image: ghcr.io/labring/fastgpt-sandbox:v4.8.2 # git
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-sandbox:v4.8.2 # 阿里云
networks:
- fastgpt
restart: always
fastgpt:
container_name: fastgpt
image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.7 # git
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.7 # 阿里云
image: ghcr.io/labring/fastgpt:v4.8.2 # git
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.2 # 阿里云
ports:
- 3000:3000
networks:
@@ -75,6 +85,7 @@ services:
depends_on:
- mongo
- pg
- sandbox
restart: always
environment:
# root 密码,用户名为: root。如果需要修改 root 密码,直接修改这个环境变量,并重启即可。
@@ -95,6 +106,8 @@ services:
- MONGODB_URI=mongodb://myusername:mypassword@mongo:27017/fastgpt?authSource=admin
# pg 连接参数
- PG_URL=postgresql://username:password@pg:5432/postgres
# sandbox 地址
- SANDBOX_URL=http://sandbox:3000
volumes:
- ./config.json:/app/data/config.json
- ./fastgpt/tmp:/app/tmp

View File

@@ -4,7 +4,7 @@ export enum BucketNameEnum {
}
export const bucketNameMap = {
[BucketNameEnum.dataset]: {
label: 'common.file.bucket.dataset'
label: 'file.bucket.dataset'
}
};

View File

@@ -13,36 +13,36 @@ export enum MongoImageTypeEnum {
}
export const mongoImageTypeMap = {
[MongoImageTypeEnum.systemAvatar]: {
label: 'common.file.type.appAvatar',
label: 'appAvatar',
unique: true
},
[MongoImageTypeEnum.appAvatar]: {
label: 'common.file.type.appAvatar',
label: 'appAvatar',
unique: true
},
[MongoImageTypeEnum.pluginAvatar]: {
label: 'common.file.type.pluginAvatar',
label: 'pluginAvatar',
unique: true
},
[MongoImageTypeEnum.datasetAvatar]: {
label: 'common.file.type.datasetAvatar',
label: 'datasetAvatar',
unique: true
},
[MongoImageTypeEnum.userAvatar]: {
label: 'common.file.type.userAvatar',
label: 'userAvatar',
unique: true
},
[MongoImageTypeEnum.teamAvatar]: {
label: 'common.file.type.teamAvatar',
label: 'teamAvatar',
unique: true
},
[MongoImageTypeEnum.chatImage]: {
label: 'common.file.type.chatImage',
label: 'chatImage',
unique: false
},
[MongoImageTypeEnum.collectionImage]: {
label: 'common.file.type.collectionImage',
label: 'collectionImage',
unique: false
}
};

View File

@@ -9,6 +9,9 @@ type SplitProps = {
overlapRatio?: number;
customReg?: string[];
};
export type TextSplitProps = Omit<SplitProps, 'text' | 'chunkLen'> & {
chunkLen?: number;
};
type SplitResponse = {
chunks: string[];
@@ -49,6 +52,7 @@ const strIsMdTable = (str: string) => {
return false;
}
}
return true;
};
const markdownTableSplit = (props: SplitProps): SplitResponse => {
@@ -77,6 +81,10 @@ ${mdSplitString}
chunk += `${splitText2Lines[i]}\n`;
}
if (chunk) {
chunks.push(chunk);
}
return {
chunks,
chars: chunks.reduce((sum, chunk) => sum + chunk.length, 0)

View File

@@ -6,6 +6,42 @@ export const formatTime2YMDHM = (time?: Date) =>
export const formatTime2YMD = (time?: Date) => (time ? dayjs(time).format('YYYY-MM-DD') : '');
export const formatTime2HM = (time: Date = new Date()) => dayjs(time).format('HH:mm');
/**
* 格式化时间成聊天格式
*/
export const formatTimeToChatTime = (time: Date) => {
const now = dayjs();
const target = dayjs(time);
// 如果传入时间小于60秒返回刚刚
if (now.diff(target, 'second') < 60) {
return '刚刚';
}
// 如果时间是今天,展示几时:几分
if (now.isSame(target, 'day')) {
return target.format('HH:mm');
}
// 如果是昨天,展示昨天
if (now.subtract(1, 'day').isSame(target, 'day')) {
return '昨天';
}
// 如果是前天,展示前天
if (now.subtract(2, 'day').isSame(target, 'day')) {
return '前天';
}
// 如果是今年,展示某月某日
if (now.isSame(target, 'year')) {
return target.format('MM/DD');
}
// 如果是更久之前,展示某年某月某日
return target.format('YYYY/M/D');
};
/* cron time parse */
export const cronParser2Fields = (cronString: string) => {
try {

View File

@@ -64,4 +64,22 @@ export const getNanoid = (size = 12) => {
return `${firstChar}${randomsStr}`;
};
/* Custom text to reg, need to replace special chats */
export const replaceRegChars = (text: string) => text.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
/* slice json str */
export const sliceJsonStr = (str: string) => {
str = str.replace(/(\\n|\\)/g, '').replace(/ /g, '');
const jsonRegex = /{(?:[^{}]|{(?:[^{}]|{[^{}]*})*})*}/g;
const matches = str.match(jsonRegex);
if (!matches) {
return '';
}
// 找到第一个完整的 JSON 字符串
const jsonStr = matches[0];
return jsonStr;
};

View File

@@ -66,6 +66,8 @@ export type SystemEnvType = {
vectorMaxProcess: number;
qaMaxProcess: number;
pgHNSWEfSearch: number;
tokenWorkers: number; // token count max worker
oneapiUrl?: string;
chatApiKey?: string;
};

View File

@@ -24,22 +24,26 @@ A2:
`
};
export const Prompt_ExtractJson = `你可以从 <对话记录></对话记录> 中提取指定 JSON 信息,你仅需返回 JSON 字符串,无需回答问题。
export const Prompt_ExtractJson = `你可以从 <对话记录></对话记录> 中提取指定 Json 信息,你仅需返回 Json 字符串,无需回答问题。
<提取要求>
{{description}}
</提取要求>
<字段说明>
1. 下面的 JSON 字符串均按照 JSON Schema 的规则描述
2. key 代表字段名description 代表字段的描述enum 是可选值,代表可选的 value。
3. 如果没有可提取的内容,忽略该字段。
4. 本次需提取的JSON Schema{{json}}
</字段说明>
<提取规则>
- 本次需提取的 json 字符串,需符合 JsonSchema 的规则。
- type 代表数据类型; key 代表字段名; description 代表字段的描述; enum 是枚举值,代表可选的 value。
- 如果没有可提取的内容,忽略该字段。
</提取规则>
<JsonSchema>
{{json}}
</JsonSchema>
<对话记录>
{{text}}
</对话记录>
`;
提取的 json 字符串:`;
export const Prompt_CQJson = `请帮我执行一个“问题分类”任务,将问题分类为以下几种类型之一:

View File

@@ -1,4 +1,4 @@
import { AppWhisperConfigType } from './type';
import { AppTTSConfigType, AppWhisperConfigType } from './type';
export enum AppTypeEnum {
simple = 'simple',
@@ -13,8 +13,16 @@ export const AppTypeMap = {
}
};
export const defaultTTSConfig: AppTTSConfigType = { type: 'web' };
export const defaultWhisperConfig: AppWhisperConfigType = {
open: false,
autoSend: false,
autoTTSResponse: false
};
export const defaultChatInputGuideConfig = {
open: false,
textList: [],
customUrl: ''
};

View File

@@ -8,7 +8,7 @@ import { DatasetSearchModeEnum } from '../dataset/constants';
import { TeamTagSchema as TeamTagsSchemaType } from '@fastgpt/global/support/user/team/type.d';
import { StoreEdgeItemType } from '../workflow/type/edge';
export interface AppSchema {
export type AppSchema = {
_id: string;
teamId: string;
tmbId: string;
@@ -23,13 +23,14 @@ export interface AppSchema {
edges: StoreEdgeItemType[];
// App system config
chatConfig: AppChatConfigType;
scheduledTriggerConfig?: AppScheduledTriggerConfigType | null;
scheduledTriggerNextTime?: Date;
permission: `${PermissionTypeEnum}`;
inited?: boolean;
teamTags: string[];
}
};
export type AppListItemType = {
_id: string;
@@ -66,32 +67,19 @@ export type AppSimpleEditFormType = {
datasetSearchExtensionBg?: string;
};
selectedTools: FlowNodeTemplateType[];
userGuide: {
welcomeText: string;
variables: {
id: string;
key: string;
label: string;
type: `${VariableInputEnum}`;
required: boolean;
maxLen: number;
enums: {
value: string;
}[];
}[];
questionGuide: boolean;
tts: {
type: 'none' | 'web' | 'model';
model?: string | undefined;
voice?: string | undefined;
speed?: number | undefined;
};
whisper: AppWhisperConfigType;
scheduleTrigger: AppScheduledTriggerConfigType | null;
};
chatConfig: AppChatConfigType;
};
/* app function config */
/* app chat config type */
export type AppChatConfigType = {
welcomeText?: string;
variables?: VariableItemType[];
questionGuide?: boolean;
ttsConfig?: AppTTSConfigType;
whisperConfig?: AppWhisperConfigType;
scheduledTriggerConfig?: AppScheduledTriggerConfigType;
chatInputGuide?: ChatInputGuideConfigType;
};
export type SettingAIDataType = {
model: string;
temperature: number;
@@ -123,6 +111,11 @@ export type AppWhisperConfigType = {
autoSend: boolean;
autoTTSResponse: boolean;
};
// question guide text
export type ChatInputGuideConfigType = {
open: boolean;
customUrl: string;
};
// interval timer
export type AppScheduledTriggerConfigType = {
cronString: string;

View File

@@ -1,49 +1,42 @@
import type { AppSimpleEditFormType } from '../app/type';
import type { AppChatConfigType, AppSimpleEditFormType } from '../app/type';
import { FlowNodeTypeEnum } from '../workflow/node/constant';
import { NodeInputKeyEnum, FlowNodeTemplateTypeEnum } from '../workflow/constants';
import type { FlowNodeInputItemType } from '../workflow/type/io.d';
import { getGuideModule, splitGuideModule } from '../workflow/utils';
import { getAppChatConfig } from '../workflow/utils';
import { StoreNodeItemType } from '../workflow/type';
import { DatasetSearchModeEnum } from '../dataset/constants';
import { defaultWhisperConfig } from './constants';
export const getDefaultAppForm = (): AppSimpleEditFormType => {
return {
aiSettings: {
model: 'gpt-3.5-turbo',
systemPrompt: '',
temperature: 0,
isResponseAnswerText: true,
maxHistories: 6,
maxToken: 4000
},
dataset: {
datasets: [],
similarity: 0.4,
limit: 1500,
searchMode: DatasetSearchModeEnum.embedding,
usingReRank: false,
datasetSearchUsingExtensionQuery: true,
datasetSearchExtensionBg: ''
},
selectedTools: [],
userGuide: {
welcomeText: '',
variables: [],
questionGuide: false,
tts: {
type: 'web'
},
whisper: defaultWhisperConfig,
scheduleTrigger: null
}
};
};
export const getDefaultAppForm = (): AppSimpleEditFormType => ({
aiSettings: {
model: 'gpt-3.5-turbo',
systemPrompt: '',
temperature: 0,
isResponseAnswerText: true,
maxHistories: 6,
maxToken: 4000
},
dataset: {
datasets: [],
similarity: 0.4,
limit: 1500,
searchMode: DatasetSearchModeEnum.embedding,
usingReRank: false,
datasetSearchUsingExtensionQuery: true,
datasetSearchExtensionBg: ''
},
selectedTools: [],
chatConfig: {}
});
/* format app nodes to edit form */
export const appWorkflow2Form = ({ nodes }: { nodes: StoreNodeItemType[] }) => {
export const appWorkflow2Form = ({
nodes,
chatConfig
}: {
nodes: StoreNodeItemType[];
chatConfig: AppChatConfigType;
}) => {
const defaultAppForm = getDefaultAppForm();
const findInputValueByKey = (inputs: FlowNodeInputItemType[], key: string) => {
return inputs.find((item) => item.key === key)?.value;
};
@@ -102,24 +95,6 @@ export const appWorkflow2Form = ({ nodes }: { nodes: StoreNodeItemType[] }) => {
node.inputs,
NodeInputKeyEnum.datasetSearchExtensionBg
);
} else if (node.flowNodeType === FlowNodeTypeEnum.systemConfig) {
const {
welcomeText,
variableNodes,
questionGuide,
ttsConfig,
whisperConfig,
scheduledTriggerConfig
} = splitGuideModule(getGuideModule(nodes));
defaultAppForm.userGuide = {
welcomeText: welcomeText,
variables: variableNodes,
questionGuide: questionGuide,
tts: ttsConfig,
whisper: whisperConfig,
scheduleTrigger: scheduledTriggerConfig
};
} else if (node.flowNodeType === FlowNodeTypeEnum.pluginModule) {
if (!node.pluginId) return;
@@ -131,10 +106,17 @@ export const appWorkflow2Form = ({ nodes }: { nodes: StoreNodeItemType[] }) => {
intro: node.intro || '',
flowNodeType: node.flowNodeType,
showStatus: node.showStatus,
version: '481',
inputs: node.inputs,
outputs: node.outputs,
templateType: FlowNodeTemplateTypeEnum.other
});
} else if (node.flowNodeType === FlowNodeTypeEnum.systemConfig) {
defaultAppForm.chatConfig = getAppChatConfig({
chatConfig,
systemConfigNode: node,
isPublicFetch: true
});
}
});

View File

@@ -1,5 +1,6 @@
import { StoreNodeItemType } from '../workflow/type';
import { StoreEdgeItemType } from '../workflow/type/edge';
import { AppChatConfigType } from './type';
export type AppVersionSchemaType = {
_id: string;
@@ -7,4 +8,5 @@ export type AppVersionSchemaType = {
time: Date;
nodes: StoreNodeItemType[];
edges: StoreEdgeItemType[];
chatConfig: AppChatConfigType;
};

View File

@@ -0,0 +1,5 @@
export type ChatInputGuideSchemaType = {
_id: string;
appId: string;
text: string;
};

View File

@@ -10,7 +10,7 @@ import {
import { FlowNodeTypeEnum } from '../workflow/node/constant';
import { NodeOutputKeyEnum } from '../workflow/constants';
import { DispatchNodeResponseKeyEnum } from '../workflow/runtime/constants';
import { AppSchema, VariableItemType } from '../app/type';
import { AppChatConfigType, AppSchema, VariableItemType } from '../app/type';
import type { AppSchema as AppType } from '@fastgpt/global/core/app/type.d';
import { DatasetSearchModeEnum } from '../dataset/constants';
import { ChatBoxInputType } from '../../../../projects/app/src/components/ChatBox/type';
@@ -139,7 +139,7 @@ export type ChatHistoryItemType = HistoryItemType & {
/* ------- response data ------------ */
export type ChatHistoryItemResType = DispatchNodeResponseType & {
nodeId: string;
moduleType: `${FlowNodeTypeEnum}`;
moduleType: FlowNodeTypeEnum;
moduleName: string;
};

View File

@@ -11,31 +11,42 @@ export type DatasetUpdateBody = {
intro?: string;
permission?: DatasetSchemaType['permission'];
agentModel?: LLMModelItemType;
websiteConfig?: DatasetSchemaType['websiteConfig'];
status?: DatasetSchemaType['status'];
websiteConfig?: DatasetSchemaType['websiteConfig'];
externalReadUrl?: DatasetSchemaType['externalReadUrl'];
};
/* ================= collection ===================== */
export type DatasetCollectionChunkMetadataType = {
parentId?: string;
trainingType?: `${TrainingModeEnum}`;
trainingType?: TrainingModeEnum;
chunkSize?: number;
chunkSplitter?: string;
qaPrompt?: string;
metadata?: Record<string, any>;
};
// create collection params
export type CreateDatasetCollectionParams = DatasetCollectionChunkMetadataType & {
datasetId: string;
name: string;
type: `${DatasetCollectionTypeEnum}`;
type: DatasetCollectionTypeEnum;
tags?: string[];
fileId?: string;
rawLink?: string;
externalFileId?: string;
externalFileUrl?: string;
rawTextLength?: number;
hashRawText?: string;
};
export type ApiCreateDatasetCollectionParams = DatasetCollectionChunkMetadataType & {
datasetId: string;
tags?: string[];
};
export type TextCreateDatasetCollectionParams = ApiCreateDatasetCollectionParams & {
name: string;
@@ -56,6 +67,11 @@ export type CsvTableCreateDatasetCollectionParams = {
parentId?: string;
fileId: string;
};
export type ExternalFileCreateDatasetCollectionParams = ApiCreateDatasetCollectionParams & {
externalFileId?: string;
externalFileUrl: string;
filename?: string;
};
/* ================= data ===================== */
export type PgSearchRawType = {
@@ -78,7 +94,7 @@ export type PostWebsiteSyncParams = {
export type PushDatasetDataProps = {
collectionId: string;
data: PushDatasetDataChunkProps[];
trainingMode: `${TrainingModeEnum}`;
trainingMode: TrainingModeEnum;
prompt?: string;
billId?: string;
};

View File

@@ -0,0 +1,6 @@
/* sourceId = prefix-id; id=fileId;link url;externalFileId */
export enum CollectionSourcePrefixEnum {
local = 'local',
link = 'link',
external = 'external'
}

View File

@@ -0,0 +1,14 @@
import { CollectionWithDatasetType, DatasetCollectionSchemaType } from '../type';
export const getCollectionSourceData = (
collection?: CollectionWithDatasetType | DatasetCollectionSchemaType
) => {
return {
sourceId:
collection?.fileId ||
collection?.rawLink ||
collection?.externalFileId ||
collection?.externalFileUrl,
sourceName: collection?.name || ''
};
};

View File

@@ -2,23 +2,29 @@
export enum DatasetTypeEnum {
folder = 'folder',
dataset = 'dataset',
websiteDataset = 'websiteDataset' // depp link
websiteDataset = 'websiteDataset', // depp link
externalFile = 'externalFile'
}
export const DatasetTypeMap = {
[DatasetTypeEnum.folder]: {
icon: 'common/folderFill',
label: 'core.dataset.Folder Dataset',
label: 'Folder Dataset',
collectionLabel: 'common.Folder'
},
[DatasetTypeEnum.dataset]: {
icon: 'core/dataset/commonDataset',
label: 'core.dataset.Common Dataset',
label: 'Common Dataset',
collectionLabel: 'common.File'
},
[DatasetTypeEnum.websiteDataset]: {
icon: 'core/dataset/websiteDataset',
label: 'core.dataset.Website Dataset',
label: 'Website Dataset',
collectionLabel: 'common.Website'
},
[DatasetTypeEnum.externalFile]: {
icon: 'core/dataset/externalDataset',
label: 'External File',
collectionLabel: 'common.File'
}
};
@@ -38,9 +44,11 @@ export const DatasetStatusMap = {
/* ------------ collection -------------- */
export enum DatasetCollectionTypeEnum {
folder = 'folder',
virtual = 'virtual',
file = 'file',
link = 'link', // one link
virtual = 'virtual'
externalFile = 'externalFile'
}
export const DatasetCollectionTypeMap = {
[DatasetCollectionTypeEnum.folder]: {
@@ -49,6 +57,9 @@ export const DatasetCollectionTypeMap = {
[DatasetCollectionTypeEnum.file]: {
name: 'core.dataset.file'
},
[DatasetCollectionTypeEnum.externalFile]: {
name: 'core.dataset.externalFile'
},
[DatasetCollectionTypeEnum.link]: {
name: 'core.dataset.link'
},
@@ -77,7 +88,8 @@ export enum ImportDataSourceEnum {
fileLocal = 'fileLocal',
fileLink = 'fileLink',
fileCustom = 'fileCustom',
csvTable = 'csvTable'
csvTable = 'csvTable',
externalFile = 'externalFile'
}
export enum TrainingModeEnum {
@@ -163,3 +175,10 @@ export const SearchScoreTypeMap = {
export const CustomCollectionIcon = 'common/linkBlue';
export const LinkCollectionIcon = 'common/linkBlue';
/* source prefix */
export enum DatasetSourceReadTypeEnum {
fileLocal = 'fileLocal',
link = 'link',
externalFile = 'externalFile'
}

View File

@@ -0,0 +1,14 @@
import { DatasetSourceReadTypeEnum, ImportDataSourceEnum } from './constants';
export const importType2ReadType = (type: ImportDataSourceEnum) => {
if (type === ImportDataSourceEnum.csvTable || type === ImportDataSourceEnum.fileLocal) {
return DatasetSourceReadTypeEnum.fileLocal;
}
if (type === ImportDataSourceEnum.fileLink) {
return DatasetSourceReadTypeEnum.link;
}
if (type === ImportDataSourceEnum.externalFile) {
return DatasetSourceReadTypeEnum.externalFile;
}
return DatasetSourceReadTypeEnum.link;
};

View File

@@ -22,13 +22,16 @@ export type DatasetSchemaType = {
vectorModel: string;
agentModel: string;
intro: string;
type: `${DatasetTypeEnum}`;
type: DatasetTypeEnum;
status: `${DatasetStatusEnum}`;
permission: `${PermissionTypeEnum}`;
// metadata
websiteConfig?: {
url: string;
selector: string;
};
externalReadUrl?: string;
};
export type DatasetCollectionSchemaType = {
@@ -38,20 +41,24 @@ export type DatasetCollectionSchemaType = {
datasetId: string;
parentId?: string;
name: string;
type: `${DatasetCollectionTypeEnum}`;
type: DatasetCollectionTypeEnum;
createTime: Date;
updateTime: Date;
trainingType: `${TrainingModeEnum}`;
trainingType: TrainingModeEnum;
chunkSize: number;
chunkSplitter?: string;
qaPrompt?: string;
fileId?: string;
rawLink?: string;
tags?: string[];
fileId?: string; // local file id
rawLink?: string; // link url
externalFileId?: string; //external file id
rawTextLength?: number;
hashRawText?: string;
externalFileUrl?: string; // external import url
metadata?: {
webPageSelector?: string;
relatedImgId?: string; // The id of the associated image collections
@@ -80,6 +87,7 @@ export type DatasetDataSchemaType = {
a: string; // answer or custom content
fullTextToken: string;
indexes: DatasetDataIndexItemType[];
rebuilding?: boolean;
};
export type DatasetTrainingSchemaType = {
@@ -92,9 +100,10 @@ export type DatasetTrainingSchemaType = {
billId: string;
expireAt: Date;
lockTime: Date;
mode: `${TrainingModeEnum}`;
mode: TrainingModeEnum;
model: string;
prompt: string;
dataId?: string;
q: string;
a: string;
chunkIndex: number;
@@ -110,13 +119,19 @@ export type DatasetDataWithCollectionType = Omit<DatasetDataSchemaType, 'collect
};
/* ================= dataset ===================== */
export type DatasetSimpleItemType = {
_id: string;
avatar: string;
name: string;
vectorModel: VectorModelItemType;
};
export type DatasetListItemType = {
_id: string;
parentId: string;
avatar: string;
name: string;
intro: string;
type: `${DatasetTypeEnum}`;
type: DatasetTypeEnum;
isOwner: boolean;
canWrite: boolean;
permission: `${PermissionTypeEnum}`;

View File

@@ -3,7 +3,7 @@ import { getFileIcon } from '../../common/file/icon';
import { strIsLink } from '../../common/string/tools';
export function getCollectionIcon(
type: `${DatasetCollectionTypeEnum}` = DatasetCollectionTypeEnum.file,
type: DatasetCollectionTypeEnum = DatasetCollectionTypeEnum.file,
name = ''
) {
if (type === DatasetCollectionTypeEnum.folder) {
@@ -24,13 +24,13 @@ export function getSourceNameIcon({
sourceName: string;
sourceId?: string;
}) {
if (strIsLink(sourceId)) {
return 'common/linkBlue';
}
const fileIcon = getFileIcon(sourceName, '');
const fileIcon = getFileIcon(decodeURIComponent(sourceName), '');
if (fileIcon) {
return fileIcon;
}
if (strIsLink(sourceId)) {
return 'common/linkBlue';
}
return 'file/fill/manual';
}
@@ -46,7 +46,7 @@ export function getDefaultIndex(props?: { q?: string; a?: string; dataId?: strin
};
}
export const predictDataLimitLength = (mode: `${TrainingModeEnum}`, data: any[]) => {
export const predictDataLimitLength = (mode: TrainingModeEnum, data: any[]) => {
if (mode === TrainingModeEnum.qa) return data.length * 20;
if (mode === TrainingModeEnum.auto) return data.length * 5;
return data.length;

View File

@@ -52,13 +52,35 @@ export const str2OpenApiSchema = async (yamlStr = ''): Promise<OpenApiJsonSchema
})
.flat()
.filter(Boolean) as OpenApiJsonSchema['pathData'];
return { pathData, serverPath };
} catch (err) {
throw new Error('Invalid Schema');
}
};
export const getType = (schema: { type: string; items?: { type: string } }) => {
const typeMap: { [key: string]: WorkflowIOValueTypeEnum } = {
string: WorkflowIOValueTypeEnum.arrayString,
number: WorkflowIOValueTypeEnum.arrayNumber,
integer: WorkflowIOValueTypeEnum.arrayNumber,
boolean: WorkflowIOValueTypeEnum.arrayBoolean,
object: WorkflowIOValueTypeEnum.arrayObject
};
if (schema?.type === 'integer') {
return WorkflowIOValueTypeEnum.number;
}
if (schema?.type === 'array' && schema?.items) {
const itemType = typeMap[schema.items.type];
if (itemType) {
return itemType;
}
}
return schema?.type as WorkflowIOValueTypeEnum;
};
export const httpApiSchema2Plugins = async ({
parentId,
apiSchemaStr = '',
@@ -87,7 +109,7 @@ export const httpApiSchema2Plugins = async ({
...(item.params?.map((param: any) => {
return {
key: param.name,
valueType: param.schema.type,
valueType: getType(param.schema),
label: param.name,
renderTypeList: [FlowNodeInputTypeEnum.reference],
required: param.required,
@@ -109,7 +131,7 @@ export const httpApiSchema2Plugins = async ({
const prop = properties[key];
return {
key,
valueType: prop.type,
valueType: getType(prop),
label: key,
renderTypeList: [FlowNodeInputTypeEnum.reference],
required: false,
@@ -136,7 +158,7 @@ export const httpApiSchema2Plugins = async ({
return {
id,
key: param.name,
valueType: param.schema.type,
valueType: getType(param.schema),
label: param.name,
type: FlowNodeOutputTypeEnum.source
};
@@ -147,7 +169,7 @@ export const httpApiSchema2Plugins = async ({
return {
id,
key,
valueType: properties[key].type,
valueType: getType(properties[key]),
label: key,
type: FlowNodeOutputTypeEnum.source,
edit: true
@@ -159,7 +181,7 @@ export const httpApiSchema2Plugins = async ({
...(item.params?.map((param: any) => {
return {
key: param.name,
valueType: param.schema.type,
valueType: getType(param.schema),
label: param.name,
renderTypeList: [FlowNodeInputTypeEnum.reference],
canEdit: true,
@@ -173,7 +195,7 @@ export const httpApiSchema2Plugins = async ({
...(propsKeys?.map((key) => {
return {
key,
valueType: properties[key].type,
valueType: getType(properties[key]),
label: key,
renderTypeList: [FlowNodeInputTypeEnum.reference],
canEdit: true,
@@ -197,7 +219,7 @@ export const httpApiSchema2Plugins = async ({
if (param.in === 'header') {
httpNodeHeaders.push({
key: param.name,
type: param.schema?.type || WorkflowIOValueTypeEnum.string,
type: getType(param.schema) || WorkflowIOValueTypeEnum.string,
value: `{{${param.name}}}`
});
} else if (param.in === 'body') {
@@ -209,7 +231,7 @@ export const httpApiSchema2Plugins = async ({
} else if (param.in === 'query') {
httpNodeParams.push({
key: param.name,
type: param.schema?.type || WorkflowIOValueTypeEnum.string,
type: getType(param.schema) || WorkflowIOValueTypeEnum.string,
value: `{{${param.name}}}`
});
}
@@ -260,6 +282,7 @@ export const httpApiSchema2Plugins = async ({
x: 616.4226348688949,
y: -165.05298493910115
},
version: PluginInputModule.version,
inputs: pluginInputs,
outputs: pluginOutputs
},
@@ -274,6 +297,7 @@ export const httpApiSchema2Plugins = async ({
x: 1607.7142331269126,
y: -151.8669210746189
},
version: PluginOutputModule.version,
inputs: [
{
key: pluginOutputKey,
@@ -312,6 +336,7 @@ export const httpApiSchema2Plugins = async ({
x: 1042.549746602742,
y: -447.77496332641647
},
version: HttpModule468.version,
inputs: [
{
key: NodeInputKeyEnum.addInputParam,

View File

@@ -23,6 +23,7 @@ export type PluginItemSchema = {
customHeaders?: string;
};
version?: 'v1' | 'v2';
nodeVersion?: string;
};
/* plugin template */
@@ -32,6 +33,7 @@ export type PluginTemplateType = PluginRuntimeType & {
source: `${PluginSourceEnum}`;
templateType: FlowNodeTemplateType['templateType'];
intro: string;
nodeVersion: string;
};
export type PluginRuntimeType = {

View File

@@ -1,7 +1,7 @@
import { VectorModelItemType } from '../ai/model.d';
import { NodeInputKeyEnum } from './constants';
export type SelectedDatasetType = { datasetId: string; vectorModel: VectorModelItemType }[];
export type SelectedDatasetType = { datasetId: string }[];
export type HttpBodyType<T = Record<string, any>> = {
[NodeInputKeyEnum.addInputParam]: Record<string, any>;

View File

@@ -45,6 +45,7 @@ export enum NodeInputKeyEnum {
whisper = 'whisper',
variables = 'variables',
scheduleTrigger = 'scheduleTrigger',
chatInputGuide = 'chatInputGuide',
// entry
userChatInput = 'userChatInput',
@@ -107,7 +108,11 @@ export enum NodeInputKeyEnum {
ifElseList = 'ifElseList',
// variable update
updateList = 'updateList'
updateList = 'updateList',
// code
code = 'code',
codeType = 'codeType' // js|py
}
export enum NodeOutputKeyEnum {
@@ -117,8 +122,10 @@ export enum NodeOutputKeyEnum {
answerText = 'answerText', // module answer. the value will be show and save to history
success = 'success',
failed = 'failed',
error = 'error',
text = 'system_text',
addOutputParam = 'system_addOutputParam',
rawResponse = 'system_rawResponse',
// dataset
datasetQuoteQA = 'quoteQA',

View File

@@ -113,7 +113,8 @@ export enum FlowNodeTypeEnum {
stopTool = 'stopTool',
lafModule = 'lafModule',
ifElseNode = 'ifElseNode',
variableUpdate = 'variableUpdate'
variableUpdate = 'variableUpdate',
code = 'code'
}
export const EDGE_TYPE = 'default';

View File

@@ -31,6 +31,9 @@ export type DispatchNodeResponseType = {
runningTime?: number;
query?: string;
textOutput?: string;
error?: Record<string, any>;
customInputs?: Record<string, any>;
customOutputs?: Record<string, any>;
// bill
tokens?: number;

View File

@@ -1,5 +1,5 @@
import { ChatCompletionRequestMessageRoleEnum } from '../../ai/constants';
import { NodeOutputKeyEnum } from '../constants';
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '../constants';
import { FlowNodeTypeEnum } from '../node/constant';
import { StoreNodeItemType } from '../type';
import { StoreEdgeItemType } from '../type/edge';
@@ -8,6 +8,23 @@ import { VARIABLE_NODE_ID } from '../constants';
import { isReferenceValue } from '../utils';
import { ReferenceValueProps } from '../type/io';
export const getMaxHistoryLimitFromNodes = (nodes: StoreNodeItemType[]): number => {
let limit = 10;
nodes.forEach((node) => {
node.inputs.forEach((input) => {
if (
(input.key === NodeInputKeyEnum.history ||
input.key === NodeInputKeyEnum.historyMaxAmount) &&
typeof input.value === 'number'
) {
limit = Math.max(limit, input.value);
}
});
});
return limit * 2;
};
export const initWorkflowEdgeStatus = (edges: StoreEdgeItemType[]): RuntimeEdgeItemType[] => {
return (
edges?.map((edge) => ({

View File

@@ -22,6 +22,7 @@ import type { FlowNodeTemplateType } from '../type';
import { LafModule } from './system/laf';
import { IfElseNode } from './system/ifElse/index';
import { VariableUpdateNode } from './system/variableUpdate';
import { CodeNode } from './system/sandbox';
/* app flow module templates */
export const appSystemModuleTemplates: FlowNodeTemplateType[] = [
@@ -40,7 +41,8 @@ export const appSystemModuleTemplates: FlowNodeTemplateType[] = [
AiQueryExtension,
LafModule,
IfElseNode,
VariableUpdateNode
VariableUpdateNode,
CodeNode
];
/* plugin flow module templates */
export const pluginSystemModuleTemplates: FlowNodeTemplateType[] = [
@@ -59,7 +61,8 @@ export const pluginSystemModuleTemplates: FlowNodeTemplateType[] = [
AiQueryExtension,
LafModule,
IfElseNode,
VariableUpdateNode
VariableUpdateNode,
CodeNode
];
/* all module */
@@ -84,5 +87,6 @@ export const moduleTemplatesFlat: FlowNodeTemplateType[] = [
AiQueryExtension,
LafModule,
IfElseNode,
VariableUpdateNode
VariableUpdateNode,
CodeNode
];

View File

@@ -31,6 +31,7 @@ export const AiChatModule: FlowNodeTemplateType = {
intro: 'AI 大模型对话',
showStatus: true,
isTool: true,
version: '481',
inputs: [
Input_Template_SettingAiModel,
// --- settings modal
@@ -88,6 +89,7 @@ export const AiChatModule: FlowNodeTemplateType = {
{
id: NodeOutputKeyEnum.history,
key: NodeOutputKeyEnum.history,
required: true,
label: 'core.module.output.label.New context',
description: 'core.module.output.description.New context',
valueType: WorkflowIOValueTypeEnum.chatHistory,
@@ -96,6 +98,7 @@ export const AiChatModule: FlowNodeTemplateType = {
{
id: NodeOutputKeyEnum.answerText,
key: NodeOutputKeyEnum.answerText,
required: true,
label: 'core.module.output.label.Ai response content',
description: 'core.module.output.description.Ai response content',
valueType: WorkflowIOValueTypeEnum.string,

View File

@@ -17,11 +17,14 @@ export const AssignedAnswerModule: FlowNodeTemplateType = {
name: '指定回复',
intro:
'该模块可以直接回复一段指定的内容。常用于引导、提示。非字符串内容传入时,会转成字符串进行输出。',
version: '481',
isTool: true,
inputs: [
{
key: NodeInputKeyEnum.answerText,
renderTypeList: [FlowNodeInputTypeEnum.textarea, FlowNodeInputTypeEnum.reference],
valueType: WorkflowIOValueTypeEnum.any,
required: true,
label: 'core.module.input.label.Response content',
description: 'core.module.input.description.Response content',
placeholder: 'core.module.input.description.Response content'

View File

@@ -29,6 +29,7 @@ export const ClassifyQuestionModule: FlowNodeTemplateType = {
name: '问题分类',
intro: `根据用户的历史记录和当前问题判断该次提问的类型。可以添加多组问题类型,下面是一个模板例子:\n类型1: 打招呼\n类型2: 关于商品“使用”问题\n类型3: 关于商品“购买”问题\n类型4: 其他问题`,
showStatus: true,
version: '481',
inputs: [
{
...Input_Template_SelectAIModel,
@@ -67,6 +68,7 @@ export const ClassifyQuestionModule: FlowNodeTemplateType = {
{
id: NodeOutputKeyEnum.cqResult,
key: NodeOutputKeyEnum.cqResult,
required: true,
label: '分类结果',
valueType: WorkflowIOValueTypeEnum.string,
type: FlowNodeOutputTypeEnum.static

View File

@@ -25,6 +25,7 @@ export const ContextExtractModule: FlowNodeTemplateType = {
intro: '可从文本中提取指定的数据例如sql语句、搜索关键词、代码等',
showStatus: true,
isTool: true,
version: '481',
inputs: [
{
...Input_Template_SelectAIModel,
@@ -55,29 +56,24 @@ export const ContextExtractModule: FlowNodeTemplateType = {
label: '',
valueType: WorkflowIOValueTypeEnum.any,
description: "由 '描述' 和 'key' 组成一个目标字段,可提取多个目标字段",
value: [] // {desc: string; key: string; required: boolean; enum: string[]}[]
value: [] // {valueType: string; desc: string; key: string; required: boolean; enum: string[]}[]
}
],
outputs: [
// {
// id: NodeOutputKeyEnum.success,
// key: NodeOutputKeyEnum.success,
// label: '字段完全提取',
// valueType: WorkflowIOValueTypeEnum.boolean,
// type: FlowNodeOutputTypeEnum.source
// },
// {
// id: NodeOutputKeyEnum.failed,
// key: NodeOutputKeyEnum.failed,
// label: '提取字段缺失',
// description: '存在一个或多个字段未提取成功。尽管使用了默认值也算缺失。',
// valueType: WorkflowIOValueTypeEnum.boolean,
// type: FlowNodeOutputTypeEnum.source
// },
{
id: NodeOutputKeyEnum.success,
key: NodeOutputKeyEnum.success,
label: '字段完全提取',
required: true,
description: '提取字段全部填充时返回 true (模型提取或使用默认值均属于成功)',
valueType: WorkflowIOValueTypeEnum.boolean,
type: FlowNodeOutputTypeEnum.static
},
{
id: NodeOutputKeyEnum.contextExtractFields,
key: NodeOutputKeyEnum.contextExtractFields,
label: '完整提取结果',
required: true,
description: '一个 JSON 字符串,例如:{"name:":"YY","Time":"2023/7/2 18:00"}',
valueType: WorkflowIOValueTypeEnum.string,
type: FlowNodeOutputTypeEnum.static

View File

@@ -42,6 +42,7 @@ export const DatasetConcatModule: FlowNodeTemplateType = {
name: '知识库搜索引用合并',
intro: '可以将多个知识库搜索结果进行合并输出。使用 RRF 的合并方式进行最终排序输出。',
showStatus: false,
version: '481',
inputs: [
{
key: NodeInputKeyEnum.datasetMaxTokens,

View File

@@ -28,6 +28,7 @@ export const DatasetSearchModule: FlowNodeTemplateType = {
intro: Dataset_SEARCH_DESC,
showStatus: true,
isTool: true,
version: '481',
inputs: [
{
key: NodeInputKeyEnum.datasetSelectList,
@@ -35,7 +36,6 @@ export const DatasetSearchModule: FlowNodeTemplateType = {
label: 'core.module.input.label.Select dataset',
value: [],
valueType: WorkflowIOValueTypeEnum.selectDataset,
list: [],
required: true
},
{

View File

@@ -12,6 +12,7 @@ export const EmptyNode: FlowNodeTemplateType = {
avatar: '',
name: '',
intro: '',
version: '481',
inputs: [],
outputs: []
};

View File

@@ -1,32 +0,0 @@
import { FlowNodeTemplateTypeEnum, WorkflowIOValueTypeEnum } from '../../constants';
import { getHandleConfig } from '../utils';
import { FlowNodeOutputTypeEnum, FlowNodeTypeEnum } from '../../node/constant';
import { VariableItemType } from '../../../app/type';
import { FlowNodeTemplateType } from '../../type';
export const getGlobalVariableNode = ({
id,
variables
}: {
id: string;
variables: VariableItemType[];
}): FlowNodeTemplateType => {
return {
id,
templateType: FlowNodeTemplateTypeEnum.other,
flowNodeType: FlowNodeTypeEnum.systemConfig,
sourceHandle: getHandleConfig(true, true, true, true),
targetHandle: getHandleConfig(true, true, true, true),
avatar: '/imgs/workflow/variable.png',
name: '全局变量',
intro: '',
inputs: [],
outputs: variables.map((item) => ({
id: item.key,
key: item.key,
valueType: WorkflowIOValueTypeEnum.string,
type: FlowNodeOutputTypeEnum.static,
label: item.label
}))
};
};

View File

@@ -25,6 +25,7 @@ export const HttpModule468: FlowNodeTemplateType = {
intro: '可以发出一个 HTTP 请求,实现更为复杂的操作(联网搜索、数据库查询等)',
showStatus: true,
isTool: true,
version: '481',
inputs: [
{
...Input_Template_DynamicInput,
@@ -80,10 +81,19 @@ export const HttpModule468: FlowNodeTemplateType = {
],
outputs: [
Output_Template_AddOutput,
{
id: NodeOutputKeyEnum.error,
key: NodeOutputKeyEnum.error,
label: '请求错误',
description: 'HTTP请求错误信息成功时返回空',
valueType: WorkflowIOValueTypeEnum.object,
type: FlowNodeOutputTypeEnum.static
},
{
id: NodeOutputKeyEnum.httpRawResponse,
key: NodeOutputKeyEnum.httpRawResponse,
label: '原始响应',
required: true,
description: 'HTTP请求的原始响应。只能接受字符串或JSON类型响应数据。',
valueType: WorkflowIOValueTypeEnum.any,
type: FlowNodeOutputTypeEnum.static

View File

@@ -22,6 +22,7 @@ export const IfElseNode: FlowNodeTemplateType = {
name: '判断器',
intro: '根据一定的条件,执行不同的分支。',
showStatus: true,
version: '481',
inputs: [
{
key: NodeInputKeyEnum.ifElseList,

View File

@@ -25,6 +25,7 @@ export const LafModule: FlowNodeTemplateType = {
intro: '可以调用Laf账号下的云函数。',
showStatus: true,
isTool: true,
version: '481',
inputs: [
{
...Input_Template_DynamicInput,

View File

@@ -15,6 +15,7 @@ export const PluginInputModule: FlowNodeTemplateType = {
name: '自定义插件输入',
intro: '自定义配置外部输入,使用插件时,仅暴露自定义配置的输入',
showStatus: false,
version: '481',
inputs: [],
outputs: []
};

View File

@@ -15,6 +15,7 @@ export const PluginOutputModule: FlowNodeTemplateType = {
name: '自定义插件输出',
intro: '自定义配置外部输出,使用插件时,仅暴露自定义配置的输出',
showStatus: false,
version: '481',
inputs: [],
outputs: []
};

View File

@@ -29,6 +29,7 @@ export const AiQueryExtension: FlowNodeTemplateType = {
intro:
'使用问题优化功能,可以提高知识库连续对话时搜索的精度。使用该功能后,会先利用 AI 根据上下文构建一个或多个新的检索词,这些检索词更利于进行知识库搜索。该模块已内置在知识库搜索模块中,如果您仅进行一次知识库搜索,可直接使用知识库内置的补全功能。',
showStatus: true,
version: '481',
inputs: [
{
...Input_Template_SelectAIModel,

View File

@@ -23,6 +23,8 @@ export const RunAppModule: FlowNodeTemplateType = {
name: '应用调用',
intro: '可以选择一个其他应用进行调用',
showStatus: true,
version: '481',
isTool: true,
inputs: [
{
key: NodeInputKeyEnum.runAppSelectApp,
@@ -42,6 +44,7 @@ export const RunAppModule: FlowNodeTemplateType = {
label: '新的上下文',
description: '将该应用回复内容拼接到历史记录中,作为新的上下文返回',
valueType: WorkflowIOValueTypeEnum.chatHistory,
required: true,
type: FlowNodeOutputTypeEnum.static
},
{

View File

@@ -13,6 +13,7 @@ export const RunPluginModule: FlowNodeTemplateType = {
name: '',
showStatus: false,
isTool: true,
version: '481',
inputs: [], // [{key:'pluginId'},...]
outputs: []
};

View File

@@ -0,0 +1,7 @@
export const JS_TEMPLATE = `function main({data1, data2}){
return {
result: data1,
data2
}
}`;

View File

@@ -0,0 +1,72 @@
import {
FlowNodeTemplateTypeEnum,
NodeInputKeyEnum,
NodeOutputKeyEnum,
WorkflowIOValueTypeEnum
} from '../../../constants';
import {
FlowNodeInputTypeEnum,
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../../node/constant';
import { FlowNodeTemplateType } from '../../../type';
import { getHandleConfig } from '../../utils';
import { Input_Template_DynamicInput } from '../../input';
import { Output_Template_AddOutput } from '../../output';
import { JS_TEMPLATE } from './constants';
export const CodeNode: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.code,
templateType: FlowNodeTemplateTypeEnum.tools,
flowNodeType: FlowNodeTypeEnum.code,
sourceHandle: getHandleConfig(true, true, true, true),
targetHandle: getHandleConfig(true, true, true, true),
avatar: '/imgs/workflow/code.svg',
name: '代码运行',
intro: '执行一段简单的脚本代码,通常用于进行复杂的数据处理。',
showStatus: true,
version: '482',
inputs: [
{
...Input_Template_DynamicInput,
description: '这些变量会作为代码的运行的输入参数',
editField: {
key: true,
valueType: true
}
},
{
key: NodeInputKeyEnum.codeType,
renderTypeList: [FlowNodeInputTypeEnum.hidden],
label: '',
value: 'js'
},
{
key: NodeInputKeyEnum.code,
renderTypeList: [FlowNodeInputTypeEnum.custom],
label: '',
value: JS_TEMPLATE
}
],
outputs: [
{
...Output_Template_AddOutput,
description: '将代码中 return 的对象作为输出,传递给后续的节点'
},
{
id: NodeOutputKeyEnum.rawResponse,
key: NodeOutputKeyEnum.rawResponse,
label: '完整响应数据',
valueType: WorkflowIOValueTypeEnum.object,
type: FlowNodeOutputTypeEnum.static
},
{
id: NodeOutputKeyEnum.error,
key: NodeOutputKeyEnum.error,
label: '运行错误',
description: '代码运行错误信息,成功时返回空',
valueType: WorkflowIOValueTypeEnum.object,
type: FlowNodeOutputTypeEnum.static
}
]
};

View File

@@ -13,6 +13,7 @@ export const StopToolNode: FlowNodeTemplateType = {
name: '工具调用终止',
intro:
'该模块需配置工具调用使用。当该模块被执行时本次工具调用将会强制结束并且不再调用AI针对工具调用结果回答问题。',
version: '481',
inputs: [],
outputs: []
};

View File

@@ -1,10 +1,6 @@
import { FlowNodeInputTypeEnum, FlowNodeTypeEnum } from '../../node/constant';
import { FlowNodeTypeEnum } from '../../node/constant';
import { FlowNodeTemplateType } from '../../type/index.d';
import {
WorkflowIOValueTypeEnum,
NodeInputKeyEnum,
FlowNodeTemplateTypeEnum
} from '../../constants';
import { FlowNodeTemplateTypeEnum, WorkflowIOValueTypeEnum } from '../../constants';
import { getHandleConfig } from '../utils';
export const SystemConfigNode: FlowNodeTemplateType = {
@@ -18,44 +14,7 @@ export const SystemConfigNode: FlowNodeTemplateType = {
intro: '可以配置应用的系统参数。',
unique: true,
forbidDelete: true,
inputs: [
{
key: NodeInputKeyEnum.welcomeText,
renderTypeList: [FlowNodeInputTypeEnum.hidden],
valueType: WorkflowIOValueTypeEnum.string,
label: 'core.app.Welcome Text'
},
{
key: NodeInputKeyEnum.variables,
renderTypeList: [FlowNodeInputTypeEnum.hidden],
valueType: WorkflowIOValueTypeEnum.any,
label: 'core.module.Variable',
value: []
},
{
key: NodeInputKeyEnum.questionGuide,
valueType: WorkflowIOValueTypeEnum.boolean,
renderTypeList: [FlowNodeInputTypeEnum.hidden],
label: ''
},
{
key: NodeInputKeyEnum.tts,
renderTypeList: [FlowNodeInputTypeEnum.hidden],
valueType: WorkflowIOValueTypeEnum.any,
label: ''
},
{
key: NodeInputKeyEnum.whisper,
renderTypeList: [FlowNodeInputTypeEnum.hidden],
valueType: WorkflowIOValueTypeEnum.any,
label: ''
},
{
key: NodeInputKeyEnum.scheduleTrigger,
renderTypeList: [FlowNodeInputTypeEnum.hidden],
valueType: WorkflowIOValueTypeEnum.any,
label: ''
}
],
version: '481',
inputs: [],
outputs: []
};

View File

@@ -30,6 +30,7 @@ export const ToolModule: FlowNodeTemplateType = {
name: '工具调用(实验)',
intro: '通过AI模型自动选择一个或多个功能块进行调用也可以对插件进行调用。',
showStatus: true,
version: '481',
inputs: [
{
...Input_Template_SettingAiModel,

View File

@@ -16,8 +16,9 @@ export const VariableUpdateNode: FlowNodeTemplateType = {
avatar: '/imgs/workflow/variable.png',
name: '变量更新',
intro: '可以更新指定节点的输出值或更新全局变量',
showStatus: true,
showStatus: false,
isTool: false,
version: '481',
inputs: [
{
key: NodeInputKeyEnum.updateList,

View File

@@ -19,6 +19,7 @@ export const WorkflowStart: FlowNodeTemplateType = {
intro: '',
forbidDelete: true,
unique: true,
version: '481',
inputs: [{ ...Input_Template_UserChatInput, toolDescription: '用户问题' }],
outputs: [
{

View File

@@ -1,3 +1 @@
export const chatNodeSystemPromptTip = 'core.app.tip.chatNodeSystemPromptTip';
export const welcomeTextTip = 'core.app.tip.welcomeTextTip';
export const variableTip = 'core.app.tip.variableTip';

View File

@@ -22,12 +22,13 @@ import { RuntimeEdgeItemType, StoreEdgeItemType } from './edge';
import { NextApiResponse } from 'next';
export type FlowNodeCommonType = {
flowNodeType: `${FlowNodeTypeEnum}`; // render node card
flowNodeType: FlowNodeTypeEnum; // render node card
avatar?: string;
name: string;
intro?: string; // template list intro
showStatus?: boolean; // chatting response step status
version: string;
// data
inputs: FlowNodeInputItemType[];
@@ -63,6 +64,7 @@ export type FlowNodeTemplateType = FlowNodeCommonType & {
// action
forbidDelete?: boolean; // forbid delete
unique?: boolean;
nodeVersion?: string;
};
export type FlowNodeItemType = FlowNodeTemplateType & {
nodeId: string;
@@ -113,6 +115,7 @@ export type ClassifyQuestionAgentItemType = {
key: string;
};
export type ContextExtractAgentItemType = {
valueType: 'string' | 'number' | 'boolean';
desc: string;
key: string;
required: boolean;

View File

@@ -1,4 +1,4 @@
import { FlowNodeOutputTypeEnum, FlowNodeTypeEnum } from './node/constant';
import { FlowNodeInputTypeEnum, FlowNodeOutputTypeEnum, FlowNodeTypeEnum } from './node/constant';
import {
WorkflowIOValueTypeEnum,
NodeInputKeyEnum,
@@ -11,10 +11,16 @@ import type {
VariableItemType,
AppTTSConfigType,
AppWhisperConfigType,
AppScheduledTriggerConfigType
AppScheduledTriggerConfigType,
ChatInputGuideConfigType,
AppChatConfigType
} from '../app/type';
import { EditorVariablePickerType } from '../../../web/components/common/Textarea/PromptEditor/type';
import { defaultWhisperConfig } from '../app/constants';
import {
defaultChatInputGuideConfig,
defaultTTSConfig,
defaultWhisperConfig
} from '../app/constants';
import { IfElseResultEnum } from './template/system/ifElse/constant';
export const getHandleId = (nodeId: string, type: 'source' | 'target', key: string) => {
@@ -22,15 +28,9 @@ export const getHandleId = (nodeId: string, type: 'source' | 'target', key: stri
};
export const checkInputIsReference = (input: FlowNodeInputItemType) => {
const value = input.value;
if (
Array.isArray(value) &&
value.length === 2 &&
typeof value[0] === 'string' &&
typeof value[1] === 'string'
) {
if (input.renderTypeList?.[input?.selectedTypeIndex || 0] === FlowNodeInputTypeEnum.reference)
return true;
}
return false;
};
@@ -46,63 +46,78 @@ export const splitGuideModule = (guideModules?: StoreNodeItemType) => {
const welcomeText: string =
guideModules?.inputs?.find((item) => item.key === NodeInputKeyEnum.welcomeText)?.value || '';
const variableNodes: VariableItemType[] =
const variables: VariableItemType[] =
guideModules?.inputs.find((item) => item.key === NodeInputKeyEnum.variables)?.value || [];
const questionGuide: boolean =
!!guideModules?.inputs?.find((item) => item.key === NodeInputKeyEnum.questionGuide)?.value ||
false;
const ttsConfig: AppTTSConfigType = guideModules?.inputs?.find(
(item) => item.key === NodeInputKeyEnum.tts
)?.value || { type: 'web' };
const ttsConfig: AppTTSConfigType =
guideModules?.inputs?.find((item) => item.key === NodeInputKeyEnum.tts)?.value ||
defaultTTSConfig;
const whisperConfig: AppWhisperConfigType =
guideModules?.inputs?.find((item) => item.key === NodeInputKeyEnum.whisper)?.value ||
defaultWhisperConfig;
const scheduledTriggerConfig: AppScheduledTriggerConfigType | null =
guideModules?.inputs?.find((item) => item.key === NodeInputKeyEnum.scheduleTrigger)?.value ??
null;
const scheduledTriggerConfig: AppScheduledTriggerConfigType = guideModules?.inputs?.find(
(item) => item.key === NodeInputKeyEnum.scheduleTrigger
)?.value;
const chatInputGuide: ChatInputGuideConfigType =
guideModules?.inputs?.find((item) => item.key === NodeInputKeyEnum.chatInputGuide)?.value ||
defaultChatInputGuideConfig;
return {
welcomeText,
variableNodes,
variables,
questionGuide,
ttsConfig,
whisperConfig,
scheduledTriggerConfig
scheduledTriggerConfig,
chatInputGuide
};
};
export const replaceAppChatConfig = ({
node,
variableList,
welcomeText
export const getAppChatConfig = ({
chatConfig,
systemConfigNode,
storeVariables,
storeWelcomeText,
isPublicFetch = false
}: {
node?: StoreNodeItemType;
variableList?: VariableItemType[];
welcomeText?: string;
}): StoreNodeItemType | undefined => {
if (!node) return;
return {
...node,
inputs: node.inputs.map((input) => {
if (input.key === NodeInputKeyEnum.variables && variableList) {
return {
...input,
value: variableList
};
}
if (input.key === NodeInputKeyEnum.welcomeText && welcomeText) {
return {
...input,
value: welcomeText
};
}
chatConfig?: AppChatConfigType;
systemConfigNode?: StoreNodeItemType;
storeVariables?: VariableItemType[];
storeWelcomeText?: string;
isPublicFetch: boolean;
}): AppChatConfigType => {
const {
welcomeText,
variables,
questionGuide,
ttsConfig,
whisperConfig,
scheduledTriggerConfig,
chatInputGuide
} = splitGuideModule(systemConfigNode);
return input;
})
const config: AppChatConfigType = {
questionGuide,
ttsConfig,
whisperConfig,
scheduledTriggerConfig,
chatInputGuide,
...chatConfig,
variables: storeVariables ?? chatConfig?.variables ?? variables,
welcomeText: storeWelcomeText ?? chatConfig?.welcomeText ?? welcomeText
};
if (!isPublicFetch) {
config.scheduledTriggerConfig = undefined;
}
return config;
};
export const getOrInitModuleInputValue = (input: FlowNodeInputItemType) => {
@@ -160,7 +175,7 @@ export const pluginData2FlowNodeIO = (
};
export const formatEditorVariablePickerIcon = (
variables: { key: string; label: string; type?: `${VariableInputEnum}` }[]
variables: { key: string; label: string; type?: `${VariableInputEnum}`; required?: boolean }[]
): EditorVariablePickerType[] => {
return variables.map((item) => ({
...item,

View File

@@ -10,7 +10,7 @@
"js-yaml": "^4.1.0",
"jschardet": "3.1.1",
"nanoid": "^4.0.1",
"next": "13.5.2",
"next": "14.2.3",
"openai": "4.28.0",
"openapi-types": "^12.1.3",
"timezones-list": "^3.0.2"

View File

@@ -20,3 +20,9 @@ export const PermissionTypeMap = {
label: 'permission.Public'
}
};
export enum ResourceTypeEnum {
team = 'team',
app = 'app',
dataset = 'dataset'
}

View File

@@ -1,5 +1,7 @@
import { AuthUserTypeEnum } from './constant';
export type PermissionValueType = number;
export type AuthResponseType = {
teamId: string;
tmbId: string;
@@ -9,3 +11,10 @@ export type AuthResponseType = {
appId?: string;
apikey?: string;
};
export type ResourcePermissionType = {
teamId: string;
tmbId: string;
resourceType: ResourceType;
permission: PermissionValueType;
};

View File

@@ -1,6 +1,6 @@
export const TeamCollectionName = 'teams';
export const TeamMemberCollectionName = 'team.members';
export const TeamTagsCollectionName = 'team.tags';
export const TeamMemberCollectionName = 'team_members';
export const TeamTagsCollectionName = 'team_tags';
export enum TeamMemberRoleEnum {
owner = 'owner',

View File

@@ -1,3 +1,4 @@
import { PermissionValueType } from 'support/permission/type';
import { TeamMemberRoleEnum } from './constant';
import { LafAccountType, TeamMemberSchema } from './type';
@@ -13,7 +14,6 @@ export type CreateTeamProps = {
lafAccount?: LafAccountType;
};
export type UpdateTeamProps = {
teamId: string;
name?: string;
avatar?: string;
teamDomain?: string;
@@ -44,3 +44,9 @@ export type InviteMemberResponse = Record<
'invite' | 'inValid' | 'inTeam',
{ username: string; userId: string }[]
>;
export type UpdateTeamMemberPermissionProps = {
teamId: string;
memberIds: string[];
permission: PermissionValueType;
};

View File

@@ -1,6 +1,7 @@
import type { UserModelSchema } from '../type';
import type { TeamMemberRoleEnum, TeamMemberStatusEnum } from './constant';
import { LafAccountType } from './type';
import { PermissionValueType, ResourcePermissionType } from '../../permission/type';
export type TeamSchema = {
_id: string;
@@ -15,6 +16,7 @@ export type TeamSchema = {
lastWebsiteSyncTime: Date;
};
lafAccount: LafAccountType;
defaultPermission: PermissionValueType;
};
export type tagsType = {
label: string;
@@ -61,6 +63,7 @@ export type TeamItemType = {
status: `${TeamMemberStatusEnum}`;
canWrite: boolean;
lafAccount?: LafAccountType;
defaultPermission: PermissionValueType;
};
export type TeamMemberItemType = {
@@ -69,8 +72,10 @@ export type TeamMemberItemType = {
teamId: string;
memberName: string;
avatar: string;
// TODO: this should be deprecated.
role: `${TeamMemberRoleEnum}`;
status: `${TeamMemberStatusEnum}`;
permission: PermissionValueType;
};
export type TeamTagItemType = {

View File

@@ -11,6 +11,7 @@ export type BillSchemaType = {
status: 'SUCCESS' | 'REFUND' | 'NOTPAY' | 'CLOSED';
type: `${BillTypeEnum}`;
price: number;
hasInvoice: boolean;
metadata: {
payWay: `${BillPayWayEnum}`;
subMode?: `${SubModeEnum}`;
@@ -20,7 +21,6 @@ export type BillSchemaType = {
extraPoints?: number;
invoice: boolean;
};
username: string;
};
export type ChatNodeUsageType = {

View File

@@ -2,7 +2,7 @@ import { connectionMongo, type Model } from '../../mongo';
const { Schema, model, models } = connectionMongo;
import { RawTextBufferSchemaType } from './type';
export const collectionName = 'buffer.rawText';
export const collectionName = 'buffer_rawtexts';
const RawTextBufferSchema = new Schema({
sourceId: {

View File

@@ -2,7 +2,7 @@ import { connectionMongo, type Model } from '../../../common/mongo';
const { Schema, model, models } = connectionMongo;
import { TTSBufferSchemaType } from './type.d';
export const collectionName = 'buffer.tts';
export const collectionName = 'buffer_tts';
const TTSBufferSchema = new Schema({
bufferId: {

View File

@@ -7,7 +7,7 @@ import { MongoFileSchema } from './schema';
import { detectFileEncoding } from '@fastgpt/global/common/file/tools';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import { MongoRawTextBuffer } from '../../buffer/rawText/schema';
import { readFileRawContent } from '../read/utils';
import { readRawContentByFileBuffer } from '../read/utils';
import { PassThrough } from 'stream';
export function getGFSCollection(bucket: `${BucketNameEnum}`) {
@@ -151,12 +151,12 @@ export const readFileContentFromMongo = async ({
teamId,
bucketName,
fileId,
csvFormat = false
isQAImport = false
}: {
teamId: string;
bucketName: `${BucketNameEnum}`;
fileId: string;
csvFormat?: boolean;
isQAImport?: boolean;
}): Promise<{
rawText: string;
filename: string;
@@ -196,9 +196,9 @@ export const readFileContentFromMongo = async ({
});
})();
const { rawText } = await readFileRawContent({
const { rawText } = await readRawContentByFileBuffer({
extension,
csvFormat,
isQAImport,
teamId,
buffer: fileBuffers,
encoding,

View File

@@ -1,9 +1,11 @@
import { markdownProcess, simpleMarkdownText } from '@fastgpt/global/common/string/markdown';
import { markdownProcess } from '@fastgpt/global/common/string/markdown';
import { uploadMongoImg } from '../image/controller';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
import { addHours } from 'date-fns';
import { WorkerNameEnum, runWorker } from '../../../worker/utils';
import fs from 'fs';
import { detectFileEncoding } from '@fastgpt/global/common/file/tools';
import { ReadFileResponse } from '../../../worker/file/type';
export const initMarkdownText = ({
@@ -27,42 +29,71 @@ export const initMarkdownText = ({
})
});
export const readFileRawContent = async ({
export type readRawTextByLocalFileParams = {
teamId: string;
path: string;
metadata?: Record<string, any>;
};
export const readRawTextByLocalFile = async (params: readRawTextByLocalFileParams) => {
const { path } = params;
const extension = path?.split('.')?.pop()?.toLowerCase() || '';
const buffer = fs.readFileSync(path);
const encoding = detectFileEncoding(buffer);
const { rawText } = await readRawContentByFileBuffer({
extension,
isQAImport: false,
teamId: params.teamId,
encoding,
buffer,
metadata: params.metadata
});
return {
rawText
};
};
export const readRawContentByFileBuffer = async ({
extension,
csvFormat,
isQAImport,
teamId,
buffer,
encoding,
metadata
}: {
csvFormat?: boolean;
isQAImport?: boolean;
extension: string;
teamId: string;
buffer: Buffer;
encoding: string;
metadata?: Record<string, any>;
}) => {
const result = await runWorker<ReadFileResponse>(WorkerNameEnum.readFile, {
let { rawText, formatText } = await runWorker<ReadFileResponse>(WorkerNameEnum.readFile, {
extension,
csvFormat,
encoding,
buffer
});
// markdown data format
if (['md', 'html', 'docx'].includes(extension)) {
result.rawText = await initMarkdownText({
rawText = await initMarkdownText({
teamId: teamId,
md: result.rawText,
md: rawText,
metadata: metadata
});
}
return result;
};
if (['csv', 'xlsx'].includes(extension)) {
// qa data
if (isQAImport) {
rawText = rawText || '';
} else {
rawText = formatText || '';
}
}
export const htmlToMarkdown = async (html?: string | null) => {
const md = await runWorker<string>(WorkerNameEnum.htmlStr2Md, { html: html || '' });
return simpleMarkdownText(md);
return { rawText };
};

View File

@@ -0,0 +1,44 @@
import { jsonRes } from '../response';
import type { NextApiResponse } from 'next';
import { withNextCors } from './cors';
import { ApiRequestProps } from '../../type/next';
import { addLog } from '../system/log';
export type NextApiHandler<T = any> = (
req: ApiRequestProps,
res: NextApiResponse<T>
) => unknown | Promise<unknown>;
export const NextEntry = ({ beforeCallback = [] }: { beforeCallback?: Promise<any>[] }) => {
return (...args: NextApiHandler[]): NextApiHandler => {
return async function api(req: ApiRequestProps, res: NextApiResponse) {
const start = Date.now();
addLog.info(`Request start ${req.url}`);
try {
await Promise.all([withNextCors(req, res), ...beforeCallback]);
let response = null;
for (const handler of args) {
response = await handler(req, res);
}
const contentType = res.getHeader('Content-Type');
addLog.info(`Request finish ${req.url}, time: ${Date.now() - start}ms`);
if ((!contentType || contentType === 'application/json') && !res.writableFinished) {
return jsonRes(res, {
code: 200,
data: response
});
}
} catch (error) {
return jsonRes(res, {
code: 500,
error,
url: req.url
});
}
};
};
};

View File

@@ -39,6 +39,10 @@ export async function connectMongo({
global.mongodb?.disconnect();
global.mongodb = undefined;
});
mongoose.connection.on('disconnected', () => {
console.log('mongo disconnected');
global.mongodb = undefined;
});
console.log('mongo connected');

View File

@@ -8,14 +8,12 @@ export const mongoSessionRun = async <T = unknown>(fn: (session: ClientSession)
const result = await fn(session);
await session.commitTransaction();
session.endSession();
await session.endSession();
return result as T;
} catch (error) {
console.log(error);
await session.abortTransaction();
session.endSession();
await session.endSession();
return Promise.reject(error);
}
};

View File

@@ -1,7 +1,7 @@
import { UrlFetchParams, UrlFetchResponse } from '@fastgpt/global/common/file/api';
import * as cheerio from 'cheerio';
import axios from 'axios';
import { htmlToMarkdown } from '../file/read/utils';
import { htmlToMarkdown } from './utils';
export const cheerioToHtml = ({
fetchUrl,
@@ -77,9 +77,8 @@ export const urlsFetch = async ({
$,
selector
});
console.log('html====', html);
const md = await htmlToMarkdown(html);
console.log('html====', md);
return {
url,

View File

@@ -12,27 +12,34 @@ import { getNanoid } from '@fastgpt/global/common/string/tools';
import { addLog } from '../../system/log';
export const getTiktokenWorker = () => {
if (global.tiktokenWorker) {
return global.tiktokenWorker;
const maxWorkers = global.systemEnv?.tokenWorkers || 20;
if (!global.tiktokenWorkers) {
global.tiktokenWorkers = [];
}
if (global.tiktokenWorkers.length >= maxWorkers) {
return global.tiktokenWorkers[Math.floor(Math.random() * global.tiktokenWorkers.length)];
}
const worker = getWorker(WorkerNameEnum.countGptMessagesTokens);
const i = global.tiktokenWorkers.push({
index: global.tiktokenWorkers.length,
worker,
callbackMap: {}
});
worker.on('message', ({ id, data }: { id: string; data: number }) => {
const callback = global.tiktokenWorker?.callbackMap?.[id];
const callback = global.tiktokenWorkers[i - 1]?.callbackMap?.[id];
if (callback) {
callback?.(data);
delete global.tiktokenWorker.callbackMap[id];
delete global.tiktokenWorkers[i - 1].callbackMap[id];
}
});
global.tiktokenWorker = {
worker,
callbackMap: {}
};
return global.tiktokenWorker;
return global.tiktokenWorkers[i - 1];
};
export const countGptMessagesTokens = (
@@ -40,32 +47,46 @@ export const countGptMessagesTokens = (
tools?: ChatCompletionTool[],
functionCall?: ChatCompletionCreateParams.Function[]
) => {
return new Promise<number>((resolve) => {
const start = Date.now();
return new Promise<number>(async (resolve) => {
try {
const start = Date.now();
const { worker, callbackMap } = getTiktokenWorker();
const id = getNanoid();
const { worker, callbackMap } = getTiktokenWorker();
const timer = setTimeout(() => {
const id = getNanoid();
const timer = setTimeout(() => {
console.log('Count token Time out');
resolve(
messages.reduce((sum, item) => {
if (item.content) {
return sum + item.content.length * 0.5;
}
return sum;
}, 0)
);
delete callbackMap[id];
}, 60000);
callbackMap[id] = (data) => {
// 检测是否有内存泄漏
addLog.info(`Count token time: ${Date.now() - start}, token: ${data}`);
// console.log(process.memoryUsage());
resolve(data);
clearTimeout(timer);
};
// 可以进一步优化(传递100w token数据,实际需要300ms,较慢)
worker.postMessage({
id,
messages,
tools,
functionCall
});
} catch (error) {
resolve(0);
delete callbackMap[id];
}, 300);
callbackMap[id] = (data) => {
resolve(data);
clearTimeout(timer);
// 检测是否有内存泄漏
// addLog.info(`Count token time: ${Date.now() - start}, token: ${data}`);
// console.log(process.memoryUsage());
};
worker.postMessage({
id,
messages,
tools,
functionCall
});
}
});
};

View File

@@ -0,0 +1,8 @@
import { simpleMarkdownText } from '@fastgpt/global/common/string/markdown';
import { WorkerNameEnum, runWorker } from '../../worker/utils';
export const htmlToMarkdown = async (html?: string | null) => {
const md = await runWorker<string>(WorkerNameEnum.htmlStr2Md, { html: html || '' });
return simpleMarkdownText(md);
};

View File

@@ -20,3 +20,17 @@ export const initFastGPTConfig = (config?: FastGPTConfigFileType) => {
global.whisperModel = config.whisperModel;
global.reRankModels = config.reRankModels;
};
export const systemStartCb = () => {
process.on('uncaughtException', (err) => {
console.error('Uncaught Exception:', err);
// process.exit(1); // 退出进程
});
process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
// process.exit(1); // 退出进程
});
};
export const surrenderProcess = () => new Promise((resolve) => setImmediate(resolve));

Some files were not shown because too many files have changed in this diff Show More