Compare commits
24 Commits
v4.9.4
...
v4.9.6-alp
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
952412f648 | ||
|
|
ab799e13cd | ||
|
|
ba422b73b3 | ||
|
|
c7c79b400a | ||
|
|
47f674666b | ||
|
|
0c9e56c1ee | ||
|
|
97a6c6749a | ||
|
|
565b3e4319 | ||
|
|
efad4c101f | ||
|
|
bed68718e8 | ||
|
|
7a9cf4ce9e | ||
|
|
b943d05d5a | ||
|
|
2af3cd83f2 | ||
|
|
ea74c669ee | ||
|
|
38597d8f04 | ||
|
|
88ed019717 | ||
|
|
16a22bc76a | ||
|
|
b51a87f5b7 | ||
|
|
bc1ca66b66 | ||
|
|
c9e12bb608 | ||
|
|
4e7fa29087 | ||
|
|
ec3bcfa124 | ||
|
|
199f454b6b | ||
|
|
80f41dd2a9 |
2
.github/workflows/docs-deploy-cf.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
url: ${{ steps.vercel-action.outputs.preview-url }}
|
||||
|
||||
# The type of runner that the job will run on
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
2
.github/workflows/docs-deploy-kubeconfig.yml
vendored
@@ -73,7 +73,7 @@ jobs:
|
||||
|
||||
update-docs-image:
|
||||
needs: build-fastgpt-docs-images
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-24.04
|
||||
if: github.repository == 'labring/FastGPT'
|
||||
steps:
|
||||
- name: Checkout code
|
||||
|
||||
2
.github/workflows/docs-preview.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
url: ${{ steps.vercel-action.outputs.preview-url }}
|
||||
|
||||
# The type of runner that the job will run on
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
# Job outputs
|
||||
outputs:
|
||||
|
||||
@@ -14,7 +14,7 @@ jobs:
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-24.04
|
||||
if: github.repository != 'labring/FastGPT'
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
292
.github/workflows/fastgpt-build-image.yml
vendored
@@ -1,12 +1,14 @@
|
||||
name: Build FastGPT images
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'projects/app/**'
|
||||
- 'packages/**'
|
||||
- "projects/app/**"
|
||||
- "packages/**"
|
||||
tags:
|
||||
- 'v*'
|
||||
- "v*"
|
||||
|
||||
jobs:
|
||||
build-fastgpt-images:
|
||||
permissions:
|
||||
@@ -14,260 +16,156 @@ jobs:
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
runs-on: ubuntu-20.04
|
||||
strategy:
|
||||
matrix:
|
||||
sub_routes:
|
||||
- repo: fastgpt
|
||||
base_url: ""
|
||||
- repo: fastgpt-sub-route
|
||||
base_url: "/fastai"
|
||||
- repo: fastgpt-sub-route-gchat
|
||||
base_url: "/gchat"
|
||||
archs:
|
||||
- arch: amd64
|
||||
- arch: arm64
|
||||
runs-on: ubuntu-24.04-arm
|
||||
runs-on: ${{ matrix.archs.runs-on || 'ubuntu-24.04' }}
|
||||
steps:
|
||||
# install env
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
sudo apt update && sudo apt install -y nodejs npm
|
||||
- name: Set up QEMU (optional)
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver-opts: network=host
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
key: ${{ runner.os }}-${{ matrix.sub_routes.repo }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
${{ runner.os }}-${{ matrix.sub_routes.repo }}-buildx-
|
||||
|
||||
# login docker
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Ali Hub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: registry.cn-hangzhou.aliyuncs.com
|
||||
username: ${{ secrets.ALI_HUB_USERNAME }}
|
||||
password: ${{ secrets.ALI_HUB_PASSWORD }}
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_NAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
# Set tag
|
||||
- name: Set image name and tag
|
||||
run: |
|
||||
if [[ "${{ github.ref_name }}" == "main" ]]; then
|
||||
echo "Git_Tag=ghcr.io/${{ github.repository_owner }}/fastgpt:latest" >> $GITHUB_ENV
|
||||
echo "Git_Latest=ghcr.io/${{ github.repository_owner }}/fastgpt:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Tag=${{ secrets.ALI_IMAGE_NAME }}/fastgpt:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Latest=${{ secrets.ALI_IMAGE_NAME }}/fastgpt:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Tag=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Latest=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt:latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "Git_Tag=ghcr.io/${{ github.repository_owner }}/fastgpt:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Git_Latest=ghcr.io/${{ github.repository_owner }}/fastgpt:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Tag=${{ secrets.ALI_IMAGE_NAME }}/fastgpt:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Ali_Latest=${{ secrets.ALI_IMAGE_NAME }}/fastgpt:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Tag=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Latest=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt:latest" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: Build for ${{ matrix.archs.arch }}
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: projects/app/Dockerfile
|
||||
platforms: linux/${{ matrix.archs.arch }}
|
||||
build-args: |
|
||||
${{ matrix.sub_routes.base_url && format('base_url={0}', matrix.sub_routes.base_url) || '' }}
|
||||
labels: |
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.description=${{ matrix.sub_routes.repo }} image
|
||||
outputs: type=image,"name=ghcr.io/${{ github.repository_owner }}/${{ matrix.sub_routes.repo }},${{ secrets.ALI_IMAGE_NAME }}/${{ matrix.sub_routes.repo }},${{ secrets.DOCKER_IMAGE_NAME }}/${{ matrix.sub_routes.repo }}",push-by-digest=true,push=true
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Build and publish image for main branch or tag push event
|
||||
env:
|
||||
DOCKER_REPO_TAGGED: ${{ env.DOCKER_REPO_TAGGED }}
|
||||
- name: Export digest
|
||||
run: |
|
||||
docker buildx build \
|
||||
-f projects/app/Dockerfile \
|
||||
--platform linux/amd64,linux/arm64 \
|
||||
--label "org.opencontainers.image.source=https://github.com/${{ github.repository_owner }}/FastGPT" \
|
||||
--label "org.opencontainers.image.description=fastgpt image" \
|
||||
--push \
|
||||
--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||
--cache-to=type=local,dest=/tmp/.buildx-cache \
|
||||
-t ${Git_Tag} \
|
||||
-t ${Git_Latest} \
|
||||
-t ${Ali_Tag} \
|
||||
-t ${Ali_Latest} \
|
||||
-t ${Docker_Hub_Tag} \
|
||||
-t ${Docker_Hub_Latest} \
|
||||
.
|
||||
build-fastgpt-images-sub-route:
|
||||
mkdir -p ${{ runner.temp }}/digests/${{ matrix.sub_routes.repo }}
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${{ matrix.sub_routes.repo }}/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ matrix.sub_routes.repo }}-${{ github.sha }}-${{ matrix.archs.arch }}
|
||||
path: ${{ runner.temp }}/digests/${{ matrix.sub_routes.repo }}/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
release-fastgpt-images:
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
runs-on: ubuntu-20.04
|
||||
needs: build-fastgpt-images
|
||||
strategy:
|
||||
matrix:
|
||||
sub_routes:
|
||||
- repo: fastgpt
|
||||
- repo: fastgpt-sub-route
|
||||
- repo: fastgpt-sub-route-gchat
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
# install env
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
sudo apt update && sudo apt install -y nodejs npm
|
||||
- name: Set up QEMU (optional)
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
driver-opts: network=host
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
# login docker
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Ali Hub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: registry.cn-hangzhou.aliyuncs.com
|
||||
username: ${{ secrets.ALI_HUB_USERNAME }}
|
||||
password: ${{ secrets.ALI_HUB_PASSWORD }}
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_NAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
# Set tag
|
||||
- name: Set image name and tag
|
||||
run: |
|
||||
if [[ "${{ github.ref_name }}" == "main" ]]; then
|
||||
echo "Git_Tag=ghcr.io/${{ github.repository_owner }}/fastgpt-sub-route:latest" >> $GITHUB_ENV
|
||||
echo "Git_Latest=ghcr.io/${{ github.repository_owner }}/fastgpt-sub-route:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Tag=${{ secrets.ALI_IMAGE_NAME }}/fastgpt-sub-route:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Latest=${{ secrets.ALI_IMAGE_NAME }}/fastgpt-sub-route:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Tag=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-sub-route:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Latest=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-sub-route:latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "Git_Tag=ghcr.io/${{ github.repository_owner }}/fastgpt-sub-route:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Git_Latest=ghcr.io/${{ github.repository_owner }}/fastgpt-sub-route:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Tag=${{ secrets.ALI_IMAGE_NAME }}/fastgpt-sub-route:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Ali_Latest=${{ secrets.ALI_IMAGE_NAME }}/fastgpt-sub-route:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Tag=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-sub-route:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Latest=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-sub-route:latest" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Build and publish image for main branch or tag push event
|
||||
env:
|
||||
DOCKER_REPO_TAGGED: ${{ env.DOCKER_REPO_TAGGED }}
|
||||
run: |
|
||||
docker buildx build \
|
||||
-f projects/app/Dockerfile \
|
||||
--platform linux/amd64,linux/arm64 \
|
||||
--build-arg base_url=/fastai \
|
||||
--label "org.opencontainers.image.source=https://github.com/${{ github.repository_owner }}/FastGPT" \
|
||||
--label "org.opencontainers.image.description=fastgpt image" \
|
||||
--push \
|
||||
--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||
--cache-to=type=local,dest=/tmp/.buildx-cache \
|
||||
-t ${Git_Tag} \
|
||||
-t ${Git_Latest} \
|
||||
-t ${Ali_Tag} \
|
||||
-t ${Ali_Latest} \
|
||||
-t ${Docker_Hub_Tag} \
|
||||
-t ${Docker_Hub_Latest} \
|
||||
.
|
||||
build-fastgpt-images-sub-route-gchat:
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
# install env
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
sudo apt update && sudo apt install -y nodejs npm
|
||||
- name: Set up QEMU (optional)
|
||||
uses: docker/setup-qemu-action@v2
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-${{ matrix.sub_routes.repo }}-${{ github.sha }}-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
driver-opts: network=host
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# login docker
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Ali Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: registry.cn-hangzhou.aliyuncs.com
|
||||
username: ${{ secrets.ALI_HUB_USERNAME }}
|
||||
password: ${{ secrets.ALI_HUB_PASSWORD }}
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_NAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
# Set tag
|
||||
- name: Set image name and tag
|
||||
run: |
|
||||
if [[ "${{ github.ref_name }}" == "main" ]]; then
|
||||
echo "Git_Tag=ghcr.io/${{ github.repository_owner }}/fastgpt-sub-route-gchat:latest" >> $GITHUB_ENV
|
||||
echo "Git_Latest=ghcr.io/${{ github.repository_owner }}/fastgpt-sub-route-gchat:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Tag=${{ secrets.ALI_IMAGE_NAME }}/fastgpt-sub-route-gchat:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Latest=${{ secrets.ALI_IMAGE_NAME }}/fastgpt-sub-route-gchat:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Tag=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-sub-route-gchat:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Latest=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-sub-route-gchat:latest" >> $GITHUB_ENV
|
||||
echo "Git_Tag=ghcr.io/${{ github.repository_owner }}/${{ matrix.sub_routes.repo }}:latest" >> $GITHUB_ENV
|
||||
echo "Git_Latest=ghcr.io/${{ github.repository_owner }}/${{ matrix.sub_routes.repo }}:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Tag=${{ secrets.ALI_IMAGE_NAME }}/${{ matrix.sub_routes.repo }}:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Latest=${{ secrets.ALI_IMAGE_NAME }}/${{ matrix.sub_routes.repo }}:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Tag=${{ secrets.DOCKER_IMAGE_NAME }}/${{ matrix.sub_routes.repo }}:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Latest=${{ secrets.DOCKER_IMAGE_NAME }}/${{ matrix.sub_routes.repo }}:latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "Git_Tag=ghcr.io/${{ github.repository_owner }}/fastgpt-sub-route-gchat:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Git_Latest=ghcr.io/${{ github.repository_owner }}/fastgpt-sub-route-gchat:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Tag=${{ secrets.ALI_IMAGE_NAME }}/fastgpt-sub-route-gchat:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Ali_Latest=${{ secrets.ALI_IMAGE_NAME }}/fastgpt-sub-route-gchat:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Tag=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-sub-route-gchat:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Latest=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-sub-route-gchat:latest" >> $GITHUB_ENV
|
||||
echo "Git_Tag=ghcr.io/${{ github.repository_owner }}/${{ matrix.sub_routes.repo }}:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Git_Latest=ghcr.io/${{ github.repository_owner }}/${{ matrix.sub_routes.repo }}:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Tag=${{ secrets.ALI_IMAGE_NAME }}/${{ matrix.sub_routes.repo }}:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Ali_Latest=${{ secrets.ALI_IMAGE_NAME }}/${{ matrix.sub_routes.repo }}:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Tag=${{ secrets.DOCKER_IMAGE_NAME }}/${{ matrix.sub_routes.repo }}:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Latest=${{ secrets.DOCKER_IMAGE_NAME }}/${{ matrix.sub_routes.repo }}:latest" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Build and publish image for main branch or tag push event
|
||||
env:
|
||||
DOCKER_REPO_TAGGED: ${{ env.DOCKER_REPO_TAGGED }}
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx build \
|
||||
-f projects/app/Dockerfile \
|
||||
--platform linux/amd64,linux/arm64 \
|
||||
--build-arg base_url=/gchat \
|
||||
--label "org.opencontainers.image.source=https://github.com/${{ github.repository_owner }}/FastGPT" \
|
||||
--label "org.opencontainers.image.description=fastgpt-sub-route-gchat image" \
|
||||
--push \
|
||||
--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||
--cache-to=type=local,dest=/tmp/.buildx-cache \
|
||||
-t ${Git_Tag} \
|
||||
-t ${Git_Latest} \
|
||||
-t ${Ali_Tag} \
|
||||
-t ${Ali_Latest} \
|
||||
-t ${Docker_Hub_Tag} \
|
||||
-t ${Docker_Hub_Latest} \
|
||||
.
|
||||
TAGS="$(echo -e "${Git_Tag}\n${Git_Latest}\n${Ali_Tag}\n${Ali_Latest}\n${Docker_Hub_Tag}\n${Docker_Hub_Latest}")"
|
||||
for TAG in $TAGS; do
|
||||
docker buildx imagetools create -t $TAG \
|
||||
$(printf 'ghcr.io/${{ github.repository_owner }}/${{ matrix.sub_routes.repo }}@sha256:%s ' *)
|
||||
sleep 5
|
||||
done
|
||||
|
||||
44
.github/workflows/fastgpt-preview-image.yml
vendored
@@ -12,26 +12,33 @@ jobs:
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
image: [fastgpt, sandbox, mcp_server]
|
||||
fail-fast: false # 即使一个镜像构建失败,也继续构建其他镜像
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
driver-opts: network=host
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}-${{ matrix.image }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-${{ github.sha }}-
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
@@ -41,24 +48,35 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set DOCKER_REPO_TAGGED based on branch or tag
|
||||
- name: Set image config
|
||||
id: config
|
||||
run: |
|
||||
echo "DOCKER_REPO_TAGGED=ghcr.io/${{ github.repository_owner }}/fastgpt-pr:${{ github.event.pull_request.head.sha }}" >> $GITHUB_ENV
|
||||
if [[ "${{ matrix.image }}" == "fastgpt" ]]; then
|
||||
echo "DOCKERFILE=projects/app/Dockerfile" >> $GITHUB_OUTPUT
|
||||
echo "DESCRIPTION=fastgpt-pr image" >> $GITHUB_OUTPUT
|
||||
echo "DOCKER_REPO_TAGGED=ghcr.io/${{ github.repository_owner }}/fastgpt-pr:fatsgpt_${{ github.event.pull_request.head.sha }}" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ matrix.image }}" == "sandbox" ]]; then
|
||||
echo "DOCKERFILE=projects/sandbox/Dockerfile" >> $GITHUB_OUTPUT
|
||||
echo "DESCRIPTION=fastgpt-sandbox-pr image" >> $GITHUB_OUTPUT
|
||||
echo "DOCKER_REPO_TAGGED=ghcr.io/${{ github.repository_owner }}/fastgpt-pr:fatsgpt_sandbox_${{ github.event.pull_request.head.sha }}" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ matrix.image }}" == "mcp_server" ]]; then
|
||||
echo "DOCKERFILE=projects/mcp_server/Dockerfile" >> $GITHUB_OUTPUT
|
||||
echo "DESCRIPTION=fastgpt-mcp_server-pr image" >> $GITHUB_OUTPUT
|
||||
echo "DOCKER_REPO_TAGGED=ghcr.io/${{ github.repository_owner }}/fastgpt-pr:fatsgpt_mcp_server_${{ github.event.pull_request.head.sha }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build image for PR
|
||||
env:
|
||||
DOCKER_REPO_TAGGED: ${{ env.DOCKER_REPO_TAGGED }}
|
||||
- name: Build ${{ matrix.image }} image for PR
|
||||
run: |
|
||||
docker buildx build \
|
||||
-f projects/app/Dockerfile \
|
||||
-f ${{ steps.config.outputs.DOCKERFILE }} \
|
||||
--label "org.opencontainers.image.source=https://github.com/${{ github.repository_owner }}/FastGPT" \
|
||||
--label "org.opencontainers.image.description=fastgpt-pr image" \
|
||||
--label "org.opencontainers.image.licenses=Apache" \
|
||||
--label "org.opencontainers.image.description=${{ steps.config.outputs.DESCRIPTION }}" \
|
||||
--push \
|
||||
--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||
--cache-to=type=local,dest=/tmp/.buildx-cache \
|
||||
-t ${DOCKER_REPO_TAGGED} \
|
||||
-t ${{ steps.config.outputs.DOCKER_REPO_TAGGED }} \
|
||||
.
|
||||
|
||||
- uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
@@ -67,5 +85,5 @@ jobs:
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: 'Preview Image: `${{ env.DOCKER_REPO_TAGGED }}`'
|
||||
body: 'Preview ${{ matrix.image }} Image: `${{ steps.config.outputs.DOCKER_REPO_TAGGED }}`'
|
||||
})
|
||||
|
||||
3
.github/workflows/fastgpt-test.yaml
vendored
@@ -15,6 +15,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
|
||||
2
.github/workflows/helm-release.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
151
.github/workflows/mcp_server-build-image.yml
vendored
Normal file
@@ -0,0 +1,151 @@
|
||||
name: Build fastgpt-mcp-server images
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'projects/sandbox/**'
|
||||
tags:
|
||||
- 'v*'
|
||||
jobs:
|
||||
build-fastgpt-mcp_server-images:
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- arch: amd64
|
||||
- arch: arm64
|
||||
runs-on: ubuntu-24.04-arm
|
||||
runs-on: ${{ matrix.runs-on || 'ubuntu-24.04' }}
|
||||
steps:
|
||||
# install env
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver-opts: network=host
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-mcp-server-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-mcp_server-buildx-
|
||||
|
||||
# login docker
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Ali Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: registry.cn-hangzhou.aliyuncs.com
|
||||
username: ${{ secrets.ALI_HUB_USERNAME }}
|
||||
password: ${{ secrets.ALI_HUB_PASSWORD }}
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_NAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Build for ${{ matrix.arch }}
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: projects/mcp_server/Dockerfile
|
||||
platforms: linux/${{ matrix.arch }}
|
||||
labels: |
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.description=fastgpt-mcp_server image
|
||||
outputs: type=image,"name=ghcr.io/${{ github.repository_owner }}/fastgpt-mcp_server,${{ secrets.ALI_IMAGE_NAME }}/fastgpt-mcp_server,${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-mcp_server",push-by-digest=true,push=true
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-fastgpt-mcp_server-${{ github.sha }}-${{ matrix.arch }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
release-fastgpt-mcp_server-images:
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
needs: build-fastgpt-mcp_server-images
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Ali Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: registry.cn-hangzhou.aliyuncs.com
|
||||
username: ${{ secrets.ALI_HUB_USERNAME }}
|
||||
password: ${{ secrets.ALI_HUB_PASSWORD }}
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_NAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-fastgpt-mcp_server-${{ github.sha }}-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Set image name and tag
|
||||
run: |
|
||||
if [[ "${{ github.ref_name }}" == "main" ]]; then
|
||||
echo "Git_Tag=ghcr.io/${{ github.repository_owner }}/fastgpt-mcp_server:latest" >> $GITHUB_ENV
|
||||
echo "Git_Latest=ghcr.io/${{ github.repository_owner }}/fastgpt-mcp_server:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Tag=${{ secrets.ALI_IMAGE_NAME }}/fastgpt-mcp_server:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Latest=${{ secrets.ALI_IMAGE_NAME }}/fastgpt-mcp_server:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Tag=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-mcp_server:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Latest=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-mcp_server:latest" >> $GITHUB_ENV
|
||||
else
|
||||
echo "Git_Tag=ghcr.io/${{ github.repository_owner }}/fastgpt-mcp_server:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Git_Latest=ghcr.io/${{ github.repository_owner }}/fastgpt-mcp_server:latest" >> $GITHUB_ENV
|
||||
echo "Ali_Tag=${{ secrets.ALI_IMAGE_NAME }}/fastgpt-mcp_server:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Ali_Latest=${{ secrets.ALI_IMAGE_NAME }}/fastgpt-mcp_server:latest" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Tag=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-mcp_server:${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
echo "Docker_Hub_Latest=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-mcp_server:latest" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
TAGS="$(echo -e "${Git_Tag}\n${Git_Latest}\n${Ali_Tag}\n${Ali_Latest}\n${Docker_Hub_Tag}\n${Docker_Hub_Latest}")"
|
||||
for TAG in $TAGS; do
|
||||
docker buildx imagetools create -t $TAG \
|
||||
$(printf 'ghcr.io/${{ github.repository_owner }}/fastgpt-mcp_server@sha256:%s ' *)
|
||||
sleep 5
|
||||
done
|
||||
122
.github/workflows/sandbox-build-image.yml
vendored
@@ -13,50 +13,115 @@ jobs:
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
runs-on: ubuntu-20.04
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- arch: amd64
|
||||
- arch: arm64
|
||||
runs-on: ubuntu-24.04-arm
|
||||
runs-on: ${{ matrix.runs-on || 'ubuntu-24.04' }}
|
||||
steps:
|
||||
# install env
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
sudo apt update && sudo apt install -y nodejs npm
|
||||
- name: Set up QEMU (optional)
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver-opts: network=host
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
key: ${{ runner.os }}-sandbox-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
${{ runner.os }}-sandbox-buildx-
|
||||
|
||||
# login docker
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Ali Hub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: registry.cn-hangzhou.aliyuncs.com
|
||||
username: ${{ secrets.ALI_HUB_USERNAME }}
|
||||
password: ${{ secrets.ALI_HUB_PASSWORD }}
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_NAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
# Set tag
|
||||
- name: Build for ${{ matrix.arch }}
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: projects/sandbox/Dockerfile
|
||||
platforms: linux/${{ matrix.arch }}
|
||||
labels: |
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.description=fastgpt-sandbox image
|
||||
outputs: type=image,"name=ghcr.io/${{ github.repository_owner }}/fastgpt-sandbox,${{ secrets.ALI_IMAGE_NAME }}/fastgpt-sandbox,${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-sandbox",push-by-digest=true,push=true
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-fastgpt-sandbox-${{ github.sha }}-${{ matrix.arch }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
release-fastgpt-sandbox-images:
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
needs: build-fastgpt-sandbox-images
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Ali Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: registry.cn-hangzhou.aliyuncs.com
|
||||
username: ${{ secrets.ALI_HUB_USERNAME }}
|
||||
password: ${{ secrets.ALI_HUB_PASSWORD }}
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_NAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-fastgpt-sandbox-${{ github.sha }}-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Set image name and tag
|
||||
run: |
|
||||
if [[ "${{ github.ref_name }}" == "main" ]]; then
|
||||
@@ -75,27 +140,12 @@ jobs:
|
||||
echo "Docker_Hub_Latest=${{ secrets.DOCKER_IMAGE_NAME }}/fastgpt-sandbox:latest" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Build and publish image for main branch or tag push event
|
||||
env:
|
||||
Git_Tag: ${{ env.Git_Tag }}
|
||||
Git_Latest: ${{ env.Git_Latest }}
|
||||
Ali_Tag: ${{ env.Ali_Tag }}
|
||||
Ali_Latest: ${{ env.Ali_Latest }}
|
||||
Docker_Hub_Tag: ${{ env.Docker_Hub_Tag }}
|
||||
Docker_Hub_Latest: ${{ env.Docker_Hub_Latest }}
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx build \
|
||||
-f projects/sandbox/Dockerfile \
|
||||
--platform linux/amd64,linux/arm64 \
|
||||
--label "org.opencontainers.image.source=https://github.com/${{ github.repository_owner }}/fastgpt-sandbox" \
|
||||
--label "org.opencontainers.image.description=fastgpt-sandbox image" \
|
||||
--push \
|
||||
--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||
--cache-to=type=local,dest=/tmp/.buildx-cache \
|
||||
-t ${Git_Tag} \
|
||||
-t ${Git_Latest} \
|
||||
-t ${Ali_Tag} \
|
||||
-t ${Ali_Latest} \
|
||||
-t ${Docker_Hub_Tag} \
|
||||
-t ${Docker_Hub_Latest} \
|
||||
.
|
||||
TAGS="$(echo -e "${Git_Tag}\n${Git_Latest}\n${Ali_Tag}\n${Ali_Latest}\n${Docker_Hub_Tag}\n${Docker_Hub_Latest}")"
|
||||
for TAG in $TAGS; do
|
||||
docker buildx imagetools create -t $TAG \
|
||||
$(printf 'ghcr.io/${{ github.repository_owner }}/fastgpt-sandbox@sha256:%s ' *)
|
||||
sleep 5
|
||||
done
|
||||
|
||||
72
.vscode/nextapi.code-snippets
vendored
@@ -52,71 +52,17 @@
|
||||
"description": "FastGPT usecontext template"
|
||||
},
|
||||
|
||||
"Jest test template": {
|
||||
"scope": "typescriptreact",
|
||||
"prefix": "jesttest",
|
||||
"Vitest test case template": {
|
||||
"scope": "typescript",
|
||||
"prefix": "template_test",
|
||||
"body": [
|
||||
"import '@/pages/api/__mocks__/base';",
|
||||
"import { root } from '@/pages/api/__mocks__/db/init';",
|
||||
"import { getTestRequest } from '@fastgpt/service/test/utils'; ;",
|
||||
"import { AppErrEnum } from '@fastgpt/global/common/error/code/app';",
|
||||
"import handler from './demo';",
|
||||
"import { describe, it, expect } from 'vitest';",
|
||||
"",
|
||||
"// Import the schema",
|
||||
"import { MongoOutLink } from '@fastgpt/service/support/outLink/schema';",
|
||||
"",
|
||||
"beforeAll(async () => {",
|
||||
" // await MongoOutLink.create({",
|
||||
" // shareId: 'aaa',",
|
||||
" // appId: root.appId,",
|
||||
" // tmbId: root.tmbId,",
|
||||
" // teamId: root.teamId,",
|
||||
" // type: 'share',",
|
||||
" // name: 'aaa'",
|
||||
" // })",
|
||||
"});",
|
||||
"",
|
||||
"test('Should return a list of outLink', async () => {",
|
||||
" // Mock request",
|
||||
" const res = (await handler(",
|
||||
" ...getTestRequest({",
|
||||
" query: {",
|
||||
" appId: root.appId,",
|
||||
" type: 'share'",
|
||||
" },",
|
||||
" user: root",
|
||||
" })",
|
||||
" )) as any;",
|
||||
"",
|
||||
" expect(res.code).toBe(200);",
|
||||
" expect(res.data.length).toBe(2);",
|
||||
"});",
|
||||
"",
|
||||
"test('appId is required', async () => {",
|
||||
" const res = (await handler(",
|
||||
" ...getTestRequest({",
|
||||
" query: {",
|
||||
" type: 'share'",
|
||||
" },",
|
||||
" user: root",
|
||||
" })",
|
||||
" )) as any;",
|
||||
" expect(res.code).toBe(500);",
|
||||
" expect(res.error).toBe(AppErrEnum.unExist);",
|
||||
"});",
|
||||
"",
|
||||
"test('if type is not provided, return nothing', async () => {",
|
||||
" const res = (await handler(",
|
||||
" ...getTestRequest({",
|
||||
" query: {",
|
||||
" appId: root.appId",
|
||||
" },",
|
||||
" user: root",
|
||||
" })",
|
||||
" )) as any;",
|
||||
" expect(res.code).toBe(200);",
|
||||
" expect(res.data.length).toBe(0);",
|
||||
"describe('authType2UsageSource', () => {",
|
||||
" it('Test description', () => {",
|
||||
" expect().toBe();",
|
||||
" });",
|
||||
"});"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
2
Makefile
@@ -17,7 +17,7 @@ dev:
|
||||
|
||||
build:
|
||||
ifeq ($(proxy), taobao)
|
||||
docker build -f $(filePath) -t $(image) . --build-arg proxy=taobao
|
||||
docker build -f $(filePath) -t $(image) . --build-arg proxy=taobao
|
||||
else ifeq ($(proxy), clash)
|
||||
docker build -f $(filePath) -t $(image) . --network host --build-arg HTTP_PROXY=http://127.0.0.1:7890 --build-arg HTTPS_PROXY=http://127.0.0.1:7890
|
||||
else
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
<a href="./README_ja.md">日语</a>
|
||||
</p>
|
||||
|
||||
FastGPT 是一个基于 LLM 大语言模型的知识库问答系统,提供开箱即用的数据处理、模型调用等能力。同时可以通过 Flow 可视化进行工作流编排,从而实现复杂的问答场景!
|
||||
FastGPT 是一个 AI Agent 构建平台,提供开箱即用的数据处理、模型调用等能力,同时可以通过 Flow 可视化进行工作流编排,从而实现复杂的应用场景!
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
@@ -126,15 +126,15 @@ services:
|
||||
# fastgpt
|
||||
sandbox:
|
||||
container_name: sandbox
|
||||
image: ghcr.io/labring/fastgpt-sandbox:v4.9.3 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-sandbox:v4.9.3 # 阿里云
|
||||
image: ghcr.io/labring/fastgpt-sandbox:v4.9.5 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-sandbox:v4.9.5 # 阿里云
|
||||
networks:
|
||||
- fastgpt
|
||||
restart: always
|
||||
fastgpt:
|
||||
container_name: fastgpt
|
||||
image: ghcr.io/labring/fastgpt:v4.9.3 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.9.3 # 阿里云
|
||||
image: ghcr.io/labring/fastgpt:v4.9.5 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.9.5 # 阿里云
|
||||
ports:
|
||||
- 3000:3000
|
||||
networks:
|
||||
@@ -184,6 +184,8 @@ services:
|
||||
- ALLOWED_ORIGINS=
|
||||
# 是否开启IP限制,默认不开启
|
||||
- USE_IP_LIMIT=false
|
||||
# 对话文件过期天数
|
||||
- CHAT_FILE_EXPIRE_TIME=7
|
||||
volumes:
|
||||
- ./config.json:/app/data/config.json
|
||||
|
||||
|
||||
202
deploy/docker/docker-compose-oceanbase/docker-compose.yml
Normal file
@@ -0,0 +1,202 @@
|
||||
# 数据库的默认账号和密码仅首次运行时设置有效
|
||||
# 如果修改了账号密码,记得改数据库和项目连接参数,别只改一处~
|
||||
# 该配置文件只是给快速启动,测试使用。正式使用,记得务必修改账号密码,以及调整合适的知识库参数,共享内存等。
|
||||
# 如何无法访问 dockerhub 和 git,可以用阿里云(阿里云没有arm包)
|
||||
|
||||
version: '3.3'
|
||||
services:
|
||||
# vector db
|
||||
ob:
|
||||
image: oceanbase/oceanbase-ce # docker hub
|
||||
# image: quay.io/oceanbase/oceanbase-ce:4.3.5.1-101000042025031818 # 镜像
|
||||
container_name: ob
|
||||
restart: always
|
||||
# ports: # 生产环境建议不要暴露
|
||||
# - 2881:2881
|
||||
networks:
|
||||
- fastgpt
|
||||
environment:
|
||||
# 这里的配置只有首次运行生效。修改后,重启镜像是不会生效的。需要把持久化数据删除再重启,才有效果
|
||||
- OB_SYS_PASSWORD=obsyspassword
|
||||
# 不同于传统数据库,OceanBase 数据库的账号包含更多字段,包括用户名、租户名和集群名。经典格式为“用户名@租户名#集群名”
|
||||
# 比如用mysql客户端连接时,根据本文件的默认配置,应该指定 “-uroot@tenantname”
|
||||
- OB_TENANT_NAME=tenantname
|
||||
- OB_TENANT_PASSWORD=tenantpassword
|
||||
# MODE分为MINI和NORMAL, 后者会最大程度使用主机资源
|
||||
- MODE=NORMAL
|
||||
- OB_SERVER_IP=127.0.0.1
|
||||
# 更多环境变量配置见oceanbase官方文档: https://www.oceanbase.com/docs/common-oceanbase-database-cn-1000000002013494
|
||||
volumes:
|
||||
- ./ob/data:/root/ob
|
||||
- ./ob/config:/root/.obd/cluster
|
||||
- ./init.sql:/root/boot/init.d/init.sql
|
||||
healthcheck:
|
||||
# obclient -h127.0.0.1 -P2881 -uroot@tenantname -ptenantpassword -e "SELECT 1;"
|
||||
test: ["CMD-SHELL", "obclient -h$OB_SERVER_IP -P2881 -uroot@$OB_TENANT_NAME -p$OB_TENANT_PASSWORD -e \"SELECT 1;\""]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 1000
|
||||
start_period: 10s
|
||||
mongo:
|
||||
image: mongo:5.0.18 # dockerhub
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/mongo:5.0.18 # 阿里云
|
||||
# image: mongo:4.4.29 # cpu不支持AVX时候使用
|
||||
container_name: mongo
|
||||
restart: always
|
||||
# ports:
|
||||
# - 27017:27017
|
||||
networks:
|
||||
- fastgpt
|
||||
command: mongod --keyFile /data/mongodb.key --replSet rs0
|
||||
environment:
|
||||
- MONGO_INITDB_ROOT_USERNAME=myusername
|
||||
- MONGO_INITDB_ROOT_PASSWORD=mypassword
|
||||
volumes:
|
||||
- ./mongo/data:/data/db
|
||||
entrypoint:
|
||||
- bash
|
||||
- -c
|
||||
- |
|
||||
openssl rand -base64 128 > /data/mongodb.key
|
||||
chmod 400 /data/mongodb.key
|
||||
chown 999:999 /data/mongodb.key
|
||||
echo 'const isInited = rs.status().ok === 1
|
||||
if(!isInited){
|
||||
rs.initiate({
|
||||
_id: "rs0",
|
||||
members: [
|
||||
{ _id: 0, host: "mongo:27017" }
|
||||
]
|
||||
})
|
||||
}' > /data/initReplicaSet.js
|
||||
# 启动MongoDB服务
|
||||
exec docker-entrypoint.sh "$$@" &
|
||||
|
||||
# 等待MongoDB服务启动
|
||||
until mongo -u myusername -p mypassword --authenticationDatabase admin --eval "print('waited for connection')"; do
|
||||
echo "Waiting for MongoDB to start..."
|
||||
sleep 2
|
||||
done
|
||||
|
||||
# 执行初始化副本集的脚本
|
||||
mongo -u myusername -p mypassword --authenticationDatabase admin /data/initReplicaSet.js
|
||||
|
||||
# 等待docker-entrypoint.sh脚本执行的MongoDB服务进程
|
||||
wait $$!
|
||||
|
||||
# fastgpt
|
||||
sandbox:
|
||||
container_name: sandbox
|
||||
image: ghcr.io/labring/fastgpt-sandbox:v4.9.3 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-sandbox:v4.9.3 # 阿里云
|
||||
networks:
|
||||
- fastgpt
|
||||
restart: always
|
||||
fastgpt:
|
||||
container_name: fastgpt
|
||||
image: ghcr.io/labring/fastgpt:v4.9.3 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.9.3 # 阿里云
|
||||
ports:
|
||||
- 3000:3000
|
||||
networks:
|
||||
- fastgpt
|
||||
depends_on:
|
||||
mongo:
|
||||
condition: service_started
|
||||
ob:
|
||||
condition: service_healthy
|
||||
sandbox:
|
||||
condition: service_started
|
||||
restart: always
|
||||
environment:
|
||||
# 前端外部可访问的地址,用于自动补全文件资源路径。例如 https:fastgpt.cn,不能填 localhost。这个值可以不填,不填则发给模型的图片会是一个相对路径,而不是全路径,模型可能伪造Host。
|
||||
- FE_DOMAIN=
|
||||
# root 密码,用户名为: root。如果需要修改 root 密码,直接修改这个环境变量,并重启即可。
|
||||
- DEFAULT_ROOT_PSW=1234
|
||||
# # AI Proxy 的地址,如果配了该地址,优先使用
|
||||
# - AIPROXY_API_ENDPOINT=http://aiproxy:3000
|
||||
# # AI Proxy 的 Admin Token,与 AI Proxy 中的环境变量 ADMIN_KEY
|
||||
# - AIPROXY_API_TOKEN=aiproxy
|
||||
# 模型中转地址(如果用了 AI Proxy,下面 2 个就不需要了,旧版 OneAPI 用户,使用下面的变量)
|
||||
- # openai 基本地址,可用作中转。
|
||||
- OPENAI_BASE_URL=https://dashscope.aliyuncs.com/compatible-mode/v1
|
||||
- # OpenAI API Key
|
||||
- CHAT_API_KEY=sk-8990fa15a34b464a805237cfe9561f11
|
||||
# 数据库最大连接数
|
||||
- DB_MAX_LINK=30
|
||||
# 登录凭证密钥
|
||||
- TOKEN_KEY=any
|
||||
# root的密钥,常用于升级时候的初始化请求
|
||||
- ROOT_KEY=root_key
|
||||
# 文件阅读加密
|
||||
- FILE_TOKEN_KEY=filetoken
|
||||
# MongoDB 连接参数. 用户名myusername,密码mypassword。
|
||||
- MONGODB_URI=mongodb://myusername:mypassword@mongo:27017/fastgpt?authSource=admin
|
||||
# OceanBase 向量库连接参数
|
||||
- OCEANBASE_URL=mysql://root%40tenantname:tenantpassword@ob:2881/test
|
||||
# sandbox 地址
|
||||
- SANDBOX_URL=http://sandbox:3000
|
||||
# 日志等级: debug, info, warn, error
|
||||
- LOG_LEVEL=info
|
||||
- STORE_LOG_LEVEL=warn
|
||||
# 工作流最大运行次数
|
||||
- WORKFLOW_MAX_RUN_TIMES=1000
|
||||
# 批量执行节点,最大输入长度
|
||||
- WORKFLOW_MAX_LOOP_TIMES=100
|
||||
# 自定义跨域,不配置时,默认都允许跨域(多个域名通过逗号分割)
|
||||
- ALLOWED_ORIGINS=
|
||||
# 是否开启IP限制,默认不开启
|
||||
- USE_IP_LIMIT=false
|
||||
volumes:
|
||||
- ./config.json:/app/data/config.json
|
||||
|
||||
# AI Proxy
|
||||
aiproxy:
|
||||
image: ghcr.io/labring/aiproxy:v0.1.5
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/labring/aiproxy:v0.1.3 # 阿里云
|
||||
container_name: aiproxy
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
aiproxy_pg:
|
||||
condition: service_healthy
|
||||
networks:
|
||||
- fastgpt
|
||||
environment:
|
||||
# 对应 fastgpt 里的AIPROXY_API_TOKEN
|
||||
- ADMIN_KEY=aiproxy
|
||||
# 错误日志详情保存时间(小时)
|
||||
- LOG_DETAIL_STORAGE_HOURS=1
|
||||
# 数据库连接地址
|
||||
- SQL_DSN=postgres://postgres:aiproxy@aiproxy_pg:5432/aiproxy
|
||||
# 最大重试次数
|
||||
- RETRY_TIMES=3
|
||||
# 不需要计费
|
||||
- BILLING_ENABLED=false
|
||||
# 不需要严格检测模型
|
||||
- DISABLE_MODEL_CONFIG=true
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:3000/api/status']
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
aiproxy_pg:
|
||||
image: pgvector/pgvector:0.8.0-pg15 # docker hub
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/pgvector:v0.8.0-pg15 # 阿里云
|
||||
restart: unless-stopped
|
||||
container_name: aiproxy_pg
|
||||
volumes:
|
||||
- ./aiproxy_pg:/var/lib/postgresql/data
|
||||
networks:
|
||||
- fastgpt
|
||||
environment:
|
||||
TZ: Asia/Shanghai
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: aiproxy
|
||||
POSTGRES_PASSWORD: aiproxy
|
||||
healthcheck:
|
||||
test: ['CMD', 'pg_isready', '-U', 'postgres', '-d', 'aiproxy']
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
networks:
|
||||
fastgpt:
|
||||
2
deploy/docker/docker-compose-oceanbase/init.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER SYSTEM SET ob_vector_memory_limit_percentage = 30;
|
||||
|
||||
@@ -85,15 +85,15 @@ services:
|
||||
# fastgpt
|
||||
sandbox:
|
||||
container_name: sandbox
|
||||
image: ghcr.io/labring/fastgpt-sandbox:v4.9.3 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-sandbox:v4.9.3 # 阿里云
|
||||
image: ghcr.io/labring/fastgpt-sandbox:v4.9.5 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-sandbox:v4.9.5 # 阿里云
|
||||
networks:
|
||||
- fastgpt
|
||||
restart: always
|
||||
fastgpt:
|
||||
container_name: fastgpt
|
||||
image: ghcr.io/labring/fastgpt:v4.9.3 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.9.3 # 阿里云
|
||||
image: ghcr.io/labring/fastgpt:v4.9.5 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.9.5 # 阿里云
|
||||
ports:
|
||||
- 3000:3000
|
||||
networks:
|
||||
@@ -142,6 +142,8 @@ services:
|
||||
- ALLOWED_ORIGINS=
|
||||
# 是否开启IP限制,默认不开启
|
||||
- USE_IP_LIMIT=false
|
||||
# 对话文件过期天数
|
||||
- CHAT_FILE_EXPIRE_TIME=7
|
||||
volumes:
|
||||
- ./config.json:/app/data/config.json
|
||||
|
||||
|
||||
@@ -66,15 +66,15 @@ services:
|
||||
|
||||
sandbox:
|
||||
container_name: sandbox
|
||||
image: ghcr.io/labring/fastgpt-sandbox:v4.9.3 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-sandbox:v4.9.3 # 阿里云
|
||||
image: ghcr.io/labring/fastgpt-sandbox:v4.9.5 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-sandbox:v4.9.5 # 阿里云
|
||||
networks:
|
||||
- fastgpt
|
||||
restart: always
|
||||
fastgpt:
|
||||
container_name: fastgpt
|
||||
image: ghcr.io/labring/fastgpt:v4.9.3 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.9.3 # 阿里云
|
||||
image: ghcr.io/labring/fastgpt:v4.9.5 # git
|
||||
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.9.5 # 阿里云
|
||||
ports:
|
||||
- 3000:3000
|
||||
networks:
|
||||
@@ -123,6 +123,8 @@ services:
|
||||
- ALLOWED_ORIGINS=
|
||||
# 是否开启IP限制,默认不开启
|
||||
- USE_IP_LIMIT=false
|
||||
# 对话文件过期天数
|
||||
- CHAT_FILE_EXPIRE_TIME=7
|
||||
volumes:
|
||||
- ./config.json:/app/data/config.json
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 153 KiB |
|
Before Width: | Height: | Size: 86 KiB |
|
Before Width: | Height: | Size: 145 KiB |
|
Before Width: | Height: | Size: 68 KiB |
|
Before Width: | Height: | Size: 205 KiB |
|
After Width: | Height: | Size: 118 KiB |
|
After Width: | Height: | Size: 172 KiB |
|
After Width: | Height: | Size: 93 KiB |
|
After Width: | Height: | Size: 84 KiB |
|
After Width: | Height: | Size: 73 KiB |
BIN
docSite/assets/imgs/intro/image1.png
Normal file
|
After Width: | Height: | Size: 1.3 MiB |
BIN
docSite/assets/imgs/intro/image2.png
Normal file
|
After Width: | Height: | Size: 2.0 MiB |
BIN
docSite/assets/imgs/intro/image3.png
Normal file
|
After Width: | Height: | Size: 1.3 MiB |
BIN
docSite/assets/imgs/intro/image4.png
Normal file
|
After Width: | Height: | Size: 469 KiB |
BIN
docSite/assets/imgs/intro/image5.png
Normal file
|
After Width: | Height: | Size: 193 KiB |
BIN
docSite/assets/imgs/intro/image6.png
Normal file
|
After Width: | Height: | Size: 1.5 MiB |
BIN
docSite/assets/imgs/intro/image7.png
Normal file
|
After Width: | Height: | Size: 318 KiB |
BIN
docSite/assets/imgs/intro/image8.png
Normal file
|
After Width: | Height: | Size: 2.4 MiB |
|
Before Width: | Height: | Size: 199 KiB |
|
Before Width: | Height: | Size: 167 KiB |
|
Before Width: | Height: | Size: 239 KiB |
|
Before Width: | Height: | Size: 98 KiB |
@@ -135,6 +135,9 @@ curl -O https://raw.githubusercontent.com/labring/FastGPT/main/projects/app/data
|
||||
|
||||
# pgvector 版本(测试推荐,简单快捷)
|
||||
curl -o docker-compose.yml https://raw.githubusercontent.com/labring/FastGPT/main/deploy/docker/docker-compose-pgvector.yml
|
||||
# oceanbase 版本(需要将init.sql和docker-compose.yml放在同一个文件夹,方便挂载)
|
||||
# curl -o docker-compose.yml https://raw.githubusercontent.com/labring/FastGPT/main/deploy/docker/docker-compose-oceanbase/docker-compose.yml
|
||||
# curl -o init.sql https://raw.githubusercontent.com/labring/FastGPT/main/deploy/docker/docker-compose-oceanbase/init.sql
|
||||
# milvus 版本
|
||||
# curl -o docker-compose.yml https://raw.githubusercontent.com/labring/FastGPT/main/deploy/docker/docker-compose-milvus.yml
|
||||
# zilliz 版本
|
||||
@@ -151,6 +154,13 @@ curl -o docker-compose.yml https://raw.githubusercontent.com/labring/FastGPT/mai
|
||||
|
||||
无需操作
|
||||
|
||||
{{< /markdownify >}}
|
||||
{{< /tab >}}
|
||||
{{< tab tabName="Oceanbase版本" >}}
|
||||
{{< markdownify >}}
|
||||
|
||||
无需操作
|
||||
|
||||
{{< /markdownify >}}
|
||||
{{< /tab >}}
|
||||
{{< tab tabName="Milvus版本" >}}
|
||||
|
||||
@@ -138,7 +138,7 @@ FastGPT 商业版共包含了2个应用(fastgpt, fastgpt-plus)和2个数据
|
||||
SYSTEM_NAME=FastGPT
|
||||
SYSTEM_DESCRIPTION=
|
||||
SYSTEM_FAVICON=/favicon.ico
|
||||
HOME_URL=/app/list
|
||||
HOME_URL=/dashboard/apps
|
||||
```
|
||||
|
||||
SYSTEM_FAVICON 可以是一个网络地址
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
title: 'V4.9.4(进行中)'
|
||||
title: 'V4.9.4'
|
||||
description: 'FastGPT V4.9.4 更新说明'
|
||||
icon: 'upgrade'
|
||||
draft: false
|
||||
@@ -11,7 +11,7 @@ weight: 796
|
||||
|
||||
### 1. 做好数据备份
|
||||
|
||||
### 1. 安装 Redis
|
||||
### 2. 安装 Redis
|
||||
|
||||
* docker 部署的用户,参考最新的 `docker-compose.yml` 文件增加 Redis 配置。增加一个 redis 容器,并配置`fastgpt`,`fastgpt-pro`的环境变量,增加 `REDIS_URL` 环境变量。
|
||||
* Sealos 部署的用户,在数据库里新建一个`redis`数据库,并复制`内网地址的 connection` 作为 `redis` 的链接串。然后配置`fastgpt`,`fastgpt-pro`的环境变量,增加 `REDIS_URL` 环境变量。
|
||||
@@ -20,14 +20,14 @@ weight: 796
|
||||
| --- | --- | --- |
|
||||
|  |  |  |
|
||||
|
||||
### 2. 更新镜像 tag
|
||||
### 3. 更新镜像 tag
|
||||
|
||||
- 更新 FastGPT 镜像 tag: v4.9.4-alpha
|
||||
- 更新 FastGPT 商业版镜像 tag: v4.9.4-alpha
|
||||
- 更新 FastGPT 镜像 tag: v4.9.4
|
||||
- 更新 FastGPT 商业版镜像 tag: v4.9.4
|
||||
- Sandbox 无需更新
|
||||
- AIProxy 无需更新
|
||||
|
||||
### 3. 执行升级脚本
|
||||
### 4. 执行升级脚本
|
||||
|
||||
该脚本仅需商业版用户执行。
|
||||
|
||||
@@ -49,8 +49,8 @@ curl --location --request POST 'https://{{host}}/api/admin/initv494' \
|
||||
2. SMTP 发送邮件插件
|
||||
3. BullMQ 消息队列。
|
||||
4. 利用 redis 进行部分数据缓存。
|
||||
5. 站点同步支持配置训练参数。
|
||||
6. AI 对话/工具调用,增加返回模型 finish_reason 字段。
|
||||
5. 站点同步支持配置训练参数和增量同步。
|
||||
6. AI 对话/工具调用,增加返回模型 finish_reason 字段,便于追踪模型输出中断原因。
|
||||
7. 移动端语音输入交互调整
|
||||
|
||||
## ⚙️ 优化
|
||||
|
||||
39
docSite/content/zh-cn/docs/development/upgrading/495.md
Normal file
@@ -0,0 +1,39 @@
|
||||
---
|
||||
title: 'V4.9.5'
|
||||
description: 'FastGPT V4.9.5 更新说明'
|
||||
icon: 'upgrade'
|
||||
draft: false
|
||||
toc: true
|
||||
weight: 795
|
||||
---
|
||||
|
||||
## 升级指南
|
||||
### 1. 做好数据备份
|
||||
|
||||
### 2. 更新镜像 tag
|
||||
|
||||
- 更新 FastGPT 镜像 tag: v4.9.5
|
||||
- 更新 FastGPT 商业版镜像 tag: v4.9.5
|
||||
- Sandbox 无需更新
|
||||
- AIProxy 无需更新
|
||||
|
||||
## 🚀 新增内容
|
||||
|
||||
1. 团队成员权限细分,可分别控制是否可创建在根目录应用/知识库以及 API Key
|
||||
2. 支持交互节点在嵌套工作流中使用。
|
||||
3. 团队成员操作日志。
|
||||
4. 用户输入节点支持多选框。
|
||||
|
||||
## ⚙️ 优化
|
||||
|
||||
1. 繁体中文翻译。
|
||||
2. Arm 镜像打包
|
||||
|
||||
|
||||
## 🐛 修复
|
||||
|
||||
1. password 检测规则错误。
|
||||
2. 分享链接无法隐藏知识库检索结果。
|
||||
3. IOS 低版本正则兼容问题。
|
||||
4. 修复问答提取队列错误后,计数器未清零问题,导致问答提取队列失效。
|
||||
5. Debug 模式交互节点下一步可能造成死循环。
|
||||
32
docSite/content/zh-cn/docs/development/upgrading/496.md
Normal file
@@ -0,0 +1,32 @@
|
||||
---
|
||||
title: 'V4.9.6(进行中)'
|
||||
description: 'FastGPT V4.9.6 更新说明'
|
||||
icon: 'upgrade'
|
||||
draft: false
|
||||
toc: true
|
||||
weight: 794
|
||||
---
|
||||
|
||||
|
||||
|
||||
|
||||
## 🚀 新增内容
|
||||
|
||||
1. 以 MCP 方式对外提供应用调用。
|
||||
2. 支持以 MCP SSE 协议创建工具。
|
||||
3. 批量执行节点支持交互节点,可实现每一轮循环都人工参与。
|
||||
4. 增加工作台二级菜单,合并工具箱。
|
||||
5. 增加 grok3、GPT4.1、Gemini2.5 模型系统配置。
|
||||
|
||||
## ⚙️ 优化
|
||||
|
||||
1. 工作流数据类型转化鲁棒性和兼容性增强。
|
||||
2. Python sandbox 代码,支持大数据输入。
|
||||
3. 路径组件支持配置最后一步是否可点击。
|
||||
4. 知识库工具调用结果,自动补充图片域名。
|
||||
5. Github action runner 升级成 unbuntu24
|
||||
|
||||
## 🐛 修复
|
||||
|
||||
1. 修复子工作流包含交互节点时,未成功恢复子工作流所有数据。
|
||||
2. completion v1 接口,未接受 interactive 参数,导致 API 调用失败。
|
||||
@@ -5,4 +5,207 @@ icon: "group"
|
||||
draft: false
|
||||
toc: true
|
||||
weight: 450
|
||||
---
|
||||
---
|
||||
|
||||
# 团队 & 成员组 & 权限
|
||||
|
||||
## 权限系统简介
|
||||
|
||||
FastGPT
|
||||
权限系统融合了基于**属性**和基于**角色**的权限管理范式,为团队协作提供精细化的权限控制方案。通过**成员、部门和群组**三种管理模式,您可以灵活配置对团队、应用和知识库等资源的访问权限。
|
||||
|
||||
## 团队
|
||||
|
||||
每位用户可以同时归属于多个团队,系统默认为每位用户创建一个初始团队。目前暂不支持用户手动创建额外团队。
|
||||
|
||||
## 权限管理
|
||||
|
||||
FastGPT 提供三种权限管理维度:
|
||||
|
||||
**成员权限**:最高优先级,直接赋予个人的权限
|
||||
|
||||
**部门与群组权限**:采用权限并集原则,优先级低于成员权限
|
||||
|
||||
权限判定遵循以下逻辑:
|
||||
|
||||
首先检查用户的个人成员权限
|
||||
|
||||
其次检查用户所属部门和群组的权限(取并集)
|
||||
|
||||
最终权限为上述结果的组合
|
||||
|
||||
鉴权逻辑如下:
|
||||
|
||||

|
||||
|
||||
### 资源权限
|
||||
|
||||
对于不同的**资源**,有不同的权限。
|
||||
|
||||
这里说的资源,是指应用、知识库、团队等等概念。
|
||||
|
||||
下表为不同资源,可以进行管理的权限。
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>资源</th>
|
||||
<th>可管理权限</th>
|
||||
<th>说明</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td rowspan="4">团队</td>
|
||||
<td>创建应用</td>
|
||||
<td>创建,删除等基础操作</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>创建知识库</td>
|
||||
<td>创建,删除等基础操作</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>创建团队 APIKey</td>
|
||||
<td>创建,删除等基础操作</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>管理成员</td>
|
||||
<td>邀请、移除用户,创建群组等</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="3">应用</td>
|
||||
<td>可使用</td>
|
||||
<td>允许进行对话交互</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>可编辑</td>
|
||||
<td>修改基本信息,进行流程编排等</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>可管理</td>
|
||||
<td>添加或删除协作者</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="3">知识库</td>
|
||||
<td>可使用</td>
|
||||
<td>可以在应用中调用该知识库</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>可编辑</td>
|
||||
<td>修改知识库的内容</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>可管理</td>
|
||||
<td>添加或删除协作者</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
### 协作者
|
||||
|
||||
必须先添加**协作者**,才能对其进行权限管理:
|
||||
|
||||

|
||||
|
||||
管理团队权限时,需先选择成员/组织/群组,再进行权限配置。
|
||||
|
||||

|
||||
|
||||
对于应用和知识库等资源,可直接修改成员权限。
|
||||
|
||||

|
||||
|
||||
团队权限在专门的权限页面进行设置
|
||||
|
||||

|
||||
|
||||
## 特殊权限说明
|
||||
|
||||
### 管理员权限
|
||||
|
||||
管理员主要负责管理资源的协作关系,但有以下限制:
|
||||
|
||||
- 不能修改或移除自身权限
|
||||
- 不能修改或移除其他管理员权限
|
||||
-不能将管理员权限赋予其他协作者
|
||||
|
||||
### Owner 权限
|
||||
|
||||
每个资源都有唯一的 Owner,拥有该资源的最高权限。Owner
|
||||
可以转移所有权,但转移后原 Owner 将失去对资源的权限。
|
||||
|
||||
### Root 权限
|
||||
|
||||
Root
|
||||
作为系统唯一的超级管理员账号,对所有团队的所有资源拥有完全访问和管理权限。
|
||||
|
||||
## 使用技巧
|
||||
|
||||
### 1. 设置团队默认权限
|
||||
|
||||
利用\"全员群组\"可快速为整个团队设置基础权限。例如,为应用设置全员可访问权限。
|
||||
|
||||
**注意**:个人成员权限会覆盖全员组权限。例如,应用 A
|
||||
设置了全员编辑权限,而用户 M 被单独设置为使用权限,则用户 M
|
||||
只能使用而无法编辑该应用。
|
||||
|
||||
### 2. 批量权限管理
|
||||
|
||||
通过创建群组或组织,可以高效管理多用户的权限配置。先将用户添加到群组,再对群组整体授权。
|
||||
|
||||
### 开发者参考
|
||||
> 以下内容面向开发者,如不涉及二次开发可跳过。
|
||||
|
||||
#### 权限设计原理
|
||||
|
||||
FastGPT 权限系统参考 Linux 权限设计,采用二进制方式存储权限位。权限位为
|
||||
1 表示拥有该权限,为 0 表示无权限。Owner 权限特殊标记为全 1。
|
||||
|
||||
#### 权限表
|
||||
|
||||
权限信息存储在 MongoDB 的 resource_permissions 集合中,其主要字段包括:
|
||||
|
||||
- teamId: 团队标识
|
||||
- tmbId/groupId/orgId: 权限主体(三选一)
|
||||
- resourceType: 资源类型(team/app/dataset)
|
||||
- permission: 权限值(数字)
|
||||
- resourceId: 资源ID(团队资源为null)
|
||||
|
||||
系统通过这一数据结构实现了灵活而精确的权限控制。
|
||||
|
||||
对于这个表的 Schema 定义在 packages/service/support/permission/schema.ts
|
||||
文件中。定义如下:
|
||||
```typescript
|
||||
export const ResourcePermissionSchema = new Schema({
|
||||
teamId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: TeamCollectionName
|
||||
},
|
||||
tmbId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: TeamMemberCollectionName
|
||||
},
|
||||
groupId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: MemberGroupCollectionName
|
||||
},
|
||||
orgId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: OrgCollectionName
|
||||
},
|
||||
resourceType: {
|
||||
type: String,
|
||||
enum: Object.values(PerResourceTypeEnum),
|
||||
required: true
|
||||
},
|
||||
permission: {
|
||||
type: Number,
|
||||
required: true
|
||||
},
|
||||
// Resrouce ID: App or DataSet or any other resource type.
|
||||
// It is null if the resourceType is team.
|
||||
resourceId: {
|
||||
type: Schema.Types.ObjectId
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
@@ -7,76 +7,64 @@ toc: true
|
||||
weight: -10
|
||||
---
|
||||
|
||||
FastGPT 是一个AI Agent 构建平台,提供开箱即用的数据处理、模型调用等能力,同时可以通过 Flow 可视化进行工作流编排,从而实现复杂的应用场景!
|
||||
FastGPT 是一个基于 LLM 大语言模型的知识库问答系统,将智能对话与可视化编排完美结合,让 AI 应用开发变得简单自然。无论您是开发者还是业务人员,都能轻松打造专属的 AI 应用。
|
||||
|
||||
{{% alert icon="🤖 " context="success" %}}
|
||||
FastGPT 在线使用:[https://tryfastgpt.ai](https://tryfastgpt.ai)
|
||||
快速开始体验
|
||||
- 海外版:[https://tryfastgpt.ai](https://tryfastgpt.ai)
|
||||
- 国内版:[https://fastgpt.cn](https://fastgpt.cn)
|
||||
{{% /alert %}}
|
||||
|
||||
| | |
|
||||
| --------------------- | --------------------- |
|
||||
|  |  |
|
||||
|  |  |
|
||||
| | |
|
||||
| --------------------- | --------------------------------- |
|
||||
|  |  |
|
||||
|
||||
## FastGPT 能力
|
||||
# FastGPT 的优势
|
||||
## 1. 简单灵活,像搭积木一样简单 🧱
|
||||
像搭乐高一样简单有趣,FastGPT 提供丰富的功能模块,通过简单拖拽就能搭建出个性化的 AI 应用,零代码也能实现复杂的业务流程。
|
||||
## 2. 让数据更智能 🧠
|
||||
FastGPT 提供完整的数据智能化解决方案,从数据导入、预处理到知识匹配,再到智能问答,全流程自动化。配合可视化的工作流设计,轻松打造专业级 AI 应用。
|
||||
## 3. 开源开放,易于集成 🔗
|
||||
FastGPT 基于 Apache 2.0 协议开源,支持二次开发。通过标准 API 即可快速接入,无需修改源码。支持 ChatGPT、Claude、DeepSeek 和文心一言等主流模型,持续迭代优化,始终保持产品活力。
|
||||
|
||||
### 1. 专属 AI 客服
|
||||
---
|
||||
|
||||
通过导入文档或已有问答对进行训练,让 AI 模型能根据你的文档以交互式对话方式回答问题。
|
||||
# FastGPT 能做什么
|
||||
## 1. 全能知识库
|
||||
可轻松导入各式各样的文档及数据,能自动对其开展知识结构化处理工作。同时,具备支持多轮上下文理解的智能问答功能,还可为用户带来持续优化的知识库管理体验。
|
||||

|
||||
|
||||

|
||||
## 2. 可视化工作流
|
||||
FastGPT直观的拖拽式界面设计,可零代码搭建复杂业务流程。还拥有丰富的功能节点组件,能应对多种业务需求,有着灵活的流程编排能力,按需定制业务流程。
|
||||

|
||||
|
||||
### 2. 简单易用的可视化界面
|
||||
## 3. 数据智能解析
|
||||
FastGPT知识库系统对导入数据的处理极为灵活,可以智能处理PDF文档的复杂结构,保留图片、表格和LaTeX公式,自动识别扫描文件,并将内容结构化为清晰的Markdown格式。同时支持图片自动标注和索引,让视觉内容可被理解和检索,确保知识在AI问答中能被完整、准确地呈现和应用。
|
||||
|
||||
FastGPT 采用直观的可视化界面设计,为各种应用场景提供了丰富实用的功能。通过简洁易懂的操作步骤,可以轻松完成 AI 客服的创建和训练流程。
|
||||
|
||||

|
||||
|
||||
### 3. 自动数据预处理
|
||||
|
||||
提供手动输入、直接分段、LLM 自动处理和 CSV 等多种数据导入途径,其中“直接分段”支持通过 PDF、WORD、Markdown 和 CSV 文档内容作为上下文。FastGPT 会自动对文本数据进行预处理、向量化和 QA 分割,节省手动训练时间,提升效能。
|
||||
|
||||

|
||||
|
||||
### 4. 工作流编排
|
||||

|
||||
|
||||
## 4. 工作流编排
|
||||
基于 Flow 模块的工作流编排,可以帮助你设计更加复杂的问答流程。例如查询数据库、查询库存、预约实验室等。
|
||||
|
||||

|
||||

|
||||
|
||||
### 5. 强大的 API 集成
|
||||
## 5. 强大的 API 集成
|
||||
FastGPT 完全对齐 OpenAI 官方接口,支持一键接入企业微信、公众号、飞书、钉钉等平台,让 AI 能力轻松融入您的业务场景。
|
||||
|
||||
FastGPT 对外的 API 接口对齐了 OpenAI 官方接口,可以直接接入现有的 GPT 应用,也可以轻松集成到企业微信、公众号、飞书等平台。
|
||||

|
||||
|
||||

|
||||
---
|
||||
|
||||
## FastGPT 特点
|
||||
# 核心特性
|
||||
|
||||
1. **项目开源**
|
||||
- 开箱即用的知识库系统
|
||||
- 可视化的低代码工作流编排
|
||||
- 支持主流大模型
|
||||
- 简单易用的 API 接口
|
||||
- 灵活的数据处理能力
|
||||
|
||||
FastGPT 遵循**附加条件 Apache License 2.0 开源协议**,你可以 [Fork](https://github.com/labring/FastGPT/fork) 之后进行二次开发和发布。FastGPT 社区版将保留核心功能,商业版仅在社区版基础上使用 API 的形式进行扩展,不影响学习使用。
|
||||
---
|
||||
|
||||
2. **独特的 QA 结构**
|
||||
|
||||
针对客服问答场景设计的 QA 结构,提高在大量数据场景中的问答准确性。
|
||||
|
||||
3. **可视化工作流**
|
||||
|
||||
通过 Flow 模块展示了从问题输入到模型输出的完整流程,便于调试和设计复杂流程。
|
||||
|
||||
4. **无限扩展**
|
||||
|
||||
基于 API 进行扩展,无需修改 FastGPT 源码,也可快速接入现有的程序中。
|
||||
|
||||
5. **便于调试**
|
||||
|
||||
提供搜索测试、引用修改、完整对话预览等多种调试途径。
|
||||
|
||||
6. **支持多种模型**
|
||||
|
||||
支持 GPT、Claude、文心一言等多种 LLM 模型,未来也将支持自定义的向量模型。
|
||||
|
||||
## 知识库核心流程图
|
||||
|
||||

|
||||
# 知识库核心流程图
|
||||
|
||||

|
||||
|
||||
12
package.json
@@ -12,27 +12,29 @@
|
||||
"previewIcon": "node ./scripts/icon/index.js",
|
||||
"api:gen": "tsc ./scripts/openapi/index.ts && node ./scripts/openapi/index.js && npx @redocly/cli build-docs ./scripts/openapi/openapi.json -o ./projects/app/public/openapi/index.html",
|
||||
"create:i18n": "node ./scripts/i18n/index.js",
|
||||
"test": "vitest run --exclude 'test/cases/spec'",
|
||||
"test:all": "vitest run",
|
||||
"test": "vitest run",
|
||||
"test:workflow": "vitest run workflow"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chakra-ui/cli": "^2.4.1",
|
||||
"@vitest/coverage-v8": "^3.0.2",
|
||||
"@vitest/coverage-v8": "^3.0.9",
|
||||
"husky": "^8.0.3",
|
||||
"i18next": "23.16.8",
|
||||
"lint-staged": "^13.3.0",
|
||||
"next-i18next": "15.4.2",
|
||||
"prettier": "3.2.4",
|
||||
"react-i18next": "14.1.2",
|
||||
"vitest": "^3.0.2",
|
||||
"vitest-mongodb": "^1.0.1",
|
||||
"vitest": "^3.0.9",
|
||||
"mongodb-memory-server": "^10.1.4",
|
||||
"zhlint": "^0.7.4"
|
||||
},
|
||||
"lint-staged": {
|
||||
"./**/**/*.{ts,tsx,scss}": "npm run format-code",
|
||||
"./docSite/**/**/*.md": "npm run format-doc"
|
||||
},
|
||||
"resolutions": {
|
||||
"mdast-util-gfm-autolink-literal": "2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.16.0",
|
||||
"pnpm": ">=9.0.0"
|
||||
|
||||
@@ -5,6 +5,7 @@ import { ErrType } from '../errorCode';
|
||||
const startCode = 507000;
|
||||
export enum CommonErrEnum {
|
||||
invalidParams = 'invalidParams',
|
||||
invalidResource = 'invalidResource',
|
||||
fileNotFound = 'fileNotFound',
|
||||
unAuthFile = 'unAuthFile',
|
||||
missingParams = 'missingParams',
|
||||
@@ -15,6 +16,10 @@ const datasetErr = [
|
||||
statusText: CommonErrEnum.fileNotFound,
|
||||
message: i18nT('common:error.invalid_params')
|
||||
},
|
||||
{
|
||||
statusText: CommonErrEnum.invalidResource,
|
||||
message: i18nT('common:error_invalid_resource')
|
||||
},
|
||||
{
|
||||
statusText: CommonErrEnum.fileNotFound,
|
||||
message: 'error.fileNotFound'
|
||||
|
||||
@@ -27,7 +27,8 @@ export enum TeamErrEnum {
|
||||
userNotActive = 'userNotActive',
|
||||
invitationLinkInvalid = 'invitationLinkInvalid',
|
||||
youHaveBeenInTheTeam = 'youHaveBeenInTheTeam',
|
||||
tooManyInvitations = 'tooManyInvitations'
|
||||
tooManyInvitations = 'tooManyInvitations',
|
||||
unPermission = 'unPermission'
|
||||
}
|
||||
|
||||
const teamErr = [
|
||||
@@ -35,6 +36,10 @@ const teamErr = [
|
||||
statusText: TeamErrEnum.notUser,
|
||||
message: i18nT('common:code_error.team_error.not_user')
|
||||
},
|
||||
{
|
||||
statusText: TeamErrEnum.unPermission,
|
||||
message: i18nT('common:error_un_permission')
|
||||
},
|
||||
{
|
||||
statusText: TeamErrEnum.teamOverSize,
|
||||
message: i18nT('common:code_error.team_error.over_size')
|
||||
|
||||
@@ -49,6 +49,7 @@ export type FastGPTFeConfigsType = {
|
||||
find_password_method?: ['email' | 'phone'];
|
||||
bind_notification_method?: ['email' | 'phone'];
|
||||
googleClientVerKey?: string;
|
||||
mcpServerProxyEndpoint?: string;
|
||||
|
||||
show_emptyChat?: boolean;
|
||||
show_appStore?: boolean;
|
||||
|
||||
@@ -11,7 +11,9 @@ export enum AppTypeEnum {
|
||||
simple = 'simple',
|
||||
workflow = 'advanced',
|
||||
plugin = 'plugin',
|
||||
httpPlugin = 'httpPlugin'
|
||||
httpPlugin = 'httpPlugin',
|
||||
toolSet = 'toolSet',
|
||||
tool = 'tool'
|
||||
}
|
||||
|
||||
export const AppFolderTypeList = [AppTypeEnum.folder, AppTypeEnum.httpPlugin];
|
||||
@@ -53,7 +55,10 @@ export enum AppTemplateTypeEnum {
|
||||
imageGeneration = 'image-generation',
|
||||
webSearch = 'web-search',
|
||||
roleplay = 'roleplay',
|
||||
officeServices = 'office-services'
|
||||
officeServices = 'office-services',
|
||||
|
||||
// special type
|
||||
contribute = 'contribute'
|
||||
}
|
||||
|
||||
export const defaultDatasetMaxTokens = 16000;
|
||||
|
||||
97
packages/global/core/app/mcpTools/utils.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { NodeOutputKeyEnum, WorkflowIOValueTypeEnum } from '../../workflow/constants';
|
||||
import {
|
||||
FlowNodeInputTypeEnum,
|
||||
FlowNodeOutputTypeEnum,
|
||||
FlowNodeTypeEnum
|
||||
} from '../../workflow/node/constant';
|
||||
import { nanoid } from 'nanoid';
|
||||
import { ToolType } from '../type';
|
||||
import { i18nT } from '../../../../web/i18n/utils';
|
||||
import { RuntimeNodeItemType } from '../../workflow/runtime/type';
|
||||
|
||||
export const getMCPToolSetRuntimeNode = ({
|
||||
url,
|
||||
toolList,
|
||||
name,
|
||||
avatar
|
||||
}: {
|
||||
url: string;
|
||||
toolList: ToolType[];
|
||||
name?: string;
|
||||
avatar?: string;
|
||||
}): RuntimeNodeItemType => {
|
||||
return {
|
||||
nodeId: nanoid(16),
|
||||
flowNodeType: FlowNodeTypeEnum.toolSet,
|
||||
avatar,
|
||||
intro: 'MCP Tools',
|
||||
inputs: [
|
||||
{
|
||||
key: 'toolSetData',
|
||||
label: 'Tool Set Data',
|
||||
valueType: WorkflowIOValueTypeEnum.object,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
value: { url, toolList }
|
||||
}
|
||||
],
|
||||
outputs: [],
|
||||
name: name || '',
|
||||
version: ''
|
||||
};
|
||||
};
|
||||
|
||||
export const getMCPToolRuntimeNode = ({
|
||||
tool,
|
||||
url,
|
||||
avatar = 'core/app/type/mcpToolsFill'
|
||||
}: {
|
||||
tool: ToolType;
|
||||
url: string;
|
||||
avatar?: string;
|
||||
}): RuntimeNodeItemType => {
|
||||
return {
|
||||
nodeId: nanoid(16),
|
||||
flowNodeType: FlowNodeTypeEnum.tool,
|
||||
avatar,
|
||||
intro: tool.description,
|
||||
inputs: [
|
||||
{
|
||||
key: 'toolData',
|
||||
label: 'Tool Data',
|
||||
valueType: WorkflowIOValueTypeEnum.object,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
value: { ...tool, url }
|
||||
},
|
||||
...Object.entries(tool.inputSchema?.properties || {}).map(([key, value]) => ({
|
||||
key,
|
||||
label: key,
|
||||
valueType: value.type as WorkflowIOValueTypeEnum,
|
||||
description: value.description,
|
||||
toolDescription: value.description || key,
|
||||
required: tool.inputSchema?.required?.includes(key) || false,
|
||||
renderTypeList: [
|
||||
value.type === 'string'
|
||||
? FlowNodeInputTypeEnum.input
|
||||
: value.type === 'number'
|
||||
? FlowNodeInputTypeEnum.numberInput
|
||||
: value.type === 'boolean'
|
||||
? FlowNodeInputTypeEnum.switch
|
||||
: FlowNodeInputTypeEnum.JSONEditor
|
||||
]
|
||||
}))
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
id: NodeOutputKeyEnum.rawResponse,
|
||||
key: NodeOutputKeyEnum.rawResponse,
|
||||
required: true,
|
||||
label: i18nT('workflow:raw_response'),
|
||||
description: i18nT('workflow:tool_raw_response_description'),
|
||||
valueType: WorkflowIOValueTypeEnum.any,
|
||||
type: FlowNodeOutputTypeEnum.static
|
||||
}
|
||||
],
|
||||
name: tool.name,
|
||||
version: ''
|
||||
};
|
||||
};
|
||||
10
packages/global/core/app/type.d.ts
vendored
@@ -16,6 +16,16 @@ import { FlowNodeInputTypeEnum } from '../../core/workflow/node/constant';
|
||||
import { WorkflowTemplateBasicType } from '@fastgpt/global/core/workflow/type';
|
||||
import { SourceMemberType } from '../../support/user/type';
|
||||
|
||||
export type ToolType = {
|
||||
name: string;
|
||||
description: string;
|
||||
inputSchema: {
|
||||
type: string;
|
||||
properties?: Record<string, { type: string; description?: string }>;
|
||||
required?: string[];
|
||||
};
|
||||
};
|
||||
|
||||
export type AppSchema = {
|
||||
_id: string;
|
||||
parentId?: ParentIdType;
|
||||
|
||||
@@ -140,7 +140,9 @@ export const appWorkflow2Form = ({
|
||||
);
|
||||
} else if (
|
||||
node.flowNodeType === FlowNodeTypeEnum.pluginModule ||
|
||||
node.flowNodeType === FlowNodeTypeEnum.appModule
|
||||
node.flowNodeType === FlowNodeTypeEnum.appModule ||
|
||||
node.flowNodeType === FlowNodeTypeEnum.tool ||
|
||||
node.flowNodeType === FlowNodeTypeEnum.toolSet
|
||||
) {
|
||||
if (!node.pluginId) return;
|
||||
|
||||
|
||||
@@ -38,7 +38,8 @@ export enum ChatSourceEnum {
|
||||
team = 'team',
|
||||
feishu = 'feishu',
|
||||
official_account = 'official_account',
|
||||
wecom = 'wecom'
|
||||
wecom = 'wecom',
|
||||
mcp = 'mcp'
|
||||
}
|
||||
|
||||
export const ChatSourceMap = {
|
||||
@@ -68,6 +69,9 @@ export const ChatSourceMap = {
|
||||
},
|
||||
[ChatSourceEnum.wecom]: {
|
||||
name: i18nT('common:core.chat.logs.wecom')
|
||||
},
|
||||
[ChatSourceEnum.mcp]: {
|
||||
name: i18nT('common:core.chat.logs.mcp')
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -77,6 +77,13 @@ export const getHistoryPreview = (
|
||||
});
|
||||
};
|
||||
|
||||
export const filterModuleTypeList: any[] = [
|
||||
FlowNodeTypeEnum.pluginModule,
|
||||
FlowNodeTypeEnum.datasetSearchNode,
|
||||
FlowNodeTypeEnum.tools,
|
||||
FlowNodeTypeEnum.pluginOutput
|
||||
];
|
||||
|
||||
export const filterPublicNodeResponseData = ({
|
||||
flowResponses = [],
|
||||
responseDetail = false
|
||||
@@ -87,12 +94,6 @@ export const filterPublicNodeResponseData = ({
|
||||
const filedList = responseDetail
|
||||
? ['quoteList', 'moduleType', 'pluginOutput', 'runningTime']
|
||||
: ['moduleType', 'pluginOutput', 'runningTime'];
|
||||
const filterModuleTypeList: any[] = [
|
||||
FlowNodeTypeEnum.pluginModule,
|
||||
FlowNodeTypeEnum.datasetSearchNode,
|
||||
FlowNodeTypeEnum.tools,
|
||||
FlowNodeTypeEnum.pluginOutput
|
||||
];
|
||||
|
||||
return flowResponses
|
||||
.filter((item) => filterModuleTypeList.includes(item.moduleType))
|
||||
@@ -153,25 +154,55 @@ export const getChatSourceByPublishChannel = (publishChannel: PublishChannelEnum
|
||||
/*
|
||||
Merge chat responseData
|
||||
1. Same tool mergeSignId (Interactive tool node)
|
||||
2. Recursively merge plugin details with same mergeSignId
|
||||
*/
|
||||
export const mergeChatResponseData = (responseDataList: ChatHistoryItemResType[]) => {
|
||||
let lastResponse: ChatHistoryItemResType | undefined = undefined;
|
||||
|
||||
return responseDataList.reduce<ChatHistoryItemResType[]>((acc, curr) => {
|
||||
if (lastResponse && lastResponse.mergeSignId && curr.mergeSignId === lastResponse.mergeSignId) {
|
||||
// 替换 lastResponse
|
||||
const concatResponse: ChatHistoryItemResType = {
|
||||
...curr,
|
||||
runningTime: +((lastResponse.runningTime || 0) + (curr.runningTime || 0)).toFixed(2),
|
||||
totalPoints: (lastResponse.totalPoints || 0) + (curr.totalPoints || 0),
|
||||
childTotalPoints: (lastResponse.childTotalPoints || 0) + (curr.childTotalPoints || 0),
|
||||
toolCallTokens: (lastResponse.toolCallTokens || 0) + (curr.toolCallTokens || 0),
|
||||
toolDetail: [...(lastResponse.toolDetail || []), ...(curr.toolDetail || [])]
|
||||
export const mergeChatResponseData = (
|
||||
responseDataList: ChatHistoryItemResType[]
|
||||
): ChatHistoryItemResType[] => {
|
||||
// Merge children reponse data(Children has interactive response)
|
||||
const responseWithMergedPlugins = responseDataList.map((item) => {
|
||||
if (item.pluginDetail && item.pluginDetail.length > 1) {
|
||||
return {
|
||||
...item,
|
||||
pluginDetail: mergeChatResponseData(item.pluginDetail)
|
||||
};
|
||||
return [...acc.slice(0, -1), concatResponse];
|
||||
} else {
|
||||
lastResponse = curr;
|
||||
return [...acc, curr];
|
||||
}
|
||||
}, []);
|
||||
return item;
|
||||
});
|
||||
|
||||
let lastResponse: ChatHistoryItemResType | undefined = undefined;
|
||||
let hasMerged = false;
|
||||
|
||||
const firstPassResult = responseWithMergedPlugins.reduce<ChatHistoryItemResType[]>(
|
||||
(acc, curr) => {
|
||||
if (
|
||||
lastResponse &&
|
||||
lastResponse.mergeSignId &&
|
||||
curr.mergeSignId === lastResponse.mergeSignId
|
||||
) {
|
||||
const concatResponse: ChatHistoryItemResType = {
|
||||
...curr,
|
||||
runningTime: +((lastResponse.runningTime || 0) + (curr.runningTime || 0)).toFixed(2),
|
||||
totalPoints: (lastResponse.totalPoints || 0) + (curr.totalPoints || 0),
|
||||
childTotalPoints: (lastResponse.childTotalPoints || 0) + (curr.childTotalPoints || 0),
|
||||
toolCallTokens: (lastResponse.toolCallTokens || 0) + (curr.toolCallTokens || 0),
|
||||
toolDetail: [...(lastResponse.toolDetail || []), ...(curr.toolDetail || [])],
|
||||
loopDetail: [...(lastResponse.loopDetail || []), ...(curr.loopDetail || [])],
|
||||
pluginDetail: [...(lastResponse.pluginDetail || []), ...(curr.pluginDetail || [])]
|
||||
};
|
||||
hasMerged = true;
|
||||
return [...acc.slice(0, -1), concatResponse];
|
||||
} else {
|
||||
lastResponse = curr;
|
||||
return [...acc, curr];
|
||||
}
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
if (hasMerged && firstPassResult.length > 1) {
|
||||
return mergeChatResponseData(firstPassResult);
|
||||
}
|
||||
|
||||
return firstPassResult;
|
||||
};
|
||||
|
||||
@@ -7,6 +7,7 @@ export enum FlowNodeInputTypeEnum { // render ui
|
||||
numberInput = 'numberInput',
|
||||
switch = 'switch', // true/false
|
||||
select = 'select',
|
||||
multipleSelect = 'multipleSelect',
|
||||
|
||||
// editor
|
||||
JSONEditor = 'JSONEditor',
|
||||
@@ -46,6 +47,9 @@ export const FlowNodeInputMap: Record<
|
||||
[FlowNodeInputTypeEnum.select]: {
|
||||
icon: 'core/workflow/inputType/option'
|
||||
},
|
||||
[FlowNodeInputTypeEnum.multipleSelect]: {
|
||||
icon: 'core/workflow/inputType/option'
|
||||
},
|
||||
[FlowNodeInputTypeEnum.switch]: {
|
||||
icon: 'core/workflow/inputType/switch'
|
||||
},
|
||||
@@ -136,7 +140,9 @@ export enum FlowNodeTypeEnum {
|
||||
loopStart = 'loopStart',
|
||||
loopEnd = 'loopEnd',
|
||||
formInput = 'formInput',
|
||||
comment = 'comment'
|
||||
comment = 'comment',
|
||||
tool = 'tool',
|
||||
toolSet = 'toolSet'
|
||||
}
|
||||
|
||||
// node IO value type
|
||||
|
||||
@@ -23,7 +23,7 @@ import { WorkflowResponseType } from '../../../../service/core/workflow/dispatch
|
||||
import { AiChatQuoteRoleType } from '../template/system/aiChat/type';
|
||||
import { LafAccountType, OpenaiAccountType } from '../../../support/user/team/type';
|
||||
import { CompletionFinishReason } from '../../ai/type';
|
||||
|
||||
import { WorkflowInteractiveResponseType } from '../template/system/interactive/type';
|
||||
export type ExternalProviderType = {
|
||||
openaiAccount?: OpenaiAccountType;
|
||||
externalWorkflowVariables?: Record<string, string>;
|
||||
@@ -55,12 +55,14 @@ export type ChatDispatchProps = {
|
||||
variables: Record<string, any>; // global variable
|
||||
query: UserChatItemValueItemType[]; // trigger query
|
||||
chatConfig: AppSchema['chatConfig'];
|
||||
lastInteractive?: WorkflowInteractiveResponseType; // last interactive response
|
||||
stream: boolean;
|
||||
maxRunTimes: number;
|
||||
isToolCall?: boolean;
|
||||
workflowStreamResponse?: WorkflowResponseType;
|
||||
workflowDispatchDeep?: number;
|
||||
version?: 'v1' | 'v2';
|
||||
responseDetail?: boolean;
|
||||
};
|
||||
|
||||
export type ModuleDispatchProps<T> = ChatDispatchProps & {
|
||||
@@ -215,6 +217,8 @@ export type DispatchNodeResponseType = {
|
||||
// tool params
|
||||
toolParamsResult?: Record<string, any>;
|
||||
|
||||
toolRes?: any;
|
||||
|
||||
// abandon
|
||||
extensionModel?: string;
|
||||
extensionResult?: string;
|
||||
|
||||
@@ -10,7 +10,23 @@ import { FlowNodeOutputItemType, ReferenceValueType } from '../type/io';
|
||||
import { ChatItemType, NodeOutputItemType } from '../../../core/chat/type';
|
||||
import { ChatItemValueTypeEnum, ChatRoleEnum } from '../../../core/chat/constants';
|
||||
import { replaceVariable, valToStr } from '../../../common/string/tools';
|
||||
import json5 from 'json5';
|
||||
import {
|
||||
InteractiveNodeResponseType,
|
||||
WorkflowInteractiveResponseType
|
||||
} from '../template/system/interactive/type';
|
||||
|
||||
export const extractDeepestInteractive = (
|
||||
interactive: WorkflowInteractiveResponseType
|
||||
): WorkflowInteractiveResponseType => {
|
||||
if (
|
||||
(interactive?.type === 'childrenInteractive' || interactive?.type === 'loopInteractive') &&
|
||||
interactive.params?.childrenResponse
|
||||
) {
|
||||
return extractDeepestInteractive(interactive.params.childrenResponse);
|
||||
}
|
||||
return interactive;
|
||||
};
|
||||
export const getMaxHistoryLimitFromNodes = (nodes: StoreNodeItemType[]): number => {
|
||||
let limit = 10;
|
||||
nodes.forEach((node) => {
|
||||
@@ -28,13 +44,122 @@ export const getMaxHistoryLimitFromNodes = (nodes: StoreNodeItemType[]): number
|
||||
return limit * 2;
|
||||
};
|
||||
|
||||
/* value type format */
|
||||
export const valueTypeFormat = (value: any, type?: WorkflowIOValueTypeEnum) => {
|
||||
const isObjectString = (value: any) => {
|
||||
if (typeof value === 'string' && value !== 'false' && value !== 'true') {
|
||||
const trimmedValue = value.trim();
|
||||
const isJsonString =
|
||||
(trimmedValue.startsWith('{') && trimmedValue.endsWith('}')) ||
|
||||
(trimmedValue.startsWith('[') && trimmedValue.endsWith(']'));
|
||||
return isJsonString;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
// 1. any值,忽略格式化
|
||||
if (value === undefined || value === null) return value;
|
||||
if (!type || type === WorkflowIOValueTypeEnum.any) return value;
|
||||
|
||||
// 2. 如果值已经符合目标类型,直接返回
|
||||
if (
|
||||
(type === WorkflowIOValueTypeEnum.string && typeof value === 'string') ||
|
||||
(type === WorkflowIOValueTypeEnum.number && typeof value === 'number') ||
|
||||
(type === WorkflowIOValueTypeEnum.boolean && typeof value === 'boolean') ||
|
||||
(type.startsWith('array') && Array.isArray(value)) ||
|
||||
(type === WorkflowIOValueTypeEnum.object && typeof value === 'object') ||
|
||||
(type === WorkflowIOValueTypeEnum.chatHistory &&
|
||||
(Array.isArray(value) || typeof value === 'number')) ||
|
||||
(type === WorkflowIOValueTypeEnum.datasetQuote && Array.isArray(value)) ||
|
||||
(type === WorkflowIOValueTypeEnum.selectDataset && Array.isArray(value)) ||
|
||||
(type === WorkflowIOValueTypeEnum.selectApp && typeof value === 'object')
|
||||
) {
|
||||
return value;
|
||||
}
|
||||
|
||||
// 4. 按目标类型,进行格式转化
|
||||
// 4.1 基本类型转换
|
||||
if (type === WorkflowIOValueTypeEnum.string) {
|
||||
return typeof value === 'object' ? JSON.stringify(value) : String(value);
|
||||
}
|
||||
if (type === WorkflowIOValueTypeEnum.number) {
|
||||
return Number(value);
|
||||
}
|
||||
if (type === WorkflowIOValueTypeEnum.boolean) {
|
||||
if (typeof value === 'string') {
|
||||
return value.toLowerCase() === 'true';
|
||||
}
|
||||
return Boolean(value);
|
||||
}
|
||||
|
||||
// 4.3 字符串转对象
|
||||
if (
|
||||
(type === WorkflowIOValueTypeEnum.object || type.startsWith('array')) &&
|
||||
typeof value === 'string' &&
|
||||
value.trim()
|
||||
) {
|
||||
const trimmedValue = value.trim();
|
||||
const isJsonString = isObjectString(trimmedValue);
|
||||
|
||||
if (isJsonString) {
|
||||
try {
|
||||
const parsed = json5.parse(trimmedValue);
|
||||
// 检测解析结果与目标类型是否一致
|
||||
if (type.startsWith('array') && Array.isArray(parsed)) return parsed;
|
||||
if (type === WorkflowIOValueTypeEnum.object && typeof parsed === 'object') return parsed;
|
||||
} catch (error) {}
|
||||
}
|
||||
}
|
||||
|
||||
// 4.4 数组类型(这里 value 不是数组类型)(TODO: 嵌套数据类型转化)
|
||||
if (type.startsWith('array')) {
|
||||
return [value];
|
||||
}
|
||||
|
||||
// 4.5 特殊类型处理
|
||||
if (
|
||||
[WorkflowIOValueTypeEnum.datasetQuote, WorkflowIOValueTypeEnum.selectDataset].includes(type)
|
||||
) {
|
||||
if (isObjectString(value)) {
|
||||
try {
|
||||
return json5.parse(value);
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
return [];
|
||||
}
|
||||
if (
|
||||
[WorkflowIOValueTypeEnum.selectApp, WorkflowIOValueTypeEnum.object].includes(type) &&
|
||||
typeof value === 'string'
|
||||
) {
|
||||
if (isObjectString(value)) {
|
||||
try {
|
||||
return json5.parse(value);
|
||||
} catch (error) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
// Invalid history type
|
||||
if (type === WorkflowIOValueTypeEnum.chatHistory) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// 5. 默认返回原值
|
||||
return value;
|
||||
};
|
||||
|
||||
/*
|
||||
Get interaction information (if any) from the last AI message.
|
||||
What can be done:
|
||||
1. Get the interactive data
|
||||
2. Check that the workflow starts at the interaction node
|
||||
*/
|
||||
export const getLastInteractiveValue = (histories: ChatItemType[]) => {
|
||||
export const getLastInteractiveValue = (
|
||||
histories: ChatItemType[]
|
||||
): WorkflowInteractiveResponseType | undefined => {
|
||||
const lastAIMessage = [...histories].reverse().find((item) => item.obj === ChatRoleEnum.AI);
|
||||
|
||||
if (lastAIMessage) {
|
||||
@@ -45,7 +170,14 @@ export const getLastInteractiveValue = (histories: ChatItemType[]) => {
|
||||
lastValue.type !== ChatItemValueTypeEnum.interactive ||
|
||||
!lastValue.interactive
|
||||
) {
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
lastValue.interactive.type === 'childrenInteractive' ||
|
||||
lastValue.interactive.type === 'loopInteractive'
|
||||
) {
|
||||
return lastValue.interactive;
|
||||
}
|
||||
|
||||
// Check is user select
|
||||
@@ -62,38 +194,29 @@ export const getLastInteractiveValue = (histories: ChatItemType[]) => {
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
return;
|
||||
};
|
||||
|
||||
export const initWorkflowEdgeStatus = (
|
||||
edges: StoreEdgeItemType[] | RuntimeEdgeItemType[],
|
||||
histories?: ChatItemType[]
|
||||
export const storeEdges2RuntimeEdges = (
|
||||
edges: StoreEdgeItemType[],
|
||||
lastInteractive?: WorkflowInteractiveResponseType
|
||||
): RuntimeEdgeItemType[] => {
|
||||
// If there is a history, use the last interactive value
|
||||
if (histories && histories.length > 0) {
|
||||
const memoryEdges = getLastInteractiveValue(histories)?.memoryEdges;
|
||||
|
||||
if (lastInteractive) {
|
||||
const memoryEdges = lastInteractive.memoryEdges || [];
|
||||
if (memoryEdges && memoryEdges.length > 0) {
|
||||
return memoryEdges;
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
edges?.map((edge) => ({
|
||||
...edge,
|
||||
status: 'waiting'
|
||||
})) || []
|
||||
);
|
||||
return edges?.map((edge) => ({ ...edge, status: 'waiting' })) || [];
|
||||
};
|
||||
|
||||
export const getWorkflowEntryNodeIds = (
|
||||
nodes: (StoreNodeItemType | RuntimeNodeItemType)[],
|
||||
histories?: ChatItemType[]
|
||||
lastInteractive?: WorkflowInteractiveResponseType
|
||||
) => {
|
||||
// If there is a history, use the last interactive entry node
|
||||
if (histories && histories.length > 0) {
|
||||
const entryNodeIds = getLastInteractiveValue(histories)?.entryNodeIds;
|
||||
|
||||
if (lastInteractive) {
|
||||
const entryNodeIds = lastInteractive.entryNodeIds || [];
|
||||
if (Array.isArray(entryNodeIds) && entryNodeIds.length > 0) {
|
||||
return entryNodeIds;
|
||||
}
|
||||
@@ -105,7 +228,12 @@ export const getWorkflowEntryNodeIds = (
|
||||
FlowNodeTypeEnum.pluginInput
|
||||
];
|
||||
return nodes
|
||||
.filter((node) => entryList.includes(node.flowNodeType as any))
|
||||
.filter(
|
||||
(node) =>
|
||||
entryList.includes(node.flowNodeType as any) ||
|
||||
(!nodes.some((item) => entryList.includes(item.flowNodeType as any)) &&
|
||||
node.flowNodeType === FlowNodeTypeEnum.tool)
|
||||
)
|
||||
.map((item) => item.nodeId);
|
||||
};
|
||||
|
||||
@@ -303,7 +431,6 @@ export const formatVariableValByType = (val: any, valueType?: WorkflowIOValueTyp
|
||||
if (
|
||||
[
|
||||
WorkflowIOValueTypeEnum.object,
|
||||
WorkflowIOValueTypeEnum.chatHistory,
|
||||
WorkflowIOValueTypeEnum.datasetQuote,
|
||||
WorkflowIOValueTypeEnum.selectApp,
|
||||
WorkflowIOValueTypeEnum.selectDataset
|
||||
@@ -396,10 +523,10 @@ export const textAdaptGptResponse = ({
|
||||
|
||||
/* Update runtimeNode's outputs with interactive data from history */
|
||||
export function rewriteNodeOutputByHistories(
|
||||
histories: ChatItemType[],
|
||||
runtimeNodes: RuntimeNodeItemType[]
|
||||
runtimeNodes: RuntimeNodeItemType[],
|
||||
lastInteractive?: InteractiveNodeResponseType
|
||||
) {
|
||||
const interactive = getLastInteractiveValue(histories);
|
||||
const interactive = lastInteractive;
|
||||
if (!interactive?.nodeOutputs) {
|
||||
return runtimeNodes;
|
||||
}
|
||||
|
||||
@@ -34,6 +34,8 @@ import { LoopStartNode } from './system/loop/loopStart';
|
||||
import { LoopEndNode } from './system/loop/loopEnd';
|
||||
import { FormInputNode } from './system/interactive/formInput';
|
||||
import { ToolParamsNode } from './system/toolParams';
|
||||
import { RunToolNode } from './system/runTool';
|
||||
import { RunToolSetNode } from './system/runToolSet';
|
||||
|
||||
const systemNodes: FlowNodeTemplateType[] = [
|
||||
AiChatModule,
|
||||
@@ -84,5 +86,7 @@ export const moduleTemplatesFlat: FlowNodeTemplateType[] = [
|
||||
RunAppNode,
|
||||
RunAppModule,
|
||||
LoopStartNode,
|
||||
LoopEndNode
|
||||
LoopEndNode,
|
||||
RunToolNode,
|
||||
RunToolSetNode
|
||||
];
|
||||
|
||||
@@ -8,7 +8,7 @@ import { i18nT } from '../../../../web/i18n/utils';
|
||||
export const Input_Template_History: FlowNodeInputItemType = {
|
||||
key: NodeInputKeyEnum.history,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.numberInput, FlowNodeInputTypeEnum.reference],
|
||||
valueType: WorkflowIOValueTypeEnum.chatHistory,
|
||||
valueType: WorkflowIOValueTypeEnum.chatHistory, // Array / Number
|
||||
label: i18nT('common:core.module.input.label.chat history'),
|
||||
description: i18nT('workflow:max_dialog_rounds'),
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { NodeOutputItemType } from '../../../../chat/type';
|
||||
import type { FlowNodeOutputItemType } from '../../../type/io';
|
||||
import type { RuntimeEdgeItemType } from '../../../runtime/type';
|
||||
import { FlowNodeInputTypeEnum } from 'core/workflow/node/constant';
|
||||
import { WorkflowIOValueTypeEnum } from 'core/workflow/constants';
|
||||
import type { ChatCompletionMessageParam } from '../../../../ai/type';
|
||||
@@ -9,7 +8,6 @@ type InteractiveBasicType = {
|
||||
entryNodeIds: string[];
|
||||
memoryEdges: RuntimeEdgeItemType[];
|
||||
nodeOutputs: NodeOutputItemType[];
|
||||
|
||||
toolParams?: {
|
||||
entryNodeIds: string[]; // 记录工具中,交互节点的 Id,而不是起始工作流的入口
|
||||
memoryMessages: ChatCompletionMessageParam[]; // 这轮工具中,产生的新的 messages
|
||||
@@ -23,6 +21,22 @@ type InteractiveNodeType = {
|
||||
nodeOutputs?: NodeOutputItemType[];
|
||||
};
|
||||
|
||||
type ChildrenInteractive = InteractiveNodeType & {
|
||||
type: 'childrenInteractive';
|
||||
params: {
|
||||
childrenResponse?: WorkflowInteractiveResponseType;
|
||||
};
|
||||
};
|
||||
|
||||
type LoopInteractive = InteractiveNodeType & {
|
||||
type: 'loopInteractive';
|
||||
params: {
|
||||
loopResult: any[];
|
||||
childrenResponse: WorkflowInteractiveResponseType;
|
||||
currentIndex: number;
|
||||
};
|
||||
};
|
||||
|
||||
export type UserSelectOptionItemType = {
|
||||
key: string;
|
||||
value: string;
|
||||
@@ -62,5 +76,11 @@ type UserInputInteractive = InteractiveNodeType & {
|
||||
submitted?: boolean;
|
||||
};
|
||||
};
|
||||
export type InteractiveNodeResponseType = UserSelectInteractive | UserInputInteractive;
|
||||
|
||||
export type InteractiveNodeResponseType =
|
||||
| UserSelectInteractive
|
||||
| UserInputInteractive
|
||||
| ChildrenInteractive
|
||||
| LoopInteractive;
|
||||
|
||||
export type WorkflowInteractiveResponseType = InteractiveBasicType & InteractiveNodeResponseType;
|
||||
|
||||
19
packages/global/core/workflow/template/system/runTool.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { FlowNodeTemplateTypeEnum } from '../../constants';
|
||||
import { FlowNodeTypeEnum } from '../../node/constant';
|
||||
import { FlowNodeTemplateType } from '../../type/node';
|
||||
import { getHandleConfig } from '../utils';
|
||||
|
||||
export const RunToolNode: FlowNodeTemplateType = {
|
||||
id: FlowNodeTypeEnum.tool,
|
||||
templateType: FlowNodeTemplateTypeEnum.other,
|
||||
flowNodeType: FlowNodeTypeEnum.tool,
|
||||
sourceHandle: getHandleConfig(true, true, true, true),
|
||||
targetHandle: getHandleConfig(true, true, true, true),
|
||||
intro: '',
|
||||
name: '',
|
||||
showStatus: false,
|
||||
isTool: true,
|
||||
version: '4.9.6',
|
||||
inputs: [],
|
||||
outputs: []
|
||||
};
|
||||
19
packages/global/core/workflow/template/system/runToolSet.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { FlowNodeTemplateTypeEnum } from '../../constants';
|
||||
import { FlowNodeTypeEnum } from '../../node/constant';
|
||||
import { FlowNodeTemplateType } from '../../type/node';
|
||||
import { getHandleConfig } from '../utils';
|
||||
|
||||
export const RunToolSetNode: FlowNodeTemplateType = {
|
||||
id: FlowNodeTypeEnum.toolSet,
|
||||
templateType: FlowNodeTemplateTypeEnum.other,
|
||||
flowNodeType: FlowNodeTypeEnum.toolSet,
|
||||
sourceHandle: getHandleConfig(false, false, false, false),
|
||||
targetHandle: getHandleConfig(false, false, false, false),
|
||||
intro: '',
|
||||
name: '',
|
||||
showStatus: false,
|
||||
isTool: true,
|
||||
version: '4.9.6',
|
||||
inputs: [],
|
||||
outputs: []
|
||||
};
|
||||
@@ -311,6 +311,38 @@ export const appData2FlowNodeIO = ({
|
||||
};
|
||||
};
|
||||
|
||||
export const toolData2FlowNodeIO = ({
|
||||
nodes
|
||||
}: {
|
||||
nodes: StoreNodeItemType[];
|
||||
}): {
|
||||
inputs: FlowNodeInputItemType[];
|
||||
outputs: FlowNodeOutputItemType[];
|
||||
} => {
|
||||
const toolNode = nodes.find((node) => node.flowNodeType === FlowNodeTypeEnum.tool);
|
||||
|
||||
return {
|
||||
inputs: toolNode?.inputs || [],
|
||||
outputs: toolNode?.outputs || []
|
||||
};
|
||||
};
|
||||
|
||||
export const toolSetData2FlowNodeIO = ({
|
||||
nodes
|
||||
}: {
|
||||
nodes: StoreNodeItemType[];
|
||||
}): {
|
||||
inputs: FlowNodeInputItemType[];
|
||||
outputs: FlowNodeOutputItemType[];
|
||||
} => {
|
||||
const toolSetNode = nodes.find((node) => node.flowNodeType === FlowNodeTypeEnum.toolSet);
|
||||
|
||||
return {
|
||||
inputs: toolSetNode?.inputs || [],
|
||||
outputs: toolSetNode?.outputs || []
|
||||
};
|
||||
};
|
||||
|
||||
export const formatEditorVariablePickerIcon = (
|
||||
variables: { key: string; label: string; type?: `${VariableInputEnum}`; required?: boolean }[]
|
||||
): EditorVariablePickerType[] => {
|
||||
|
||||
14
packages/global/support/mcp/type.d.ts
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
export type McpKeyType = {
|
||||
_id: string;
|
||||
key: string;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
apps: McpAppType[];
|
||||
name: string;
|
||||
};
|
||||
|
||||
export type McpAppType = {
|
||||
appId: string;
|
||||
toolName: string;
|
||||
description: string;
|
||||
};
|
||||
14
packages/global/support/operationLog/constants.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
export enum OperationLogEventEnum {
|
||||
LOGIN = 'LOGIN',
|
||||
CREATE_INVITATION_LINK = 'CREATE_INVITATION_LINK',
|
||||
JOIN_TEAM = 'JOIN_TEAM',
|
||||
CHANGE_MEMBER_NAME = 'CHANGE_MEMBER_NAME',
|
||||
KICK_OUT_TEAM = 'KICK_OUT_TEAM',
|
||||
CREATE_DEPARTMENT = 'CREATE_DEPARTMENT',
|
||||
CHANGE_DEPARTMENT = 'CHANGE_DEPARTMENT',
|
||||
DELETE_DEPARTMENT = 'DELETE_DEPARTMENT',
|
||||
RELOCATE_DEPARTMENT = 'RELOCATE_DEPARTMENT',
|
||||
CREATE_GROUP = 'CREATE_GROUP',
|
||||
DELETE_GROUP = 'DELETE_GROUP',
|
||||
ASSIGN_PERMISSION = 'ASSIGN_PERMISSION'
|
||||
}
|
||||
19
packages/global/support/operationLog/type.d.ts
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { SourceMemberType } from '../user/type';
|
||||
import { OperationLogEventEnum } from './constants';
|
||||
|
||||
export type OperationLogSchema = {
|
||||
_id: string;
|
||||
tmbId: string;
|
||||
teamId: string;
|
||||
timestamp: Date;
|
||||
event: `${OperationLogEventEnum}`;
|
||||
metadata?: Record<string, string>;
|
||||
};
|
||||
|
||||
export type OperationListItemType = {
|
||||
_id: string;
|
||||
sourceMember: SourceMemberType;
|
||||
event: `${OperationLogEventEnum}`;
|
||||
timestamp: Date;
|
||||
metadata: Record<string, string>;
|
||||
};
|
||||
@@ -13,12 +13,15 @@ export type CollaboratorItemType = {
|
||||
orgId: string;
|
||||
}>;
|
||||
|
||||
export type UpdateClbPermissionProps = {
|
||||
export type UpdateClbPermissionProps<addOnly = false> = {
|
||||
members?: string[];
|
||||
groups?: string[];
|
||||
orgs?: string[];
|
||||
permission: PermissionValueType;
|
||||
};
|
||||
} & (addOnly extends true
|
||||
? {}
|
||||
: {
|
||||
permission: PermissionValueType;
|
||||
});
|
||||
|
||||
export type DeletePermissionQuery = RequireOnlyOne<{
|
||||
tmbId?: string;
|
||||
|
||||
@@ -5,15 +5,16 @@ export type PerConstructPros = {
|
||||
per?: PermissionValueType;
|
||||
isOwner?: boolean;
|
||||
permissionList?: PermissionListType;
|
||||
childUpdatePermissionCallback?: () => void;
|
||||
};
|
||||
|
||||
// the Permission helper class
|
||||
export class Permission {
|
||||
value: PermissionValueType;
|
||||
isOwner: boolean;
|
||||
hasManagePer: boolean;
|
||||
hasWritePer: boolean;
|
||||
hasReadPer: boolean;
|
||||
isOwner: boolean = false;
|
||||
hasManagePer: boolean = false;
|
||||
hasWritePer: boolean = false;
|
||||
hasReadPer: boolean = false;
|
||||
_permissionList: PermissionListType;
|
||||
|
||||
constructor(props?: PerConstructPros) {
|
||||
@@ -24,11 +25,8 @@ export class Permission {
|
||||
this.value = per;
|
||||
}
|
||||
|
||||
this.isOwner = isOwner;
|
||||
this._permissionList = permissionList;
|
||||
this.hasManagePer = this.checkPer(this._permissionList['manage'].value);
|
||||
this.hasWritePer = this.checkPer(this._permissionList['write'].value);
|
||||
this.hasReadPer = this.checkPer(this._permissionList['read'].value);
|
||||
this.updatePermissions();
|
||||
}
|
||||
|
||||
// add permission(s)
|
||||
@@ -68,10 +66,21 @@ export class Permission {
|
||||
return (this.value & perm) === perm;
|
||||
}
|
||||
|
||||
private updatePermissionCallback?: () => void;
|
||||
setUpdatePermissionCallback(callback: () => void) {
|
||||
callback();
|
||||
this.updatePermissionCallback = callback;
|
||||
}
|
||||
|
||||
private updatePermissions() {
|
||||
this.isOwner = this.value === OwnerPermissionVal;
|
||||
this.hasManagePer = this.checkPer(this._permissionList['manage'].value);
|
||||
this.hasWritePer = this.checkPer(this._permissionList['write'].value);
|
||||
this.hasReadPer = this.checkPer(this._permissionList['read'].value);
|
||||
this.updatePermissionCallback?.();
|
||||
}
|
||||
|
||||
toBinary() {
|
||||
return this.value.toString(2);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,23 +17,23 @@ type GroupMemberSchemaType = {
|
||||
role: `${GroupMemberRole}`;
|
||||
};
|
||||
|
||||
type MemberGroupListItemType<T extends boolean | undefined> = MemberGroupSchemaType & {
|
||||
members: T extends true
|
||||
type MemberGroupListItemType<WithMembers extends boolean | undefined> = MemberGroupSchemaType & {
|
||||
members: WithMembers extends true
|
||||
? {
|
||||
tmbId: string;
|
||||
name: string;
|
||||
avatar: string;
|
||||
}[]
|
||||
: undefined;
|
||||
count: T extends true ? number : undefined;
|
||||
owner?: T extends true
|
||||
count: WithMembers extends true ? number : undefined;
|
||||
owner?: WithMembers extends true
|
||||
? {
|
||||
tmbId: string;
|
||||
name: string;
|
||||
avatar: string;
|
||||
}
|
||||
: undefined;
|
||||
permission: T extends true ? Permission : undefined;
|
||||
permission: WithMembers extends true ? Permission : undefined;
|
||||
};
|
||||
|
||||
type GroupMemberItemType = {
|
||||
|
||||
@@ -1,22 +1,50 @@
|
||||
import { PermissionKeyEnum } from '../constant';
|
||||
import { PermissionListType } from '../type';
|
||||
import { PermissionList } from '../constant';
|
||||
export const TeamPermissionList: PermissionListType = {
|
||||
import { i18nT } from '../../../../web/i18n/utils';
|
||||
export enum TeamPermissionKeyEnum {
|
||||
appCreate = 'appCreate',
|
||||
datasetCreate = 'datasetCreate',
|
||||
apikeyCreate = 'apikeyCreate'
|
||||
}
|
||||
|
||||
export const TeamPermissionList: PermissionListType<TeamPermissionKeyEnum> = {
|
||||
[PermissionKeyEnum.read]: {
|
||||
...PermissionList[PermissionKeyEnum.read],
|
||||
value: 0b100
|
||||
value: 0b000100
|
||||
},
|
||||
[PermissionKeyEnum.write]: {
|
||||
...PermissionList[PermissionKeyEnum.write],
|
||||
value: 0b010
|
||||
value: 0b000010
|
||||
},
|
||||
[PermissionKeyEnum.manage]: {
|
||||
...PermissionList[PermissionKeyEnum.manage],
|
||||
value: 0b001
|
||||
value: 0b000001
|
||||
},
|
||||
[TeamPermissionKeyEnum.appCreate]: {
|
||||
checkBoxType: 'multiple',
|
||||
description: '',
|
||||
name: i18nT('account_team:permission_appCreate'),
|
||||
value: 0b001000
|
||||
},
|
||||
[TeamPermissionKeyEnum.datasetCreate]: {
|
||||
checkBoxType: 'multiple',
|
||||
description: '',
|
||||
name: i18nT('account_team:permission_datasetCreate'),
|
||||
value: 0b010000
|
||||
},
|
||||
[TeamPermissionKeyEnum.apikeyCreate]: {
|
||||
checkBoxType: 'multiple',
|
||||
description: '',
|
||||
name: i18nT('account_team:permission_apikeyCreate'),
|
||||
value: 0b100000
|
||||
}
|
||||
};
|
||||
|
||||
export const TeamReadPermissionVal = TeamPermissionList['read'].value;
|
||||
export const TeamWritePermissionVal = TeamPermissionList['write'].value;
|
||||
export const TeamManagePermissionVal = TeamPermissionList['manage'].value;
|
||||
export const TeamAppCreatePermissionVal = TeamPermissionList['appCreate'].value;
|
||||
export const TeamDatasetCreatePermissionVal = TeamPermissionList['datasetCreate'].value;
|
||||
export const TeamApikeyCreatePermissionVal = TeamPermissionList['apikeyCreate'].value;
|
||||
export const TeamDefaultPermissionVal = TeamReadPermissionVal;
|
||||
|
||||
@@ -1,7 +1,17 @@
|
||||
import { PerConstructPros, Permission } from '../controller';
|
||||
import { TeamDefaultPermissionVal, TeamPermissionList } from './constant';
|
||||
import {
|
||||
TeamApikeyCreatePermissionVal,
|
||||
TeamAppCreatePermissionVal,
|
||||
TeamDatasetCreatePermissionVal,
|
||||
TeamDefaultPermissionVal,
|
||||
TeamPermissionList
|
||||
} from './constant';
|
||||
|
||||
export class TeamPermission extends Permission {
|
||||
hasAppCreatePer: boolean = false;
|
||||
hasDatasetCreatePer: boolean = false;
|
||||
hasApikeyCreatePer: boolean = false;
|
||||
|
||||
constructor(props?: PerConstructPros) {
|
||||
if (!props) {
|
||||
props = {
|
||||
@@ -12,5 +22,11 @@ export class TeamPermission extends Permission {
|
||||
}
|
||||
props.permissionList = TeamPermissionList;
|
||||
super(props);
|
||||
|
||||
this.setUpdatePermissionCallback(() => {
|
||||
this.hasAppCreatePer = this.checkPer(TeamAppCreatePermissionVal);
|
||||
this.hasDatasetCreatePer = this.checkPer(TeamDatasetCreatePermissionVal);
|
||||
this.hasApikeyCreatePer = this.checkPer(TeamApikeyCreatePermissionVal);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,8 @@ export enum UsageSourceEnum {
|
||||
feishu = 'feishu',
|
||||
dingtalk = 'dingtalk',
|
||||
official_account = 'official_account',
|
||||
pdfParse = 'pdfParse'
|
||||
pdfParse = 'pdfParse',
|
||||
mcp = 'mcp'
|
||||
}
|
||||
|
||||
export const UsageSourceMap = {
|
||||
@@ -47,5 +48,8 @@ export const UsageSourceMap = {
|
||||
},
|
||||
[UsageSourceEnum.pdfParse]: {
|
||||
label: i18nT('account_usage:pdf_parse')
|
||||
},
|
||||
[UsageSourceEnum.mcp]: {
|
||||
label: i18nT('account_usage:mcp')
|
||||
}
|
||||
};
|
||||
|
||||
@@ -32,3 +32,13 @@ export const getImageBase64 = async (url: string) => {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
};
|
||||
|
||||
export const addEndpointToImageUrl = (text: string) => {
|
||||
const baseURL = process.env.FE_DOMAIN;
|
||||
if (!baseURL) return text;
|
||||
// 匹配 /api/system/img/xxx.xx 的图片链接,并追加 baseURL
|
||||
return text.replace(
|
||||
/(?<!https?:\/\/[^\s]*)(?:\/api\/system\/img\/[^\s.]*\.[^\s]*)/g,
|
||||
(match) => `${baseURL}${match}`
|
||||
);
|
||||
};
|
||||
|
||||
@@ -69,7 +69,7 @@ const addCommonMiddleware = (schema: mongoose.Schema) => {
|
||||
|
||||
export const getMongoModel = <T>(name: string, schema: mongoose.Schema) => {
|
||||
if (connectionMongo.models[name]) return connectionMongo.models[name] as Model<T>;
|
||||
console.log('Load model======', name);
|
||||
if (process.env.NODE_ENV !== 'test') console.log('Load model======', name);
|
||||
addCommonMiddleware(schema);
|
||||
|
||||
const model = connectionMongo.model<T>(name, schema);
|
||||
|
||||
@@ -1,6 +1,30 @@
|
||||
{
|
||||
"provider": "Gemini",
|
||||
"list": [
|
||||
{
|
||||
"model": "gemini-2.5-pro-exp-03-25",
|
||||
"name": "gemini-2.5-pro-exp-03-25",
|
||||
"maxContext": 1000000,
|
||||
"maxResponse": 63000,
|
||||
"quoteMaxToken": 1000000,
|
||||
"maxTemperature": 1,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm",
|
||||
"showTopP": true,
|
||||
"showStopSign": true
|
||||
},
|
||||
{
|
||||
"model": "gemini-2.0-flash",
|
||||
"name": "gemini-2.0-flash",
|
||||
|
||||
@@ -1,6 +1,54 @@
|
||||
{
|
||||
"provider": "Grok",
|
||||
"list": [
|
||||
{
|
||||
"model": "grok-3-mini",
|
||||
"name": "grok-3-mini",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 128000,
|
||||
"maxTemperature": 1,
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "grok-3-mini-fast",
|
||||
"name": "grok-3-mini-fast",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 128000,
|
||||
"maxTemperature": 1,
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "grok-3",
|
||||
"name": "grok-3",
|
||||
@@ -11,7 +59,31 @@
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"vision": false,
|
||||
"toolChoice": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "grok-3-fast",
|
||||
"name": "grok-3-fast",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 128000,
|
||||
"maxTemperature": 1,
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
|
||||
@@ -1,6 +1,78 @@
|
||||
{
|
||||
"provider": "OpenAI",
|
||||
"list": [
|
||||
{
|
||||
"model": "gpt-4.1",
|
||||
"name": "gpt-4.1",
|
||||
"maxContext": 1000000,
|
||||
"maxResponse": 32000,
|
||||
"quoteMaxToken": 1000000,
|
||||
"maxTemperature": 1.2,
|
||||
"showTopP": true,
|
||||
"responseFormatList": ["text", "json_object", "json_schema"],
|
||||
"showStopSign": true,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": true,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "gpt-4.1-mini",
|
||||
"name": "gpt-4.1-mini",
|
||||
"maxContext": 1000000,
|
||||
"maxResponse": 32000,
|
||||
"quoteMaxToken": 1000000,
|
||||
"maxTemperature": 1.2,
|
||||
"showTopP": true,
|
||||
"responseFormatList": ["text", "json_object", "json_schema"],
|
||||
"showStopSign": true,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": true,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "gpt-4.1-nano",
|
||||
"name": "gpt-4.1-nano",
|
||||
"maxContext": 1000000,
|
||||
"maxResponse": 32000,
|
||||
"quoteMaxToken": 1000000,
|
||||
"maxTemperature": 1.2,
|
||||
"showTopP": true,
|
||||
"responseFormatList": ["text", "json_object", "json_schema"],
|
||||
"showStopSign": true,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": true,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm"
|
||||
},
|
||||
{
|
||||
"model": "gpt-4o-mini",
|
||||
"name": "GPT-4o-mini",
|
||||
@@ -9,11 +81,7 @@
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1.2,
|
||||
"showTopP": true,
|
||||
"responseFormatList": [
|
||||
"text",
|
||||
"json_object",
|
||||
"json_schema"
|
||||
],
|
||||
"responseFormatList": ["text", "json_object", "json_schema"],
|
||||
"showStopSign": true,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
@@ -37,11 +105,7 @@
|
||||
"quoteMaxToken": 60000,
|
||||
"maxTemperature": 1.2,
|
||||
"showTopP": true,
|
||||
"responseFormatList": [
|
||||
"text",
|
||||
"json_object",
|
||||
"json_schema"
|
||||
],
|
||||
"responseFormatList": ["text", "json_object", "json_schema"],
|
||||
"showStopSign": true,
|
||||
"vision": true,
|
||||
"toolChoice": true,
|
||||
@@ -275,4 +339,4 @@
|
||||
"type": "stt"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,3 +86,19 @@ export async function findAppAndAllChildren({
|
||||
|
||||
return [app, ...childDatasets];
|
||||
}
|
||||
|
||||
export const getAppBasicInfoByIds = async ({ teamId, ids }: { teamId: string; ids: string[] }) => {
|
||||
const apps = await MongoApp.find(
|
||||
{
|
||||
teamId,
|
||||
_id: { $in: ids }
|
||||
},
|
||||
'_id name avatar'
|
||||
).lean();
|
||||
|
||||
return apps.map((item) => ({
|
||||
id: item._id,
|
||||
name: item.name,
|
||||
avatar: item.avatar
|
||||
}));
|
||||
};
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
import { FlowNodeTemplateType } from '@fastgpt/global/core/workflow/type/node.d';
|
||||
import { FlowNodeTypeEnum, defaultNodeVersion } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { appData2FlowNodeIO, pluginData2FlowNodeIO } from '@fastgpt/global/core/workflow/utils';
|
||||
import {
|
||||
appData2FlowNodeIO,
|
||||
pluginData2FlowNodeIO,
|
||||
toolData2FlowNodeIO,
|
||||
toolSetData2FlowNodeIO
|
||||
} from '@fastgpt/global/core/workflow/utils';
|
||||
import { PluginSourceEnum } from '@fastgpt/global/core/plugin/constants';
|
||||
import { FlowNodeTemplateTypeEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { getHandleConfig } from '@fastgpt/global/core/workflow/template/utils';
|
||||
@@ -128,11 +133,41 @@ export async function getChildAppPreviewNode({
|
||||
(node) => node.flowNodeType === FlowNodeTypeEnum.pluginInput
|
||||
);
|
||||
|
||||
const isTool =
|
||||
!!app.workflow.nodes.find((node) => node.flowNodeType === FlowNodeTypeEnum.tool) &&
|
||||
app.workflow.nodes.length === 1;
|
||||
|
||||
const isToolSet =
|
||||
!!app.workflow.nodes.find((node) => node.flowNodeType === FlowNodeTypeEnum.toolSet) &&
|
||||
app.workflow.nodes.length === 1;
|
||||
|
||||
const { flowNodeType, nodeIOConfig } = (() => {
|
||||
if (isToolSet)
|
||||
return {
|
||||
flowNodeType: FlowNodeTypeEnum.toolSet,
|
||||
nodeIOConfig: toolSetData2FlowNodeIO({ nodes: app.workflow.nodes })
|
||||
};
|
||||
if (isTool)
|
||||
return {
|
||||
flowNodeType: FlowNodeTypeEnum.tool,
|
||||
nodeIOConfig: toolData2FlowNodeIO({ nodes: app.workflow.nodes })
|
||||
};
|
||||
if (isPlugin)
|
||||
return {
|
||||
flowNodeType: FlowNodeTypeEnum.pluginModule,
|
||||
nodeIOConfig: pluginData2FlowNodeIO({ nodes: app.workflow.nodes })
|
||||
};
|
||||
return {
|
||||
flowNodeType: FlowNodeTypeEnum.appModule,
|
||||
nodeIOConfig: appData2FlowNodeIO({ chatConfig: app.workflow.chatConfig })
|
||||
};
|
||||
})();
|
||||
|
||||
return {
|
||||
id: getNanoid(),
|
||||
pluginId: app.id,
|
||||
templateType: app.templateType,
|
||||
flowNodeType: isPlugin ? FlowNodeTypeEnum.pluginModule : FlowNodeTypeEnum.appModule,
|
||||
flowNodeType,
|
||||
avatar: app.avatar,
|
||||
name: app.name,
|
||||
intro: app.intro,
|
||||
@@ -141,11 +176,13 @@ export async function getChildAppPreviewNode({
|
||||
showStatus: app.showStatus,
|
||||
isTool: true,
|
||||
version: app.version,
|
||||
sourceHandle: getHandleConfig(true, true, true, true),
|
||||
targetHandle: getHandleConfig(true, true, true, true),
|
||||
...(isPlugin
|
||||
? pluginData2FlowNodeIO({ nodes: app.workflow.nodes })
|
||||
: appData2FlowNodeIO({ chatConfig: app.workflow.chatConfig }))
|
||||
sourceHandle: isToolSet
|
||||
? getHandleConfig(false, false, false, false)
|
||||
: getHandleConfig(true, true, true, true),
|
||||
targetHandle: isToolSet
|
||||
? getHandleConfig(false, false, false, false)
|
||||
: getHandleConfig(true, true, true, true),
|
||||
...nodeIOConfig
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ import { mergeChatResponseData } from '@fastgpt/global/core/chat/utils';
|
||||
import { pushChatLog } from './pushChatLog';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import { extractDeepestInteractive } from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
|
||||
type Props = {
|
||||
chatId: string;
|
||||
@@ -209,34 +210,24 @@ export const updateInteractiveChat = async ({
|
||||
}
|
||||
})();
|
||||
|
||||
if (interactiveValue.interactive.type === 'userSelect') {
|
||||
interactiveValue.interactive = {
|
||||
...interactiveValue.interactive,
|
||||
params: {
|
||||
...interactiveValue.interactive.params,
|
||||
userSelectedVal: userInteractiveVal
|
||||
}
|
||||
};
|
||||
let finalInteractive = extractDeepestInteractive(interactiveValue.interactive);
|
||||
|
||||
if (finalInteractive.type === 'userSelect') {
|
||||
finalInteractive.params.userSelectedVal = userInteractiveVal;
|
||||
} else if (
|
||||
interactiveValue.interactive.type === 'userInput' &&
|
||||
finalInteractive.type === 'userInput' &&
|
||||
typeof parsedUserInteractiveVal === 'object'
|
||||
) {
|
||||
interactiveValue.interactive = {
|
||||
...interactiveValue.interactive,
|
||||
params: {
|
||||
...interactiveValue.interactive.params,
|
||||
inputForm: interactiveValue.interactive.params.inputForm.map((item) => {
|
||||
const itemValue = parsedUserInteractiveVal[item.label];
|
||||
return itemValue !== undefined
|
||||
? {
|
||||
...item,
|
||||
value: itemValue
|
||||
}
|
||||
: item;
|
||||
}),
|
||||
submitted: true
|
||||
}
|
||||
};
|
||||
finalInteractive.params.inputForm = finalInteractive.params.inputForm.map((item) => {
|
||||
const itemValue = parsedUserInteractiveVal[item.label];
|
||||
return itemValue !== undefined
|
||||
? {
|
||||
...item,
|
||||
value: itemValue
|
||||
}
|
||||
: item;
|
||||
});
|
||||
finalInteractive.params.submitted = true;
|
||||
}
|
||||
|
||||
if (aiResponse.customFeedbacks) {
|
||||
|
||||
@@ -11,7 +11,7 @@ import axios from 'axios';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
|
||||
import { i18nT } from '../../../web/i18n/utils';
|
||||
import { addLog } from '../../common/system/log';
|
||||
import { getImageBase64 } from '../../common/file/image/utils';
|
||||
import { addEndpointToImageUrl, getImageBase64 } from '../../common/file/image/utils';
|
||||
|
||||
export const filterGPTMessageByMaxContext = async ({
|
||||
messages = [],
|
||||
@@ -87,26 +87,17 @@ export const loadRequestMessages = async ({
|
||||
useVision?: boolean;
|
||||
origin?: string;
|
||||
}) => {
|
||||
const replaceLinkUrl = (text: string) => {
|
||||
const baseURL = process.env.FE_DOMAIN;
|
||||
if (!baseURL) return text;
|
||||
// 匹配 /api/system/img/xxx.xx 的图片链接,并追加 baseURL
|
||||
return text.replace(
|
||||
/(?<!https?:\/\/[^\s]*)(?:\/api\/system\/img\/[^\s.]*\.[^\s]*)/g,
|
||||
(match) => `${baseURL}${match}`
|
||||
);
|
||||
};
|
||||
const parseSystemMessage = (
|
||||
content: string | ChatCompletionContentPartText[]
|
||||
): string | ChatCompletionContentPartText[] | undefined => {
|
||||
if (typeof content === 'string') {
|
||||
if (!content) return;
|
||||
return replaceLinkUrl(content);
|
||||
return addEndpointToImageUrl(content);
|
||||
}
|
||||
|
||||
const arrayContent = content
|
||||
.filter((item) => item.text)
|
||||
.map((item) => ({ ...item, text: replaceLinkUrl(item.text) }));
|
||||
.map((item) => ({ ...item, text: addEndpointToImageUrl(item.text) }));
|
||||
if (arrayContent.length === 0) return;
|
||||
return arrayContent;
|
||||
};
|
||||
|
||||
@@ -7,7 +7,7 @@ import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import {
|
||||
getWorkflowEntryNodeIds,
|
||||
initWorkflowEdgeStatus,
|
||||
storeEdges2RuntimeEdges,
|
||||
storeNodes2RuntimeNodes,
|
||||
textAdaptGptResponse
|
||||
} from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
@@ -70,7 +70,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
appData.modules,
|
||||
getWorkflowEntryNodeIds(appData.modules)
|
||||
),
|
||||
runtimeEdges: initWorkflowEdgeStatus(appData.edges),
|
||||
runtimeEdges: storeEdges2RuntimeEdges(appData.edges),
|
||||
histories: chatHistories,
|
||||
query: runtimePrompt2ChatsValue({
|
||||
files,
|
||||
|
||||
@@ -22,7 +22,7 @@ import { formatModelChars2Points } from '../../../../../support/wallet/usage/uti
|
||||
import { getHistoryPreview } from '@fastgpt/global/core/chat/utils';
|
||||
import { runToolWithFunctionCall } from './functionCall';
|
||||
import { runToolWithPromptCall } from './promptCall';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { getNanoid, replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { getMultiplePrompt, Prompt_Tool_Call } from './constants';
|
||||
import { filterToolResponseToPreview } from './utils';
|
||||
import { InteractiveNodeResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
|
||||
@@ -188,6 +188,8 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
if (toolModel.toolChoice) {
|
||||
return runToolWithToolChoice({
|
||||
...props,
|
||||
runtimeNodes,
|
||||
runtimeEdges,
|
||||
toolNodes,
|
||||
toolModel,
|
||||
maxRunToolTimes: 30,
|
||||
@@ -198,6 +200,8 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
if (toolModel.functionCall) {
|
||||
return runToolWithFunctionCall({
|
||||
...props,
|
||||
runtimeNodes,
|
||||
runtimeEdges,
|
||||
toolNodes,
|
||||
toolModel,
|
||||
messages: adaptMessages,
|
||||
@@ -226,6 +230,8 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
|
||||
return runToolWithPromptCall({
|
||||
...props,
|
||||
runtimeNodes,
|
||||
runtimeEdges,
|
||||
toolNodes,
|
||||
toolModel,
|
||||
messages: adaptMessages,
|
||||
|
||||
@@ -17,6 +17,7 @@ import { MongoDataset } from '../../../dataset/schema';
|
||||
import { i18nT } from '../../../../../web/i18n/utils';
|
||||
import { filterDatasetsByTmbId } from '../../../dataset/utils';
|
||||
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
|
||||
import { addEndpointToImageUrl } from '../../../../common/file/image/utils';
|
||||
|
||||
type DatasetSearchProps = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.datasetSelectList]: SelectedDatasetType;
|
||||
@@ -246,7 +247,7 @@ export async function dispatchDatasetSearch(
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: searchRes.map((item) => ({
|
||||
sourceName: item.sourceName,
|
||||
updateTime: item.updateTime,
|
||||
content: `${item.q}\n${item.a}`.trim()
|
||||
content: addEndpointToImageUrl(`${item.q}\n${item.a}`.trim())
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
@@ -37,7 +37,8 @@ import { dispatchQueryExtension } from './tools/queryExternsion';
|
||||
import { dispatchRunPlugin } from './plugin/run';
|
||||
import { dispatchPluginInput } from './plugin/runInput';
|
||||
import { dispatchPluginOutput } from './plugin/runOutput';
|
||||
import { formatHttpError, removeSystemVariable, valueTypeFormat } from './utils';
|
||||
import { formatHttpError, removeSystemVariable, rewriteRuntimeWorkFlow } from './utils';
|
||||
import { valueTypeFormat } from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
import {
|
||||
filterWorkflowEdges,
|
||||
checkNodeRunStatus,
|
||||
@@ -73,6 +74,8 @@ import { dispatchLoopStart } from './loop/runLoopStart';
|
||||
import { dispatchFormInput } from './interactive/formInput';
|
||||
import { dispatchToolParams } from './agent/runTool/toolParams';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { filterModuleTypeList } from '@fastgpt/global/core/chat/utils';
|
||||
import { dispatchRunTool } from './plugin/runTool';
|
||||
|
||||
const callbackMap: Record<FlowNodeTypeEnum, Function> = {
|
||||
[FlowNodeTypeEnum.workflowStart]: dispatchWorkflowStart,
|
||||
@@ -103,6 +106,7 @@ const callbackMap: Record<FlowNodeTypeEnum, Function> = {
|
||||
[FlowNodeTypeEnum.loopStart]: dispatchLoopStart,
|
||||
[FlowNodeTypeEnum.loopEnd]: dispatchLoopEnd,
|
||||
[FlowNodeTypeEnum.formInput]: dispatchFormInput,
|
||||
[FlowNodeTypeEnum.tool]: dispatchRunTool,
|
||||
|
||||
// none
|
||||
[FlowNodeTypeEnum.systemConfig]: dispatchSystemConfig,
|
||||
@@ -110,6 +114,7 @@ const callbackMap: Record<FlowNodeTypeEnum, Function> = {
|
||||
[FlowNodeTypeEnum.emptyNode]: () => Promise.resolve(),
|
||||
[FlowNodeTypeEnum.globalVariable]: () => Promise.resolve(),
|
||||
[FlowNodeTypeEnum.comment]: () => Promise.resolve(),
|
||||
[FlowNodeTypeEnum.toolSet]: () => Promise.resolve(),
|
||||
|
||||
[FlowNodeTypeEnum.runApp]: dispatchAppRequest // abandoned
|
||||
};
|
||||
@@ -131,15 +136,19 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
externalProvider,
|
||||
stream = false,
|
||||
version = 'v1',
|
||||
responseDetail = true,
|
||||
...props
|
||||
} = data;
|
||||
|
||||
rewriteRuntimeWorkFlow(runtimeNodes, runtimeEdges);
|
||||
|
||||
// 初始化深度和自动增加深度,避免无限嵌套
|
||||
if (!props.workflowDispatchDeep) {
|
||||
props.workflowDispatchDeep = 1;
|
||||
} else {
|
||||
props.workflowDispatchDeep += 1;
|
||||
}
|
||||
const isRootRuntime = props.workflowDispatchDeep === 1;
|
||||
|
||||
if (props.workflowDispatchDeep > 20) {
|
||||
return {
|
||||
@@ -160,25 +169,28 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
let workflowRunTimes = 0;
|
||||
|
||||
// set sse response headers
|
||||
if (stream && res) {
|
||||
res.setHeader('Content-Type', 'text/event-stream;charset=utf-8');
|
||||
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||
res.setHeader('X-Accel-Buffering', 'no');
|
||||
res.setHeader('Cache-Control', 'no-cache, no-transform');
|
||||
if (isRootRuntime) {
|
||||
res?.setHeader('Connection', 'keep-alive'); // Set keepalive for long connection
|
||||
if (stream && res) {
|
||||
res.setHeader('Content-Type', 'text/event-stream;charset=utf-8');
|
||||
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||
res.setHeader('X-Accel-Buffering', 'no');
|
||||
res.setHeader('Cache-Control', 'no-cache, no-transform');
|
||||
|
||||
// 10s sends a message to prevent the browser from thinking that the connection is disconnected
|
||||
const sendStreamTimerSign = () => {
|
||||
setTimeout(() => {
|
||||
props?.workflowStreamResponse?.({
|
||||
event: SseResponseEventEnum.answer,
|
||||
data: textAdaptGptResponse({
|
||||
text: ''
|
||||
})
|
||||
});
|
||||
sendStreamTimerSign();
|
||||
}, 10000);
|
||||
};
|
||||
sendStreamTimerSign();
|
||||
// 10s sends a message to prevent the browser from thinking that the connection is disconnected
|
||||
const sendStreamTimerSign = () => {
|
||||
setTimeout(() => {
|
||||
props?.workflowStreamResponse?.({
|
||||
event: SseResponseEventEnum.answer,
|
||||
data: textAdaptGptResponse({
|
||||
text: ''
|
||||
})
|
||||
});
|
||||
sendStreamTimerSign();
|
||||
}, 10000);
|
||||
};
|
||||
sendStreamTimerSign();
|
||||
}
|
||||
}
|
||||
|
||||
variables = {
|
||||
@@ -324,10 +336,9 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
});
|
||||
|
||||
if (props.mode === 'debug') {
|
||||
debugNextStepRunNodes = debugNextStepRunNodes.concat([
|
||||
...nextStepActiveNodes,
|
||||
...nextStepSkipNodes
|
||||
]);
|
||||
debugNextStepRunNodes = debugNextStepRunNodes.concat(
|
||||
props.lastInteractive ? nextStepActiveNodes : [...nextStepActiveNodes, ...nextStepSkipNodes]
|
||||
);
|
||||
return {
|
||||
nextStepActiveNodes: [],
|
||||
nextStepSkipNodes: []
|
||||
@@ -373,7 +384,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
};
|
||||
|
||||
// Tool call, not need interactive response
|
||||
if (!props.isToolCall) {
|
||||
if (!props.isToolCall && isRootRuntime) {
|
||||
props.workflowStreamResponse?.({
|
||||
event: SseResponseEventEnum.interactive,
|
||||
data: { interactive: interactiveResult }
|
||||
@@ -427,14 +438,6 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
})();
|
||||
|
||||
if (!nodeRunResult) return [];
|
||||
if (res?.closed) {
|
||||
addLog.warn('Request is closed', {
|
||||
appId: props.runningAppInfo.id,
|
||||
nodeId: node.nodeId,
|
||||
nodeName: node.name
|
||||
});
|
||||
return [];
|
||||
}
|
||||
|
||||
/*
|
||||
特殊情况:
|
||||
@@ -491,6 +494,15 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
await Promise.all(nextStepSkipNodes.map((node) => checkNodeCanRun(node, skippedNodeIdList)))
|
||||
).flat();
|
||||
|
||||
if (res?.closed) {
|
||||
addLog.warn('Request is closed', {
|
||||
appId: props.runningAppInfo.id,
|
||||
nodeId: node.nodeId,
|
||||
nodeName: node.name
|
||||
});
|
||||
return [];
|
||||
}
|
||||
|
||||
return [
|
||||
...nextStepActiveNodes,
|
||||
...nextStepSkipNodes,
|
||||
@@ -631,14 +643,13 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
if (
|
||||
version === 'v2' &&
|
||||
!props.isToolCall &&
|
||||
!props.runningAppInfo.isChildApp &&
|
||||
formatResponseData
|
||||
isRootRuntime &&
|
||||
formatResponseData &&
|
||||
!(responseDetail === false && filterModuleTypeList.includes(formatResponseData.moduleType))
|
||||
) {
|
||||
props.workflowStreamResponse?.({
|
||||
event: SseResponseEventEnum.flowNodeResponse,
|
||||
data: {
|
||||
...formatResponseData
|
||||
}
|
||||
data: formatResponseData
|
||||
});
|
||||
}
|
||||
|
||||
@@ -719,7 +730,9 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
entryNodeIds: nodeInteractiveResponse.entryNodeIds,
|
||||
interactiveResponse: nodeInteractiveResponse.interactiveResponse
|
||||
});
|
||||
chatAssistantResponse.push(interactiveAssistant);
|
||||
if (isRootRuntime) {
|
||||
chatAssistantResponse.push(interactiveAssistant);
|
||||
}
|
||||
return interactiveAssistant.interactive;
|
||||
}
|
||||
})();
|
||||
|
||||
@@ -17,19 +17,25 @@ type Response = DispatchNodeResultType<{
|
||||
export const dispatchWorkflowStart = (props: Record<string, any>): Response => {
|
||||
const {
|
||||
query,
|
||||
variables,
|
||||
params: { userChatInput }
|
||||
} = props as UserChatInputProps;
|
||||
|
||||
const { text, files } = chatValue2RuntimePrompt(query);
|
||||
|
||||
const queryFiles = files
|
||||
.map((item) => {
|
||||
return item?.url ?? '';
|
||||
})
|
||||
.filter(Boolean);
|
||||
const variablesFiles: string[] = Array.isArray(variables?.fileUrlList)
|
||||
? variables.fileUrlList
|
||||
: [];
|
||||
|
||||
return {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {},
|
||||
[NodeInputKeyEnum.userChatInput]: text || userChatInput,
|
||||
[NodeOutputKeyEnum.userFiles]: files
|
||||
.map((item) => {
|
||||
return item?.url ?? '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
[NodeOutputKeyEnum.userFiles]: [...queryFiles, ...variablesFiles]
|
||||
// [NodeInputKeyEnum.inputFiles]: files
|
||||
};
|
||||
};
|
||||
|
||||
@@ -10,7 +10,6 @@ import type {
|
||||
UserInputInteractive
|
||||
} from '@fastgpt/global/core/workflow/template/system/interactive/type';
|
||||
import { addLog } from '../../../../common/system/log';
|
||||
import { getLastInteractiveValue } from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.description]: string;
|
||||
@@ -29,13 +28,13 @@ export const dispatchFormInput = async (props: Props): Promise<FormInputResponse
|
||||
histories,
|
||||
node,
|
||||
params: { description, userInputForms },
|
||||
query
|
||||
query,
|
||||
lastInteractive
|
||||
} = props;
|
||||
const { isEntry } = node;
|
||||
const interactive = getLastInteractiveValue(histories);
|
||||
|
||||
// Interactive node is not the entry node, return interactive result
|
||||
if (!isEntry || interactive?.type !== 'userInput') {
|
||||
if (!isEntry || lastInteractive?.type !== 'userInput') {
|
||||
return {
|
||||
[DispatchNodeResponseKeyEnum.interactive]: {
|
||||
type: 'userInput',
|
||||
|
||||
@@ -10,7 +10,6 @@ import type {
|
||||
UserSelectOptionItemType
|
||||
} from '@fastgpt/global/core/workflow/template/system/interactive/type';
|
||||
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
|
||||
import { getLastInteractiveValue } from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.description]: string;
|
||||
@@ -27,13 +26,13 @@ export const dispatchUserSelect = async (props: Props): Promise<UserSelectRespon
|
||||
histories,
|
||||
node,
|
||||
params: { description, userSelectOptions },
|
||||
query
|
||||
query,
|
||||
lastInteractive
|
||||
} = props;
|
||||
const { nodeId, isEntry } = node;
|
||||
const interactive = getLastInteractiveValue(histories);
|
||||
|
||||
// Interactive node is not the entry node, return interactive result
|
||||
if (!isEntry || interactive?.type !== 'userSelect') {
|
||||
if (!isEntry || lastInteractive?.type !== 'userSelect') {
|
||||
return {
|
||||
[DispatchNodeResponseKeyEnum.interactive]: {
|
||||
type: 'userSelect',
|
||||
|
||||
@@ -8,12 +8,18 @@ import { dispatchWorkFlow } from '..';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import { AIChatItemValueItemType, ChatHistoryItemResType } from '@fastgpt/global/core/chat/type';
|
||||
import { cloneDeep } from 'lodash';
|
||||
import {
|
||||
LoopInteractive,
|
||||
WorkflowInteractiveResponseType
|
||||
} from '@fastgpt/global/core/workflow/template/system/interactive/type';
|
||||
import { storeEdges2RuntimeEdges } from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.loopInputArray]: Array<any>;
|
||||
[NodeInputKeyEnum.childrenNodeIdList]: string[];
|
||||
}>;
|
||||
type Response = DispatchNodeResultType<{
|
||||
[DispatchNodeResponseKeyEnum.interactive]?: LoopInteractive;
|
||||
[NodeOutputKeyEnum.loopArray]: Array<any>;
|
||||
}>;
|
||||
|
||||
@@ -21,6 +27,7 @@ export const dispatchLoop = async (props: Props): Promise<Response> => {
|
||||
const {
|
||||
params,
|
||||
runtimeEdges,
|
||||
lastInteractive,
|
||||
runtimeNodes,
|
||||
node: { name }
|
||||
} = props;
|
||||
@@ -29,6 +36,8 @@ export const dispatchLoop = async (props: Props): Promise<Response> => {
|
||||
if (!Array.isArray(loopInputArray)) {
|
||||
return Promise.reject('Input value is not an array');
|
||||
}
|
||||
|
||||
// Max loop times
|
||||
const maxLength = process.env.WORKFLOW_MAX_LOOP_TIMES
|
||||
? Number(process.env.WORKFLOW_MAX_LOOP_TIMES)
|
||||
: 50;
|
||||
@@ -36,34 +45,63 @@ export const dispatchLoop = async (props: Props): Promise<Response> => {
|
||||
return Promise.reject(`Input array length cannot be greater than ${maxLength}`);
|
||||
}
|
||||
|
||||
const outputValueArr = [];
|
||||
const loopDetail: ChatHistoryItemResType[] = [];
|
||||
const interactiveData =
|
||||
lastInteractive?.type === 'loopInteractive' ? lastInteractive?.params : undefined;
|
||||
const lastIndex = interactiveData?.currentIndex;
|
||||
|
||||
const outputValueArr = interactiveData ? interactiveData.loopResult : [];
|
||||
const loopResponseDetail: ChatHistoryItemResType[] = [];
|
||||
let assistantResponses: AIChatItemValueItemType[] = [];
|
||||
let totalPoints = 0;
|
||||
let newVariables: Record<string, any> = props.variables;
|
||||
|
||||
let interactiveResponse: WorkflowInteractiveResponseType | undefined = undefined;
|
||||
let index = 0;
|
||||
|
||||
for await (const item of loopInputArray.filter(Boolean)) {
|
||||
runtimeNodes.forEach((node) => {
|
||||
if (
|
||||
childrenNodeIdList.includes(node.nodeId) &&
|
||||
node.flowNodeType === FlowNodeTypeEnum.loopStart
|
||||
) {
|
||||
node.isEntry = true;
|
||||
node.inputs.forEach((input) => {
|
||||
if (input.key === NodeInputKeyEnum.loopStartInput) {
|
||||
input.value = item;
|
||||
} else if (input.key === NodeInputKeyEnum.loopStartIndex) {
|
||||
input.value = index++;
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
// Skip already looped
|
||||
if (lastIndex && index < lastIndex) {
|
||||
index++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// It takes effect only once in current loop
|
||||
const isInteractiveResponseIndex = !!interactiveData && index === interactiveData?.currentIndex;
|
||||
|
||||
// Init entry
|
||||
if (isInteractiveResponseIndex) {
|
||||
runtimeNodes.forEach((node) => {
|
||||
if (interactiveData?.childrenResponse?.entryNodeIds.includes(node.nodeId)) {
|
||||
node.isEntry = true;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
runtimeNodes.forEach((node) => {
|
||||
if (!childrenNodeIdList.includes(node.nodeId)) return;
|
||||
|
||||
// Init interactive response
|
||||
if (node.flowNodeType === FlowNodeTypeEnum.loopStart) {
|
||||
node.isEntry = true;
|
||||
node.inputs.forEach((input) => {
|
||||
if (input.key === NodeInputKeyEnum.loopStartInput) {
|
||||
input.value = item;
|
||||
} else if (input.key === NodeInputKeyEnum.loopStartIndex) {
|
||||
input.value = index + 1;
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
index++;
|
||||
|
||||
const response = await dispatchWorkFlow({
|
||||
...props,
|
||||
lastInteractive: interactiveData?.childrenResponse,
|
||||
variables: newVariables,
|
||||
runtimeEdges: cloneDeep(runtimeEdges)
|
||||
runtimeNodes,
|
||||
runtimeEdges: cloneDeep(
|
||||
storeEdges2RuntimeEdges(runtimeEdges, interactiveData?.childrenResponse)
|
||||
)
|
||||
});
|
||||
|
||||
const loopOutputValue = response.flowResponses.find(
|
||||
@@ -71,8 +109,10 @@ export const dispatchLoop = async (props: Props): Promise<Response> => {
|
||||
)?.loopOutputValue;
|
||||
|
||||
// Concat runtime response
|
||||
outputValueArr.push(loopOutputValue);
|
||||
loopDetail.push(...response.flowResponses);
|
||||
if (!response.workflowInteractiveResponse) {
|
||||
outputValueArr.push(loopOutputValue);
|
||||
}
|
||||
loopResponseDetail.push(...response.flowResponses);
|
||||
assistantResponses.push(...response.assistantResponses);
|
||||
totalPoints += response.flowUsages.reduce((acc, usage) => acc + usage.totalPoints, 0);
|
||||
|
||||
@@ -81,15 +121,32 @@ export const dispatchLoop = async (props: Props): Promise<Response> => {
|
||||
...newVariables,
|
||||
...response.newVariables
|
||||
};
|
||||
|
||||
// handle interactive response
|
||||
if (response.workflowInteractiveResponse) {
|
||||
interactiveResponse = response.workflowInteractiveResponse;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
[DispatchNodeResponseKeyEnum.interactive]: interactiveResponse
|
||||
? {
|
||||
type: 'loopInteractive',
|
||||
params: {
|
||||
currentIndex: index - 1,
|
||||
childrenResponse: interactiveResponse,
|
||||
loopResult: outputValueArr
|
||||
}
|
||||
}
|
||||
: undefined,
|
||||
[DispatchNodeResponseKeyEnum.assistantResponses]: assistantResponses,
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints,
|
||||
loopInput: loopInputArray,
|
||||
loopResult: outputValueArr,
|
||||
loopDetail: loopDetail
|
||||
loopDetail: loopResponseDetail,
|
||||
mergeSignId: props.node.nodeId
|
||||
},
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
|
||||
@@ -5,7 +5,7 @@ import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runti
|
||||
import { getChildAppRuntimeById } from '../../../app/plugin/controller';
|
||||
import {
|
||||
getWorkflowEntryNodeIds,
|
||||
initWorkflowEdgeStatus,
|
||||
storeEdges2RuntimeEdges,
|
||||
storeNodes2RuntimeNodes
|
||||
} from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
@@ -101,7 +101,7 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
|
||||
}).value,
|
||||
chatConfig: {},
|
||||
runtimeNodes,
|
||||
runtimeEdges: initWorkflowEdgeStatus(plugin.edges)
|
||||
runtimeEdges: storeEdges2RuntimeEdges(plugin.edges)
|
||||
});
|
||||
const output = flowResponses.find((item) => item.moduleType === FlowNodeTypeEnum.pluginOutput);
|
||||
if (output) {
|
||||
|
||||
@@ -5,7 +5,8 @@ import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import {
|
||||
getWorkflowEntryNodeIds,
|
||||
initWorkflowEdgeStatus,
|
||||
storeEdges2RuntimeEdges,
|
||||
rewriteNodeOutputByHistories,
|
||||
storeNodes2RuntimeNodes,
|
||||
textAdaptGptResponse
|
||||
} from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
@@ -18,6 +19,7 @@ import { authAppByTmbId } from '../../../../support/permission/app/auth';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { getAppVersionById } from '../../../app/version/controller';
|
||||
import { parseUrlToFileType } from '@fastgpt/global/common/file/tools';
|
||||
import { ChildrenInteractive } from '@fastgpt/global/core/workflow/template/system/interactive/type';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.userChatInput]: string;
|
||||
@@ -27,6 +29,7 @@ type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.fileUrlList]?: string[];
|
||||
}>;
|
||||
type Response = DispatchNodeResultType<{
|
||||
[DispatchNodeResponseKeyEnum.interactive]?: ChildrenInteractive;
|
||||
[NodeOutputKeyEnum.answerText]: string;
|
||||
[NodeOutputKeyEnum.history]: ChatItemType[];
|
||||
}>;
|
||||
@@ -36,6 +39,7 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
|
||||
runningAppInfo,
|
||||
histories,
|
||||
query,
|
||||
lastInteractive,
|
||||
node: { pluginId: appId, version },
|
||||
workflowStreamResponse,
|
||||
params,
|
||||
@@ -100,31 +104,47 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
|
||||
appId: String(appData._id)
|
||||
};
|
||||
|
||||
const { flowResponses, flowUsages, assistantResponses, runTimes } = await dispatchWorkFlow({
|
||||
...props,
|
||||
// Rewrite stream mode
|
||||
...(system_forbid_stream
|
||||
? {
|
||||
stream: false,
|
||||
workflowStreamResponse: undefined
|
||||
}
|
||||
: {}),
|
||||
runningAppInfo: {
|
||||
id: String(appData._id),
|
||||
teamId: String(appData.teamId),
|
||||
tmbId: String(appData.tmbId),
|
||||
isChildApp: true
|
||||
},
|
||||
runtimeNodes: storeNodes2RuntimeNodes(nodes, getWorkflowEntryNodeIds(nodes)),
|
||||
runtimeEdges: initWorkflowEdgeStatus(edges),
|
||||
histories: chatHistories,
|
||||
variables: childrenRunVariables,
|
||||
query: runtimePrompt2ChatsValue({
|
||||
files: userInputFiles,
|
||||
text: userChatInput
|
||||
}),
|
||||
chatConfig
|
||||
});
|
||||
const childrenInteractive =
|
||||
lastInteractive?.type === 'childrenInteractive'
|
||||
? lastInteractive.params.childrenResponse
|
||||
: undefined;
|
||||
const runtimeNodes = rewriteNodeOutputByHistories(
|
||||
storeNodes2RuntimeNodes(
|
||||
nodes,
|
||||
getWorkflowEntryNodeIds(nodes, childrenInteractive || undefined)
|
||||
),
|
||||
childrenInteractive
|
||||
);
|
||||
|
||||
const runtimeEdges = storeEdges2RuntimeEdges(edges, childrenInteractive);
|
||||
const theQuery = childrenInteractive
|
||||
? query
|
||||
: runtimePrompt2ChatsValue({ files: userInputFiles, text: userChatInput });
|
||||
|
||||
const { flowResponses, flowUsages, assistantResponses, runTimes, workflowInteractiveResponse } =
|
||||
await dispatchWorkFlow({
|
||||
...props,
|
||||
lastInteractive: childrenInteractive,
|
||||
// Rewrite stream mode
|
||||
...(system_forbid_stream
|
||||
? {
|
||||
stream: false,
|
||||
workflowStreamResponse: undefined
|
||||
}
|
||||
: {}),
|
||||
runningAppInfo: {
|
||||
id: String(appData._id),
|
||||
teamId: String(appData.teamId),
|
||||
tmbId: String(appData.tmbId),
|
||||
isChildApp: true
|
||||
},
|
||||
runtimeNodes,
|
||||
runtimeEdges,
|
||||
histories: chatHistories,
|
||||
variables: childrenRunVariables,
|
||||
query: theQuery,
|
||||
chatConfig
|
||||
});
|
||||
|
||||
const completeMessages = chatHistories.concat([
|
||||
{
|
||||
@@ -142,6 +162,14 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
|
||||
const usagePoints = flowUsages.reduce((sum, item) => sum + (item.totalPoints || 0), 0);
|
||||
|
||||
return {
|
||||
[DispatchNodeResponseKeyEnum.interactive]: workflowInteractiveResponse
|
||||
? {
|
||||
type: 'childrenInteractive',
|
||||
params: {
|
||||
childrenResponse: workflowInteractiveResponse
|
||||
}
|
||||
}
|
||||
: undefined,
|
||||
assistantResponses: system_forbid_stream ? [] : assistantResponses,
|
||||
[DispatchNodeResponseKeyEnum.runTimes]: runTimes,
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
@@ -149,7 +177,8 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
|
||||
totalPoints: usagePoints,
|
||||
query: userChatInput,
|
||||
textOutput: text,
|
||||
pluginDetail: appData.permission.hasWritePer ? flowResponses : undefined
|
||||
pluginDetail: appData.permission.hasWritePer ? flowResponses : undefined,
|
||||
mergeSignId: props.node.nodeId
|
||||
},
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
|
||||
60
packages/service/core/workflow/dispatch/plugin/runTool.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import {
|
||||
DispatchNodeResultType,
|
||||
ModuleDispatchProps
|
||||
} from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { SSEClientTransport } from '@modelcontextprotocol/sdk/client/sse.js';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
|
||||
type RunToolProps = ModuleDispatchProps<{
|
||||
toolData: {
|
||||
name: string;
|
||||
url: string;
|
||||
};
|
||||
}>;
|
||||
|
||||
type RunToolResponse = DispatchNodeResultType<{
|
||||
[NodeOutputKeyEnum.rawResponse]: any;
|
||||
}>;
|
||||
|
||||
export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolResponse> => {
|
||||
const {
|
||||
params,
|
||||
node: { avatar }
|
||||
} = props;
|
||||
|
||||
const { toolData, ...restParams } = params;
|
||||
const { name: toolName, url } = toolData;
|
||||
|
||||
const client = new Client({
|
||||
name: 'FastGPT-MCP-client',
|
||||
version: '1.0.0'
|
||||
});
|
||||
|
||||
const result = await (async () => {
|
||||
try {
|
||||
const transport = new SSEClientTransport(new URL(url));
|
||||
await client.connect(transport);
|
||||
|
||||
return await client.callTool({
|
||||
name: toolName,
|
||||
arguments: restParams
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error running MCP tool:', error);
|
||||
return Promise.reject(error);
|
||||
} finally {
|
||||
await client.close();
|
||||
}
|
||||
})();
|
||||
|
||||
return {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
toolRes: result,
|
||||
moduleLogo: avatar
|
||||
},
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: result,
|
||||
[NodeOutputKeyEnum.rawResponse]: result
|
||||
};
|
||||
};
|
||||
@@ -10,7 +10,8 @@ import {
|
||||
SseResponseEventEnum
|
||||
} from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import axios from 'axios';
|
||||
import { formatHttpError, valueTypeFormat } from '../utils';
|
||||
import { formatHttpError } from '../utils';
|
||||
import { valueTypeFormat } from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
import { SERVICE_LOCAL_HOST } from '../../../../common/system/tools';
|
||||
import { addLog } from '../../../../common/system/log';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
|
||||
@@ -2,7 +2,7 @@ import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/
|
||||
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import axios from 'axios';
|
||||
import { valueTypeFormat } from '../utils';
|
||||
import { valueTypeFormat } from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
import { SERVICE_LOCAL_HOST } from '../../../../common/system/tools';
|
||||
import { addLog } from '../../../../common/system/log';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
|
||||
@@ -10,8 +10,9 @@ import {
|
||||
} from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
import { TUpdateListItem } from '@fastgpt/global/core/workflow/template/system/variableUpdate/type';
|
||||
import { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { removeSystemVariable, valueTypeFormat } from '../utils';
|
||||
import { removeSystemVariable } from '../utils';
|
||||
import { isValidReferenceValue } from '@fastgpt/global/core/workflow/utils';
|
||||
import { valueTypeFormat } from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.updateList]: TUpdateListItem[];
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
} from '@fastgpt/global/core/workflow/constants';
|
||||
import {
|
||||
RuntimeEdgeItemType,
|
||||
RuntimeNodeItemType,
|
||||
SystemVariablesType
|
||||
} from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { responseWrite } from '../../../common/response';
|
||||
@@ -14,7 +15,8 @@ import { NextApiResponse } from 'next';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import json5 from 'json5';
|
||||
import { getMCPToolRuntimeNode } from '@fastgpt/global/core/app/mcpTools/utils';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
|
||||
export const getWorkflowResponseWrite = ({
|
||||
res,
|
||||
@@ -104,49 +106,6 @@ export const getHistories = (history?: ChatItemType[] | number, histories: ChatI
|
||||
return [...systemHistories, ...filterHistories];
|
||||
};
|
||||
|
||||
/* value type format */
|
||||
export const valueTypeFormat = (value: any, type?: WorkflowIOValueTypeEnum) => {
|
||||
if (value === undefined) return;
|
||||
if (!type || type === WorkflowIOValueTypeEnum.any) return value;
|
||||
|
||||
if (type === 'string') {
|
||||
if (typeof value !== 'object') return String(value);
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
if (type === 'number') return Number(value);
|
||||
if (type === 'boolean') {
|
||||
if (typeof value === 'string') return value === 'true';
|
||||
return Boolean(value);
|
||||
}
|
||||
try {
|
||||
if (type === WorkflowIOValueTypeEnum.arrayString && typeof value === 'string') {
|
||||
return [value];
|
||||
}
|
||||
if (
|
||||
type &&
|
||||
[
|
||||
WorkflowIOValueTypeEnum.object,
|
||||
WorkflowIOValueTypeEnum.chatHistory,
|
||||
WorkflowIOValueTypeEnum.datasetQuote,
|
||||
WorkflowIOValueTypeEnum.selectApp,
|
||||
WorkflowIOValueTypeEnum.selectDataset,
|
||||
WorkflowIOValueTypeEnum.arrayString,
|
||||
WorkflowIOValueTypeEnum.arrayNumber,
|
||||
WorkflowIOValueTypeEnum.arrayBoolean,
|
||||
WorkflowIOValueTypeEnum.arrayObject,
|
||||
WorkflowIOValueTypeEnum.arrayAny
|
||||
].includes(type) &&
|
||||
typeof value !== 'object'
|
||||
) {
|
||||
return json5.parse(value);
|
||||
}
|
||||
} catch (error) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
export const checkQuoteQAValue = (quoteQA?: SearchDataResponseItemType[]) => {
|
||||
if (!quoteQA) return undefined;
|
||||
if (quoteQA.length === 0) {
|
||||
@@ -199,3 +158,53 @@ export const formatHttpError = (error: any) => {
|
||||
status: error?.status
|
||||
};
|
||||
};
|
||||
|
||||
export const rewriteRuntimeWorkFlow = (
|
||||
nodes: RuntimeNodeItemType[],
|
||||
edges: RuntimeEdgeItemType[]
|
||||
) => {
|
||||
const toolSetNodes = nodes.filter((node) => node.flowNodeType === FlowNodeTypeEnum.toolSet);
|
||||
|
||||
if (toolSetNodes.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const nodeIdsToRemove = new Set<string>();
|
||||
|
||||
for (const toolSetNode of toolSetNodes) {
|
||||
nodeIdsToRemove.add(toolSetNode.nodeId);
|
||||
const toolList =
|
||||
toolSetNode.inputs.find((input) => input.key === 'toolSetData')?.value?.toolList || [];
|
||||
const url = toolSetNode.inputs.find((input) => input.key === 'toolSetData')?.value?.url;
|
||||
|
||||
const incomingEdges = edges.filter((edge) => edge.target === toolSetNode.nodeId);
|
||||
|
||||
for (const tool of toolList) {
|
||||
const newToolNode = getMCPToolRuntimeNode({ avatar: toolSetNode.avatar, tool, url });
|
||||
|
||||
nodes.push({ ...newToolNode, name: `${toolSetNode.name} / ${tool.name}` });
|
||||
|
||||
for (const inEdge of incomingEdges) {
|
||||
edges.push({
|
||||
source: inEdge.source,
|
||||
target: newToolNode.nodeId,
|
||||
sourceHandle: inEdge.sourceHandle,
|
||||
targetHandle: 'selectedTools',
|
||||
status: inEdge.status
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = nodes.length - 1; i >= 0; i--) {
|
||||
if (nodeIdsToRemove.has(nodes[i].nodeId)) {
|
||||
nodes.splice(i, 1);
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = edges.length - 1; i >= 0; i--) {
|
||||
if (nodeIdsToRemove.has(edges[i].target)) {
|
||||
edges.splice(i, 1);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"@fastgpt/global": "workspace:*",
|
||||
"@modelcontextprotocol/sdk": "^1.9.0",
|
||||
"@node-rs/jieba": "2.0.1",
|
||||
"@xmldom/xmldom": "^0.8.10",
|
||||
"@zilliz/milvus2-sdk-node": "2.4.2",
|
||||
|
||||