Compare commits

..

23 Commits
v3.0 ... v3.3

Author SHA1 Message Date
archer
487ef670cd perf: prompt 2023-05-04 11:33:42 +08:00
archer
0d6897e180 feat: lafClaude 2023-05-04 10:53:55 +08:00
archer
3c8f38799c perf: ts type 2023-05-03 22:33:25 +08:00
archer
91b02bbfd9 feat: img compress;perf: color.prompt response 2023-05-03 21:42:23 +08:00
archer
17a42ac0cc perf: kb-add last question to search 2023-05-03 18:38:59 +08:00
archer
e384893ae0 feat: select chat model 2023-05-03 15:50:31 +08:00
archer
00a99261ae perf: chat framwork 2023-05-03 15:28:25 +08:00
archer
91decc3683 perf: model framwork 2023-05-03 10:57:56 +08:00
archer
aa74625f96 fix: system prompt response 2023-05-03 10:27:06 +08:00
archer
9199e3e57d fix: userquery cache 2023-05-02 17:28:23 +08:00
archer
89234c197c fix: response 2023-05-02 14:56:12 +08:00
archer
a409db9578 perf: icon 2023-05-02 14:52:20 +08:00
archer
11f42ad9ed perf: chat load 2023-05-02 14:18:43 +08:00
archer
90456301d2 feat: save system prompt 2023-05-02 14:06:10 +08:00
archer
b0d414ac12 feat: export chat 2023-05-02 12:01:22 +08:00
archer
89a67ca9c0 perf: token split text 2023-04-30 22:35:47 +08:00
archer
39869bc4ea perf: search kb model 2023-04-30 14:01:39 +08:00
archer
f109f1cf60 perf: save chat and del chat content;UI 2023-04-30 13:26:56 +08:00
archer
c971adaabd docs 2023-04-29 16:01:42 +08:00
archer
ea100d84bf perf: auth token 2023-04-29 15:59:53 +08:00
archer
78762498eb perf: model framwork 2023-04-29 15:55:47 +08:00
archer
cd9acab938 fix: request config 2023-04-28 16:47:55 +08:00
archer
56b3ddc147 fix: mode data 2023-04-28 15:07:38 +08:00
102 changed files with 3185 additions and 2341 deletions

422
pnpm-lock.yaml generated
View File

@@ -136,6 +136,184 @@ devDependencies:
packages:
/@aws-sdk/credential-providers/3.278.0:
resolution: {integrity: sha512-XeCyXru9+gxePvrnPbdITZLSCC5qg3C/hgdItstDEvrQh+vMw9DJvX5ztoKh987C0puwkbB6HENC04ZTh/0Buw==}
engines: {node: '>=14.0.0'}
requiresBuild: true
dependencies:
'@aws-sdk/client-cognito-identity': registry.npmmirror.com/@aws-sdk/client-cognito-identity/3.278.0
'@aws-sdk/client-sso': registry.npmmirror.com/@aws-sdk/client-sso/3.278.0
'@aws-sdk/client-sts': registry.npmmirror.com/@aws-sdk/client-sts/3.278.0
'@aws-sdk/credential-provider-cognito-identity': registry.npmmirror.com/@aws-sdk/credential-provider-cognito-identity/3.278.0
'@aws-sdk/credential-provider-env': registry.npmmirror.com/@aws-sdk/credential-provider-env/3.272.0
'@aws-sdk/credential-provider-imds': registry.npmmirror.com/@aws-sdk/credential-provider-imds/3.272.0
'@aws-sdk/credential-provider-ini': registry.npmmirror.com/@aws-sdk/credential-provider-ini/3.278.0
'@aws-sdk/credential-provider-node': registry.npmmirror.com/@aws-sdk/credential-provider-node/3.278.0
'@aws-sdk/credential-provider-process': registry.npmmirror.com/@aws-sdk/credential-provider-process/3.272.0
'@aws-sdk/credential-provider-sso': registry.npmmirror.com/@aws-sdk/credential-provider-sso/3.278.0
'@aws-sdk/credential-provider-web-identity': registry.npmmirror.com/@aws-sdk/credential-provider-web-identity/3.272.0
'@aws-sdk/property-provider': registry.npmmirror.com/@aws-sdk/property-provider/3.272.0
'@aws-sdk/shared-ini-file-loader': registry.npmmirror.com/@aws-sdk/shared-ini-file-loader/3.272.0
'@aws-sdk/types': registry.npmmirror.com/@aws-sdk/types/3.272.0
tslib: registry.npmmirror.com/tslib/2.5.0
transitivePeerDependencies:
- aws-crt
dev: false
optional: true
/@emotion/is-prop-valid/0.8.8:
resolution: {integrity: sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==}
requiresBuild: true
dependencies:
'@emotion/memoize': registry.npmmirror.com/@emotion/memoize/0.7.4
dev: false
optional: true
/@next/swc-android-arm-eabi/13.1.6:
resolution: {integrity: sha512-F3/6Z8LH/pGlPzR1AcjPFxx35mPqjE5xZcf+IL+KgbW9tMkp7CYi1y7qKrEWU7W4AumxX/8OINnDQWLiwLasLQ==}
engines: {node: '>= 10'}
cpu: [arm]
os: [android]
requiresBuild: true
dev: false
optional: true
/@next/swc-android-arm64/13.1.6:
resolution: {integrity: sha512-cMwQjnB8vrYkWyK/H0Rf2c2pKIH4RGjpKUDvbjVAit6SbwPDpmaijLio0LWFV3/tOnY6kvzbL62lndVA0mkYpw==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [android]
requiresBuild: true
dev: false
optional: true
/@next/swc-darwin-arm64/13.1.6:
resolution: {integrity: sha512-KKRQH4DDE4kONXCvFMNBZGDb499Hs+xcFAwvj+rfSUssIDrZOlyfJNy55rH5t2Qxed1e4K80KEJgsxKQN1/fyw==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [darwin]
requiresBuild: true
dev: false
optional: true
/@next/swc-darwin-x64/13.1.6:
resolution: {integrity: sha512-/uOky5PaZDoaU99ohjtNcDTJ6ks/gZ5ykTQDvNZDjIoCxFe3+t06bxsTPY6tAO6uEAw5f6vVFX5H5KLwhrkZCA==}
engines: {node: '>= 10'}
cpu: [x64]
os: [darwin]
requiresBuild: true
dev: false
optional: true
/@next/swc-freebsd-x64/13.1.6:
resolution: {integrity: sha512-qaEALZeV7to6weSXk3Br80wtFQ7cFTpos/q+m9XVRFggu+8Ib895XhMWdJBzew6aaOcMvYR6KQ6JmHA2/eMzWw==}
engines: {node: '>= 10'}
cpu: [x64]
os: [freebsd]
requiresBuild: true
dev: false
optional: true
/@next/swc-linux-arm-gnueabihf/13.1.6:
resolution: {integrity: sha512-OybkbC58A1wJ+JrJSOjGDvZzrVEQA4sprJejGqMwiZyLqhr9Eo8FXF0y6HL+m1CPCpPhXEHz/2xKoYsl16kNqw==}
engines: {node: '>= 10'}
cpu: [arm]
os: [linux]
requiresBuild: true
dev: false
optional: true
/@next/swc-linux-arm64-gnu/13.1.6:
resolution: {integrity: sha512-yCH+yDr7/4FDuWv6+GiYrPI9kcTAO3y48UmaIbrKy8ZJpi7RehJe3vIBRUmLrLaNDH3rY1rwoHi471NvR5J5NQ==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
requiresBuild: true
dev: false
optional: true
/@next/swc-linux-arm64-musl/13.1.6:
resolution: {integrity: sha512-ECagB8LGX25P9Mrmlc7Q/TQBb9rGScxHbv/kLqqIWs2fIXy6Y/EiBBiM72NTwuXUFCNrWR4sjUPSooVBJJ3ESQ==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
requiresBuild: true
dev: false
optional: true
/@next/swc-linux-x64-gnu/13.1.6:
resolution: {integrity: sha512-GT5w2mruk90V/I5g6ScuueE7fqj/d8Bui2qxdw6lFxmuTgMeol5rnzAv4uAoVQgClOUO/MULilzlODg9Ib3Y4Q==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
requiresBuild: true
dev: false
optional: true
/@next/swc-linux-x64-musl/13.1.6:
resolution: {integrity: sha512-keFD6KvwOPzmat4TCnlnuxJCQepPN+8j3Nw876FtULxo8005Y9Ghcl7ACcR8GoiKoddAq8gxNBrpjoxjQRHeAQ==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
requiresBuild: true
dev: false
optional: true
/@next/swc-win32-arm64-msvc/13.1.6:
resolution: {integrity: sha512-OwertslIiGQluFvHyRDzBCIB07qJjqabAmINlXUYt7/sY7Q7QPE8xVi5beBxX/rxTGPIbtyIe3faBE6Z2KywhQ==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [win32]
requiresBuild: true
dev: false
optional: true
/@next/swc-win32-ia32-msvc/13.1.6:
resolution: {integrity: sha512-g8zowiuP8FxUR9zslPmlju7qYbs2XBtTLVSxVikPtUDQedhcls39uKYLvOOd1JZg0ehyhopobRoH1q+MHlIN/w==}
engines: {node: '>= 10'}
cpu: [ia32]
os: [win32]
requiresBuild: true
dev: false
optional: true
/@next/swc-win32-x64-msvc/13.1.6:
resolution: {integrity: sha512-Ls2OL9hi3YlJKGNdKv8k3X/lLgc3VmLG3a/DeTkAd+lAituJp8ZHmRmm9f9SL84fT3CotlzcgbdaCDfFwFA6bA==}
engines: {node: '>= 10'}
cpu: [x64]
os: [win32]
requiresBuild: true
dev: false
optional: true
/fsevents/2.3.2:
resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
requiresBuild: true
dev: false
optional: true
/graceful-fs/4.2.10:
resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==}
dev: false
optional: true
/saslprep/1.0.3:
resolution: {integrity: sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==}
engines: {node: '>=6'}
requiresBuild: true
dependencies:
sparse-bitfield: registry.npmmirror.com/sparse-bitfield/3.0.3
dev: false
optional: true
/source-map/0.6.1:
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
engines: {node: '>=0.10.0'}
dev: false
optional: true
registry.npmmirror.com/@alicloud/credentials/2.2.6:
resolution: {integrity: sha512-jG+msY77dHmAF3x+8VTy7fEgORyXLHmDci8t92HeipBdCHsPptDegA++GEwKgR7f6G4wvafYt+aqMZ1iligdrQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@alicloud/credentials/-/credentials-2.2.6.tgz}
name: '@alicloud/credentials'
@@ -641,33 +819,6 @@ packages:
dev: false
optional: true
registry.npmmirror.com/@aws-sdk/credential-providers/3.278.0:
resolution: {integrity: sha512-XeCyXru9+gxePvrnPbdITZLSCC5qg3C/hgdItstDEvrQh+vMw9DJvX5ztoKh987C0puwkbB6HENC04ZTh/0Buw==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@aws-sdk/credential-providers/-/credential-providers-3.278.0.tgz}
name: '@aws-sdk/credential-providers'
version: 3.278.0
engines: {node: '>=14.0.0'}
requiresBuild: true
dependencies:
'@aws-sdk/client-cognito-identity': registry.npmmirror.com/@aws-sdk/client-cognito-identity/3.278.0
'@aws-sdk/client-sso': registry.npmmirror.com/@aws-sdk/client-sso/3.278.0
'@aws-sdk/client-sts': registry.npmmirror.com/@aws-sdk/client-sts/3.278.0
'@aws-sdk/credential-provider-cognito-identity': registry.npmmirror.com/@aws-sdk/credential-provider-cognito-identity/3.278.0
'@aws-sdk/credential-provider-env': registry.npmmirror.com/@aws-sdk/credential-provider-env/3.272.0
'@aws-sdk/credential-provider-imds': registry.npmmirror.com/@aws-sdk/credential-provider-imds/3.272.0
'@aws-sdk/credential-provider-ini': registry.npmmirror.com/@aws-sdk/credential-provider-ini/3.278.0
'@aws-sdk/credential-provider-node': registry.npmmirror.com/@aws-sdk/credential-provider-node/3.278.0
'@aws-sdk/credential-provider-process': registry.npmmirror.com/@aws-sdk/credential-provider-process/3.272.0
'@aws-sdk/credential-provider-sso': registry.npmmirror.com/@aws-sdk/credential-provider-sso/3.278.0
'@aws-sdk/credential-provider-web-identity': registry.npmmirror.com/@aws-sdk/credential-provider-web-identity/3.272.0
'@aws-sdk/property-provider': registry.npmmirror.com/@aws-sdk/property-provider/3.272.0
'@aws-sdk/shared-ini-file-loader': registry.npmmirror.com/@aws-sdk/shared-ini-file-loader/3.272.0
'@aws-sdk/types': registry.npmmirror.com/@aws-sdk/types/3.272.0
tslib: registry.npmmirror.com/tslib/2.5.0
transitivePeerDependencies:
- aws-crt
dev: false
optional: true
registry.npmmirror.com/@aws-sdk/fetch-http-handler/3.272.0:
resolution: {integrity: sha512-1Qhm9e0RbS1Xf4CZqUbQyUMkDLd7GrsRXWIvm9b86/vgeV8/WnjO3CMue9D51nYgcyQORhYXv6uVjAYCWbUExA==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@aws-sdk/fetch-http-handler/-/fetch-http-handler-3.272.0.tgz}
name: '@aws-sdk/fetch-http-handler'
@@ -4220,16 +4371,6 @@ packages:
version: 0.9.0
dev: false
registry.npmmirror.com/@emotion/is-prop-valid/0.8.8:
resolution: {integrity: sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz}
name: '@emotion/is-prop-valid'
version: 0.8.8
requiresBuild: true
dependencies:
'@emotion/memoize': registry.npmmirror.com/@emotion/memoize/0.7.4
dev: false
optional: true
registry.npmmirror.com/@emotion/is-prop-valid/1.2.0:
resolution: {integrity: sha512-3aDpDprjM0AwaxGE09bOPkNxHpBd+kA6jty3RnaEXdweX1DF1U3VQpPYb0g1IStAuK7SVQ1cy+bNBBKp4W3Fjg==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@emotion/is-prop-valid/-/is-prop-valid-1.2.0.tgz}
name: '@emotion/is-prop-valid'
@@ -4520,153 +4661,6 @@ packages:
version: 13.1.6
dev: false
registry.npmmirror.com/@next/swc-android-arm-eabi/13.1.6:
resolution: {integrity: sha512-F3/6Z8LH/pGlPzR1AcjPFxx35mPqjE5xZcf+IL+KgbW9tMkp7CYi1y7qKrEWU7W4AumxX/8OINnDQWLiwLasLQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-android-arm-eabi/-/swc-android-arm-eabi-13.1.6.tgz}
name: '@next/swc-android-arm-eabi'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [arm]
os: [android]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@next/swc-android-arm64/13.1.6:
resolution: {integrity: sha512-cMwQjnB8vrYkWyK/H0Rf2c2pKIH4RGjpKUDvbjVAit6SbwPDpmaijLio0LWFV3/tOnY6kvzbL62lndVA0mkYpw==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-android-arm64/-/swc-android-arm64-13.1.6.tgz}
name: '@next/swc-android-arm64'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [arm64]
os: [android]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@next/swc-darwin-arm64/13.1.6:
resolution: {integrity: sha512-KKRQH4DDE4kONXCvFMNBZGDb499Hs+xcFAwvj+rfSUssIDrZOlyfJNy55rH5t2Qxed1e4K80KEJgsxKQN1/fyw==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-13.1.6.tgz}
name: '@next/swc-darwin-arm64'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [arm64]
os: [darwin]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@next/swc-darwin-x64/13.1.6:
resolution: {integrity: sha512-/uOky5PaZDoaU99ohjtNcDTJ6ks/gZ5ykTQDvNZDjIoCxFe3+t06bxsTPY6tAO6uEAw5f6vVFX5H5KLwhrkZCA==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-darwin-x64/-/swc-darwin-x64-13.1.6.tgz}
name: '@next/swc-darwin-x64'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [x64]
os: [darwin]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@next/swc-freebsd-x64/13.1.6:
resolution: {integrity: sha512-qaEALZeV7to6weSXk3Br80wtFQ7cFTpos/q+m9XVRFggu+8Ib895XhMWdJBzew6aaOcMvYR6KQ6JmHA2/eMzWw==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-freebsd-x64/-/swc-freebsd-x64-13.1.6.tgz}
name: '@next/swc-freebsd-x64'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [x64]
os: [freebsd]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@next/swc-linux-arm-gnueabihf/13.1.6:
resolution: {integrity: sha512-OybkbC58A1wJ+JrJSOjGDvZzrVEQA4sprJejGqMwiZyLqhr9Eo8FXF0y6HL+m1CPCpPhXEHz/2xKoYsl16kNqw==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-13.1.6.tgz}
name: '@next/swc-linux-arm-gnueabihf'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [arm]
os: [linux]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@next/swc-linux-arm64-gnu/13.1.6:
resolution: {integrity: sha512-yCH+yDr7/4FDuWv6+GiYrPI9kcTAO3y48UmaIbrKy8ZJpi7RehJe3vIBRUmLrLaNDH3rY1rwoHi471NvR5J5NQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-13.1.6.tgz}
name: '@next/swc-linux-arm64-gnu'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
libc: [glibc]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@next/swc-linux-arm64-musl/13.1.6:
resolution: {integrity: sha512-ECagB8LGX25P9Mrmlc7Q/TQBb9rGScxHbv/kLqqIWs2fIXy6Y/EiBBiM72NTwuXUFCNrWR4sjUPSooVBJJ3ESQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-13.1.6.tgz}
name: '@next/swc-linux-arm64-musl'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
libc: [musl]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@next/swc-linux-x64-gnu/13.1.6:
resolution: {integrity: sha512-GT5w2mruk90V/I5g6ScuueE7fqj/d8Bui2qxdw6lFxmuTgMeol5rnzAv4uAoVQgClOUO/MULilzlODg9Ib3Y4Q==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-13.1.6.tgz}
name: '@next/swc-linux-x64-gnu'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
libc: [glibc]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@next/swc-linux-x64-musl/13.1.6:
resolution: {integrity: sha512-keFD6KvwOPzmat4TCnlnuxJCQepPN+8j3Nw876FtULxo8005Y9Ghcl7ACcR8GoiKoddAq8gxNBrpjoxjQRHeAQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-13.1.6.tgz}
name: '@next/swc-linux-x64-musl'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
libc: [musl]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@next/swc-win32-arm64-msvc/13.1.6:
resolution: {integrity: sha512-OwertslIiGQluFvHyRDzBCIB07qJjqabAmINlXUYt7/sY7Q7QPE8xVi5beBxX/rxTGPIbtyIe3faBE6Z2KywhQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-13.1.6.tgz}
name: '@next/swc-win32-arm64-msvc'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [arm64]
os: [win32]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@next/swc-win32-ia32-msvc/13.1.6:
resolution: {integrity: sha512-g8zowiuP8FxUR9zslPmlju7qYbs2XBtTLVSxVikPtUDQedhcls39uKYLvOOd1JZg0ehyhopobRoH1q+MHlIN/w==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-13.1.6.tgz}
name: '@next/swc-win32-ia32-msvc'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [ia32]
os: [win32]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@next/swc-win32-x64-msvc/13.1.6:
resolution: {integrity: sha512-Ls2OL9hi3YlJKGNdKv8k3X/lLgc3VmLG3a/DeTkAd+lAituJp8ZHmRmm9f9SL84fT3CotlzcgbdaCDfFwFA6bA==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-13.1.6.tgz}
name: '@next/swc-win32-x64-msvc'
version: 13.1.6
engines: {node: '>= 10'}
cpu: [x64]
os: [win32]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/@nodelib/fs.scandir/2.1.5:
resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz}
name: '@nodelib/fs.scandir'
@@ -6018,7 +6012,7 @@ packages:
normalize-path: registry.npmmirror.com/normalize-path/3.0.0
readdirp: registry.npmmirror.com/readdirp/3.6.0
optionalDependencies:
fsevents: registry.npmmirror.com/fsevents/2.3.2
fsevents: 2.3.2
dev: false
registry.npmmirror.com/chownr/1.1.4:
@@ -6812,7 +6806,7 @@ packages:
esutils: registry.npmmirror.com/esutils/2.0.3
optionator: registry.npmmirror.com/optionator/0.8.3
optionalDependencies:
source-map: registry.npmmirror.com/source-map/0.6.1
source-map: 0.6.1
dev: false
registry.npmmirror.com/eslint-config-next/13.1.6_7kw3g6rralp5ps6mg3uyzz6azm:
@@ -7399,7 +7393,7 @@ packages:
react-dom: registry.npmmirror.com/react-dom/18.2.0_react@18.2.0
tslib: registry.npmmirror.com/tslib/2.5.0
optionalDependencies:
'@emotion/is-prop-valid': registry.npmmirror.com/@emotion/is-prop-valid/0.8.8
'@emotion/is-prop-valid': 0.8.8
dev: false
registry.npmmirror.com/framesync/6.1.2:
@@ -7433,16 +7427,6 @@ packages:
version: 1.0.0
dev: true
registry.npmmirror.com/fsevents/2.3.2:
resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/fsevents/-/fsevents-2.3.2.tgz}
name: fsevents
version: 2.3.2
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
requiresBuild: true
dev: false
optional: true
registry.npmmirror.com/ftp/0.3.10:
resolution: {integrity: sha512-faFVML1aBx2UoDStmLwv2Wptt4vw5x03xxX172nhA5Y5HBshW5JweqQ2W4xL4dezQTG8inJsuYcpPHHU3X5OTQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/ftp/-/ftp-0.3.10.tgz}
name: ftp
@@ -8442,7 +8426,7 @@ packages:
name: jsonfile
version: 4.0.0
optionalDependencies:
graceful-fs: registry.npmmirror.com/graceful-fs/4.2.10
graceful-fs: 4.2.10
dev: false
registry.npmmirror.com/jsonwebtoken/9.0.0:
@@ -9373,8 +9357,8 @@ packages:
mongodb-connection-string-url: registry.npmmirror.com/mongodb-connection-string-url/2.6.0
socks: registry.npmmirror.com/socks/2.7.1
optionalDependencies:
'@aws-sdk/credential-providers': registry.npmmirror.com/@aws-sdk/credential-providers/3.278.0
saslprep: registry.npmmirror.com/saslprep/1.0.3
'@aws-sdk/credential-providers': 3.278.0
saslprep: 1.0.3
transitivePeerDependencies:
- aws-crt
dev: false
@@ -9513,19 +9497,19 @@ packages:
sass: registry.npmmirror.com/sass/1.58.3
styled-jsx: registry.npmmirror.com/styled-jsx/5.1.1_react@18.2.0
optionalDependencies:
'@next/swc-android-arm-eabi': registry.npmmirror.com/@next/swc-android-arm-eabi/13.1.6
'@next/swc-android-arm64': registry.npmmirror.com/@next/swc-android-arm64/13.1.6
'@next/swc-darwin-arm64': registry.npmmirror.com/@next/swc-darwin-arm64/13.1.6
'@next/swc-darwin-x64': registry.npmmirror.com/@next/swc-darwin-x64/13.1.6
'@next/swc-freebsd-x64': registry.npmmirror.com/@next/swc-freebsd-x64/13.1.6
'@next/swc-linux-arm-gnueabihf': registry.npmmirror.com/@next/swc-linux-arm-gnueabihf/13.1.6
'@next/swc-linux-arm64-gnu': registry.npmmirror.com/@next/swc-linux-arm64-gnu/13.1.6
'@next/swc-linux-arm64-musl': registry.npmmirror.com/@next/swc-linux-arm64-musl/13.1.6
'@next/swc-linux-x64-gnu': registry.npmmirror.com/@next/swc-linux-x64-gnu/13.1.6
'@next/swc-linux-x64-musl': registry.npmmirror.com/@next/swc-linux-x64-musl/13.1.6
'@next/swc-win32-arm64-msvc': registry.npmmirror.com/@next/swc-win32-arm64-msvc/13.1.6
'@next/swc-win32-ia32-msvc': registry.npmmirror.com/@next/swc-win32-ia32-msvc/13.1.6
'@next/swc-win32-x64-msvc': registry.npmmirror.com/@next/swc-win32-x64-msvc/13.1.6
'@next/swc-android-arm-eabi': 13.1.6
'@next/swc-android-arm64': 13.1.6
'@next/swc-darwin-arm64': 13.1.6
'@next/swc-darwin-x64': 13.1.6
'@next/swc-freebsd-x64': 13.1.6
'@next/swc-linux-arm-gnueabihf': 13.1.6
'@next/swc-linux-arm64-gnu': 13.1.6
'@next/swc-linux-arm64-musl': 13.1.6
'@next/swc-linux-x64-gnu': 13.1.6
'@next/swc-linux-x64-musl': 13.1.6
'@next/swc-win32-arm64-msvc': 13.1.6
'@next/swc-win32-ia32-msvc': 13.1.6
'@next/swc-win32-x64-msvc': 13.1.6
transitivePeerDependencies:
- '@babel/core'
- babel-plugin-macros
@@ -10760,17 +10744,6 @@ packages:
version: 2.1.2
dev: false
registry.npmmirror.com/saslprep/1.0.3:
resolution: {integrity: sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/saslprep/-/saslprep-1.0.3.tgz}
name: saslprep
version: 1.0.3
engines: {node: '>=6'}
requiresBuild: true
dependencies:
sparse-bitfield: registry.npmmirror.com/sparse-bitfield/3.0.3
dev: false
optional: true
registry.npmmirror.com/sass/1.58.3:
resolution: {integrity: sha512-Q7RaEtYf6BflYrQ+buPudKR26/lH+10EmO9bBqbmPh/KeLqv8bjpTNqxe71ocONqXq+jYiCbpPUmQMS+JJPk4A==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/sass/-/sass-1.58.3.tgz}
name: sass
@@ -11011,6 +10984,7 @@ packages:
name: source-map
version: 0.6.1
engines: {node: '>=0.10.0'}
dev: true
registry.npmmirror.com/space-separated-tokens/1.1.5:
resolution: {integrity: sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz}

View File

@@ -3,39 +3,41 @@
[Git 仓库](https://github.com/c121914yu/FastGPT)
### 交流群/问题反馈
扫码满了,加个小号,定时拉
wx号: fastgpt123
wx 号: fastgpt123
![](/imgs/wx300.jpg)
### 快速开始
1. 使用手机号注册账号。
2. 进入账号页面,添加关联账号,目前只有 openai 的账号可以添加,直接去 openai 官网,把 API Key 粘贴过来。
1. 使用手机号注册账号。
2. 进入账号页面,添加关联账号,目前只有 openai 的账号可以添加,直接去 openai 官网,把 API Key 粘贴过来。
3. 如果填写了自己的 openai 账号,使用时会直接用你的账号。如果没有填写,需要付费使用平台的账号。
4. 进入模型页,创建一个模型,建议直接用 ChatGPT。
5. 在模型列表点击【对话】,即可使用 API 进行聊天。
4. 进入模型页,创建一个模型,建议直接用 ChatGPT。
5. 在模型列表点击【对话】,即可使用 API 进行聊天。
### 价格表
如果使用了自己的 Api Key不会计费。可以在账号页看到详细账单。单纯使用 chatGPT 模型进行对话,只有一个计费项目。使用知识库时,包含**对话**和**索引**生成两个计费项。
| 计费项 | 价格: 元/ 1K tokens包含上下文|
| --- | --- |
| --- | --- |
| claude - 对话 | 免费 |
| chatgpt - 对话 | 0.03 |
| 知识库 - 对话 | 0.03 |
| 知识库 - 索引 | 0.004 |
| 文件拆分 | 0.03 |
### 定制 prompt
1. 进入模型编辑页
2. 调整温度和提示词
1. 进入模型编辑页
2. 调整温度和提示词
3. 使用该模型对话。每次对话时,提示词和温度都会自动注入,方便管理个人的模型。建议把自己日常经常需要使用的 5~10 个方向预设好。
### 知识库
1. 创建模型时选择【知识库】
2. 进入模型编辑页
3. 导入数据,可以选择手动导入,或者选择文件导入。文件导入会自动调用 chatGPT 理解文件内容,并生成知识库。
4. 使用该模型对话。
1. 创建模型时选择【知识库】
2. 进入模型编辑页
3. 导入数据,可以选择手动导入,或者选择文件导入。文件导入会自动调用 chatGPT 理解文件内容,并生成知识库。
4. 使用该模型对话。
注意使用知识库模型对话时tokens 消耗会加快。
注意使用知识库模型对话时tokens 消耗会加快。

View File

@@ -1,6 +1,6 @@
### Fast GPT V3.0
### Fast GPT V3.1
- 优化 - 知识库搜索,会将上一个问题并入搜索范围。
- 优化 - 模型结构设计,不再区分知识库和对话模型,而是通过开关的形式,手动选择手否需要进行知识库搜索。
- 新增 - 模型共享市场,可以使用其他用户分享的模型。
- 新增 - 邀请好友注册功能。
- 优化 - 选择文件,采用链式处理,避免卡死。
- 修复 - 导入时分段问题。

3
public/js/html2pdf.bundle.min.js vendored Normal file

File diff suppressed because one or more lines are too long

View File

@@ -24,6 +24,7 @@ export const delChatHistoryById = (id: string) => GET(`/chat/removeHistory?id=${
*/
export const postSaveChat = (data: {
modelId: string;
newChatId: '' | string;
chatId: '' | string;
prompts: ChatItemType[];
}) => POST<string>('/chat/saveChat', data);
@@ -31,5 +32,5 @@ export const postSaveChat = (data: {
/**
* 删除一句对话
*/
export const delChatRecordByIndex = (chatId: string, index: number) =>
DELETE(`/chat/delChatRecordByIndex?chatId=${chatId}&index=${index}`);
export const delChatRecordByIndex = (chatId: string, contentId: string) =>
DELETE(`/chat/delChatRecordByContentId?chatId=${chatId}&contentId=${contentId}`);

View File

@@ -1,4 +1,6 @@
import { getToken } from '../utils/user';
import { SYSTEM_PROMPT_HEADER, NEW_CHATID_HEADER } from '@/constants/chat';
interface StreamFetchProps {
url: string;
data: any;
@@ -6,50 +8,56 @@ interface StreamFetchProps {
abortSignal: AbortController;
}
export const streamFetch = ({ url, data, onMessage, abortSignal }: StreamFetchProps) =>
new Promise<string>(async (resolve, reject) => {
try {
const res = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: getToken() || ''
},
body: JSON.stringify(data),
signal: abortSignal.signal
});
const reader = res.body?.getReader();
if (!reader) return;
const decoder = new TextDecoder();
let responseText = '';
new Promise<{ responseText: string; systemPrompt: string; newChatId: string }>(
async (resolve, reject) => {
try {
const res = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: getToken() || ''
},
body: JSON.stringify(data),
signal: abortSignal.signal
});
const reader = res.body?.getReader();
if (!reader) return;
const read = async () => {
try {
const { done, value } = await reader?.read();
if (done) {
if (res.status === 200) {
resolve(responseText);
} else {
const parseError = JSON.parse(responseText);
reject(parseError?.message || '请求异常');
const decoder = new TextDecoder();
const systemPrompt = decodeURIComponent(res.headers.get(SYSTEM_PROMPT_HEADER) || '');
const newChatId = decodeURIComponent(res.headers.get(NEW_CHATID_HEADER) || '');
let responseText = '';
const read = async () => {
try {
const { done, value } = await reader?.read();
if (done) {
if (res.status === 200) {
resolve({ responseText, systemPrompt, newChatId });
} else {
const parseError = JSON.parse(responseText);
reject(parseError?.message || '请求异常');
}
return;
}
return;
const text = decoder.decode(value).replace(/<br\/>/g, '\n');
responseText += text;
onMessage(text);
read();
} catch (err: any) {
if (err?.message === 'The user aborted a request.') {
return resolve({ responseText, systemPrompt, newChatId });
}
reject(typeof err === 'string' ? err : err?.message || '请求异常');
}
const text = decoder.decode(value).replace(/<br\/>/g, '\n');
res.status === 200 && onMessage(text);
responseText += text;
read();
} catch (err: any) {
if (err?.message === 'The user aborted a request.') {
return resolve(responseText);
}
reject(typeof err === 'string' ? err : err?.message || '请求异常');
}
};
read();
} catch (err: any) {
console.log(err, '====');
reject(typeof err === 'string' ? err : err?.message || '请求异常');
};
read();
} catch (err: any) {
console.log(err, '====');
reject(typeof err === 'string' ? err : err?.message || '请求异常');
}
}
});
);

View File

@@ -12,8 +12,7 @@ export const getMyModels = () => GET<ModelSchema[]>('/model/list');
/**
* 创建一个模型
*/
export const postCreateModel = (data: { name: string; serviceModelName: string }) =>
POST<ModelSchema>('/model/create', data);
export const postCreateModel = (data: { name: string }) => POST<string>('/model/create', data);
/**
* 根据 ID 删除模型

View File

@@ -7,7 +7,6 @@ export type InitChatResponse = {
name: string;
avatar: string;
intro: string;
chatModel: ModelSchema.service.chatModel; // 对话模型名
modelName: ModelSchema.service.modelName; // 底层模型
chatModel: ModelSchema['chat']['chatModel']; // 对话模型名
history: ChatItemType[];
};

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1682957006954" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="2644" xmlns:xlink="http://www.w3.org/1999/xlink" width="48" height="48"><path d="M942.1 593.9c-22.6 0-41 18.3-41 41v204.8c0 22.6-18.4 41-41 41H163.8c-22.6 0-41-18.4-41-41V634.9c0-22.6-18.3-41-41-41s-41 18.3-41 41v204.8c0 67.8 55.1 122.9 122.9 122.9H860c67.8 0 122.9-55.1 122.9-122.9V634.9c0.1-22.6-18.2-41-40.8-41z" p-id="2645"></path><path d="M309.3 363L471 201.3v515.5c0 22.5 18.4 41 41 41 22.5 0 41-18.4 41-41V201.3L714.7 363c15.9 15.9 42 15.9 57.9 0 15.9-15.9 15.9-42 0-57.9L541.5 73.9c-0.2-0.2-0.3-0.4-0.4-0.5-5.7-5.7-12.7-9.3-20.1-10.9-0.2-0.1-0.5-0.2-0.7-0.2-2.7-0.5-5.4-0.8-8.1-0.8-2.7 0-5.5 0.3-8.1 0.8-0.3 0.1-0.5 0.2-0.7 0.2-7.4 1.6-14.4 5.2-20.1 10.9-0.2 0.2-0.3 0.4-0.4 0.5L251.4 305.1c-15.9 15.9-15.9 42 0 57.9 15.9 15.9 42 15.9 57.9 0z" p-id="2646"></path></svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -21,7 +21,8 @@ const map = {
stop: require('./icons/stop.svg').default,
shareMarket: require('./icons/shareMarket.svg').default,
collectionLight: require('./icons/collectionLight.svg').default,
collectionSolid: require('./icons/collectionSolid.svg').default
collectionSolid: require('./icons/collectionSolid.svg').default,
export: require('./icons/export.svg').default
};
export type IconName = keyof typeof map;

View File

@@ -39,7 +39,7 @@ const NavbarPhone = ({
px={7}
>
<Box onClick={onOpen}>
<MyIcon name="menu" width={'20px'} height={'20px'} color={'blackAlpha.600'}></MyIcon>
<MyIcon name="menu" width={'20px'} height={'20px'} color={'blackAlpha.700'}></MyIcon>
</Box>
</Flex>
<Drawer isOpen={isOpen} placement="left" size={'xs'} onClose={onClose}>

View File

@@ -17,7 +17,7 @@ const Markdown = ({ source, isChatting = false }: { source: string; isChatting?:
return (
<ReactMarkdown
className={`${styles.markdown} ${
className={`markdown ${styles.markdown} ${
isChatting ? (source === '' ? styles.waitingAnimation : styles.animation) : ''
}`}
remarkPlugins={[remarkMath]}
@@ -31,6 +31,7 @@ const Markdown = ({ source, isChatting = false }: { source: string; isChatting?:
return !inline || match ? (
<Box my={3} borderRadius={'md'} overflow={'hidden'} backgroundColor={'#222'}>
<Flex
className="code-header"
py={2}
px={5}
backgroundColor={useColorModeValue('#323641', 'gray.600')}

23
src/constants/chat.ts Normal file

File diff suppressed because one or more lines are too long

View File

@@ -4,3 +4,823 @@ export enum UserAuthTypeEnum {
}
export const PRICE_SCALE = 100000;
export const htmlTemplate = `<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width initial-scale=1.0" />
<title>FastGpt</title>
</head>
<style>
.markdown > :first-child {
margin-top: 0 !important;
}
.markdown > :last-child {
margin-bottom: 0 !important;
}
.markdown a.absent {
color: #cc0000;
}
.markdown a.anchor {
bottom: 0;
cursor: pointer;
display: block;
left: 0;
margin-left: -30px;
padding-left: 30px;
position: absolute;
top: 0;
}
.markdown h1,
.markdown h2,
.markdown h3,
.markdown h4,
.markdown h5,
.markdown h6 {
cursor: text;
font-weight: bold;
margin: 10px 0;
padding: 0;
position: relative;
}
.markdown h1 .mini-icon-link,
.markdown h2 .mini-icon-link,
.markdown h3 .mini-icon-link,
.markdown h4 .mini-icon-link,
.markdown h5 .mini-icon-link,
.markdown h6 .mini-icon-link {
display: none;
}
.markdown h1:hover a.anchor,
.markdown h2:hover a.anchor,
.markdown h3:hover a.anchor,
.markdown h4:hover a.anchor,
.markdown h5:hover a.anchor,
.markdown h6:hover a.anchor {
line-height: 1;
margin-left: -22px;
padding-left: 0;
text-decoration: none;
top: 15%;
}
.markdown h1:hover a.anchor .mini-icon-link,
.markdown h2:hover a.anchor .mini-icon-link,
.markdown h3:hover a.anchor .mini-icon-link,
.markdown h4:hover a.anchor .mini-icon-link,
.markdown h5:hover a.anchor .mini-icon-link,
.markdown h6:hover a.anchor .mini-icon-link {
display: inline-block;
}
.markdown h1 tt,
.markdown h1 code,
.markdown h2 tt,
.markdown h2 code,
.markdown h3 tt,
.markdown h3 code,
.markdown h4 tt,
.markdown h4 code,
.markdown h5 tt,
.markdown h5 code,
.markdown h6 tt,
.markdown h6 code {
font-size: inherit;
}
.markdown h1 {
font-size: 28px;
}
.markdown h2 {
font-size: 24px;
}
.markdown h3 {
font-size: 18px;
}
.markdown h4 {
font-size: 16px;
}
.markdown h5 {
font-size: 14px;
}
.markdown h6 {
font-size: 12px;
}
.markdown p,
.markdown blockquote,
.markdown ul,
.markdown ol,
.markdown dl,
.markdown table,
.markdown pre {
margin: 10px 0;
}
.markdown > h2:first-child,
.markdown > h1:first-child,
.markdown > h1:first-child + h2,
.markdown > h3:first-child,
.markdown > h4:first-child,
.markdown > h5:first-child,
.markdown > h6:first-child {
margin-top: 0;
padding-top: 0;
}
.markdown a:first-child,
.markdown > h1,
.markdown a:first-child,
.markdown > h2,
.markdown a:first-child,
.markdown > h3,
.markdown a:first-child,
.markdown > h4,
.markdown a:first-child,
.markdown > h5,
.markdown a:first-child,
.markdown > h6 {
margin-top: 0;
padding-top: 0;
}
.markdown h1 + p,
.markdown h2 + p,
.markdown h3 + p,
.markdown h4 + p,
.markdown h5 + p,
.markdown h6 + p {
margin-top: 0;
}
.markdown li p.first {
display: inline-block;
}
.markdown ul,
.markdown ol {
padding-left: 2em;
}
.markdown ul.no-list,
.markdown ol.no-list {
list-style-type: none;
padding: 0;
}
.markdown ul li > :first-child,
.markdown ol li > :first-child {
margin-top: 0;
}
.markdown ul ul,
.markdown ul ol,
.markdown ol ol,
.markdown ol ul {
margin-bottom: 0;
}
.markdown dl {
padding: 0;
}
.markdown dl dt {
font-size: 14px;
font-style: italic;
font-weight: bold;
margin: 15px 0 5px;
padding: 0;
}
.markdown dl dt:first-child {
padding: 0;
}
.markdown dl dt > :first-child {
margin-top: 0;
}
.markdown dl dt > :last-child {
margin-bottom: 0;
}
.markdown dl dd {
margin: 0 0 15px;
padding: 0 15px;
}
.markdown dl dd > *:first-child {
margin-top: 0;
}
.markdown dl dd > *last-child {
margin-bottom: none;
}
.markdown blockquote {
border-left: solid 4px #dddddd;
color: #777777;
padding-left: 15px;
}
.markdown blockquote > * :first-child {
margin-top: 0;
}
.markdown blockquote > * :last-child {
margin-bottom: 0;
}
.markdown table th {
font-weight: bold;
}
.markdown table th,
.markdown table td {
padding: 6px 13px;
}
.markdown table tr {
background-color: #ffffff;
}
.markdown table tr:nth-child(2n) {
background-color: #f0f0f0;
}
.markdown img {
max-width: 100%;
}
.markdown span.frame {
display: block;
overflow: hidden;
}
.markdown span.frame > span {
border: 1px solid #dddddd;
display: block;
float: left;
margin: 13px 0 0;
overflow: hidden;
padding: 7px;
width: auto;
}
.markdown span.frame span img {
display: block;
float: left;
}
.markdown span.frame span span {
clear: both;
color: #333333;
display: block;
padding: 5px;
}
.markdown span.align-center {
clear: both;
display: block;
overflow: hidden;
text-align: center;
}
.markdown span.align-center > span {
display: block;
margin: 13px auto;
overflow: hidden;
}
.markdown span.align-center span img {
margin: 0 auto;
text-align: center;
}
.markdown span.align-right {
clear: both;
display: block;
overflow: hidden;
}
.markdown span.align-right > span {
display: block;
margin: 13px auto;
overflow: hidden;
text-align: right;
}
.markdown span.align-right img {
margin: 0;
text-align: right;
}
.markdown span.float-left {
display: block;
float: left;
margin-right: 13px;
overflow: hidden;
}
.markdown span.float-left > span {
margin: 13px auto;
}
.markdown span.float-right {
display: block;
float: right;
margin-left: 13px;
overflow: hidden;
}
.markdown span.float-right > span {
display: block;
margin: 13px auto;
overflow: hidden;
text-align: right;
}
.markdown code,
.markdown tt {
border: 1px solid #eaeaea;
border-radius: 3px;
margin: 0 2px;
padding: 0 5px;
}
.markdown pre > code {
background-color: transparent;
border: none;
margin: 0;
padding: 0;
white-space: pre;
}
.markdown .highlight pre,
.markdown pre {
border: 1px solid #ccc;
border-radius: 3px;
font-size: max(0.9em, 14px);
line-height: 19px;
overflow: auto;
padding: 6px 10px;
}
.markdown pre code,
.markdown pre tt {
background-color: #f8f8f8;
border: none;
}
.markdown {
text-align: justify;
overflow-y: hidden;
tab-size: 4;
word-spacing: normal;
word-break: break-all;
}
.markdown pre {
display: block;
width: 100%;
padding: 15px;
margin: 0;
border: none;
border-radius: none;
background-color: #222 !important;
overflow-x: auto;
color: #fff;
}
.markdown pre code {
background-color: #222 !important;
width: 100%;
}
.markdown a {
text-decoration: underline;
color: var(--chakra-colors-blue-600);
}
.markdown table {
border-collapse: separate;
border-spacing: 0;
color: #718096;
}
.markdown table thead tr:first-child th {
border-bottom-width: 1px;
border-left-width: 1px;
border-top-width: 1px;
border-color: #ccc;
background-color: #edf2f7;
overflow: hidden;
}
.markdown table thead tr:first-child th:first-child {
border-top-left-radius: 0.375rem;
}
.markdown table thead tr:first-child th:last-child {
border-right-width: 1px;
border-top-right-radius: 0.375rem;
}
.markdown td {
border-bottom-width: 1px;
border-left-width: 1px;
border-color: #ccc;
}
.markdown td:last-of-type {
border-right-width: 1px;
}
.markdown tbody tr:last-child {
overflow: hidden;
}
.markdown tbody tr:last-child td:first-child {
border-bottom-left-radius: 0.375rem;
}
.markdown tbody tr:last-child td:last-child {
border-bottom-right-radius: 0.375rem;
}
.markdown p {
text-align: justify;
white-space: pre-wrap;
}
code[class*='language-'] {
color: #d4d4d4;
text-shadow: none;
direction: ltr;
text-align: left;
white-space: pre;
word-spacing: normal;
word-break: normal;
line-height: 1.5;
-moz-tab-size: 4;
-o-tab-size: 4;
tab-size: 4;
-webkit-hyphens: none;
-moz-hyphens: none;
hyphens: none;
}
pre[class*='language-'] {
color: #d4d4d4;
text-shadow: none;
direction: ltr;
text-align: left;
white-space: pre;
word-spacing: normal;
word-break: normal;
line-height: 1.5;
-moz-tab-size: 4;
-o-tab-size: 4;
tab-size: 4;
-webkit-hyphens: none;
-moz-hyphens: none;
hyphens: none;
padding: 1em;
margin: 0.5em0;
overflow: auto;
background: #1e1e1e;
}
code[class*='language-'] ::selection,
code[class*='language-']::selection,
pre[class*='language-'] ::selection,
pre[class*='language-']::selection {
text-shadow: none;
background: #264f78;
}
:not(pre) > code[class*='language-'] {
padding: 0.1em 0.3em;
border-radius: 0.3em;
color: #db4c69;
background: #1e1e1e;
}
.namespace {
opacity: 0.7;
}
.doctype.doctype-tag {
color: #569cd6;
}
.doctype.name {
color: #9cdcfe;
}
comment {
color: #6a9955;
}
prolog {
color: #6a9955;
}
.language-html .language-css .token.punctuation,
.language-html .language-javascript .token.punctuation {
color: #d4d4d4;
}
punctuation {
color: #d4d4d4;
}
boolean {
color: #569cd6;
}
constant {
color: #9cdcfe;
}
inserted {
color: #b5cea8;
}
number {
color: #b5cea8;
}
property {
color: #9cdcfe;
}
symbol {
color: #b5cea8;
}
tag {
color: #569cd6;
}
unit {
color: #b5cea8;
}
attr-name {
color: #9cdcfe;
}
builtin {
color: #ce9178;
}
char {
color: #ce9178;
}
deleted {
color: #ce9178;
}
selector {
color: #d7ba7d;
}
string {
color: #ce9178;
}
.language-css .token.string.url {
text-decoration: underline;
}
entity {
color: #569cd6;
}
operator {
color: #d4d4d4;
}
operator.arrow {
color: #569cd6;
}
atrule {
color: #ce9178;
}
atrule.rule {
color: #c586c0;
}
atrule.url {
color: #9cdcfe;
}
atrule.url.function {
color: #dcdcaa;
}
atrule.url.punctuation {
color: #d4d4d4;
}
keyword {
color: #569cd6;
}
keyword.control-flow {
color: #c586c0;
}
keyword.module {
color: #c586c0;
}
function {
color: #dcdcaa;
}
function.maybe-class-name {
color: #dcdcaa;
}
regex {
color: #d16969;
}
important {
color: #569cd6;
}
italic {
font-style: italic;
}
class-name {
color: #4ec9b0;
}
maybe-class-name {
color: #4ec9b0;
}
console {
color: #9cdcfe;
}
parameter {
color: #9cdcfe;
}
interpolation {
color: #9cdcfe;
}
punctuation.interpolation-punctuation {
color: #569cd6;
}
exports.maybe-class-name {
color: #9cdcfe;
}
imports.maybe-class-name {
color: #9cdcfe;
}
variable {
color: #9cdcfe;
}
escape {
color: #d7ba7d;
}
tag.punctuation {
color: grey;
}
cdata {
color: grey;
}
attr-value {
color: #ce9178;
}
attr-value.punctuation {
color: #ce9178;
}
attr-value.punctuation.attr-equals {
color: #d4d4d4;
}
namespace {
color: #4ec9b0;
}
code[class*='language-javascript'],
code[class*='language-jsx'],
code[class*='language-tsx'],
code[class*='language-typescript'] {
color: #9cdcfe;
}
pre[class*='language-javascript'],
pre[class*='language-jsx'],
pre[class*='language-tsx'],
pre[class*='language-typescript'] {
color: #9cdcfe;
}
code[class*='language-css'] {
color: #ce9178;
}
pre[class*='language-css'] {
color: #ce9178;
}
code[class*='language-html'] {
color: #d4d4d4;
}
pre[class*='language-html'] {
color: #d4d4d4;
}
.language-regex .token.anchor {
color: #dcdcaa;
}
.language-html .token.punctuation {
color: grey;
}
pre[class*='language-'] > code[class*='language-'] {
position: relative;
z-index: 1;
}
.line-highlight.line-highlight {
background: #f7ebc6;
box-shadow: inset 5px 0 0 #f7d87c;
z-index: 0;
}
* {
box-sizing: border-box;
}
body,
h1,
h2,
h3,
h4,
hr,
p,
blockquote,
dl,
dt,
dd,
ul,
ol,
li,
pre,
form,
fieldset,
legend,
button,
input,
textarea,
th,
td,
svg {
margin: 0;
}
body,
html {
font-size: 16px;
background-color: #fff;
color: rgba(0, 0, 0, 0.64);
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif,
'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol';
}
pre,
code,
kbd,
samp {
font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace;
font-size: 1em;
}
::-webkit-scrollbar,
::-webkit-scrollbar {
width: 10px;
height: 10px;
}
::-webkit-scrollbar-track,
::-webkit-scrollbar-track {
background: transparent;
border-radius: 2px;
}
::-webkit-scrollbar-thumb,
::-webkit-scrollbar-thumb {
background: #bfbfbf;
border-radius: 10px;
}
::-webkit-scrollbar-thumb:hover,
::-webkit-scrollbar-thumb:hover {
background: #999;
}
</style>
<style>
.chat-item {
display: flex;
align-items: flex-start;
padding: 20px 16px;
border-bottom: 1px solid rgba(0, 0, 0, 0.1);
justify-content: center;
}
.chat-item img {
width: 30px;
max-height: 50px;
object-fit: contain;
margin-right: 10px;
}
.chat-item:nth-child(even) {
background-color: #f6f6f6;
}
.chat-item:nth-child(odd) {
background-color: #ffffff;
}
.markdown {
overflow-x: hidden;
max-width: 800px;
width: 100%;
}
@media (max-width: 900px) {
html {
font-size: 14px;
}
::-webkit-scrollbar,
::-webkit-scrollbar {
width: 2px;
height: 2px;
}
.chat-item img {
width: 20px;
max-height: 40px;
margin-right: 4px;
}
}
</style>
<body>{{CHAT_CONTENT}}</body>
</html>
`;

View File

@@ -1,51 +1,66 @@
import type { ModelSchema } from '@/types/mongoSchema';
export const embeddingModel = 'text-embedding-ada-002';
export enum ChatModelEnum {
export type EmbeddingModelType = 'text-embedding-ada-002';
export enum OpenAiChatEnum {
'GPT35' = 'gpt-3.5-turbo',
'GPT4' = 'gpt-4',
'GPT432k' = 'gpt-4-32k'
}
export enum ModelNameEnum {
GPT35 = 'gpt-3.5-turbo',
VECTOR_GPT = 'VECTOR_GPT'
export enum ClaudeEnum {
'Claude' = 'Claude'
}
export const Model2ChatModelMap: Record<`${ModelNameEnum}`, `${ChatModelEnum}`> = {
[ModelNameEnum.GPT35]: 'gpt-3.5-turbo',
[ModelNameEnum.VECTOR_GPT]: 'gpt-3.5-turbo'
};
export type ChatModelType = `${OpenAiChatEnum}` | `${ClaudeEnum}`;
export type ModelConstantsData = {
icon: 'model' | 'dbModel';
export type ChatModelItemType = {
chatModel: ChatModelType;
name: string;
model: `${ModelNameEnum}`;
trainName: string; // 空字符串代表不能训练
contextMaxToken: number;
systemMaxToken: number;
maxTemperature: number;
price: number; // 多少钱 / 1token单位: 0.00001元
price: number;
};
export const modelList: ModelConstantsData[] = [
{
icon: 'model',
name: 'chatGPT',
model: ModelNameEnum.GPT35,
trainName: '',
export const ChatModelMap = {
[OpenAiChatEnum.GPT35]: {
chatModel: OpenAiChatEnum.GPT35,
name: 'ChatGpt',
contextMaxToken: 4096,
systemMaxToken: 3000,
maxTemperature: 1.5,
price: 3
},
{
icon: 'dbModel',
name: '知识库',
model: ModelNameEnum.VECTOR_GPT,
trainName: 'vector',
contextMaxToken: 4096,
[OpenAiChatEnum.GPT4]: {
chatModel: OpenAiChatEnum.GPT4,
name: 'Gpt4',
contextMaxToken: 8000,
systemMaxToken: 4000,
maxTemperature: 1.5,
price: 30
},
[OpenAiChatEnum.GPT432k]: {
chatModel: OpenAiChatEnum.GPT432k,
name: 'Gpt4-32k',
contextMaxToken: 32000,
systemMaxToken: 4000,
maxTemperature: 1.5,
price: 30
},
[ClaudeEnum.Claude]: {
chatModel: ClaudeEnum.Claude,
name: 'Claude(免费体验)',
contextMaxToken: 9000,
systemMaxToken: 2500,
maxTemperature: 1,
price: 3
price: 0
}
};
export const chatModelList: ChatModelItemType[] = [
ChatModelMap[OpenAiChatEnum.GPT35],
ChatModelMap[ClaudeEnum.Claude]
];
export enum ModelStatusEnum {
@@ -115,14 +130,16 @@ export const ModelVectorSearchModeMap: Record<
export const defaultModel: ModelSchema = {
_id: 'modelId',
userId: 'userId',
name: 'modelName',
name: '模型名称',
avatar: '/icon/logo.png',
status: ModelStatusEnum.pending,
updateTime: Date.now(),
systemPrompt: '',
temperature: 5,
search: {
mode: ModelVectorSearchModeEnum.hightSimilarity
chat: {
useKb: false,
searchMode: ModelVectorSearchModeEnum.hightSimilarity,
systemPrompt: '',
temperature: 0,
chatModel: OpenAiChatEnum.GPT35
},
share: {
isShare: false,
@@ -130,10 +147,6 @@ export const defaultModel: ModelSchema = {
intro: '',
collection: 0
},
service: {
chatModel: ModelNameEnum.GPT35,
modelName: ModelNameEnum.GPT35
},
security: {
domain: ['*'],
contextMaxLen: 1,

View File

@@ -51,8 +51,9 @@ export default function App({ Component, pageProps }: AppProps) {
/>
<link rel="icon" href="/favicon.ico" />
</Head>
<Script src="/js/qrcode.min.js" strategy="afterInteractive"></Script>
<Script src="/js/pdf.js" strategy="afterInteractive"></Script>
<Script src="/js/qrcode.min.js" strategy="lazyOnload"></Script>
<Script src="/js/pdf.js" strategy="lazyOnload"></Script>
<Script src="/js/html2pdf.bundle.min.js" strategy="lazyOnload"></Script>
<QueryClientProvider client={queryClient}>
<ChakraProvider theme={theme}>
<ColorModeScript initialColorMode={theme.config.initialColorMode} />

133
src/pages/api/chat/chat.ts Normal file
View File

@@ -0,0 +1,133 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authChat } from '@/service/utils/auth';
import { modelServiceToolMap } from '@/service/utils/chat';
import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream';
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { resStreamResponse } from '@/service/utils/chat';
import { searchKb } from '@/service/plugins/searchKb';
import { ChatRoleEnum } from '@/constants/chat';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1时表示开始了流响应
const stream = new PassThrough();
stream.on('error', () => {
console.log('error: ', 'stream error');
stream.destroy();
});
res.on('close', () => {
stream.destroy();
});
res.on('error', () => {
console.log('error: ', 'request error');
stream.destroy();
});
try {
const { chatId, prompt, modelId } = req.body as {
prompt: ChatItemSimpleType;
modelId: string;
chatId: '' | string;
};
const { authorization } = req.headers;
if (!modelId || !prompt) {
throw new Error('缺少参数');
}
await connectToDatabase();
let startTime = Date.now();
const { model, showModelDetail, content, userOpenAiKey, systemAuthKey, userId } =
await authChat({
modelId,
chatId,
authorization
});
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// 读取对话内容
const prompts = [...content, prompt];
// 使用了知识库搜索
if (model.chat.useKb) {
const { code, searchPrompt } = await searchKb({
userOpenAiKey,
prompts,
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity,
model,
userId
});
// search result is empty
if (code === 201) {
return res.send(searchPrompt?.value);
}
searchPrompt && prompts.unshift(searchPrompt);
} else {
// 没有用知识库搜索,仅用系统提示词
model.chat.systemPrompt &&
prompts.unshift({
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
});
}
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// 发出请求
const { streamResponse } = await modelServiceToolMap[model.chat.chatModel].chatCompletion({
apiKey: userOpenAiKey || systemAuthKey,
temperature: +temperature,
messages: prompts,
stream: true,
res,
chatId
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
step = 1;
const { totalTokens, finishMessages } = await resStreamResponse({
model: model.chat.chatModel,
res,
stream,
chatResponse: streamResponse,
prompts,
systemPrompt:
showModelDetail && prompts[0].obj === ChatRoleEnum.System ? prompts[0].value : ''
});
// 只有使用平台的 key 才计费
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
chatId,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens
});
} catch (err: any) {
if (step === 1) {
// 直接结束流
console.log('error结束');
stream.destroy();
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}
}

View File

@@ -1,126 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { getOpenAIApi, authChat } from '@/service/utils/auth';
import { axiosConfig, openaiChatFilter } from '@/service/utils/tools';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream';
import { modelList } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { gpt35StreamResponse } from '@/service/utils/openai';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1时表示开始了流响应
const stream = new PassThrough();
stream.on('error', () => {
console.log('error: ', 'stream error');
stream.destroy();
});
res.on('close', () => {
stream.destroy();
});
res.on('error', () => {
console.log('error: ', 'request error');
stream.destroy();
});
try {
const { chatId, prompt, modelId } = req.body as {
prompt: ChatItemType;
modelId: string;
chatId: '' | string;
};
const { authorization } = req.headers;
if (!modelId || !prompt) {
throw new Error('缺少参数');
}
await connectToDatabase();
let startTime = Date.now();
const { model, content, userApiKey, systemKey, userId } = await authChat({
modelId,
chatId,
authorization
});
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
if (!modelConstantsData) {
throw new Error('模型加载异常');
}
// 读取对话内容
const prompts = [...content, prompt];
// 如果有系统提示词,自动插入
if (model.systemPrompt) {
prompts.unshift({
obj: 'SYSTEM',
value: model.systemPrompt
});
}
// 控制在 tokens 数量,防止超出
const filterPrompts = openaiChatFilter({
model: model.service.chatModel,
prompts,
maxTokens: modelConstantsData.contextMaxToken - 500
});
// 计算温度
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
// console.log(filterPrompts);
// 获取 chatAPI
const chatAPI = getOpenAIApi(userApiKey || systemKey);
// 发出请求
const chatResponse = await chatAPI.createChatCompletion(
{
model: model.service.chatModel,
temperature,
messages: filterPrompts,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream: true,
stop: ['.!?。']
},
{
timeout: 40000,
responseType: 'stream',
...axiosConfig
}
);
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
step = 1;
const { responseContent } = await gpt35StreamResponse({
res,
stream,
chatResponse
});
// 只有使用平台的 key 才计费
pushChatBill({
isPay: !userApiKey,
modelName: model.service.modelName,
userId,
chatId,
messages: filterPrompts.concat({ role: 'assistant', content: responseContent })
});
} catch (err: any) {
if (step === 1) {
// 直接结束流
console.log('error结束');
stream.destroy();
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}
}

View File

@@ -1,17 +1,17 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Chat } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { chatId, index } = req.query as { chatId: string; index: string };
const { chatId, contentId } = req.query as { chatId: string; contentId: string };
const { authorization } = req.headers;
if (!authorization) {
throw new Error('无权操作');
}
if (!chatId || !index) {
if (!chatId || !contentId) {
throw new Error('缺少参数');
}
@@ -26,30 +26,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
throw new Error('找不到对话');
}
// 重新计算 index跳过已经被删除的内容
let unDeleteIndex = +index;
let deletedIndex = 0;
for (deletedIndex = 0; deletedIndex < chatRecord.content.length; deletedIndex++) {
if (!chatRecord.content[deletedIndex].deleted) {
unDeleteIndex--;
if (unDeleteIndex < 0) {
break;
}
}
}
// 删除最一条数据库记录, 也就是预发送的那一条
// 删除一条数据库记录
await Chat.updateOne(
{
_id: chatId,
userId
},
{
$set: {
[`content.${deletedIndex}.deleted`]: true,
updateTime: Date.now()
}
}
{ $pull: { content: { _id: contentId } } }
);
jsonRes(res);

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Chat } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
/* 获取历史记录 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Chat } from '@/service/mongo';
import type { InitChatResponse } from '@/api/response/chat';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { ChatItemType } from '@/types/chat';
import { authModel } from '@/service/utils/auth';
import mongoose from 'mongoose';
@@ -36,20 +36,23 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
userId: new mongoose.Types.ObjectId(userId)
}
},
{ $unwind: '$content' },
{ $match: { 'content.deleted': false } },
{ $sort: { 'content._id': -1 } },
{ $limit: 50 },
{
$project: {
id: '$content._id',
content: {
$slice: ['$content', -50] // 返回 content 数组的最后50个元素
}
}
},
{ $unwind: '$content' },
{
$project: {
_id: '$content._id',
obj: '$content.obj',
value: '$content.value'
value: '$content.value',
systemPrompt: '$content.systemPrompt'
}
}
]);
history.reverse();
}
jsonRes<InitChatResponse>(res, {
@@ -59,8 +62,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
name: model.name,
avatar: model.avatar,
intro: model.share.intro,
modelName: model.service.modelName,
chatModel: model.service.chatModel,
chatModel: model.chat.chatModel,
history
}
});

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { ChatItemType } from '@/types/chat';
import { connectToDatabase, Chat } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
/* 获取历史记录 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {

View File

@@ -3,12 +3,14 @@ import { jsonRes } from '@/service/response';
import { ChatItemType } from '@/types/chat';
import { connectToDatabase, Chat } from '@/service/mongo';
import { authModel } from '@/service/utils/auth';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import mongoose from 'mongoose';
/* 聊天内容存存储 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { chatId, modelId, prompts } = req.body as {
const { chatId, modelId, prompts, newChatId } = req.body as {
newChatId: '' | string;
chatId: '' | string;
modelId: string;
prompts: ChatItemType[];
@@ -23,8 +25,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await connectToDatabase();
const content = prompts.map((item) => ({
_id: new mongoose.Types.ObjectId(item._id),
obj: item.obj,
value: item.value
value: item.value,
systemPrompt: item.systemPrompt
}));
await authModel({ modelId, userId, authOwner: false });
@@ -32,6 +36,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
// 没有 chatId, 创建一个对话
if (!chatId) {
const { _id } = await Chat.create({
_id: newChatId ? new mongoose.Types.ObjectId(newChatId) : undefined,
userId,
modelId,
content,

View File

@@ -1,189 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authChat } from '@/service/utils/auth';
import { axiosConfig, systemPromptFilter, openaiChatFilter } from '@/service/utils/tools';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream';
import {
modelList,
ModelVectorSearchModeMap,
ModelVectorSearchModeEnum,
ModelDataStatusEnum
} from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { openaiCreateEmbedding, gpt35StreamResponse } from '@/service/utils/openai';
import dayjs from 'dayjs';
import { PgClient } from '@/service/pg';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1时表示开始了流响应
const stream = new PassThrough();
stream.on('error', () => {
console.log('error: ', 'stream error');
stream.destroy();
});
res.on('close', () => {
stream.destroy();
});
res.on('error', () => {
console.log('error: ', 'request error');
stream.destroy();
});
try {
const { modelId, chatId, prompt } = req.body as {
modelId: string;
chatId: '' | string;
prompt: ChatItemType;
};
const { authorization } = req.headers;
if (!modelId || !prompt) {
throw new Error('缺少参数');
}
await connectToDatabase();
let startTime = Date.now();
const { model, content, userApiKey, systemKey, userId } = await authChat({
modelId,
chatId,
authorization
});
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
if (!modelConstantsData) {
throw new Error('模型加载异常');
}
// 读取对话内容
const prompts = [...content, prompt];
// 获取提示词的向量
const { vector: promptVector, chatAPI } = await openaiCreateEmbedding({
isPay: !userApiKey,
apiKey: userApiKey || systemKey,
userId,
text: prompt.value
});
// 相似度搜素
const similarity = ModelVectorSearchModeMap[model.search.mode]?.similarity || 0.22;
const vectorSearch = await PgClient.select<{ id: string; q: string; a: string }>('modelData', {
fields: ['id', 'q', 'a'],
where: [
['status', ModelDataStatusEnum.ready],
'AND',
['model_id', model._id],
'AND',
`vector <=> '[${promptVector}]' < ${similarity}`
],
order: [{ field: 'vector', mode: `<=> '[${promptVector}]'` }],
limit: 20
});
const formatRedisPrompt: string[] = vectorSearch.rows.map((item) => `${item.q}\n${item.a}`);
/* 高相似度+退出,无法匹配时直接退出 */
if (
formatRedisPrompt.length === 0 &&
model.search.mode === ModelVectorSearchModeEnum.hightSimilarity
) {
return res.send('对不起,你的问题不在知识库中。');
}
/* 高相似度+无上下文,不添加额外知识 */
if (
formatRedisPrompt.length === 0 &&
model.search.mode === ModelVectorSearchModeEnum.noContext
) {
prompts.unshift({
obj: 'SYSTEM',
value: model.systemPrompt
});
} else {
// 有匹配情况下system 添加知识库内容。
// 系统提示词过滤,最多 2500 tokens
const systemPrompt = systemPromptFilter({
model: model.service.chatModel,
prompts: formatRedisPrompt,
maxTokens: 2500
});
prompts.unshift({
obj: 'SYSTEM',
value: `
${model.systemPrompt}
${
model.search.mode === ModelVectorSearchModeEnum.hightSimilarity
? `你只能从知识库选择内容回答.不在知识库内容拒绝回复`
: ''
}
知识库内容为: 当前时间为${dayjs().format('YYYY/MM/DD HH:mm:ss')}\n${systemPrompt}'
`
});
}
// 控制在 tokens 数量,防止超出
const filterPrompts = openaiChatFilter({
model: model.service.chatModel,
prompts,
maxTokens: modelConstantsData.contextMaxToken - 500
});
// console.log(filterPrompts);
// 计算温度
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
// 发出请求
const chatResponse = await chatAPI.createChatCompletion(
{
model: model.service.chatModel,
temperature,
messages: filterPrompts,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream: true,
stop: ['.!?。']
},
{
timeout: 40000,
responseType: 'stream',
...axiosConfig
}
);
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
step = 1;
const { responseContent } = await gpt35StreamResponse({
res,
stream,
chatResponse
});
// 只有使用平台的 key 才计费
pushChatBill({
isPay: !userApiKey,
modelName: model.service.modelName,
userId,
chatId,
messages: filterPrompts.concat({ role: 'assistant', content: responseContent })
});
// jsonRes(res);
} catch (err: any) {
if (step === 1) {
// 直接结束流
console.log('error结束');
stream.destroy();
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}
}

View File

@@ -2,15 +2,14 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { ModelStatusEnum, modelList, ModelNameEnum, Model2ChatModelMap } from '@/constants/model';
import { authToken } from '@/service/utils/auth';
import { ModelStatusEnum } from '@/constants/model';
import { Model } from '@/service/models/model';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { name, serviceModelName } = req.body as {
const { name } = req.body as {
name: string;
serviceModelName: `${ModelNameEnum}`;
};
const { authorization } = req.headers;
@@ -18,45 +17,32 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
throw new Error('无权操作');
}
if (!name || !serviceModelName) {
if (!name) {
throw new Error('缺少参数');
}
// 凭证校验
const userId = await authToken(authorization);
const modelItem = modelList.find((item) => item.model === serviceModelName);
if (!modelItem) {
throw new Error('模型不存在');
}
await connectToDatabase();
// 上限校验
const authCount = await Model.countDocuments({
userId
});
if (authCount >= 20) {
throw new Error('上限 20 个模型');
if (authCount >= 30) {
throw new Error('上限 30 个模型');
}
// 创建模型
const response = await Model.create({
name,
userId,
status: ModelStatusEnum.running,
service: {
chatModel: Model2ChatModelMap[modelItem.model], // 聊天时用的模型
modelName: modelItem.model // 最底层的模型,不会变,用于计费等核心操作
}
status: ModelStatusEnum.running
});
// 根据 id 获取模型信息
const model = await Model.findById(response._id);
jsonRes(res, {
data: model
data: response._id
});
} catch (err) {
jsonRes(res, {

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import axios from 'axios';
import { axiosConfig } from '@/service/utils/tools';
@@ -22,7 +22,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const data = await axios
.get(url, {
httpsAgent: axiosConfig.httpsAgent
httpsAgent: axiosConfig().httpsAgent
})
.then((res) => res.data as string);

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import type { PgModelDataItemType } from '@/types/pg';
import { authModel } from '@/service/utils/auth';

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, SplitData, Model } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
/* 拆分数据成QA */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { generateVector } from '@/service/events/generateVector';
import { ModelDataStatusEnum } from '@/constants/model';
import { PgClient } from '@/service/pg';

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { ModelDataSchema } from '@/types/mongoSchema';
import { generateVector } from '@/service/events/generateVector';
import { PgClient } from '@/service/pg';

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { ModelDataStatusEnum } from '@/constants/model';
import { generateVector } from '@/service/events/generateVector';
import { PgClient } from '@/service/pg';

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, SplitData, Model } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { generateVector } from '@/service/events/generateVector';
import { generateQA } from '@/service/events/generateQA';
import { PgClient } from '@/service/pg';

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { Chat, Model, connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import { authModel } from '@/service/utils/auth';

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { authModel } from '@/service/utils/auth';
/* 获取我的模型 */

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { Model } from '@/service/models/model';
/* 获取模型列表 */

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Collection, Model } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
/* 模型收藏切换 */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Collection } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import type { ShareModelItem } from '@/types/model';
/* 获取模型列表 */

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Collection, Model } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import type { PagingData } from '@/types';
import type { ShareModelItem } from '@/types/model';

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { Model } from '@/service/models/model';
import type { ModelUpdateParams } from '@/types/model';
import { authModel } from '@/service/utils/auth';
@@ -9,8 +9,7 @@ import { authModel } from '@/service/utils/auth';
/* 获取我的模型 */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { name, avatar, search, share, service, security, systemPrompt, temperature } =
req.body as ModelUpdateParams;
const { name, avatar, chat, share, security } = req.body as ModelUpdateParams;
const { modelId } = req.query as { modelId: string };
const { authorization } = req.headers;
@@ -18,7 +17,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
throw new Error('无权操作');
}
if (!name || !service || !security || !modelId) {
if (!name || !chat || !security || !modelId) {
throw new Error('参数错误');
}
@@ -41,12 +40,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
{
name,
avatar,
systemPrompt,
temperature,
chat,
'share.isShare': share.isShare,
'share.isShareDetail': share.isShareDetail,
'share.intro': share.intro,
search,
security
}
);

View File

@@ -0,0 +1,147 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authOpenApiKey, authModel } from '@/service/utils/auth';
import { modelServiceToolMap, resStreamResponse } from '@/service/utils/chat';
import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream';
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { searchKb } from '@/service/plugins/searchKb';
import { ChatRoleEnum } from '@/constants/chat';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1时表示开始了流响应
const stream = new PassThrough();
stream.on('error', () => {
console.log('error: ', 'stream error');
stream.destroy();
});
res.on('close', () => {
stream.destroy();
});
res.on('error', () => {
console.log('error: ', 'request error');
stream.destroy();
});
try {
const {
prompts,
modelId,
isStream = true
} = req.body as {
prompts: ChatItemSimpleType[];
modelId: string;
isStream: boolean;
};
if (!prompts || !modelId) {
throw new Error('缺少参数');
}
if (!Array.isArray(prompts)) {
throw new Error('prompts is not array');
}
if (prompts.length > 30 || prompts.length === 0) {
throw new Error('prompts length range 1-30');
}
await connectToDatabase();
let startTime = Date.now();
/* 凭证校验 */
const { apiKey, userId } = await authOpenApiKey(req);
const { model } = await authModel({
userId,
modelId
});
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// 使用了知识库搜索
if (model.chat.useKb) {
const similarity = ModelVectorSearchModeMap[model.chat.searchMode]?.similarity || 0.22;
const { code, searchPrompt } = await searchKb({
prompts,
similarity,
model,
userId
});
// search result is empty
if (code === 201) {
return res.send(searchPrompt?.value);
}
searchPrompt && prompts.unshift(searchPrompt);
} else {
// 没有用知识库搜索,仅用系统提示词
if (model.chat.systemPrompt) {
prompts.unshift({
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
});
}
}
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// 发出请求
const { streamResponse, responseMessages, responseText, totalTokens } =
await modelServiceToolMap[model.chat.chatModel].chatCompletion({
apiKey,
temperature: +temperature,
messages: prompts,
stream: isStream
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
let textLen = 0;
let tokens = totalTokens;
if (isStream) {
step = 1;
const { finishMessages, totalTokens } = await resStreamResponse({
model: model.chat.chatModel,
res,
stream,
chatResponse: streamResponse,
prompts
});
textLen = finishMessages.map((item) => item.value).join('').length;
tokens = totalTokens;
} else {
textLen = responseMessages.map((item) => item.value).join('').length;
jsonRes(res, {
data: responseText
});
}
pushChatBill({
isPay: true,
chatModel: model.chat.chatModel,
userId,
textLen,
tokens
});
} catch (err: any) {
if (step === 1) {
// 直接结束流
console.log('error结束');
stream.destroy();
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}
}

View File

@@ -1,146 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase, Model } from '@/service/mongo';
import { getOpenAIApi } from '@/service/utils/auth';
import { axiosConfig, openaiChatFilter, authOpenApiKey } from '@/service/utils/tools';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream';
import { modelList } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { gpt35StreamResponse } from '@/service/utils/openai';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1时表示开始了流响应
const stream = new PassThrough();
stream.on('error', () => {
console.log('error: ', 'stream error');
stream.destroy();
});
res.on('close', () => {
stream.destroy();
});
res.on('error', () => {
console.log('error: ', 'request error');
stream.destroy();
});
try {
const {
prompts,
modelId,
isStream = true
} = req.body as {
prompts: ChatItemType[];
modelId: string;
isStream: boolean;
};
if (!prompts || !modelId) {
throw new Error('缺少参数');
}
if (!Array.isArray(prompts)) {
throw new Error('prompts is not array');
}
if (prompts.length > 30 || prompts.length === 0) {
throw new Error('prompts length range 1-30');
}
await connectToDatabase();
let startTime = Date.now();
const { apiKey, userId } = await authOpenApiKey(req);
const model = await Model.findOne({
_id: modelId,
userId
});
if (!model) {
throw new Error('无权使用该模型');
}
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
if (!modelConstantsData) {
throw new Error('模型加载异常');
}
// 如果有系统提示词,自动插入
if (model.systemPrompt) {
prompts.unshift({
obj: 'SYSTEM',
value: model.systemPrompt
});
}
// 控制在 tokens 数量,防止超出
const filterPrompts = openaiChatFilter({
model: model.service.chatModel,
prompts,
maxTokens: modelConstantsData.contextMaxToken - 500
});
// console.log(filterPrompts);
// 计算温度
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
// 获取 chatAPI
const chatAPI = getOpenAIApi(apiKey);
// 发出请求
const chatResponse = await chatAPI.createChatCompletion(
{
model: model.service.chatModel,
temperature,
messages: filterPrompts,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream: isStream,
stop: ['.!?。']
},
{
timeout: 40000,
responseType: isStream ? 'stream' : 'json',
...axiosConfig
}
);
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
let responseContent = '';
if (isStream) {
step = 1;
const streamResponse = await gpt35StreamResponse({
res,
stream,
chatResponse
});
responseContent = streamResponse.responseContent;
} else {
responseContent = chatResponse.data.choices?.[0]?.message?.content || '';
jsonRes(res, {
data: responseContent
});
}
// 只有使用平台的 key 才计费
pushChatBill({
isPay: true,
modelName: model.service.modelName,
userId,
messages: filterPrompts.concat({ role: 'assistant', content: responseContent })
});
} catch (err: any) {
if (step === 1) {
// 直接结束流
console.log('error结束');
stream.destroy();
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}
}

View File

@@ -1,20 +1,14 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase, Model } from '@/service/mongo';
import { getOpenAIApi } from '@/service/utils/auth';
import { authOpenApiKey } from '@/service/utils/tools';
import { axiosConfig, openaiChatFilter, systemPromptFilter } from '@/service/utils/tools';
import { ChatItemType } from '@/types/chat';
import { authOpenApiKey } from '@/service/utils/auth';
import { resStreamResponse, modelServiceToolMap } from '@/service/utils/chat';
import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream';
import {
ModelNameEnum,
modelList,
ModelVectorSearchModeMap,
ChatModelEnum
} from '@/constants/model';
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { openaiCreateEmbedding, gpt35StreamResponse } from '@/service/utils/openai';
import { PgClient } from '@/service/pg';
import { searchKb } from '@/service/plugins/searchKb';
import { ChatRoleEnum } from '@/constants/chat';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -38,7 +32,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
modelId,
isStream = true
} = req.body as {
prompt: ChatItemType;
prompt: ChatItemSimpleType;
modelId: string;
isStream: boolean;
};
@@ -59,26 +53,20 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
throw new Error('找不到模型');
}
const modelConstantsData = modelList.find((item) => item.model === ModelNameEnum.VECTOR_GPT);
if (!modelConstantsData) {
throw new Error('模型已下架');
}
const modelConstantsData = ChatModelMap[model.chat.chatModel];
console.log('laf gpt start');
// 获取 chatAPI
const chatAPI = getOpenAIApi(apiKey);
// 请求一次 chatgpt 拆解需求
const promptResponse = await chatAPI.createChatCompletion(
{
model: ChatModelEnum.GPT35,
temperature: 0,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
messages: [
{
role: 'system',
content: `服务端逻辑生成器.根据用户输入的需求,拆解成 laf 云函数实现的步骤,只返回步骤,按格式返回步骤: 1.\n2.\n3.\n ......
const { responseText: resolveText, totalTokens: resolveTokens } = await modelServiceToolMap[
model.chat.chatModel
].chatCompletion({
apiKey,
temperature: 0,
messages: [
{
obj: ChatRoleEnum.System,
value: `服务端逻辑生成器.根据用户输入的需求,拆解成 laf 云函数实现的步骤,只返回步骤,按格式返回步骤: 1.\n2.\n3.\n ......
下面是一些例子:
一个 hello world 例子
1. 返回字符串: "hello world"
@@ -111,111 +99,66 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
5. 获取当前时间,记录为 updateTime.
6. 更新数据库数据,表为"blogs",更新符合 blogId 的记录的内容为{blogText, tags, updateTime}.
7. 返回结果 "更新博客记录成功"`
},
{
role: 'user',
content: prompt.value
}
]
},
{
timeout: 180000,
...axiosConfig
}
);
const promptResolve = promptResponse.data.choices?.[0]?.message?.content || '';
if (!promptResolve) {
throw new Error('gpt 异常');
}
prompt.value += ` ${promptResolve}`;
console.log('prompt resolve success, time:', `${(Date.now() - startTime) / 1000}s`);
// 获取提示词的向量
const { vector: promptVector } = await openaiCreateEmbedding({
isPay: true,
apiKey,
userId,
text: prompt.value
},
{
obj: ChatRoleEnum.Human,
value: prompt.value
}
],
stream: false
});
prompt.value += ` ${resolveText}`;
console.log('prompt resolve success, time:', `${(Date.now() - startTime) / 1000}s`);
// 读取对话内容
const prompts = [prompt];
// 相似度搜索
const similarity = ModelVectorSearchModeMap[model.search.mode]?.similarity || 0.22;
const vectorSearch = await PgClient.select<{ id: string; q: string; a: string }>('modelData', {
fields: ['id', 'q', 'a'],
order: [{ field: 'vector', mode: `<=> '[${promptVector}]'` }],
where: [
['model_id', model._id],
'AND',
['user_id', userId],
'AND',
`vector <=> '[${promptVector}]' < ${similarity}`
],
limit: 30
});
const formatRedisPrompt: string[] = vectorSearch.rows.map((item) => `${item.q}\n${item.a}`);
// system 筛选,最多 2500 tokens
const systemPrompt = systemPromptFilter({
model: model.service.chatModel,
prompts: formatRedisPrompt,
maxTokens: 2500
});
prompts.unshift({
obj: 'SYSTEM',
value: `${model.systemPrompt} 知识库是最新的,下面是知识库内容:${systemPrompt}`
});
// 控制上下文 tokens 数量,防止超出
const filterPrompts = openaiChatFilter({
model: model.service.chatModel,
// 获取向量匹配到的提示词
const { searchPrompt } = await searchKb({
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity,
prompts,
maxTokens: modelConstantsData.contextMaxToken - 500
model,
userId
});
// console.log(filterPrompts);
// 计算温度
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
searchPrompt && prompts.unshift(searchPrompt);
// 发出请求
const chatResponse = await chatAPI.createChatCompletion(
{
model: model.service.chatModel,
temperature,
messages: filterPrompts,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream: isStream
},
{
timeout: 180000,
responseType: isStream ? 'stream' : 'json',
...axiosConfig
}
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
console.log('code response. time:', `${(Date.now() - startTime) / 1000}s`);
// 发出请求
const { streamResponse, responseMessages, responseText, totalTokens } =
await modelServiceToolMap[model.chat.chatModel].chatCompletion({
apiKey,
temperature: +temperature,
messages: prompts,
stream: isStream
});
let responseContent = '';
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
let textLen = resolveText.length;
let tokens = resolveTokens;
if (isStream) {
step = 1;
const streamResponse = await gpt35StreamResponse({
const { finishMessages, totalTokens } = await resStreamResponse({
model: model.chat.chatModel,
res,
stream,
chatResponse
chatResponse: streamResponse,
prompts
});
responseContent = streamResponse.responseContent;
textLen += finishMessages.map((item) => item.value).join('').length;
tokens += totalTokens;
} else {
responseContent = chatResponse.data.choices?.[0]?.message?.content || '';
textLen += responseMessages.map((item) => item.value).join('').length;
tokens += totalTokens;
jsonRes(res, {
data: responseContent
data: responseText
});
}
@@ -223,9 +166,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
pushChatBill({
isPay: true,
modelName: model.service.modelName,
chatModel: model.chat.chatModel,
userId,
messages: filterPrompts.concat({ role: 'assistant', content: responseContent })
textLen,
tokens
});
} catch (err: any) {
if (step === 1) {

View File

@@ -1,217 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase, Model } from '@/service/mongo';
import {
axiosConfig,
systemPromptFilter,
authOpenApiKey,
openaiChatFilter
} from '@/service/utils/tools';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream';
import {
modelList,
ModelVectorSearchModeMap,
ModelVectorSearchModeEnum,
ModelDataStatusEnum
} from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { openaiCreateEmbedding, gpt35StreamResponse } from '@/service/utils/openai';
import dayjs from 'dayjs';
import { PgClient } from '@/service/pg';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let step = 0; // step=1时表示开始了流响应
const stream = new PassThrough();
stream.on('error', () => {
console.log('error: ', 'stream error');
stream.destroy();
});
res.on('close', () => {
stream.destroy();
});
res.on('error', () => {
console.log('error: ', 'request error');
stream.destroy();
});
try {
const {
prompts,
modelId,
isStream = true
} = req.body as {
prompts: ChatItemType[];
modelId: string;
isStream: boolean;
};
if (!prompts || !modelId) {
throw new Error('缺少参数');
}
if (!Array.isArray(prompts)) {
throw new Error('prompts is not array');
}
if (prompts.length > 30 || prompts.length === 0) {
throw new Error('prompts length range 1-30');
}
await connectToDatabase();
let startTime = Date.now();
/* 凭证校验 */
const { apiKey, userId } = await authOpenApiKey(req);
const model = await Model.findOne({
_id: modelId,
userId
});
if (!model) {
throw new Error('无权使用该模型');
}
const modelConstantsData = modelList.find((item) => item.model === model?.service?.modelName);
if (!modelConstantsData) {
throw new Error('模型初始化异常');
}
// 获取提示词的向量
const { vector: promptVector, chatAPI } = await openaiCreateEmbedding({
isPay: true,
apiKey,
userId,
text: prompts[prompts.length - 1].value // 取最后一个
});
// 相似度搜素
const similarity = ModelVectorSearchModeMap[model.search.mode]?.similarity || 0.22;
const vectorSearch = await PgClient.select<{ id: string; q: string; a: string }>('modelData', {
fields: ['id', 'q', 'a'],
where: [
['status', ModelDataStatusEnum.ready],
'AND',
['model_id', model._id],
'AND',
`vector <=> '[${promptVector}]' < ${similarity}`
],
order: [{ field: 'vector', mode: `<=> '[${promptVector}]'` }],
limit: 20
});
const formatRedisPrompt: string[] = vectorSearch.rows.map((item) => `${item.q}\n${item.a}`);
// system 合并
if (prompts[0].obj === 'SYSTEM') {
formatRedisPrompt.unshift(prompts.shift()?.value || '');
}
/* 高相似度+退出,无法匹配时直接退出 */
if (
formatRedisPrompt.length === 0 &&
model.search.mode === ModelVectorSearchModeEnum.hightSimilarity
) {
return res.send('对不起,你的问题不在知识库中。');
}
/* 高相似度+无上下文,不添加额外知识 */
if (
formatRedisPrompt.length === 0 &&
model.search.mode === ModelVectorSearchModeEnum.noContext
) {
prompts.unshift({
obj: 'SYSTEM',
value: model.systemPrompt
});
} else {
// 有匹配或者低匹配度模式情况下,添加知识库内容。
// 系统提示词过滤,最多 2500 tokens
const systemPrompt = systemPromptFilter({
model: model.service.chatModel,
prompts: formatRedisPrompt,
maxTokens: 2500
});
prompts.unshift({
obj: 'SYSTEM',
value: `
${model.systemPrompt}
${
model.search.mode === ModelVectorSearchModeEnum.hightSimilarity
? `你只能从知识库选择内容回答.不在知识库内容拒绝回复`
: ''
}
知识库内容为: 当前时间为${dayjs().format('YYYY/MM/DD HH:mm:ss')}\n${systemPrompt}'
`
});
}
// 控制在 tokens 数量,防止超出
const filterPrompts = openaiChatFilter({
model: model.service.chatModel,
prompts,
maxTokens: modelConstantsData.contextMaxToken - 500
});
// console.log(filterPrompts);
// 计算温度
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
// 发出请求
const chatResponse = await chatAPI.createChatCompletion(
{
model: model.service.chatModel,
temperature,
messages: filterPrompts,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream: isStream,
stop: ['.!?。']
},
{
timeout: 180000,
responseType: isStream ? 'stream' : 'json',
...axiosConfig
}
);
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
let responseContent = '';
if (isStream) {
step = 1;
const streamResponse = await gpt35StreamResponse({
res,
stream,
chatResponse
});
responseContent = streamResponse.responseContent;
} else {
responseContent = chatResponse.data.choices?.[0]?.message?.content || '';
jsonRes(res, {
data: responseContent
});
}
pushChatBill({
isPay: true,
modelName: model.service.modelName,
userId,
messages: filterPrompts.concat({ role: 'assistant', content: responseContent })
});
// jsonRes(res);
} catch (err: any) {
if (step === 1) {
// 直接结束流
console.log('error结束');
stream.destroy();
} else {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}
}

View File

@@ -2,7 +2,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, OpenApi } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {

View File

@@ -2,7 +2,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, OpenApi } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { UserOpenApiKey } from '@/types/openapi';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {

View File

@@ -2,7 +2,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, OpenApi } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890');

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, User, Pay } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { PaySchema, UserModelSchema } from '@/types/mongoSchema';
import dayjs from 'dayjs';
import { getPayResult } from '@/service/utils/wxpay';

View File

@@ -2,7 +2,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, Bill } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import type { BillSchema } from '@/types/mongoSchema';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { customAlphabet } from 'nanoid';
import { connectToDatabase, Pay } from '@/service/mongo';
import { PRICE_SCALE } from '@/constants/common';

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { connectToDatabase, Pay } from '@/service/mongo';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {

View File

@@ -2,7 +2,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, User, promotionRecord } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import mongoose from 'mongoose';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {

View File

@@ -2,7 +2,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, promotionRecord } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {

View File

@@ -3,7 +3,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { User } from '@/service/models/user';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {

View File

@@ -3,7 +3,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { User } from '@/service/models/user';
import { connectToDatabase } from '@/service/mongo';
import { authToken } from '@/service/utils/tools';
import { authToken } from '@/service/utils/auth';
import { UserUpdateParams } from '@/types/user';
/* 更新一些基本信息 */

View File

@@ -1,4 +1,4 @@
import React, { useRef, useEffect, useMemo } from 'react';
import React, { useRef, useEffect, useMemo, useCallback } from 'react';
import { AddIcon, ChatIcon, DeleteIcon, MoonIcon, SunIcon } from '@chakra-ui/icons';
import {
Box,
@@ -13,7 +13,11 @@ import {
IconButton,
useDisclosure,
useColorMode,
useColorModeValue
useColorModeValue,
Menu,
MenuButton,
MenuList,
MenuItem
} from '@chakra-ui/react';
import { useUserStore } from '@/store/user';
import { useMutation, useQuery } from '@tanstack/react-query';
@@ -23,16 +27,20 @@ import MyIcon from '@/components/Icon';
import WxConcat from '@/components/WxConcat';
import { getChatHistory, delChatHistoryById } from '@/api/chat';
import { getCollectionModels } from '@/api/model';
import { modelList } from '@/constants/model';
import type { ChatSiteItemType } from '../index';
import { fileDownload } from '@/utils/file';
import { htmlTemplate } from '@/constants/common';
const SlideBar = ({
chatId,
modelId,
history,
resetChat,
onClose
}: {
chatId: string;
modelId: string;
history: ChatSiteItemType[];
resetChat: (modelId?: string, chatId?: string) => void;
onClose: () => void;
}) => {
@@ -42,17 +50,25 @@ const SlideBar = ({
const { isOpen: isOpenWx, onOpen: onOpenWx, onClose: onCloseWx } = useDisclosure();
const preChatId = useRef('chatId'); // 用于校验上一次chatId的情况,判断是否需要刷新历史记录
const { isSuccess } = useQuery(['getMyModels'], getMyModels, {
cacheTime: 5 * 60 * 1000
const { isSuccess, refetch: fetchMyModels } = useQuery(['getMyModels'], getMyModels, {
cacheTime: 5 * 60 * 1000,
enabled: false
});
const { data: collectionModels = [] } = useQuery([getCollectionModels], getCollectionModels);
const { data: collectionModels = [], refetch: fetchCollectionModels } = useQuery(
[getCollectionModels],
getCollectionModels,
{
cacheTime: 5 * 60 * 1000,
enabled: false
}
);
const models = useMemo(() => {
const myModelList = myModels.map((item) => ({
id: item._id,
name: item.name,
icon: modelList.find((model) => model.model === item?.service?.modelName)?.icon || 'model'
icon: 'model' as any
}));
const collectionList = collectionModels
.map((item) => ({
@@ -80,9 +96,73 @@ const SlideBar = ({
// init history
useEffect(() => {
setTimeout(() => {
fetchMyModels();
fetchCollectionModels();
loadChatHistory();
}, 1000);
}, [loadChatHistory]);
}, [fetchCollectionModels, fetchMyModels, loadChatHistory]);
/**
* export md
*/
const onclickExportMd = useCallback(() => {
fileDownload({
text: history.map((item) => item.value).join('\n'),
type: 'text/markdown',
filename: 'chat.md'
});
}, [history]);
const getHistoryHtml = useCallback(() => {
const historyDom = document.getElementById('history');
if (!historyDom) return;
const dom = Array.from(historyDom.children).map((child, i) => {
const avatar = `<img src="${
child.querySelector<HTMLImageElement>('.avatar')?.src
}" alt="" />`;
const chatContent = child.querySelector<HTMLDivElement>('.markdown');
if (!chatContent) {
return '';
}
const chatContentClone = chatContent.cloneNode(true) as HTMLDivElement;
const codeHeader = chatContentClone.querySelectorAll('.code-header');
codeHeader.forEach((childElement: any) => {
childElement.remove();
});
return `<div class="chat-item">
${avatar}
${chatContentClone.outerHTML}
</div>`;
});
const html = htmlTemplate.replace('{{CHAT_CONTENT}}', dom.join('\n'));
return html;
}, []);
const onclickExportHtml = useCallback(() => {
const html = getHistoryHtml();
html &&
fileDownload({
text: html,
type: 'text/html',
filename: '聊天记录.html'
});
}, [getHistoryHtml]);
const onclickExportPdf = useCallback(() => {
const html = getHistoryHtml();
html &&
// @ts-ignore
html2pdf(html, {
margin: 0,
filename: `聊天记录.pdf`
});
}, [getHistoryHtml]);
const RenderHistory = () => (
<>
@@ -145,7 +225,7 @@ const SlideBar = ({
onClick: () => void;
children: JSX.Element | string;
}) => (
<Box px={3} mb={3}>
<Box px={3} mb={2}>
<Flex
alignItems={'center'}
p={2}
@@ -176,7 +256,7 @@ const SlideBar = ({
w={'90%'}
variant={'white'}
h={'40px'}
mb={4}
mb={2}
mx={'auto'}
leftIcon={<AddIcon />}
onClick={() => resetChat()}
@@ -238,7 +318,33 @@ const SlideBar = ({
</Accordion>
</Box>
<Divider my={4} colorScheme={useColorModeValue('gray', 'white')} />
<Divider my={3} colorScheme={useColorModeValue('gray', 'white')} />
{history.length > 0 && (
<Menu autoSelect={false}>
<MenuButton
mx={3}
mb={2}
p={2}
display={'flex'}
alignItems={'center'}
cursor={'pointer'}
borderRadius={'md'}
textAlign={'left'}
_hover={{
backgroundColor: 'rgba(255,255,255,0.2)'
}}
>
<MyIcon name="export" fill={'white'} w={'18px'} h={'18px'} mr={4} />
</MenuButton>
<MenuList fontSize={'sm'} color={'blackAlpha.800'}>
<MenuItem onClick={onclickExportHtml}>HTML格式</MenuItem>
<MenuItem onClick={onclickExportPdf}>PDF格式</MenuItem>
<MenuItem onClick={onclickExportMd}>Markdown格式</MenuItem>
</MenuList>
</Menu>
)}
<RenderButton onClick={() => router.push('/')}>
<>

View File

@@ -1,6 +1,5 @@
import React, { useCallback, useState, useRef, useMemo, useEffect } from 'react';
import { useRouter } from 'next/router';
import Image from 'next/image';
import { getInitChatSiteInfo, delChatRecordByIndex, postSaveChat } from '@/api/chat';
import type { InitChatResponse } from '@/api/response/chat';
import type { ChatItemType } from '@/types/chat';
@@ -16,32 +15,37 @@ import {
Menu,
MenuButton,
MenuList,
MenuItem
MenuItem,
Image,
Button,
Modal,
ModalOverlay,
ModalContent,
ModalBody,
ModalCloseButton
} from '@chakra-ui/react';
import { useToast } from '@/hooks/useToast';
import { useScreen } from '@/hooks/useScreen';
import { useQuery } from '@tanstack/react-query';
import { ModelNameEnum } from '@/constants/model';
import { OpenAiChatEnum } from '@/constants/model';
import dynamic from 'next/dynamic';
import { useGlobalStore } from '@/store/global';
import { useCopyData } from '@/utils/tools';
import { streamFetch } from '@/api/fetch';
import Icon from '@/components/Icon';
import MyIcon from '@/components/Icon';
import { throttle } from 'lodash';
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 5);
import { Types } from 'mongoose';
import Markdown from '@/components/Markdown';
import { HUMAN_ICON, LOGO_ICON } from '@/constants/chat';
const SlideBar = dynamic(() => import('./components/SlideBar'));
const Empty = dynamic(() => import('./components/Empty'));
const Markdown = dynamic(() => import('@/components/Markdown'));
import styles from './index.module.scss';
const textareaMinH = '22px';
export type ChatSiteItemType = {
id: string;
status: 'loading' | 'finish';
} & ChatItemType;
@@ -65,12 +69,12 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
name: '',
avatar: '/icon/logo.png',
intro: '',
chatModel: '',
modelName: '',
chatModel: OpenAiChatEnum.GPT35,
history: []
}); // 聊天框整体数据
const [inputVal, setInputVal] = useState(''); // 输入的内容
const [inputVal, setInputVal] = useState(''); // user input prompt
const [showSystemPrompt, setShowSystemPrompt] = useState('');
const isChatting = useMemo(
() => chatData.history[chatData.history.length - 1]?.status === 'loading',
@@ -137,17 +141,15 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
setChatData({
...res,
history: res.history.map((item: any, i) => ({
obj: item.obj,
value: item.value,
id: item.id || `${nanoid()}-${i}`,
history: res.history.map((item) => ({
...item,
status: 'finish'
}))
});
if (isScroll && res.history.length > 0) {
setTimeout(() => {
scrollToBottom('auto');
}, 1200);
}, 1000);
}
} catch (e: any) {
toast({
@@ -192,27 +194,20 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
// gpt 对话
const gptChatPrompt = useCallback(
async (prompts: ChatSiteItemType) => {
const urlMap: Record<string, string> = {
[ModelNameEnum.GPT35]: '/api/chat/chatGpt',
[ModelNameEnum.VECTOR_GPT]: '/api/chat/vectorGpt'
};
if (!urlMap[chatData.modelName]) return Promise.reject('找不到模型');
async (prompts: ChatSiteItemType[]) => {
// create abort obj
const abortSignal = new AbortController();
controller.current = abortSignal;
isResetPage.current = false;
const prompt = {
obj: prompts.obj,
value: prompts.value
obj: prompts[0].obj,
value: prompts[0].value
};
// 流请求,获取数据
const responseText = await streamFetch({
url: urlMap[chatData.modelName],
let { responseText, systemPrompt, newChatId } = await streamFetch({
url: '/api/chat/chat',
data: {
prompt,
chatId,
@@ -239,17 +234,23 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
return;
}
let newChatId = '';
// 保存对话信息
// save chat record
try {
newChatId = await postSaveChat({
newChatId, // 如果有newChatId会自动以这个Id创建对话框
modelId,
chatId,
prompts: [
prompt,
{
_id: prompts[0]._id,
obj: 'Human',
value: prompt.value
},
{
_id: prompts[1]._id,
obj: 'AI',
value: responseText
value: responseText,
systemPrompt
}
]
});
@@ -273,12 +274,13 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
if (index !== state.history.length - 1) return item;
return {
...item,
status: 'finish'
status: 'finish',
systemPrompt
};
})
}));
},
[chatData.modelName, chatId, generatingMessage, modelId, router, toast]
[chatId, generatingMessage, modelId, router, toast]
);
/**
@@ -307,13 +309,13 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
const newChatList: ChatSiteItemType[] = [
...chatData.history,
{
id: nanoid(),
_id: String(new Types.ObjectId()),
obj: 'Human',
value: val,
status: 'finish'
},
{
id: nanoid(),
_id: String(new Types.ObjectId()),
obj: 'AI',
value: '',
status: 'loading'
@@ -333,7 +335,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
}, 100);
try {
await gptChatPrompt(newChatList[newChatList.length - 2]);
await gptChatPrompt(newChatList.slice(-2));
} catch (err: any) {
toast({
title: typeof err === 'string' ? err : err?.message || '聊天出错了~',
@@ -353,11 +355,11 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
// 删除一句话
const delChatRecord = useCallback(
async (index: number) => {
async (index: number, id: string) => {
setLoading(true);
try {
// 删除数据库最后一句
await delChatRecordByIndex(chatId, index);
await delChatRecordByIndex(chatId, id);
setChatData((state) => ({
...state,
@@ -393,7 +395,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
// 更新流中断对象
useEffect(() => {
return () => {
// eslint-disable-next-line react-hooks/exhaustive-deps
isResetPage.current = true;
controller.current?.abort();
};
}, []);
@@ -410,6 +412,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
resetChat={resetChat}
chatId={chatId}
modelId={modelId}
history={chatData.history}
onClose={onCloseSlider}
/>
</Box>
@@ -425,11 +428,11 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
px={7}
>
<Box onClick={onOpenSlider}>
<Icon
<MyIcon
name={'menu'}
w={'20px'}
h={'20px'}
fill={useColorModeValue('blackAlpha.700', 'white')}
color={useColorModeValue('blackAlpha.700', 'white')}
/>
</Box>
<Box>{chatData?.name}</Box>
@@ -441,6 +444,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
resetChat={resetChat}
chatId={chatId}
modelId={modelId}
history={chatData.history}
onClose={onCloseSlider}
/>
</DrawerContent>
@@ -454,10 +458,18 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
flexDirection={'column'}
>
{/* 聊天内容 */}
<Box ref={ChatBox} pb={[4, 0]} flex={'1 0 0'} h={0} w={'100%'} overflowY={'auto'}>
<Box
id={'history'}
ref={ChatBox}
pb={[4, 0]}
flex={'1 0 0'}
h={0}
w={'100%'}
overflowY={'auto'}
>
{chatData.history.map((item, index) => (
<Box
key={item.id}
key={item._id}
py={media(9, 6)}
px={media(4, 2)}
backgroundColor={
@@ -470,29 +482,43 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
<Menu autoSelect={false}>
<MenuButton as={Box} mr={media(4, 1)} cursor={'pointer'}>
<Image
src={
item.obj === 'Human'
? '/icon/human.png'
: chatData.avatar || '/icon/logo.png'
}
className="avatar"
src={item.obj === 'Human' ? HUMAN_ICON : chatData.avatar || LOGO_ICON}
alt="avatar"
width={media(30, 20)}
height={media(30, 20)}
w={['20px', '30px']}
maxH={'50px'}
objectFit={'contain'}
/>
</MenuButton>
<MenuList fontSize={'sm'}>
<MenuItem onClick={() => onclickCopy(item.value)}></MenuItem>
<MenuItem onClick={() => delChatRecord(index)}></MenuItem>
<MenuItem onClick={() => delChatRecord(index, item._id)}></MenuItem>
</MenuList>
</Menu>
<Box flex={'1 0 0'} w={0} overflow={'hidden'} id={`chat${index}`}>
<Box flex={'1 0 0'} w={0} overflow={'hidden'}>
{item.obj === 'AI' ? (
<Markdown
source={item.value}
isChatting={isChatting && index === chatData.history.length - 1}
/>
<>
<Markdown
source={item.value}
isChatting={isChatting && index === chatData.history.length - 1}
/>
{item.systemPrompt && (
<Button
size={'xs'}
mt={2}
fontWeight={'normal'}
colorScheme={'gray'}
variant={'outline'}
onClick={() => setShowSystemPrompt(item.systemPrompt || '')}
>
</Button>
)}
</>
) : (
<Box whiteSpace={'pre-wrap'}>{item.value}</Box>
<Box className="markdown" whiteSpace={'pre-wrap'}>
<Box as={'p'}>{item.value}</Box>
</Box>
)}
</Box>
{isPc && (
@@ -514,7 +540,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
_hover={{
color: 'red.600'
}}
onClick={() => delChatRecord(index)}
onClick={() => delChatRecord(index, item._id)}
/>
</Flex>
)}
@@ -585,7 +611,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
bottom={'15px'}
>
{isChatting ? (
<Icon
<MyIcon
className={styles.stopIcon}
width={['22px', '25px']}
height={['22px', '25px']}
@@ -597,7 +623,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
}}
/>
) : (
<Icon
<MyIcon
name={'chatSend'}
width={['18px', '20px']}
height={['18px', '20px']}
@@ -610,6 +636,19 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
</Box>
</Box>
</Flex>
{/* system prompt show modal */}
{
<Modal isOpen={!!showSystemPrompt} onClose={() => setShowSystemPrompt('')}>
<ModalOverlay />
<ModalContent maxW={'min(90vw, 600px)'} pr={2} maxH={'80vh'} overflowY={'auto'}>
<ModalCloseButton />
<ModalBody pt={10} fontSize={'sm'} whiteSpace={'pre-wrap'} textAlign={'justify'}>
{showSystemPrompt}
</ModalBody>
</ModalContent>
</Modal>
}
</Flex>
);
};

View File

@@ -22,9 +22,9 @@ const Home = () => {
<Card p={5} mt={4} textAlign={'center'}>
<Box>
{/* <Link href="https://beian.miit.gov.cn/" target="_blank">
浙B2-20080101
</Link> */}
<Link href="https://beian.miit.gov.cn/" target="_blank">
ICP备2023011255号-1
</Link>
</Box>
<Box>Made by FastGpt Team.</Box>
</Card>

View File

@@ -78,7 +78,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
FastGPT
</Box>
<form onSubmit={handleSubmit(onclickFindPassword)}>
<FormControl mt={8} isInvalid={!!errors.username}>
<FormControl mt={5} isInvalid={!!errors.username}>
<Input
placeholder="邮箱/手机号"
size={mediaLgMd}
@@ -95,7 +95,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
{!!errors.username && errors.username.message}
</FormErrorMessage>
</FormControl>
<FormControl mt={8} isInvalid={!!errors.username}>
<FormControl mt={5} isInvalid={!!errors.username}>
<Flex>
<Input
flex={1}
@@ -121,7 +121,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
{!!errors.code && errors.code.message}
</FormErrorMessage>
</FormControl>
<FormControl mt={8} isInvalid={!!errors.password}>
<FormControl mt={5} isInvalid={!!errors.password}>
<Input
type={'password'}
placeholder="新密码"
@@ -142,7 +142,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
{!!errors.password && errors.password.message}
</FormErrorMessage>
</FormControl>
<FormControl mt={8} isInvalid={!!errors.password2}>
<FormControl mt={5} isInvalid={!!errors.password2}>
<Input
type={'password'}
placeholder="确认密码"
@@ -168,7 +168,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
</Box>
<Button
type="submit"
mt={8}
mt={5}
w={'100%'}
size={mediaLgMd}
colorScheme="blue"

View File

@@ -8,6 +8,7 @@ import type { ResLogin } from '@/api/response/user';
import { useScreen } from '@/hooks/useScreen';
import { useToast } from '@/hooks/useToast';
import { useRouter } from 'next/router';
import { postCreateModel } from '@/api/model';
interface Props {
loginSuccess: (e: ResLogin) => void;
@@ -64,6 +65,10 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
title: `注册成功`,
status: 'success'
});
// aut register a model
postCreateModel({
name: '模型1'
});
} catch (error: any) {
toast({
title: error.message || '注册异常',
@@ -81,7 +86,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
FastGPT
</Box>
<form onSubmit={handleSubmit(onclickRegister)}>
<FormControl mt={8} isInvalid={!!errors.username}>
<FormControl mt={5} isInvalid={!!errors.username}>
<Input
placeholder="邮箱/手机号"
size={mediaLgMd}
@@ -98,7 +103,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
{!!errors.username && errors.username.message}
</FormErrorMessage>
</FormControl>
<FormControl mt={8} isInvalid={!!errors.username}>
<FormControl mt={5} isInvalid={!!errors.username}>
<Flex>
<Input
flex={1}
@@ -124,7 +129,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
{!!errors.code && errors.code.message}
</FormErrorMessage>
</FormControl>
<FormControl mt={8} isInvalid={!!errors.password}>
<FormControl mt={5} isInvalid={!!errors.password}>
<Input
type={'password'}
placeholder="密码"
@@ -145,7 +150,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
{!!errors.password && errors.password.message}
</FormErrorMessage>
</FormControl>
<FormControl mt={8} isInvalid={!!errors.password2}>
<FormControl mt={5} isInvalid={!!errors.password2}>
<Input
type={'password'}
placeholder="确认密码"
@@ -171,7 +176,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
</Box>
<Button
type="submit"
mt={8}
mt={5}
w={'100%'}
size={mediaLgMd}
colorScheme="blue"

View File

@@ -45,12 +45,19 @@ const Login = () => {
}, [router]);
return (
<Box className={styles.loginPage} h={'100%'} p={isPc ? '10vh 10vw' : 0}>
<Flex
alignItems={'center'}
justifyContent={'center'}
className={styles.loginPage}
h={'100%'}
px={[0, '10vw']}
>
<Flex
maxW={'1240px'}
m={'auto'}
backgroundColor={'#fff'}
height="100%"
w={'100%'}
maxW={'1240px'}
maxH={['auto', '660px']}
backgroundColor={'#fff'}
alignItems={'center'}
justifyContent={'center'}
p={10}
@@ -83,7 +90,7 @@ const Login = () => {
<DynamicComponent type={pageType} />
</Box>
</Flex>
</Box>
</Flex>
);
};

View File

@@ -118,11 +118,11 @@ const InputDataModal = ({
px={6}
pb={2}
>
<Box flex={2} mr={[0, 4]} mb={[4, 0]} h={['230px', '100%']}>
<Box flex={1} mr={[0, 4]} mb={[4, 0]} h={['230px', '100%']}>
<Box h={'30px'}>{'匹配的知识点'}</Box>
<Textarea
placeholder={'匹配的知识点。这部分内容会被搜索,请把控内容的质量。最多 1000 字。'}
maxLength={2000}
placeholder={'匹配的知识点。这部分内容会被搜索,请把控内容的质量。最多 1500 字。'}
maxLength={1500}
resize={'none'}
h={'calc(100% - 30px)'}
{...register(`q`, {
@@ -130,13 +130,13 @@ const InputDataModal = ({
})}
/>
</Box>
<Box flex={3} h={['330px', '100%']}>
<Box flex={1} h={['330px', '100%']}>
<Box h={'30px'}></Box>
<Textarea
placeholder={
'补充知识。这部分内容不会被搜索,但会作为"匹配的知识点"的内容补充,你可以讲一些细节的内容填写在这里。最多 2000 字。'
'补充知识。这部分内容不会被搜索,但会作为"匹配的知识点"的内容补充,你可以讲一些细节的内容填写在这里。最多 1500 字。'
}
maxLength={2000}
maxLength={1500}
resize={'none'}
h={'calc(100% - 30px)'}
{...register('a')}

View File

@@ -16,8 +16,10 @@ import {
MenuButton,
MenuList,
MenuItem,
Input
Input,
Tooltip
} from '@chakra-ui/react';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import type { BoxProps } from '@chakra-ui/react';
import type { ModelDataItemType } from '@/types/model';
import { ModelDataStatusMap } from '@/constants/model';
@@ -45,9 +47,10 @@ const ModelDataCard = ({ modelId, isOwner }: { modelId: string; isOwner: boolean
const [searchText, setSearchText] = useState('');
const tdStyles = useRef<BoxProps>({
fontSize: 'xs',
minW: '150px',
maxW: '500px',
whiteSpace: 'pre-wrap',
maxH: '250px',
whiteSpace: 'pre-wrap',
overflowY: 'auto'
});
const {
@@ -132,7 +135,7 @@ const ModelDataCard = ({ modelId, isOwner }: { modelId: string; isOwner: boolean
<>
<Flex>
<Box fontWeight={'bold'} fontSize={'lg'} flex={1} mr={2}>
: {total}
: {total}
</Box>
{isOwner && (
<>
@@ -207,7 +210,16 @@ const ModelDataCard = ({ modelId, isOwner }: { modelId: string; isOwner: boolean
<Table variant={'simple'} w={'100%'}>
<Thead>
<Tr>
<Th>{'匹配的知识点'}</Th>
<Th>
<Tooltip
label={
'对话时,会将用户的问题和知识库的 "匹配知识点" 进行比较,找到最相似的前 n 条记录,将这些记录的 "匹配知识点"+"补充知识点" 作为 chatgpt 的系统提示词。'
}
>
<QuestionOutlineIcon ml={1} />
</Tooltip>
</Th>
<Th></Th>
<Th></Th>
{isOwner && <Th></Th>}

View File

@@ -21,21 +21,19 @@ import {
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import type { ModelSchema } from '@/types/mongoSchema';
import { UseFormReturn } from 'react-hook-form';
import { modelList, ModelVectorSearchModeMap } from '@/constants/model';
import { ChatModelMap, ModelVectorSearchModeMap, chatModelList } from '@/constants/model';
import { formatPrice } from '@/utils/user';
import { useConfirm } from '@/hooks/useConfirm';
import { useSelectFile } from '@/hooks/useSelectFile';
import { useToast } from '@/hooks/useToast';
import { fileToBase64 } from '@/utils/file';
import { fileToBase64, compressImg } from '@/utils/file';
const ModelEditForm = ({
formHooks,
canTrain,
isOwner,
handleDelModel
}: {
formHooks: UseFormReturn<ModelSchema>;
canTrain: boolean;
isOwner: boolean;
handleDelModel: () => void;
}) => {
@@ -54,17 +52,19 @@ const ModelEditForm = ({
async (e: File[]) => {
const file = e[0];
if (!file) return;
try {
const base64 = await compressImg({
file
});
if (file.size > 100 * 1024) {
return toast({
title: '头像需小于 100kb',
setValue('avatar', base64);
setRefresh((state) => !state);
} catch (err: any) {
toast({
title: typeof err === 'string' ? err : '头像选择异常',
status: 'warning'
});
}
const base64 = (await fileToBase64(file)) as string;
setValue('avatar', base64);
setRefresh((state) => !state);
},
[setValue, toast]
);
@@ -73,6 +73,12 @@ const ModelEditForm = ({
<>
<Card p={4}>
<Box fontWeight={'bold'}></Box>
<Flex alignItems={'center'} mt={4}>
<Box flex={'0 0 80px'} w={0}>
modelId:
</Box>
<Box>{getValues('_id')}</Box>
</Flex>
<Flex mt={4} alignItems={'center'}>
<Box flex={'0 0 80px'} w={0}>
:
@@ -101,27 +107,25 @@ const ModelEditForm = ({
></Input>
</Flex>
</FormControl>
<Flex alignItems={'center'} mt={5}>
<Box flex={'0 0 80px'} w={0}>
modelId:
:
</Box>
<Box>{getValues('_id')}</Box>
</Flex>
<Flex alignItems={'center'} mt={5}>
<Box flex={'0 0 80px'} w={0}>
:
</Box>
<Box>{modelList.find((item) => item.model === getValues('service.modelName'))?.name}</Box>
<Select isDisabled={!isOwner} {...register('chat.chatModel')}>
{chatModelList.map((item) => (
<option key={item.chatModel} value={item.chatModel}>
{item.name}
</option>
))}
</Select>
</Flex>
<Flex alignItems={'center'} mt={5}>
<Box flex={'0 0 80px'} w={0}>
:
</Box>
<Box>
{formatPrice(
modelList.find((item) => item.model === getValues('service.modelName'))?.price || 0,
1000
)}
{formatPrice(ChatModelMap[getValues('chat.chatModel')]?.price, 1000)}
/1K tokens()
</Box>
</Flex>
@@ -163,15 +167,15 @@ const ModelEditForm = ({
min={0}
max={10}
step={1}
value={getValues('temperature')}
value={getValues('chat.temperature')}
isDisabled={!isOwner}
onChange={(e) => {
setValue('temperature', e);
setValue('chat.temperature', e);
setRefresh(!refresh);
}}
>
<SliderMark
value={getValues('temperature')}
value={getValues('chat.temperature')}
textAlign="center"
bg="blue.500"
color="white"
@@ -181,7 +185,7 @@ const ModelEditForm = ({
fontSize={'xs'}
transform={'translate(-50%, -200%)'}
>
{getValues('temperature')}
{getValues('chat.temperature')}
</SliderMark>
<SliderTrack>
<SliderFilledTrack />
@@ -190,35 +194,43 @@ const ModelEditForm = ({
</Slider>
</Flex>
</FormControl>
{canTrain && (
<FormControl mt={4}>
<Flex alignItems={'center'}>
<Box flex={'0 0 70px'}></Box>
<Select
isDisabled={!isOwner}
{...register('search.mode', { required: '搜索模式不能为空' })}
>
{Object.entries(ModelVectorSearchModeMap).map(([key, { text }]) => (
<option key={key} value={key}>
{text}
</option>
))}
</Select>
</Flex>
</FormControl>
<Flex mt={4} alignItems={'center'}>
<Box mr={4}></Box>
<Switch
isDisabled={!isOwner}
isChecked={getValues('chat.useKb')}
onChange={() => {
setValue('chat.useKb', !getValues('chat.useKb'));
setRefresh(!refresh);
}}
/>
</Flex>
{getValues('chat.useKb') && (
<Flex mt={4} alignItems={'center'}>
<Box mr={4} whiteSpace={'nowrap'}>
&emsp;
</Box>
<Select
isDisabled={!isOwner}
{...register('chat.searchMode', { required: '搜索模式不能为空' })}
>
{Object.entries(ModelVectorSearchModeMap).map(([key, { text }]) => (
<option key={key} value={key}>
{text}
</option>
))}
</Select>
</Flex>
)}
<Box mt={4}>
<Box mb={1}></Box>
<Textarea
rows={8}
maxLength={-1}
isDisabled={!isOwner}
placeholder={
canTrain
? '训练的模型会根据知识库内容,生成一部分系统提示词,因此在对话时需要消耗更多的 tokens。你可以增加提示词让效果更符合预期。例如: \n1. 请根据知识库内容回答用户问题。\n2. 知识库是电影《铃芽之旅》的内容,根据知识库内容回答。无关问题,拒绝回复!'
: '模型默认的 prompt 词,通过调整该内容,可以生成一个限定范围的模型。\n注意改功能会影响对话的整体朝向'
}
{...register('systemPrompt')}
placeholder={'模型默认的 prompt 词,通过调整该内容,可以引导模型聊天方向。'}
{...register('chat.systemPrompt')}
/>
</Box>
</Card>

View File

@@ -20,8 +20,7 @@ import { useMutation } from '@tanstack/react-query';
import { postModelDataSplitData } from '@/api/model';
import { formatPrice } from '@/utils/user';
import Radio from '@/components/Radio';
import { splitText } from '@/utils/file';
import { countChatTokens } from '@/utils/tools';
import { splitText_token } from '@/utils/file';
const fileExtension = '.txt,.doc,.docx,.pdf,.md';
@@ -49,7 +48,7 @@ const SelectFileModal = ({
onSuccess: () => void;
modelId: string;
}) => {
const [selecting, setSelecting] = useState(false);
const [btnLoading, setBtnLoading] = useState(false);
const { toast } = useToast();
const [prompt, setPrompt] = useState('');
const { File, onOpen } = useSelectFile({ fileType: fileExtension, multiple: true });
@@ -62,17 +61,21 @@ const SelectFileModal = ({
const { openConfirm, ConfirmChild } = useConfirm({
content: `确认导入该文件,需要一定时间进行拆解,该任务无法终止!如果余额不足,未完成的任务会被直接清除。一共 ${
splitRes.chunks.length
},大约 ${splitRes.tokens || '数量太多,未计算'} 个tokens, 约 ${formatPrice(
splitRes.tokens * modeMap[mode].price
)}`
}${
splitRes.tokens
? `大约 ${splitRes.tokens} 个tokens, 约 ${formatPrice(
splitRes.tokens * modeMap[mode].price
)}`
: ''
}`
});
const onSelectFile = useCallback(
async (e: File[]) => {
setSelecting(true);
async (files: File[]) => {
setBtnLoading(true);
try {
let promise = Promise.resolve();
e.map((file) => {
files.forEach((file) => {
promise = promise.then(async () => {
const extension = file?.name?.split('.')?.pop()?.toLowerCase();
let text = '';
@@ -101,7 +104,7 @@ const SelectFileModal = ({
status: 'error'
});
}
setSelecting(false);
setBtnLoading(false);
},
[toast]
);
@@ -131,46 +134,49 @@ const SelectFileModal = ({
}
});
const onclickImport = useCallback(() => {
const chunks = fileTextArr
const onclickImport = useCallback(async () => {
setBtnLoading(true);
let promise = Promise.resolve();
const splitRes = fileTextArr
.filter((item) => item)
.map((item) =>
splitText({
splitText_token({
text: item,
...modeMap[mode]
})
)
.flat();
let tokens: number[] = [];
// just count 100 sets of tokens
if (chunks.length < 100) {
tokens = chunks.map((item) =>
countChatTokens({ messages: [{ role: 'system', content: item }] })
);
}
setSplitRes({
tokens: tokens.reduce((sum, item) => sum + item, 0),
chunks
tokens: splitRes.reduce((sum, item) => sum + item.tokens, 0),
chunks: splitRes.map((item) => item.chunks).flat()
});
setBtnLoading(false);
await promise;
openConfirm(mutate)();
}, [fileTextArr, mode, mutate, openConfirm]);
return (
<Modal isOpen={true} onClose={onClose} isCentered>
<ModalOverlay />
<ModalContent maxW={'min(1000px, 90vw)'} m={0} position={'relative'} h={'90vh'}>
<ModalContent
display={'flex'}
maxW={'min(1000px, 90vw)'}
m={0}
position={'relative'}
h={'90vh'}
>
<ModalHeader></ModalHeader>
<ModalCloseButton />
<ModalBody
flex={1}
h={0}
display={'flex'}
flexDirection={'column'}
p={0}
h={'100%'}
alignItems={'center'}
justifyContent={'center'}
fontSize={'sm'}
@@ -232,7 +238,7 @@ const SelectFileModal = ({
</ModalBody>
<Flex px={6} pt={2} pb={4}>
<Button isLoading={selecting} onClick={onOpen}>
<Button isLoading={btnLoading} onClick={onOpen}>
</Button>
<Box flex={1}></Box>
@@ -240,8 +246,8 @@ const SelectFileModal = ({
</Button>
<Button
isLoading={isLoading}
isDisabled={selecting || fileTextArr[0] === ''}
isLoading={isLoading || btnLoading}
isDisabled={isLoading || btnLoading || fileTextArr[0] === ''}
onClick={onclickImport}
>

View File

@@ -5,7 +5,7 @@ import type { ModelSchema } from '@/types/mongoSchema';
import { Card, Box, Flex, Button, Tag, Grid } from '@chakra-ui/react';
import { useToast } from '@/hooks/useToast';
import { useForm } from 'react-hook-form';
import { formatModelStatus, modelList, defaultModel } from '@/constants/model';
import { formatModelStatus, defaultModel } from '@/constants/model';
import { useGlobalStore } from '@/store/global';
import { useScreen } from '@/hooks/useScreen';
import { useQuery } from '@tanstack/react-query';
@@ -27,11 +27,6 @@ const ModelDetail = ({ modelId }: { modelId: string }) => {
defaultValues: model
});
const canTrain = useMemo(() => {
const openai = modelList.find((item) => item.model === model?.service.modelName);
return !!(openai && openai.trainName);
}, [model]);
const isOwner = useMemo(() => model.userId === userInfo?._id, [model.userId, userInfo?._id]);
/* 加载模型数据 */
@@ -86,11 +81,8 @@ const ModelDetail = ({ modelId }: { modelId: string }) => {
await putModelById(data._id, {
name: data.name,
avatar: data.avatar || '/icon/logo.png',
systemPrompt: data.systemPrompt,
temperature: data.temperature,
search: data.search,
chat: data.chat,
share: data.share,
service: data.service,
security: data.security
});
toast({
@@ -171,11 +163,15 @@ const ModelDetail = ({ modelId }: { modelId: string }) => {
</Tag>
</Flex>
<Box mt={4} textAlign={'right'}>
<Button variant={'outline'} onClick={handlePreviewChat}>
<Button variant={'outline'} size={'sm'} onClick={handlePreviewChat}>
</Button>
{isOwner && (
<Button ml={4} onClick={formHooks.handleSubmit(saveSubmitSuccess, saveSubmitError)}>
<Button
ml={4}
size={'sm'}
onClick={formHooks.handleSubmit(saveSubmitSuccess, saveSubmitError)}
>
</Button>
)}
@@ -184,16 +180,11 @@ const ModelDetail = ({ modelId }: { modelId: string }) => {
)}
</Card>
<Grid mt={5} gridTemplateColumns={['1fr', '1fr 1fr']} gridGap={5}>
<ModelEditForm
formHooks={formHooks}
handleDelModel={handleDelModel}
canTrain={canTrain}
isOwner={isOwner}
/>
<ModelEditForm formHooks={formHooks} handleDelModel={handleDelModel} isOwner={isOwner} />
{canTrain && !!model._id && (
{modelId && (
<Card p={4} gridColumnStart={[1, 1]} gridColumnEnd={[2, 3]}>
<ModelDataCard modelId={model._id} isOwner={isOwner} />
<ModelDataCard modelId={modelId} isOwner={isOwner} />
</Card>
)}
</Grid>

View File

@@ -1,138 +0,0 @@
import React, { Dispatch, useState, useCallback, useMemo } from 'react';
import {
Modal,
ModalOverlay,
ModalContent,
ModalHeader,
ModalFooter,
ModalBody,
ModalCloseButton,
FormControl,
FormErrorMessage,
Button,
useToast,
Input,
Select,
Box
} from '@chakra-ui/react';
import { useForm } from 'react-hook-form';
import { postCreateModel } from '@/api/model';
import type { ModelSchema } from '@/types/mongoSchema';
import { modelList } from '@/constants/model';
import { formatPrice } from '@/utils/user';
interface CreateFormType {
name: string;
serviceModelName: string;
}
const CreateModel = ({
setCreateModelOpen,
onSuccess
}: {
setCreateModelOpen: Dispatch<boolean>;
onSuccess: Dispatch<ModelSchema>;
}) => {
const [requesting, setRequesting] = useState(false);
const [refresh, setRefresh] = useState(false);
const toast = useToast({
duration: 2000,
position: 'top'
});
const {
getValues,
register,
handleSubmit,
formState: { errors }
} = useForm<CreateFormType>({
defaultValues: {
serviceModelName: modelList[0].model
}
});
const handleCreateModel = useCallback(
async (data: CreateFormType) => {
setRequesting(true);
try {
const res = await postCreateModel(data);
toast({
title: '创建成功',
status: 'success'
});
onSuccess(res);
setCreateModelOpen(false);
} catch (err: any) {
toast({
title: typeof err === 'string' ? err : err.message || '出现了意外',
status: 'error'
});
}
setRequesting(false);
},
[onSuccess, setCreateModelOpen, toast]
);
return (
<>
<Modal isOpen={true} onClose={() => setCreateModelOpen(false)}>
<ModalOverlay />
<ModalContent>
<ModalHeader></ModalHeader>
<ModalCloseButton />
<ModalBody>
<FormControl mb={8} isInvalid={!!errors.name}>
<Input
placeholder="模型名称"
{...register('name', {
required: '模型名不能为空'
})}
/>
<FormErrorMessage position={'absolute'} fontSize="xs">
{!!errors.name && errors.name.message}
</FormErrorMessage>
</FormControl>
<FormControl isInvalid={!!errors.serviceModelName}>
<Select
placeholder="选择基础模型类型"
{...register('serviceModelName', {
required: '底层模型不能为空',
onChange() {
setRefresh(!refresh);
}
})}
>
{modelList.map((item) => (
<option key={item.model} value={item.model}>
{item.name}
</option>
))}
</Select>
<FormErrorMessage position={'absolute'} fontSize="xs">
{!!errors.serviceModelName && errors.serviceModelName.message}
</FormErrorMessage>
</FormControl>
<Box mt={3} textAlign={'center'} fontSize={'sm'} color={'blackAlpha.600'}>
{formatPrice(
modelList.find((item) => item.model === getValues('serviceModelName'))?.price || 0,
1000
)}
/1K tokens()
</Box>
</ModalBody>
<ModalFooter>
<Button mr={3} colorScheme={'gray'} onClick={() => setCreateModelOpen(false)}>
</Button>
<Button isLoading={requesting} onClick={handleSubmit(handleCreateModel)}>
</Button>
</ModalFooter>
</ModalContent>
</Modal>
</>
);
};
export default CreateModel;

View File

@@ -2,8 +2,8 @@ import React, { useEffect } from 'react';
import { Box, Button, Flex, Tag } from '@chakra-ui/react';
import type { ModelSchema } from '@/types/mongoSchema';
import { formatModelStatus } from '@/constants/model';
import dayjs from 'dayjs';
import { useRouter } from 'next/router';
import { ChatModelMap } from '@/constants/model';
const ModelPhoneList = ({
models,
@@ -42,12 +42,12 @@ const ModelPhoneList = ({
</Tag>
</Flex>
<Flex mt={5}>
<Box flex={'0 0 100px'}>: </Box>
<Box color={'blackAlpha.500'}>{dayjs(model.updateTime).format('YYYY-MM-DD HH:mm')}</Box>
<Box flex={'0 0 100px'}>: </Box>
<Box color={'blackAlpha.500'}>{ChatModelMap[model.chat.chatModel].name}</Box>
</Flex>
<Flex mt={5}>
<Box flex={'0 0 100px'}>AI模型: </Box>
<Box color={'blackAlpha.500'}>{model.service.modelName}</Box>
<Box flex={'0 0 100px'}>: </Box>
<Box color={'blackAlpha.500'}>{model.chat.temperature}</Box>
</Flex>
<Flex mt={5} justifyContent={'flex-end'}>
<Button

View File

@@ -13,10 +13,9 @@ import {
Box
} from '@chakra-ui/react';
import { formatModelStatus } from '@/constants/model';
import dayjs from 'dayjs';
import type { ModelSchema } from '@/types/mongoSchema';
import { useRouter } from 'next/router';
import { modelList } from '@/constants/model';
import { ChatModelMap } from '@/constants/model';
const ModelTable = ({
models = [],
@@ -33,18 +32,18 @@ const ModelTable = ({
dataIndex: 'name'
},
{
title: '模型类型',
title: '对话模型',
key: 'service',
render: (model: ModelSchema) => (
<Box fontWeight={'bold'} whiteSpace={'pre-wrap'} maxW={'200px'}>
{modelList.find((item) => item.model === model.service.modelName)?.name}
{ChatModelMap[model.chat.chatModel].name}
</Box>
)
},
{
title: '最后更新时间',
key: 'updateTime',
render: (item: ModelSchema) => dayjs(item.updateTime).format('YYYY-MM-DD HH:mm')
title: '温度',
key: 'temperature',
render: (model: ModelSchema) => <>{model.chat.temperature}</>
},
{
title: '状态',

View File

@@ -1,4 +1,4 @@
import React, { useState, useCallback } from 'react';
import React, { useCallback } from 'react';
import { Box, Button, Flex, Card } from '@chakra-ui/react';
import type { ModelSchema } from '@/types/mongoSchema';
import { useRouter } from 'next/router';
@@ -7,30 +7,37 @@ import ModelPhoneList from './components/ModelPhoneList';
import { useScreen } from '@/hooks/useScreen';
import { useQuery } from '@tanstack/react-query';
import { useLoading } from '@/hooks/useLoading';
import dynamic from 'next/dynamic';
import { useToast } from '@/hooks/useToast';
import { useUserStore } from '@/store/user';
const CreateModel = dynamic(() => import('./components/CreateModel'));
import { postCreateModel } from '@/api/model';
const modelList = () => {
const { toast } = useToast();
const { isPc } = useScreen();
const router = useRouter();
const { myModels, setMyModels, getMyModels } = useUserStore();
const [openCreateModel, setOpenCreateModel] = useState(false);
const { myModels, getMyModels } = useUserStore();
const { Loading, setIsLoading } = useLoading();
/* 加载模型 */
const { isLoading } = useQuery(['loadModels'], getMyModels);
/* 创建成功回调 */
const createModelSuccess = useCallback(
(data: ModelSchema) => {
setMyModels([data, ...myModels]);
},
[myModels, setMyModels]
);
const handleCreateModel = useCallback(async () => {
setIsLoading(true);
try {
const id = await postCreateModel({ name: `模型${myModels.length}` });
toast({
title: '创建成功',
status: 'success'
});
router.push(`/model/detail?modelId=${id}`);
} catch (err: any) {
toast({
title: typeof err === 'string' ? err : err.message || '出现了意外',
status: 'error'
});
}
setIsLoading(false);
}, [myModels.length, router, setIsLoading, toast]);
/* 点前往聊天预览页 */
const handlePreviewChat = useCallback(
@@ -61,7 +68,7 @@ const modelList = () => {
</Box>
<Button flex={'0 0 145px'} variant={'outline'} onClick={() => setOpenCreateModel(true)}>
<Button flex={'0 0 145px'} variant={'outline'} onClick={handleCreateModel}>
</Button>
</Flex>
@@ -74,10 +81,6 @@ const modelList = () => {
<ModelPhoneList models={myModels} handlePreviewChat={handlePreviewChat} />
)}
</Box>
{/* 创建弹窗 */}
{openCreateModel && (
<CreateModel setCreateModelOpen={setOpenCreateModel} onSuccess={createModelSuccess} />
)}
<Loading loading={isLoading} />
</Box>

View File

@@ -17,7 +17,8 @@ const ShareModelList = ({
return (
<>
{models.map((model) => (
<Box
<Flex
flexDirection={'column'}
key={model._id}
p={4}
border={'1px solid'}
@@ -36,7 +37,7 @@ const ShareModelList = ({
{model.name}
</Box>
</Flex>
<Box className={styles.intro} my={4} fontSize={'sm'} color={'blackAlpha.600'}>
<Box flex={1} className={styles.intro} my={4} fontSize={'sm'} color={'blackAlpha.600'}>
{model.share.intro || '这个模型没有介绍~'}
</Box>
<Flex justifyContent={'space-between'}>
@@ -74,7 +75,7 @@ const ShareModelList = ({
)}
</Box>
</Flex>
</Box>
</Flex>
))}
</>
);

View File

@@ -23,7 +23,7 @@ const modelList = () => {
});
const { data: collectionModels = [], refetch: refetchCollection } = useQuery(
[getCollectionModels],
['getCollectionModels'],
getCollectionModels
);

View File

@@ -85,25 +85,6 @@ const PayModal = ({ onClose }: { onClose: () => void }) => {
<ModalBody py={0}>
{!payId && (
<>
{/* 价格表 */}
{/* <TableContainer mb={4}>
<Table>
<Thead>
<Tr>
<Th>模型类型</Th>
<Th>价格(元/1K tokens包含所有上下文)</Th>
</Tr>
</Thead>
<Tbody>
{modelList.map((item, i) => (
<Tr key={item.model}>
<Td>{item.name}</Td>
<Td>{formatPrice(item.price, 1000)}</Td>
</Tr>
))}
</Tbody>
</Table>
</TableContainer> */}
<Grid gridTemplateColumns={'repeat(4,1fr)'} gridGap={5} mb={4}>
{[5, 10, 20, 50].map((item) => (
<Button

View File

@@ -1,5 +1,4 @@
import React, { useState } from 'react';
import Link from 'next/link';
import {
Card,
Box,
@@ -32,7 +31,7 @@ const OpenApi = () => {
data: apiKeys = [],
isLoading: isGetting,
refetch
} = useQuery([getOpenApiKeys], getOpenApiKeys);
} = useQuery(['getOpenApiKeys'], getOpenApiKeys);
const [apiKey, setApiKey] = useState('');
const { copyData } = useCopyData();

View File

@@ -47,7 +47,7 @@ const OpenApi = () => {
useQuery(['init'], initUserInfo);
const { data: { invitedAmount = 0, historyAmount = 0, residueAmount = 0 } = {} } = useQuery(
['getInvitedCountAmount'],
['getPromotionInitData'],
getPromotionInitData
);

View File

@@ -1,5 +0,0 @@
export enum OpenAiTuneStatusEnum {
cancelled = 'cancelled',
succeeded = 'succeeded',
pending = 'pending'
}

View File

@@ -23,7 +23,7 @@ export const openaiError: Record<string, string> = {
context_length_exceeded: '内容超长了,请重置对话',
Unauthorized: 'API-KEY 不合法',
rate_limit_reached: 'API被限制请稍后再试',
'Bad Request': 'Bad Request~ openai 异常',
'Bad Request': 'Bad Request~ 可能内容太多了',
'Bad Gateway': '网关异常,请重试'
};
export const openaiError2: Record<string, string> = {

View File

@@ -1,18 +1,21 @@
import { SplitData } from '@/service/mongo';
import { getOpenAIApi } from '@/service/utils/auth';
import { axiosConfig } from '@/service/utils/tools';
import { getOpenApiKey } from '../utils/openai';
import type { ChatCompletionRequestMessage } from 'openai';
import { ChatModelEnum } from '@/constants/model';
import { getApiKey } from '../utils/auth';
import { OpenAiChatEnum } from '@/constants/model';
import { pushSplitDataBill } from '@/service/events/pushBill';
import { generateVector } from './generateVector';
import { openaiError2 } from '../errorCode';
import { PgClient } from '@/service/pg';
import { ModelSplitDataSchema } from '@/types/mongoSchema';
import { modelServiceToolMap } from '../utils/chat';
import { ChatRoleEnum } from '@/constants/chat';
export async function generateQA(next = false): Promise<any> {
if (process.env.queueTask !== '1') {
fetch(process.env.parentUrl || '');
try {
fetch(process.env.parentUrl || '');
} catch (error) {
console.log('parentUrl fetch error', error);
}
return;
}
if (global.generatingQA === true && !next) return;
@@ -42,12 +45,12 @@ export async function generateQA(next = false): Promise<any> {
const textList: string[] = dataItem.textList.slice(-5);
// 获取 openapi Key
let userApiKey = '',
systemKey = '';
let userOpenAiKey = '',
systemAuthKey = '';
try {
const key = await getOpenApiKey(dataItem.userId);
userApiKey = key.userApiKey;
systemKey = key.systemKey;
const key = await getApiKey({ model: OpenAiChatEnum.GPT35, userId: dataItem.userId });
userOpenAiKey = key.userOpenAiKey;
systemAuthKey = key.systemAuthKey;
} catch (error: any) {
if (error?.code === 501) {
// 余额不够了, 清空该记录
@@ -65,55 +68,44 @@ export async function generateQA(next = false): Promise<any> {
const startTime = Date.now();
// 获取 openai 请求实例
const chatAPI = getOpenAIApi(userApiKey || systemKey);
const systemPrompt: ChatCompletionRequestMessage = {
role: 'system',
content: `你是出题人
// 请求 chatgpt 获取回答
const response = await Promise.allSettled(
textList.map((text) =>
modelServiceToolMap[OpenAiChatEnum.GPT35]
.chatCompletion({
apiKey: userOpenAiKey || systemAuthKey,
temperature: 0.8,
messages: [
{
obj: ChatRoleEnum.System,
value: `你是出题人
${dataItem.prompt || '下面是"一段长文本"'}
从中选出5至20个题目和答案,题目包含问答题,计算题,代码题等.答案详细.按格式返回: Q1:
从中选出5至20个题目和答案.答案详细.按格式返回: Q1:
A1:
Q2:
A2:
...`
};
// 请求 chatgpt 获取回答
const response = await Promise.allSettled(
textList.map((text) =>
chatAPI
.createChatCompletion(
{
model: ChatModelEnum.GPT35,
temperature: 0.8,
n: 1,
messages: [
systemPrompt,
{
role: 'user',
content: text
}
]
},
{
timeout: 180000,
...axiosConfig
}
)
.then((res) => {
const rawContent = res?.data.choices[0].message?.content || ''; // chatgpt 原本的回复
const result = splitText(res?.data.choices[0].message?.content || ''); // 格式化后的QA对
},
{
obj: 'Human',
value: text
}
],
stream: false
})
.then(({ totalTokens, responseText, responseMessages }) => {
const result = formatSplitText(responseText); // 格式化后的QA对
console.log(`split result length: `, result.length);
// 计费
pushSplitDataBill({
isPay: !userApiKey && result.length > 0,
isPay: !userOpenAiKey && result.length > 0,
userId: dataItem.userId,
type: 'QA',
text: systemPrompt.content + text + rawContent,
tokenLen: res.data.usage?.total_tokens || 0
textLen: responseMessages.map((item) => item.value).join('').length,
totalTokens
});
return {
rawContent,
rawContent: responseText,
result
};
})
@@ -186,7 +178,7 @@ A2:
/**
* 检查文本是否按格式返回
*/
function splitText(text: string) {
function formatSplitText(text: string) {
const regex = /Q\d+:(\s*)(.*)(\s*)A\d+:(\s*)([\s\S]*?)(?=Q|$)/g; // 匹配Q和A的正则表达式
const matches = text.matchAll(regex); // 获取所有匹配到的结果

View File

@@ -1,10 +1,15 @@
import { openaiCreateEmbedding, getOpenApiKey } from '../utils/openai';
import { openaiCreateEmbedding } from '../utils/chat/openai';
import { getApiKey } from '../utils/auth';
import { openaiError2 } from '../errorCode';
import { PgClient } from '@/service/pg';
export async function generateVector(next = false): Promise<any> {
if (process.env.queueTask !== '1') {
fetch(process.env.parentUrl || '');
try {
fetch(process.env.parentUrl || '');
} catch (error) {
console.log('parentUrl fetch error', error);
}
return;
}
@@ -36,11 +41,10 @@ export async function generateVector(next = false): Promise<any> {
dataId = dataItem.id;
// 获取 openapi Key
let userApiKey, systemKey;
let userOpenAiKey;
try {
const res = await getOpenApiKey(dataItem.userId);
userApiKey = res.userApiKey;
systemKey = res.systemKey;
const res = await getApiKey({ model: 'gpt-3.5-turbo', userId: dataItem.userId });
userOpenAiKey = res.userOpenAiKey;
} catch (error: any) {
if (error?.code === 501) {
await PgClient.delete('modelData', {
@@ -54,17 +58,16 @@ export async function generateVector(next = false): Promise<any> {
}
// 生成词向量
const { vector } = await openaiCreateEmbedding({
text: dataItem.q,
const { vectors } = await openaiCreateEmbedding({
textArr: [dataItem.q],
userId: dataItem.userId,
isPay: !userApiKey,
apiKey: userApiKey || systemKey
userOpenAiKey
});
// 更新 pg 向量和状态数据
await PgClient.update('modelData', {
values: [
{ key: 'vector', value: `[${vector}]` },
{ key: 'vector', value: `[${vectors[0]}]` },
{ key: 'status', value: `ready` }
],
where: [['id', dataId]]

View File

@@ -1,68 +1,54 @@
import { connectToDatabase, Bill, User } from '../mongo';
import {
modelList,
ChatModelEnum,
ModelNameEnum,
Model2ChatModelMap,
embeddingModel
} from '@/constants/model';
import { ChatModelMap, OpenAiChatEnum, ChatModelType, embeddingModel } from '@/constants/model';
import { BillTypeEnum } from '@/constants/user';
import { countChatTokens } from '@/utils/tools';
export const pushChatBill = async ({
isPay,
modelName,
chatModel,
userId,
chatId,
messages
textLen,
tokens
}: {
isPay: boolean;
modelName: `${ModelNameEnum}`;
chatModel: ChatModelType;
userId: string;
chatId?: '' | string;
messages: { role: 'system' | 'user' | 'assistant'; content: string }[];
textLen: number;
tokens: number;
}) => {
console.log(`chat generate success. text len: ${textLen}. token len: ${tokens}. pay:${isPay}`);
if (!isPay) return;
let billId = '';
try {
// 计算 token 数量
const tokens = countChatTokens({ model: Model2ChatModelMap[modelName] as any, messages });
const text = messages.map((item) => item.content).join('');
await connectToDatabase();
console.log(
`chat generate success. text len: ${text.length}. token len: ${tokens}. pay:${isPay}`
);
// 计算价格
const unitPrice = ChatModelMap[chatModel]?.price || 5;
const price = unitPrice * tokens;
if (isPay) {
await connectToDatabase();
try {
// 插入 Bill 记录
const res = await Bill.create({
userId,
type: 'chat',
modelName: chatModel,
chatId: chatId ? chatId : undefined,
textLen,
tokenLen: tokens,
price
});
billId = res._id;
// 获取模型单价格
const modelItem = modelList.find((item) => item.model === modelName);
// 计算价格
const unitPrice = modelItem?.price || 5;
const price = unitPrice * tokens;
try {
// 插入 Bill 记录
const res = await Bill.create({
userId,
type: 'chat',
modelName,
chatId: chatId ? chatId : undefined,
textLen: text.length,
tokenLen: tokens,
price
});
billId = res._id;
// 账号扣费
await User.findByIdAndUpdate(userId, {
$inc: { balance: -price }
});
} catch (error) {
console.log('创建账单失败:', error);
billId && Bill.findByIdAndDelete(billId);
}
// 账号扣费
await User.findByIdAndUpdate(userId, {
$inc: { balance: -price }
});
} catch (error) {
console.log('创建账单失败:', error);
billId && Bill.findByIdAndDelete(billId);
}
} catch (error) {
console.log(error);
@@ -72,55 +58,49 @@ export const pushChatBill = async ({
export const pushSplitDataBill = async ({
isPay,
userId,
tokenLen,
text,
totalTokens,
textLen,
type
}: {
isPay: boolean;
userId: string;
tokenLen: number;
text: string;
totalTokens: number;
textLen: number;
type: `${BillTypeEnum}`;
}) => {
await connectToDatabase();
console.log(
`splitData generate success. text len: ${textLen}. token len: ${totalTokens}. pay:${isPay}`
);
if (!isPay) return;
let billId;
try {
console.log(
`splitData generate success. text len: ${text.length}. token len: ${tokenLen}. pay:${isPay}`
);
await connectToDatabase();
if (isPay) {
try {
// 获取模型单价格, 都是用 gpt35 拆分
const modelItem = modelList.find((item) => item.model === ChatModelEnum.GPT35);
const unitPrice = modelItem?.price || 3;
// 计算价格
const price = unitPrice * tokenLen;
// 获取模型单价格, 都是用 gpt35 拆分
const unitPrice = ChatModelMap[OpenAiChatEnum.GPT35].price || 3;
// 计算价格
const price = unitPrice * totalTokens;
// 插入 Bill 记录
const res = await Bill.create({
userId,
type,
modelName: ChatModelEnum.GPT35,
textLen: text.length,
tokenLen,
price
});
billId = res._id;
// 插入 Bill 记录
const res = await Bill.create({
userId,
type,
modelName: OpenAiChatEnum.GPT35,
textLen,
tokenLen: totalTokens,
price
});
billId = res._id;
// 账号扣费
await User.findByIdAndUpdate(userId, {
$inc: { balance: -price }
});
} catch (error) {
console.log('创建账单失败:', error);
billId && Bill.findByIdAndDelete(billId);
}
}
// 账号扣费
await User.findByIdAndUpdate(userId, {
$inc: { balance: -price }
});
} catch (error) {
console.log(error);
console.log('创建账单失败:', error);
billId && Bill.findByIdAndDelete(billId);
}
};
@@ -135,41 +115,40 @@ export const pushGenerateVectorBill = async ({
text: string;
tokenLen: number;
}) => {
await connectToDatabase();
console.log(
`vector generate success. text len: ${text.length}. token len: ${tokenLen}. pay:${isPay}`
);
if (!isPay) return;
let billId;
try {
console.log(
`vector generate success. text len: ${text.length}. token len: ${tokenLen}. pay:${isPay}`
);
await connectToDatabase();
if (isPay) {
try {
const unitPrice = 0.4;
// 计算价格. 至少为1
let price = unitPrice * tokenLen;
price = price > 1 ? price : 1;
try {
const unitPrice = 0.4;
// 计算价格. 至少为1
let price = unitPrice * tokenLen;
price = price > 1 ? price : 1;
// 插入 Bill 记录
const res = await Bill.create({
userId,
type: BillTypeEnum.vector,
modelName: embeddingModel,
textLen: text.length,
tokenLen,
price
});
billId = res._id;
// 插入 Bill 记录
const res = await Bill.create({
userId,
type: BillTypeEnum.vector,
modelName: embeddingModel,
textLen: text.length,
tokenLen,
price
});
billId = res._id;
// 账号扣费
await User.findByIdAndUpdate(userId, {
$inc: { balance: -price }
});
} catch (error) {
console.log('创建账单失败:', error);
billId && Bill.findByIdAndDelete(billId);
}
// 账号扣费
await User.findByIdAndUpdate(userId, {
$inc: { balance: -price }
});
} catch (error) {
console.log('创建账单失败:', error);
billId && Bill.findByIdAndDelete(billId);
}
} catch (error) {
console.log(error);

View File

@@ -1,5 +1,5 @@
import { Schema, model, models, Model } from 'mongoose';
import { modelList } from '@/constants/model';
import { ChatModelMap } from '@/constants/model';
import { BillSchema as BillType } from '@/types/mongoSchema';
import { BillTypeMap } from '@/constants/user';
@@ -16,7 +16,7 @@ const BillSchema = new Schema({
},
modelName: {
type: String,
enum: [...modelList.map((item) => item.model), 'text-embedding-ada-002'],
enum: [...Object.keys(ChatModelMap), 'text-embedding-ada-002'],
required: true
},
chatId: {

View File

@@ -1,5 +1,6 @@
import { Schema, model, models, Model } from 'mongoose';
import { ChatSchema as ChatType } from '@/types/mongoSchema';
import { ChatRoleMap } from '@/constants/chat';
const ChatSchema = new Schema({
userId: {
@@ -36,15 +37,15 @@ const ChatSchema = new Schema({
obj: {
type: String,
required: true,
enum: ['Human', 'AI', 'SYSTEM']
enum: Object.keys(ChatRoleMap)
},
value: {
type: String,
required: true
},
deleted: {
type: Boolean,
default: false
systemPrompt: {
type: String,
default: ''
}
}
],

View File

@@ -1,6 +1,11 @@
import { Schema, model, models, Model as MongoModel } from 'mongoose';
import { ModelSchema as ModelType } from '@/types/mongoSchema';
import { ModelVectorSearchModeMap, ModelVectorSearchModeEnum } from '@/constants/model';
import {
ModelVectorSearchModeMap,
ModelVectorSearchModeEnum,
ChatModelMap,
OpenAiChatEnum
} from '@/constants/model';
const ModelSchema = new Schema({
userId: {
@@ -13,11 +18,6 @@ const ModelSchema = new Schema({
required: true
},
avatar: {
type: String,
default: '/icon/logo.png'
},
systemPrompt: {
// 系统提示词
type: String,
default: ''
},
@@ -30,17 +30,34 @@ const ModelSchema = new Schema({
type: Date,
default: () => new Date()
},
temperature: {
type: Number,
min: 0,
max: 10,
default: 4
},
search: {
mode: {
chat: {
useKb: {
// use knowledge base to search
type: Boolean,
default: false
},
searchMode: {
// knowledge base search mode
type: String,
enum: Object.keys(ModelVectorSearchModeMap),
default: ModelVectorSearchModeEnum.hightSimilarity
},
systemPrompt: {
// 系统提示词
type: String,
default: ''
},
temperature: {
type: Number,
min: 0,
max: 10,
default: 0
},
chatModel: {
// 聊天时使用的模型
type: String,
enum: Object.keys(ChatModelMap),
default: OpenAiChatEnum.GPT35
}
},
share: {
@@ -63,18 +80,6 @@ const ModelSchema = new Schema({
default: 0
}
},
service: {
chatModel: {
// 聊天时使用的模型
type: String,
required: true
},
modelName: {
// 底层模型的名称
type: String,
required: true
}
},
security: {
type: {
domain: {
@@ -100,8 +105,7 @@ const ModelSchema = new Schema({
default: -1
}
},
default: {},
required: true
default: {}
}
});

View File

@@ -0,0 +1,134 @@
import { PgClient } from '@/service/pg';
import { ModelDataStatusEnum, ModelVectorSearchModeEnum, ChatModelMap } from '@/constants/model';
import { ModelSchema } from '@/types/mongoSchema';
import { openaiCreateEmbedding } from '../utils/chat/openai';
import { ChatRoleEnum } from '@/constants/chat';
import { modelToolMap } from '@/utils/chat';
import { ChatItemSimpleType } from '@/types/chat';
/**
* use openai embedding search kb
*/
export const searchKb = async ({
userOpenAiKey,
prompts,
similarity = 0.2,
model,
userId
}: {
userOpenAiKey?: string;
prompts: ChatItemSimpleType[];
model: ModelSchema;
userId: string;
similarity?: number;
}): Promise<{
code: 200 | 201;
searchPrompt?: {
obj: `${ChatRoleEnum}`;
value: string;
};
}> => {
async function search(textArr: string[] = []) {
// 获取提示词的向量
const { vectors: promptVectors } = await openaiCreateEmbedding({
userOpenAiKey,
userId,
textArr
});
const searchRes = await Promise.all(
promptVectors.map((promptVector) =>
PgClient.select<{ id: string; q: string; a: string }>('modelData', {
fields: ['id', 'q', 'a'],
where: [
['status', ModelDataStatusEnum.ready],
'AND',
['model_id', model._id],
'AND',
`vector <=> '[${promptVector}]' < ${similarity}`
],
order: [{ field: 'vector', mode: `<=> '[${promptVector}]'` }],
limit: 20
}).then((res) => res.rows)
)
);
// Remove repeat record
const idSet = new Set<string>();
const filterSearch = searchRes.map((search) =>
search.filter((item) => {
if (idSet.has(item.id)) {
return false;
}
idSet.add(item.id);
return true;
})
);
return filterSearch.map((item) => item.map((item) => `${item.q}\n${item.a}`).join('\n'));
}
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// search three times
const userPrompts = prompts.filter((item) => item.obj === 'Human');
const searchArr: string[] = [
userPrompts[userPrompts.length - 1].value,
userPrompts[userPrompts.length - 2]?.value
].filter((item) => item);
const systemPrompts = await search(searchArr);
// filter system prompts.
const filterRateMap: Record<number, number[]> = {
1: [1],
2: [0.7, 0.3]
};
const filterRate = filterRateMap[systemPrompts.length] || filterRateMap[0];
const filterSystemPrompt = filterRate
.map((rate, i) =>
modelToolMap[model.chat.chatModel].sliceText({
text: systemPrompts[i],
length: Math.floor(modelConstantsData.systemMaxToken * rate)
})
)
.join('\n');
/* 高相似度+不回复 */
if (!filterSystemPrompt && model.chat.searchMode === ModelVectorSearchModeEnum.hightSimilarity) {
return {
code: 201,
searchPrompt: {
obj: ChatRoleEnum.AI,
value: '对不起,你的问题不在知识库中。'
}
};
}
/* 高相似度+无上下文,不添加额外知识,仅用系统提示词 */
if (!filterSystemPrompt && model.chat.searchMode === ModelVectorSearchModeEnum.noContext) {
return {
code: 200,
searchPrompt: model.chat.systemPrompt
? {
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
: undefined
};
}
/* 有匹配 */
return {
code: 200,
searchPrompt: {
obj: ChatRoleEnum.System,
value: `
${model.chat.systemPrompt}
${
model.chat.searchMode === ModelVectorSearchModeEnum.hightSimilarity ? '不回答知识库外的内容.' : ''
}
知识库内容为: '${filterSystemPrompt}'
`
}
};
};

View File

@@ -39,12 +39,7 @@ export const jsonRes = <T = any>(
} else if (openaiError[error?.response?.statusText]) {
msg = openaiError[error.response.statusText];
}
console.log(`error-> msg:${msg}`);
// request 时候报错
if (error?.response) {
console.log('statusText:', error?.response?.statusText);
console.log('openai error:', error?.response?.data?.error);
}
console.log(error);
}
res.json({

View File

@@ -1,18 +1,84 @@
import { Configuration, OpenAIApi } from 'openai';
import { Chat, Model } from '../mongo';
import type { NextApiRequest } from 'next';
import jwt from 'jsonwebtoken';
import { Chat, Model, OpenApi, User } from '../mongo';
import type { ModelSchema } from '@/types/mongoSchema';
import { authToken } from './tools';
import { getOpenApiKey } from './openai';
import type { ChatItemType } from '@/types/chat';
import type { ChatItemSimpleType } from '@/types/chat';
import mongoose from 'mongoose';
import { ClaudeEnum, defaultModel } from '@/constants/model';
import { formatPrice } from '@/utils/user';
import { ERROR_ENUM } from '../errorCode';
import { ChatModelType, OpenAiChatEnum } from '@/constants/model';
export const getOpenAIApi = (apiKey: string) => {
const configuration = new Configuration({
apiKey,
basePath: process.env.OPENAI_BASE_URL
/* 校验 token */
export const authToken = (token?: string): Promise<string> => {
return new Promise((resolve, reject) => {
if (!token) {
reject('缺少登录凭证');
return;
}
const key = process.env.TOKEN_KEY as string;
jwt.verify(token, key, function (err, decoded: any) {
if (err || !decoded?.userId) {
reject('凭证无效');
return;
}
resolve(decoded.userId);
});
});
};
return new OpenAIApi(configuration);
/* 获取 api 请求的 key */
export const getApiKey = async ({ model, userId }: { model: ChatModelType; userId: string }) => {
const user = await User.findById(userId);
if (!user) {
return Promise.reject({
code: 501,
message: '找不到用户'
});
}
const keyMap = {
[OpenAiChatEnum.GPT35]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: process.env.OPENAIKEY as string
},
[OpenAiChatEnum.GPT4]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: process.env.OPENAIKEY as string
},
[OpenAiChatEnum.GPT432k]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: process.env.OPENAIKEY as string
},
[ClaudeEnum.Claude]: {
userOpenAiKey: '',
systemAuthKey: process.env.LAFKEY as string
}
};
// 有自己的key
if (keyMap[model].userOpenAiKey) {
return {
user,
userOpenAiKey: keyMap[model].userOpenAiKey,
systemAuthKey: ''
};
}
// 平台账号余额校验
if (formatPrice(user.balance) <= 0) {
return Promise.reject({
code: 501,
message: '账号余额不足'
});
}
return {
user,
userOpenAiKey: '',
systemAuthKey: keyMap[model].systemAuthKey
};
};
// 模型使用权校验
@@ -20,12 +86,14 @@ export const authModel = async ({
modelId,
userId,
authUser = true,
authOwner = true
authOwner = true,
reserveDetail = false
}: {
modelId: string;
userId: string;
authUser?: boolean;
authOwner?: boolean;
reserveDetail?: boolean; // focus reserve detail
}) => {
// 获取 model 数据
const model = await Model.findById<ModelSchema>(modelId);
@@ -33,18 +101,24 @@ export const authModel = async ({
return Promise.reject('模型不存在');
}
// 使用权限校验
/*
Access verification
1. authOwner=true or authUser = true , just owner can use
2. authUser = false and share, anyone can use
*/
if ((authOwner || (authUser && !model.share.isShare)) && userId !== String(model.userId)) {
return Promise.reject('无权操作该模型');
}
// detail 内容去除
if (!model.share.isShareDetail) {
model.systemPrompt = '';
model.temperature = 0;
// do not share detail info
if (!reserveDetail && !model.share.isShareDetail && userId !== String(model.userId)) {
model.chat = {
...defaultModel.chat,
chatModel: model.chat.chatModel
};
}
return { model };
return { model, showModelDetail: model.share.isShareDetail || userId === String(model.userId) };
};
// 获取对话校验
@@ -60,17 +134,28 @@ export const authChat = async ({
const userId = await authToken(authorization);
// 获取 model 数据
const { model } = await authModel({ modelId, userId, authOwner: false });
const { model, showModelDetail } = await authModel({
modelId,
userId,
authOwner: false,
reserveDetail: true
});
// 聊天内容
let content: ChatItemType[] = [];
let content: ChatItemSimpleType[] = [];
if (chatId) {
// 获取 chat 数据
content = await Chat.aggregate([
{ $match: { _id: new mongoose.Types.ObjectId(chatId) } },
{
$project: {
content: {
$slice: ['$content', -50] // 返回 content 数组的最后50个元素
}
}
},
{ $unwind: '$content' },
{ $match: { 'content.deleted': false } },
{
$project: {
obj: '$content.obj',
@@ -79,15 +164,53 @@ export const authChat = async ({
}
]);
}
// 获取 user 的 apiKey
const { userApiKey, systemKey } = await getOpenApiKey(userId);
const { userOpenAiKey, systemAuthKey } = await getApiKey({ model: model.chat.chatModel, userId });
return {
userApiKey,
systemKey,
userOpenAiKey,
systemAuthKey,
content,
userId,
model
model,
showModelDetail
};
};
/* 校验 open api key */
export const authOpenApiKey = async (req: NextApiRequest) => {
const { apikey: apiKey } = req.headers;
if (!apiKey) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
try {
const openApi = await OpenApi.findOne({ apiKey });
if (!openApi) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
const userId = String(openApi.userId);
// 余额校验
const user = await User.findById(userId);
if (!user) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
if (formatPrice(user.balance) <= 0) {
return Promise.reject(ERROR_ENUM.insufficientQuota);
}
// 更新使用的时间
await OpenApi.findByIdAndUpdate(openApi._id, {
lastUsedTime: new Date()
});
return {
apiKey: process.env.OPENAIKEY as string,
userId
};
} catch (error) {
return Promise.reject(error);
}
};

View File

@@ -0,0 +1,103 @@
import { modelToolMap } from '@/utils/chat';
import { ChatCompletionType, StreamResponseType } from './index';
import { ChatRoleEnum } from '@/constants/chat';
import axios from 'axios';
import mongoose from 'mongoose';
import { NEW_CHATID_HEADER } from '@/constants/chat';
import { ClaudeEnum } from '@/constants/model';
/* 模型对话 */
export const lafClaudChat = async ({
apiKey,
messages,
stream,
chatId,
res
}: ChatCompletionType) => {
const conversationId = chatId || String(new mongoose.Types.ObjectId());
// create a new chat
!chatId &&
messages.filter((item) => item.obj === 'Human').length === 1 &&
res?.setHeader(NEW_CHATID_HEADER, conversationId);
// get system prompt
const systemPrompt = messages
.filter((item) => item.obj === 'System')
.map((item) => item.value)
.join('\n');
const systemPromptText = systemPrompt ? `\n知识库内容:'${systemPrompt}'\n我的问题:` : '';
const prompt = systemPromptText + messages[messages.length - 1].value;
const lafResponse = await axios.post(
'https://hnvacz.laf.run/claude-gpt',
{
prompt,
stream,
conversationId
},
{
headers: {
Authorization: apiKey
},
timeout: stream ? 40000 : 240000,
responseType: stream ? 'stream' : 'json'
}
);
let responseText = '';
let totalTokens = 0;
if (!stream) {
responseText = lafResponse.data?.text || '';
}
return {
streamResponse: lafResponse,
responseMessages: messages.concat({ obj: ChatRoleEnum.AI, value: responseText }),
responseText,
totalTokens
};
};
/* openai stream response */
export const lafClaudStreamResponse = async ({
stream,
chatResponse,
prompts
}: StreamResponseType) => {
try {
let responseContent = '';
try {
const decoder = new TextDecoder();
for await (const chunk of chatResponse.data as any) {
if (stream.destroyed) {
// 流被中断了,直接忽略后面的内容
break;
}
const content = decoder.decode(chunk);
responseContent += content;
content && stream.push(content.replace(/\n/g, '<br/>'));
}
} catch (error) {
console.log('pipe error', error);
}
// count tokens
const finishMessages = prompts.concat({
obj: ChatRoleEnum.AI,
value: responseContent
});
const totalTokens = modelToolMap[ClaudeEnum.Claude].countTokens({
messages: finishMessages
});
return {
responseContent,
totalTokens,
finishMessages
};
} catch (error) {
return Promise.reject(error);
}
};

View File

@@ -0,0 +1,177 @@
import { ChatItemSimpleType } from '@/types/chat';
import { modelToolMap } from '@/utils/chat';
import type { ChatModelType } from '@/constants/model';
import { ChatRoleEnum, SYSTEM_PROMPT_HEADER } from '@/constants/chat';
import { OpenAiChatEnum, ClaudeEnum } from '@/constants/model';
import { chatResponse, openAiStreamResponse } from './openai';
import { lafClaudChat, lafClaudStreamResponse } from './claude';
import type { NextApiResponse } from 'next';
import type { PassThrough } from 'stream';
export type ChatCompletionType = {
apiKey: string;
temperature: number;
messages: ChatItemSimpleType[];
stream: boolean;
[key: string]: any;
};
export type ChatCompletionResponseType = {
streamResponse: any;
responseMessages: ChatItemSimpleType[];
responseText: string;
totalTokens: number;
};
export type StreamResponseType = {
stream: PassThrough;
chatResponse: any;
prompts: ChatItemSimpleType[];
res: NextApiResponse;
systemPrompt?: string;
[key: string]: any;
};
export type StreamResponseReturnType = {
responseContent: string;
totalTokens: number;
finishMessages: ChatItemSimpleType[];
};
export const modelServiceToolMap: Record<
ChatModelType,
{
chatCompletion: (data: ChatCompletionType) => Promise<ChatCompletionResponseType>;
streamResponse: (data: StreamResponseType) => Promise<StreamResponseReturnType>;
}
> = {
[OpenAiChatEnum.GPT35]: {
chatCompletion: (data: ChatCompletionType) =>
chatResponse({ model: OpenAiChatEnum.GPT35, ...data }),
streamResponse: (data: StreamResponseType) =>
openAiStreamResponse({
model: OpenAiChatEnum.GPT35,
...data
})
},
[OpenAiChatEnum.GPT4]: {
chatCompletion: (data: ChatCompletionType) =>
chatResponse({ model: OpenAiChatEnum.GPT4, ...data }),
streamResponse: (data: StreamResponseType) =>
openAiStreamResponse({
model: OpenAiChatEnum.GPT4,
...data
})
},
[OpenAiChatEnum.GPT432k]: {
chatCompletion: (data: ChatCompletionType) =>
chatResponse({ model: OpenAiChatEnum.GPT432k, ...data }),
streamResponse: (data: StreamResponseType) =>
openAiStreamResponse({
model: OpenAiChatEnum.GPT432k,
...data
})
},
[ClaudeEnum.Claude]: {
chatCompletion: lafClaudChat,
streamResponse: lafClaudStreamResponse
}
};
/* delete invalid symbol */
const simplifyStr = (str: string) =>
str
.replace(/\n+/g, '\n') // 连续空行
.replace(/[^\S\r\n]+/g, ' ') // 连续空白内容
.trim();
/* 聊天上下文 tokens 截断 */
export const ChatContextFilter = ({
model,
prompts,
maxTokens
}: {
model: ChatModelType;
prompts: ChatItemSimpleType[];
maxTokens: number;
}) => {
let rawTextLen = 0;
const formatPrompts = prompts.map<ChatItemSimpleType>((item) => {
const val = simplifyStr(item.value);
rawTextLen += val.length;
return {
obj: item.obj,
value: val
};
});
// 长度太小时,不需要进行 token 截断
if (formatPrompts.length <= 2 || rawTextLen < maxTokens * 0.5) {
return formatPrompts;
}
// 根据 tokens 截断内容
const chats: ChatItemSimpleType[] = [];
let systemPrompt: ChatItemSimpleType | null = null;
// System 词保留
if (formatPrompts[0].obj === ChatRoleEnum.System) {
const prompt = formatPrompts.shift();
if (prompt) {
systemPrompt = prompt;
}
}
let messages: ChatItemSimpleType[] = [];
// 从后往前截取对话内容
for (let i = formatPrompts.length - 1; i >= 0; i--) {
chats.unshift(formatPrompts[i]);
messages = systemPrompt ? [systemPrompt, ...chats] : chats;
const tokens = modelToolMap[model].countTokens({
messages
});
/* 整体 tokens 超出范围 */
if (tokens >= maxTokens) {
return systemPrompt ? [systemPrompt, ...chats.slice(1)] : chats.slice(1);
}
}
return messages;
};
/* stream response */
export const resStreamResponse = async ({
model,
res,
stream,
chatResponse,
systemPrompt,
prompts
}: StreamResponseType & {
model: ChatModelType;
}) => {
// 创建响应流
res.setHeader('Content-Type', 'text/event-stream;charset-utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
systemPrompt && res.setHeader(SYSTEM_PROMPT_HEADER, encodeURIComponent(systemPrompt));
stream.pipe(res);
const { responseContent, totalTokens, finishMessages } = await modelServiceToolMap[
model
].streamResponse({
chatResponse,
stream,
prompts,
res,
systemPrompt
});
// close stream
!stream.destroyed && stream.push(null);
stream.destroy();
return { responseContent, totalTokens, finishMessages };
};

View File

@@ -0,0 +1,174 @@
import { Configuration, OpenAIApi } from 'openai';
import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser';
import { axiosConfig } from '../tools';
import { ChatModelMap, embeddingModel, OpenAiChatEnum } from '@/constants/model';
import { pushGenerateVectorBill } from '../../events/pushBill';
import { adaptChatItem_openAI } from '@/utils/chat/openai';
import { modelToolMap } from '@/utils/chat';
import { ChatCompletionType, ChatContextFilter, StreamResponseType } from './index';
import { ChatRoleEnum } from '@/constants/chat';
export const getOpenAIApi = (apiKey: string) => {
const configuration = new Configuration({
apiKey,
basePath: process.env.OPENAI_BASE_URL
});
return new OpenAIApi(configuration);
};
/* 获取向量 */
export const openaiCreateEmbedding = async ({
userOpenAiKey,
userId,
textArr
}: {
userOpenAiKey?: string;
userId: string;
textArr: string[];
}) => {
const systemAuthKey = process.env.OPENAIKEY as string;
// 获取 chatAPI
const chatAPI = getOpenAIApi(userOpenAiKey || systemAuthKey);
// 把输入的内容转成向量
const res = await chatAPI
.createEmbedding(
{
model: embeddingModel,
input: textArr
},
{
timeout: 60000,
...axiosConfig()
}
)
.then((res) => ({
tokenLen: res.data.usage.total_tokens || 0,
vectors: res.data.data.map((item) => item.embedding)
}));
pushGenerateVectorBill({
isPay: !userOpenAiKey,
userId,
text: textArr.join(''),
tokenLen: res.tokenLen
});
return {
vectors: res.vectors,
chatAPI
};
};
/* 模型对话 */
export const chatResponse = async ({
model,
apiKey,
temperature,
messages,
stream
}: ChatCompletionType & { model: `${OpenAiChatEnum}` }) => {
const filterMessages = ChatContextFilter({
model,
prompts: messages,
maxTokens: Math.ceil(ChatModelMap[model].contextMaxToken * 0.9)
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages });
const chatAPI = getOpenAIApi(apiKey);
const response = await chatAPI.createChatCompletion(
{
model,
temperature: Number(temperature) || 0,
messages: adaptMessages,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream,
stop: ['.!?。']
},
{
timeout: stream ? 40000 : 240000,
responseType: stream ? 'stream' : 'json',
...axiosConfig()
}
);
let responseText = '';
let totalTokens = 0;
// adapt data
if (!stream) {
responseText = response.data.choices[0].message?.content || '';
totalTokens = response.data.usage?.total_tokens || 0;
}
return {
streamResponse: response,
responseMessages: filterMessages.concat({ obj: 'AI', value: responseText }),
responseText,
totalTokens
};
};
/* openai stream response */
export const openAiStreamResponse = async ({
model,
stream,
chatResponse,
prompts
}: StreamResponseType & {
model: `${OpenAiChatEnum}`;
}) => {
try {
let responseContent = '';
const onParse = async (event: ParsedEvent | ReconnectInterval) => {
if (event.type !== 'event') return;
const data = event.data;
if (data === '[DONE]') return;
try {
const json = JSON.parse(data);
const content: string = json?.choices?.[0].delta.content || '';
responseContent += content;
!stream.destroyed && content && stream.push(content.replace(/\n/g, '<br/>'));
} catch (error) {
error;
}
};
try {
const decoder = new TextDecoder();
const parser = createParser(onParse);
for await (const chunk of chatResponse.data as any) {
if (stream.destroyed) {
// 流被中断了,直接忽略后面的内容
break;
}
parser.feed(decoder.decode(chunk, { stream: true }));
}
} catch (error) {
console.log('pipe error', error);
}
// count tokens
const finishMessages = prompts.concat({
obj: ChatRoleEnum.AI,
value: responseContent
});
const totalTokens = modelToolMap[model].countTokens({
messages: finishMessages
});
return {
responseContent,
totalTokens,
finishMessages
};
} catch (error) {
return Promise.reject(error);
}
};

View File

@@ -1,170 +0,0 @@
import type { NextApiResponse } from 'next';
import type { PassThrough } from 'stream';
import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser';
import { getOpenAIApi } from '@/service/utils/auth';
import { axiosConfig } from './tools';
import { User } from '../models/user';
import { formatPrice } from '@/utils/user';
import { embeddingModel } from '@/constants/model';
import { pushGenerateVectorBill } from '../events/pushBill';
/* 获取用户 api 的 openai 信息 */
export const getUserApiOpenai = async (userId: string) => {
const user = await User.findById(userId);
const userApiKey = user?.openaiKey;
if (!userApiKey) {
return Promise.reject('缺少ApiKey, 无法请求');
}
return {
user,
openai: getOpenAIApi(userApiKey),
apiKey: userApiKey
};
};
/* 获取 open api key如果用户没有自己的key就用平台的用平台记得加账单 */
export const getOpenApiKey = async (userId: string) => {
const user = await User.findById(userId);
if (!user) {
return Promise.reject({
code: 501,
message: '找不到用户'
});
}
const userApiKey = user?.openaiKey;
// 有自己的key
if (userApiKey) {
return {
user,
userApiKey,
systemKey: ''
};
}
// 平台账号余额校验
if (formatPrice(user.balance) <= 0) {
return Promise.reject({
code: 501,
message: '账号余额不足'
});
}
return {
user,
userApiKey: '',
systemKey: process.env.OPENAIKEY as string
};
};
/* 获取向量 */
export const openaiCreateEmbedding = async ({
isPay,
userId,
apiKey,
text
}: {
isPay: boolean;
userId: string;
apiKey: string;
text: string;
}) => {
// 获取 chatAPI
const chatAPI = getOpenAIApi(apiKey);
// 把输入的内容转成向量
const res = await chatAPI
.createEmbedding(
{
model: embeddingModel,
input: text
},
{
timeout: 60000,
...axiosConfig
}
)
.then((res) => ({
tokenLen: res.data.usage.total_tokens || 0,
vector: res?.data?.data?.[0]?.embedding || []
}));
pushGenerateVectorBill({
isPay,
userId,
text,
tokenLen: res.tokenLen
});
return {
vector: res.vector,
chatAPI
};
};
/* gpt35 响应 */
export const gpt35StreamResponse = ({
res,
stream,
chatResponse
}: {
res: NextApiResponse;
stream: PassThrough;
chatResponse: any;
}) =>
new Promise<{ responseContent: string }>(async (resolve, reject) => {
try {
// 创建响应流
res.setHeader('Content-Type', 'text/event-stream;charset-utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
stream.pipe(res);
let responseContent = '';
const onParse = async (event: ParsedEvent | ReconnectInterval) => {
if (event.type !== 'event') return;
const data = event.data;
if (data === '[DONE]') return;
try {
const json = JSON.parse(data);
const content: string = json?.choices?.[0].delta.content || '';
responseContent += content;
if (!stream.destroyed && content) {
stream.push(content.replace(/\n/g, '<br/>'));
}
} catch (error) {
error;
}
};
const decoder = new TextDecoder();
try {
const parser = createParser(onParse);
for await (const chunk of chatResponse.data as any) {
if (stream.destroyed) {
// 流被中断了,直接忽略后面的内容
break;
}
parser.feed(decoder.decode(chunk, { stream: true }));
}
} catch (error) {
console.log('pipe error', error);
}
// close stream
!stream.destroyed && stream.push(null);
stream.destroy();
resolve({
responseContent
});
} catch (error) {
reject(error);
}
});

View File

@@ -1,6 +1,5 @@
import * as nodemailer from 'nodemailer';
import { UserAuthTypeEnum } from '@/constants/common';
import dayjs from 'dayjs';
import Dysmsapi, * as dysmsapi from '@alicloud/dysmsapi20170525';
// @ts-ignore
import * as OpenApi from '@alicloud/openapi-client';
@@ -48,25 +47,6 @@ export const sendEmailCode = (email: string, code: string, type: `${UserAuthType
});
};
export const sendTrainSucceed = (email: string, modelName: string) => {
return new Promise((resolve, reject) => {
const options = {
from: `"FastGPT" ${myEmail}`,
to: email,
subject: '模型训练完成通知',
html: `你的模型 ${modelName} 已于 ${dayjs().format('YYYY-MM-DD HH:mm')} 训练完成!`
};
mailTransport.sendMail(options, function (err, msg) {
if (err) {
console.log('send email error->', err);
reject('邮箱异常');
} else {
resolve('');
}
});
});
};
export const sendPhoneCode = async (phone: string, code: string) => {
const accessKeyId = process.env.aliAccessKeyId;
const accessKeySecret = process.env.aliAccessKeySecret;

View File

@@ -1,13 +1,5 @@
import type { NextApiRequest } from 'next';
import crypto from 'crypto';
import jwt from 'jsonwebtoken';
import { ChatItemType } from '@/types/chat';
import { OpenApi, User } from '../mongo';
import { formatPrice } from '@/utils/user';
import { ERROR_ENUM } from '../errorCode';
import { countChatTokens } from '@/utils/tools';
import { ChatCompletionRequestMessageRoleEnum, ChatCompletionRequestMessage } from 'openai';
import { ChatModelEnum } from '@/constants/model';
/* 密码加密 */
export const hashPassword = (psw: string) => {
@@ -27,163 +19,10 @@ export const generateToken = (userId: string) => {
return token;
};
/* 校验 token */
export const authToken = (token?: string): Promise<string> => {
return new Promise((resolve, reject) => {
if (!token) {
reject('缺少登录凭证');
return;
}
const key = process.env.TOKEN_KEY as string;
jwt.verify(token, key, function (err, decoded: any) {
if (err || !decoded?.userId) {
reject('凭证无效');
return;
}
resolve(decoded.userId);
});
});
};
/* 校验 open api key */
export const authOpenApiKey = async (req: NextApiRequest) => {
const { apikey: apiKey } = req.headers;
if (!apiKey) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
try {
const openApi = await OpenApi.findOne({ apiKey });
if (!openApi) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
const userId = String(openApi.userId);
// 余额校验
const user = await User.findById(userId);
if (!user) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
if (formatPrice(user.balance) <= 0) {
return Promise.reject('Insufficient account balance');
}
// 更新使用的时间
await OpenApi.findByIdAndUpdate(openApi._id, {
lastUsedTime: new Date()
});
return {
apiKey: process.env.OPENAIKEY as string,
userId
};
} catch (error) {
return Promise.reject(error);
}
};
/* openai axios config */
export const axiosConfig = {
export const axiosConfig = () => ({
httpsAgent: global.httpsAgent,
headers: {
auth: process.env.OPENAI_BASE_URL_AUTH || ''
}
};
/* delete invalid symbol */
const simplifyStr = (str: string) =>
str
.replace(/\n+/g, '\n') // 连续空行
.replace(/[^\S\r\n]+/g, ' ') // 连续空白内容
.trim();
/* 聊天内容 tokens 截断 */
export const openaiChatFilter = ({
model,
prompts,
maxTokens
}: {
model: `${ChatModelEnum}`;
prompts: ChatItemType[];
maxTokens: number;
}) => {
// role map
const map = {
Human: ChatCompletionRequestMessageRoleEnum.User,
AI: ChatCompletionRequestMessageRoleEnum.Assistant,
SYSTEM: ChatCompletionRequestMessageRoleEnum.System
};
let rawTextLen = 0;
const formatPrompts = prompts.map((item) => {
const val = simplifyStr(item.value);
rawTextLen += val.length;
return {
role: map[item.obj],
content: val
};
});
// 长度太小时,不需要进行 token 截断
if (rawTextLen < maxTokens * 0.5) {
return formatPrompts;
}
// 根据 tokens 截断内容
const chats: ChatCompletionRequestMessage[] = [];
let systemPrompt: ChatCompletionRequestMessage | null = null;
// System 词保留
if (formatPrompts[0]?.role === 'system') {
systemPrompt = formatPrompts.shift() as ChatCompletionRequestMessage;
}
let messages: { role: ChatCompletionRequestMessageRoleEnum; content: string }[] = [];
// 从后往前截取对话内容
for (let i = formatPrompts.length - 1; i >= 0; i--) {
chats.unshift(formatPrompts[i]);
messages = systemPrompt ? [systemPrompt, ...chats] : chats;
const tokens = countChatTokens({
model,
messages
});
/* 整体 tokens 超出范围 */
if (tokens >= maxTokens) {
break;
}
}
return messages;
};
/* system 内容截断. 相似度从高到低 */
export const systemPromptFilter = ({
model,
prompts,
maxTokens
}: {
model: 'gpt-4' | 'gpt-4-32k' | 'gpt-3.5-turbo';
prompts: string[];
maxTokens: number;
}) => {
let splitText = '';
// 从前往前截取
for (let i = 0; i < prompts.length; i++) {
const prompt = simplifyStr(prompts[i]);
splitText += `${prompt}\n`;
const tokens = countChatTokens({ model, messages: [{ role: 'system', content: splitText }] });
if (tokens >= maxTokens) {
break;
}
}
return splitText.slice(0, splitText.length - 1);
};
});

11
src/types/chat.d.ts vendored
View File

@@ -1,5 +1,10 @@
export type ChatItemType = {
obj: 'Human' | 'AI' | 'SYSTEM';
import { ChatRoleEnum } from '@/constants/chat';
export type ChatItemSimpleType = {
obj: `${ChatRoleEnum}`;
value: string;
deleted?: boolean;
systemPrompt?: string;
};
export type ChatItemType = {
_id: string;
} & ChatItemSimpleType;

View File

@@ -1,13 +1,11 @@
import { ModelStatusEnum } from '@/constants/model';
import type { ModelSchema } from './mongoSchema';
export interface ModelUpdateParams {
name: string;
avatar: string;
systemPrompt: string;
temperature: number;
search: ModelSchema['search'];
chat: ModelSchema['chat'];
share: ModelSchema['share'];
service: ModelSchema['service'];
security: ModelSchema['security'];
}

View File

@@ -3,7 +3,7 @@ import {
ModelStatusEnum,
ModelNameEnum,
ModelVectorSearchModeEnum,
ChatModelEnum
ChatModelType
} from '@/constants/model';
import type { DataType } from './data';
@@ -31,15 +31,17 @@ export interface AuthCodeSchema {
export interface ModelSchema {
_id: string;
userId: string;
name: string;
avatar: string;
systemPrompt: string;
userId: string;
status: `${ModelStatusEnum}`;
updateTime: number;
temperature: number;
search: {
mode: `${ModelVectorSearchModeEnum}`;
chat: {
useKb: boolean;
searchMode: `${ModelVectorSearchModeEnum}`;
systemPrompt: string;
temperature: number;
chatModel: ChatModelType; // 聊天时用的模型,训练后就是训练的模型
};
share: {
isShare: boolean;
@@ -47,10 +49,6 @@ export interface ModelSchema {
intro: string;
collection: number;
};
service: {
chatModel: `${ChatModelEnum}`; // 聊天时用的模型,训练后就是训练的模型
modelName: `${ModelNameEnum}`; // 底层模型名称,不会变
};
security: {
domain: string[];
contextMaxLen: number;

3
src/utils/chat/claude.ts Normal file
View File

@@ -0,0 +1,3 @@
export const ClaudeSliceTextByToken = ({ text, length }: { text: string; length: number }) => {
return text.slice(0, length);
};

30
src/utils/chat/index.ts Normal file
View File

@@ -0,0 +1,30 @@
import { ClaudeEnum, OpenAiChatEnum } from '@/constants/model';
import type { ChatModelType } from '@/constants/model';
import type { ChatItemSimpleType } from '@/types/chat';
import { countOpenAIToken, openAiSliceTextByToken } from './openai';
import { ClaudeSliceTextByToken } from './claude';
export const modelToolMap: Record<
ChatModelType,
{
countTokens: (data: { messages: ChatItemSimpleType[] }) => number;
sliceText: (data: { text: string; length: number }) => string;
}
> = {
[OpenAiChatEnum.GPT35]: {
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT35, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT35, ...data })
},
[OpenAiChatEnum.GPT4]: {
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT4, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT35, ...data })
},
[OpenAiChatEnum.GPT432k]: {
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT432k, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT35, ...data })
},
[ClaudeEnum.Claude]: {
countTokens: () => 0,
sliceText: ClaudeSliceTextByToken
}
};

121
src/utils/chat/openai.ts Normal file
View File

@@ -0,0 +1,121 @@
import { encoding_for_model, type Tiktoken } from '@dqbd/tiktoken';
import type { ChatItemSimpleType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { ChatCompletionRequestMessage, ChatCompletionRequestMessageRoleEnum } from 'openai';
import { OpenAiChatEnum } from '@/constants/model';
import Graphemer from 'graphemer';
const textDecoder = new TextDecoder();
const graphemer = new Graphemer();
export const adaptChatItem_openAI = ({
messages
}: {
messages: ChatItemSimpleType[];
}): ChatCompletionRequestMessage[] => {
const map = {
[ChatRoleEnum.AI]: ChatCompletionRequestMessageRoleEnum.Assistant,
[ChatRoleEnum.Human]: ChatCompletionRequestMessageRoleEnum.User,
[ChatRoleEnum.System]: ChatCompletionRequestMessageRoleEnum.System
};
return messages.map((item) => ({
role: map[item.obj] || ChatCompletionRequestMessageRoleEnum.System,
content: item.value || ''
}));
};
/* count openai chat token*/
let OpenAiEncMap: Record<string, Tiktoken>;
export const getOpenAiEncMap = () => {
if (OpenAiEncMap) return OpenAiEncMap;
OpenAiEncMap = {
'gpt-3.5-turbo': encoding_for_model('gpt-3.5-turbo', {
'<|im_start|>': 100264,
'<|im_end|>': 100265,
'<|im_sep|>': 100266
}),
'gpt-4': encoding_for_model('gpt-4', {
'<|im_start|>': 100264,
'<|im_end|>': 100265,
'<|im_sep|>': 100266
}),
'gpt-4-32k': encoding_for_model('gpt-4-32k', {
'<|im_start|>': 100264,
'<|im_end|>': 100265,
'<|im_sep|>': 100266
})
};
return OpenAiEncMap;
};
export function countOpenAIToken({
messages,
model
}: {
messages: ChatItemSimpleType[];
model: `${OpenAiChatEnum}`;
}) {
function getChatGPTEncodingText(
messages: { role: 'system' | 'user' | 'assistant'; content: string; name?: string }[],
model: 'gpt-3.5-turbo' | 'gpt-4' | 'gpt-4-32k'
) {
const isGpt3 = model === 'gpt-3.5-turbo';
const msgSep = isGpt3 ? '\n' : '';
const roleSep = isGpt3 ? '\n' : '<|im_sep|>';
return [
messages
.map(({ name = '', role, content }) => {
return `<|im_start|>${name || role}${roleSep}${content}<|im_end|>`;
})
.join(msgSep),
`<|im_start|>assistant${roleSep}`
].join(msgSep);
}
function text2TokensLen(encoder: Tiktoken, inputText: string) {
const encoding = encoder.encode(inputText, 'all');
const segments: { text: string; tokens: { id: number; idx: number }[] }[] = [];
let byteAcc: number[] = [];
let tokenAcc: { id: number; idx: number }[] = [];
let inputGraphemes = graphemer.splitGraphemes(inputText);
for (let idx = 0; idx < encoding.length; idx++) {
const token = encoding[idx]!;
byteAcc.push(...encoder.decode_single_token_bytes(token));
tokenAcc.push({ id: token, idx });
const segmentText = textDecoder.decode(new Uint8Array(byteAcc));
const graphemes = graphemer.splitGraphemes(segmentText);
if (graphemes.every((item, idx) => inputGraphemes[idx] === item)) {
segments.push({ text: segmentText, tokens: tokenAcc });
byteAcc = [];
tokenAcc = [];
inputGraphemes = inputGraphemes.slice(graphemes.length);
}
}
return segments.reduce((memo, i) => memo + i.tokens.length, 0) ?? 0;
}
const adaptMessages = adaptChatItem_openAI({ messages });
return text2TokensLen(getOpenAiEncMap()[model], getChatGPTEncodingText(adaptMessages, model));
}
export const openAiSliceTextByToken = ({
model = 'gpt-3.5-turbo',
text,
length
}: {
model: `${OpenAiChatEnum}`;
text: string;
length: number;
}) => {
const enc = getOpenAiEncMap()[model];
const encodeText = enc.encode(text);
const decoder = new TextDecoder();
return decoder.decode(enc.decode(encodeText.slice(0, length)));
};

View File

@@ -1,6 +1,6 @@
import mammoth from 'mammoth';
import Papa from 'papaparse';
import { countChatTokens } from './tools';
import { getOpenAiEncMap } from './chat/openai';
/**
* 读取 txt 文件内容
@@ -145,7 +145,7 @@ export const fileDownload = ({
* slideLen - The size of the before and after Text
* maxLen > slideLen
*/
export const splitText = ({
export const splitText_token = ({
text,
maxLen,
slideLen
@@ -154,39 +154,32 @@ export const splitText = ({
maxLen: number;
slideLen: number;
}) => {
const textArr =
text.split(/(?<=[。!?\.!\?\n])/g)?.filter((item) => {
const text = item.replace(/(\\n)/g, '\n').trim();
if (text && text !== '\n') return true;
return false;
}) || [];
const enc = getOpenAiEncMap()['gpt-3.5-turbo'];
// filter empty text. encode sentence
const encodeText = enc.encode(text);
const chunks: { sum: number; arr: string[] }[] = [{ sum: 0, arr: [] }];
const chunks: string[] = [];
let tokens = 0;
for (let i = 0; i < textArr.length; i++) {
const tokenLen = countChatTokens({ messages: [{ role: 'system', content: textArr[i] }] });
chunks[chunks.length - 1].sum += tokenLen;
chunks[chunks.length - 1].arr.push(textArr[i]);
let startIndex = 0;
let endIndex = Math.min(startIndex + maxLen, encodeText.length);
let chunkEncodeArr = encodeText.slice(startIndex, endIndex);
// current length is over maxLen. create new chunk
if (chunks[chunks.length - 1].sum + tokenLen >= maxLen) {
// get slide len text as the initial value
const chunk: { sum: number; arr: string[] } = { sum: 0, arr: [] };
for (let j = chunks[chunks.length - 1].arr.length - 1; j >= 0; j--) {
const chunkText = chunks[chunks.length - 1].arr[j];
const tokenLen = countChatTokens({ messages: [{ role: 'system', content: chunkText }] });
chunk.sum += tokenLen;
chunk.arr.unshift(chunkText);
const decoder = new TextDecoder();
if (chunk.sum >= slideLen) {
break;
}
}
chunks.push(chunk);
}
while (startIndex < encodeText.length) {
tokens += chunkEncodeArr.length;
chunks.push(decoder.decode(enc.decode(chunkEncodeArr)));
startIndex += maxLen - slideLen;
endIndex = Math.min(startIndex + maxLen, encodeText.length);
chunkEncodeArr = encodeText.slice(Math.min(encodeText.length - slideLen, startIndex), endIndex);
}
const result = chunks.map((item) => item.arr.join(''));
return result;
return {
chunks,
tokens
};
};
export const fileToBase64 = (file: File) => {
@@ -197,3 +190,67 @@ export const fileToBase64 = (file: File) => {
reader.onerror = (error) => reject(error);
});
};
/**
* compress image. response base64
* @param maxSize The max size of the compressed image
*/
export const compressImg = ({
file,
maxW = 200,
maxH = 200,
maxSize = 1024 * 100
}: {
file: File;
maxW?: number;
maxH?: number;
maxSize?: number;
}) =>
new Promise<string>((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => {
const img = new Image();
// @ts-ignore
img.src = reader.result;
img.onload = () => {
let width = img.width;
let height = img.height;
if (width > height) {
if (width > maxW) {
height *= maxW / width;
width = maxW;
}
} else {
if (height > maxH) {
width *= maxH / height;
height = maxH;
}
}
const canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext('2d');
if (!ctx) {
return reject('压缩图片异常');
}
ctx.drawImage(img, 0, 0, width, height);
const compressedDataUrl = canvas.toDataURL(file.type, 1);
// 移除 canvas 元素
canvas.remove();
if (compressedDataUrl.length > maxSize) {
return reject('图片太大了');
}
resolve(compressedDataUrl);
};
};
reader.onerror = (err) => {
console.log(err);
reject('压缩图片异常');
};
});

Some files were not shown because too many files have changed in this diff Show More