feat: ai proxy v1 (#3898)
* feat: ai proxy v1 * perf: ai proxy channel crud * feat: ai proxy logs * feat: channel test * doc * update lock
This commit is contained in:
@@ -7,13 +7,17 @@ import { useUserStore } from '@/web/support/user/useUserStore';
|
||||
import FillRowTabs from '@fastgpt/web/components/common/Tabs/FillRowTabs';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import dynamic from 'next/dynamic';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
|
||||
const ModelConfigTable = dynamic(() => import('@/pageComponents/account/model/ModelConfigTable'));
|
||||
const ChannelTable = dynamic(() => import('@/pageComponents/account/model/Channel'));
|
||||
const ChannelLog = dynamic(() => import('@/pageComponents/account/model/Log'));
|
||||
|
||||
type TabType = 'model' | 'config' | 'channel';
|
||||
type TabType = 'model' | 'config' | 'channel' | 'channel_log';
|
||||
|
||||
const ModelProvider = () => {
|
||||
const { t } = useTranslation();
|
||||
const { feConfigs } = useSystemStore();
|
||||
|
||||
const [tab, setTab] = useState<TabType>('model');
|
||||
|
||||
@@ -22,21 +26,29 @@ const ModelProvider = () => {
|
||||
<FillRowTabs<TabType>
|
||||
list={[
|
||||
{ label: t('account:active_model'), value: 'model' },
|
||||
{ label: t('account:config_model'), value: 'config' }
|
||||
// { label: t('account:channel'), value: 'channel' }
|
||||
{ label: t('account:config_model'), value: 'config' },
|
||||
// @ts-ignore
|
||||
...(feConfigs?.show_aiproxy
|
||||
? [
|
||||
{ label: t('account:channel'), value: 'channel' },
|
||||
{ label: t('account_model:log'), value: 'channel_log' }
|
||||
]
|
||||
: [])
|
||||
]}
|
||||
value={tab}
|
||||
py={1}
|
||||
onChange={setTab}
|
||||
/>
|
||||
);
|
||||
}, [t, tab]);
|
||||
}, [feConfigs.show_aiproxy, t, tab]);
|
||||
|
||||
return (
|
||||
<AccountContainer>
|
||||
<Flex h={'100%'} flexDirection={'column'} gap={4} py={4} px={6}>
|
||||
{tab === 'model' && <ValidModelTable Tab={Tab} />}
|
||||
{tab === 'config' && <ModelConfigTable Tab={Tab} />}
|
||||
{tab === 'channel' && <ChannelTable Tab={Tab} />}
|
||||
{tab === 'channel_log' && <ChannelLog Tab={Tab} />}
|
||||
</Flex>
|
||||
</AccountContainer>
|
||||
);
|
||||
@@ -45,7 +57,7 @@ const ModelProvider = () => {
|
||||
export async function getServerSideProps(content: any) {
|
||||
return {
|
||||
props: {
|
||||
...(await serviceSideProps(content, ['account']))
|
||||
...(await serviceSideProps(content, ['account', 'account_model']))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
72
projects/app/src/pages/api/aiproxy/[...path].ts
Normal file
72
projects/app/src/pages/api/aiproxy/[...path].ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { request } from 'https';
|
||||
import { authSystemAdmin } from '@fastgpt/service/support/permission/user/auth';
|
||||
|
||||
const baseUrl = process.env.AIPROXY_API_ENDPOINT;
|
||||
const token = process.env.AIPROXY_API_TOKEN;
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
await authSystemAdmin({ req });
|
||||
|
||||
if (!baseUrl || !token) {
|
||||
throw new Error('AIPROXY_API_ENDPOINT or AIPROXY_API_TOKEN is not set');
|
||||
}
|
||||
|
||||
const { path = [], ...query } = req.query as any;
|
||||
|
||||
if (!path.length) {
|
||||
throw new Error('url is empty');
|
||||
}
|
||||
|
||||
const queryStr = new URLSearchParams(query).toString();
|
||||
const requestPath = queryStr
|
||||
? `/${path?.join('/')}?${new URLSearchParams(query).toString()}`
|
||||
: `/${path?.join('/')}`;
|
||||
|
||||
const parsedUrl = new URL(baseUrl);
|
||||
delete req.headers?.cookie;
|
||||
delete req.headers?.host;
|
||||
delete req.headers?.origin;
|
||||
|
||||
const requestResult = request({
|
||||
protocol: parsedUrl.protocol,
|
||||
hostname: parsedUrl.hostname,
|
||||
port: parsedUrl.port,
|
||||
path: requestPath,
|
||||
method: req.method,
|
||||
headers: {
|
||||
...req.headers,
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
timeout: 30000
|
||||
});
|
||||
|
||||
req.pipe(requestResult);
|
||||
|
||||
requestResult.on('response', (response) => {
|
||||
Object.keys(response.headers).forEach((key) => {
|
||||
// @ts-ignore
|
||||
res.setHeader(key, response.headers[key]);
|
||||
});
|
||||
response.statusCode && res.writeHead(response.statusCode);
|
||||
response.pipe(res);
|
||||
});
|
||||
requestResult.on('error', (e) => {
|
||||
res.send(e);
|
||||
res.end();
|
||||
});
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export const config = {
|
||||
api: {
|
||||
bodyParser: false
|
||||
}
|
||||
};
|
||||
33
projects/app/src/pages/api/aiproxy/api/createChannel.ts
Normal file
33
projects/app/src/pages/api/aiproxy/api/createChannel.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
|
||||
import { authSystemAdmin } from '@fastgpt/service/support/permission/user/auth';
|
||||
import axios from 'axios';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
|
||||
const baseUrl = process.env.AIPROXY_API_ENDPOINT;
|
||||
const token = process.env.AIPROXY_API_TOKEN;
|
||||
|
||||
async function handler(req: ApiRequestProps, res: ApiResponseType<any>) {
|
||||
try {
|
||||
await authSystemAdmin({ req });
|
||||
|
||||
if (!baseUrl || !token) {
|
||||
return Promise.reject('AIPROXY_API_ENDPOINT or AIPROXY_API_TOKEN is not set');
|
||||
}
|
||||
|
||||
const { data } = await axios.post(`${baseUrl}/api/channel/`, req.body, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`
|
||||
}
|
||||
});
|
||||
|
||||
res.json(data);
|
||||
} catch (error) {
|
||||
res.json({
|
||||
success: false,
|
||||
message: getErrText(error),
|
||||
data: error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default handler;
|
||||
@@ -60,6 +60,7 @@ const testLLMModel = async (model: LLMModelItemType) => {
|
||||
const ai = getAIApi({
|
||||
timeout: 10000
|
||||
});
|
||||
|
||||
const requestBody = llmCompletionsBodyFormat(
|
||||
{
|
||||
model: model.model,
|
||||
|
||||
@@ -218,7 +218,7 @@ const MyApps = () => {
|
||||
size="md"
|
||||
Button={
|
||||
<Button variant={'primary'} leftIcon={<AddIcon />}>
|
||||
<Box>{t('common:common.Create New')}</Box>
|
||||
<Box>{t('common:new_create')}</Box>
|
||||
</Button>
|
||||
}
|
||||
menuList={[
|
||||
|
||||
@@ -147,7 +147,7 @@ const Dataset = () => {
|
||||
<Button variant={'primary'} px="0">
|
||||
<Flex alignItems={'center'} px={5}>
|
||||
<AddIcon mr={2} />
|
||||
<Box>{t('common:common.Create New')}</Box>
|
||||
<Box>{t('common:new_create')}</Box>
|
||||
</Flex>
|
||||
</Button>
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user