update for custom model, ollama

This commit is contained in:
duanfuxiang
2025-03-20 12:44:53 +08:00
parent 76ecca0da9
commit 570e8d9564
9 changed files with 155 additions and 115 deletions

View File

@@ -49,15 +49,35 @@ class LLMManager implements LLMManagerInterface {
constructor(settings: InfioSettings) {
this.infioProvider = new InfioProvider(settings.infioProvider.apiKey)
this.openrouterProvider = new OpenAICompatibleProvider(settings.openrouterProvider.apiKey, OPENROUTER_BASE_URL)
this.siliconflowProvider = new OpenAICompatibleProvider(settings.siliconflowProvider.apiKey, SILICONFLOW_BASE_URL)
this.alibabaQwenProvider = new OpenAICompatibleProvider(settings.alibabaQwenProvider.apiKey, ALIBABA_QWEN_BASE_URL)
this.deepseekProvider = new OpenAICompatibleProvider(settings.deepseekProvider.apiKey, DEEPSEEK_BASE_URL)
this.openrouterProvider = new OpenAICompatibleProvider(
settings.openrouterProvider.apiKey,
settings.openrouterProvider.baseUrl && settings.openrouterProvider.useCustomUrl ?
settings.openrouterProvider.baseUrl
: OPENROUTER_BASE_URL
)
this.siliconflowProvider = new OpenAICompatibleProvider(
settings.siliconflowProvider.apiKey,
settings.siliconflowProvider.baseUrl && settings.siliconflowProvider.useCustomUrl ?
settings.siliconflowProvider.baseUrl
: SILICONFLOW_BASE_URL
)
this.alibabaQwenProvider = new OpenAICompatibleProvider(
settings.alibabaQwenProvider.apiKey,
settings.alibabaQwenProvider.baseUrl && settings.alibabaQwenProvider.useCustomUrl ?
settings.alibabaQwenProvider.baseUrl
: ALIBABA_QWEN_BASE_URL
)
this.deepseekProvider = new OpenAICompatibleProvider(
settings.deepseekProvider.apiKey,
settings.deepseekProvider.baseUrl && settings.deepseekProvider.useCustomUrl ?
settings.deepseekProvider.baseUrl
: DEEPSEEK_BASE_URL
)
this.openaiProvider = new OpenAIAuthenticatedProvider(settings.openaiProvider.apiKey)
this.anthropicProvider = new AnthropicProvider(settings.anthropicProvider.apiKey)
this.googleProvider = new GeminiProvider(settings.googleProvider.apiKey)
this.groqProvider = new GroqProvider(settings.groqProvider.apiKey)
this.ollamaProvider = new OllamaProvider(settings.groqProvider.baseUrl)
this.ollamaProvider = new OllamaProvider(settings.ollamaProvider.baseUrl)
this.openaiCompatibleProvider = new OpenAICompatibleProvider(settings.openaicompatibleProvider.apiKey, settings.openaicompatibleProvider.baseUrl)
this.isInfioEnabled = !!settings.infioProvider.apiKey
}
@@ -125,6 +145,8 @@ class LLMManager implements LLMManagerInterface {
request,
options,
)
case ApiProvider.OpenAICompatible:
return await this.openaiCompatibleProvider.generateResponse(model, request, options)
default:
throw new Error(`Unsupported model provider: ${model.provider}`)
}

View File

@@ -68,7 +68,7 @@ export class OllamaProvider implements BaseLLMProvider {
const client = new NoStainlessOpenAI({
baseURL: `${this.baseUrl}/v1`,
apiKey: '',
apiKey: 'ollama',
dangerouslyAllowBrowser: true,
})
return this.adapter.generateResponse(client, request, options)
@@ -87,7 +87,7 @@ export class OllamaProvider implements BaseLLMProvider {
const client = new NoStainlessOpenAI({
baseURL: `${this.baseUrl}/v1`,
apiKey: '',
apiKey: 'ollama',
dangerouslyAllowBrowser: true,
})
return this.adapter.streamResponse(client, request, options)