simple model config

This commit is contained in:
duanfuxiang
2025-02-17 13:06:22 +08:00
parent bf29a42baa
commit 025dc85c59
34 changed files with 12098 additions and 708 deletions

View File

@@ -2,7 +2,7 @@ import { SerializedEditorState } from 'lexical'
import { SelectVector } from '../database/schema'
import { CustomLLMModel } from './llm/model'
import { LLMModel } from './llm/model'
import { ContentPart } from './llm/request'
import { ResponseUsage } from './llm/response'
import { Mentionable, SerializedMentionable } from './mentionable'
@@ -17,15 +17,17 @@ export type ChatUserMessage = {
similarity: number
})[]
}
export type ChatAssistantMessage = {
role: 'assistant'
content: string
id: string
metadata?: {
usage?: ResponseUsage
model?: CustomLLMModel
model?: LLMModel
}
}
export type ChatMessage = ChatUserMessage | ChatAssistantMessage
export type SerializedChatUserMessage = {
@@ -38,15 +40,17 @@ export type SerializedChatUserMessage = {
similarity: number
})[]
}
export type SerializedChatAssistantMessage = {
role: 'assistant'
content: string
id: string
metadata?: {
usage?: ResponseUsage
model?: CustomLLMModel
model?: LLMModel
}
}
export type SerializedChatMessage =
| SerializedChatUserMessage
| SerializedChatAssistantMessage

View File

@@ -1,4 +1,6 @@
import { CustomLLMModel } from './llm/model'
import { LLMModel } from './llm/model'
import { EmbeddingModelInfo } from '../utils/api'
export type EmbeddingModelId =
| 'text-embedding-3-small'
@@ -10,12 +12,12 @@ export type EmbeddingModelId =
export type EmbeddingModelOption = {
id: EmbeddingModelId
name: string
model: CustomLLMModel
model: LLMModel
dimension: number
}
export type EmbeddingModel = {
id: EmbeddingModelId
id: string
dimension: number
getEmbedding: (text: string) => Promise<number[]>
}

View File

@@ -1,3 +1,22 @@
export enum ApiProvider {
Infio = "Infio",
OpenRouter = "OpenRouter",
SiliconFlow = "SiliconFlow",
AlibabaQwen = "AlibabaQwen",
Anthropic = "Anthropic",
Deepseek = "Deepseek",
OpenAI = "OpenAI",
Google = "Google",
Groq = "Groq",
Ollama = "Ollama",
OpenAICompatible = "OpenAICompatible",
}
export type LLMModel = {
provider: ApiProvider;
modelId: string;
}
// Model Providers
export enum ModelProviders {
OPENAI = "openai",

View File

@@ -1,6 +1,5 @@
import { z } from 'zod';
import { DEFAULT_MODELS } from '../constants';
import {
MAX_DELAY,
@@ -11,10 +10,143 @@ import {
modelOptionsSchema
} from '../settings/versions/shared';
import { DEFAULT_AUTOCOMPLETE_SETTINGS } from "../settings/versions/v1/v1";
import { ApiProvider } from '../types/llm/model';
import { isRegexValid, isValidIgnorePattern } from '../utils/auto-complete';
export const SETTINGS_SCHEMA_VERSION = 0.1
const InfioProviderSchema = z.object({
name: z.literal('Infio'),
apiKey: z.string().catch(''),
baseUrl: z.string().catch(''),
useCustomUrl: z.boolean().catch(false)
}).catch({
name: 'Infio',
apiKey: '',
baseUrl: '',
useCustomUrl: false
})
const OpenRouterProviderSchema = z.object({
name: z.literal('OpenRouter'),
apiKey: z.string().catch(''),
baseUrl: z.string().catch(''),
useCustomUrl: z.boolean().catch(false)
}).catch({
name: 'OpenRouter',
apiKey: '',
baseUrl: '',
useCustomUrl: false
})
const SiliconFlowProviderSchema = z.object({
name: z.literal('SiliconFlow'),
apiKey: z.string().catch(''),
baseUrl: z.string().catch(''),
useCustomUrl: z.boolean().catch(false)
}).catch({
name: 'SiliconFlow',
apiKey: '',
baseUrl: '',
useCustomUrl: false
})
const AlibabaQwenProviderSchema = z.object({
name: z.literal('AlibabaQwen'),
apiKey: z.string().catch(''),
baseUrl: z.string().catch(''),
useCustomUrl: z.boolean().catch(false)
}).catch({
name: 'AlibabaQwen',
apiKey: '',
baseUrl: '',
useCustomUrl: false
})
const AnthropicProviderSchema = z.object({
name: z.literal('Anthropic'),
apiKey: z.string().catch(''),
baseUrl: z.string().optional(),
useCustomUrl: z.boolean().catch(false)
}).catch({
name: 'Anthropic',
apiKey: '',
baseUrl: '',
useCustomUrl: false
})
const DeepSeekProviderSchema = z.object({
name: z.literal('DeepSeek'),
apiKey: z.string().catch(''),
baseUrl: z.string().catch(''),
useCustomUrl: z.boolean().catch(false)
}).catch({
name: 'DeepSeek',
apiKey: '',
baseUrl: '',
useCustomUrl: false
})
const GoogleProviderSchema = z.object({
name: z.literal('Google'),
apiKey: z.string().catch(''),
baseUrl: z.string().catch(''),
useCustomUrl: z.boolean().catch(false)
}).catch({
name: 'Google',
apiKey: '',
baseUrl: '',
useCustomUrl: false
})
const OpenAIProviderSchema = z.object({
name: z.literal('OpenAI'),
apiKey: z.string().catch(''),
baseUrl: z.string().optional(),
useCustomUrl: z.boolean().catch(false)
}).catch({
name: 'OpenAI',
apiKey: '',
baseUrl: '',
useCustomUrl: false
})
const OpenAICompatibleProviderSchema = z.object({
name: z.literal('OpenAICompatible'),
apiKey: z.string().catch(''),
baseUrl: z.string().optional(),
useCustomUrl: z.boolean().catch(true)
}).catch({
name: 'OpenAICompatible',
apiKey: '',
baseUrl: '',
useCustomUrl: true
})
const OllamaProviderSchema = z.object({
name: z.literal('Ollama'),
apiKey: z.string().catch(''),
baseUrl: z.string().catch(''),
useCustomUrl: z.boolean().catch(false)
}).catch({
name: 'Ollama',
apiKey: '',
baseUrl: '',
useCustomUrl: false
})
const GroqProviderSchema = z.object({
name: z.literal('Groq'),
apiKey: z.string().catch(''),
baseUrl: z.string().catch(''),
useCustomUrl: z.boolean().catch(false)
}).catch({
name: 'Groq',
apiKey: '',
baseUrl: '',
useCustomUrl: false
})
const ollamaModelSchema = z.object({
baseUrl: z.string().catch(''),
model: z.string().catch(''),
@@ -61,7 +193,34 @@ export const InfioSettingsSchema = z.object({
// Version
version: z.literal(SETTINGS_SCHEMA_VERSION).catch(SETTINGS_SCHEMA_VERSION),
// activeModels
// Provider
defaultProvider: z.nativeEnum(ApiProvider).catch(ApiProvider.OpenRouter),
infioProvider: InfioProviderSchema,
openrouterProvider: OpenRouterProviderSchema,
siliconflowProvider: SiliconFlowProviderSchema,
alibabaQwenProvider: AlibabaQwenProviderSchema,
anthropicProvider: AnthropicProviderSchema,
deepseekProvider: DeepSeekProviderSchema,
openaiProvider: OpenAIProviderSchema,
googleProvider: GoogleProviderSchema,
ollamaProvider: OllamaProviderSchema,
groqProvider: GroqProviderSchema,
openaicompatibleProvider: OpenAICompatibleProviderSchema,
// Chat Model
chatModelProvider: z.nativeEnum(ApiProvider).catch(ApiProvider.OpenRouter),
chatModelId: z.string().catch(''),
// Apply Model
applyModelProvider: z.nativeEnum(ApiProvider).catch(ApiProvider.OpenRouter),
applyModelId: z.string().catch(''),
// Embedding Model
embeddingModelProvider: z.nativeEnum(ApiProvider).catch(ApiProvider.Google),
embeddingModelId: z.string().catch(''),
/// [compatible]
// activeModels [compatible]
activeModels: z.array(
z.object({
name: z.string(),
@@ -74,17 +233,17 @@ export const InfioSettingsSchema = z.object({
dimension: z.number().optional(),
})
).catch(DEFAULT_MODELS),
// API Keys
// API Keys [compatible]
infioApiKey: z.string().catch(''),
openAIApiKey: z.string().catch(''),
anthropicApiKey: z.string().catch(''),
geminiApiKey: z.string().catch(''),
groqApiKey: z.string().catch(''),
deepseekApiKey: z.string().catch(''),
// DEFAULT Chat Model
chatModelId: z.string().catch('deepseek-chat'),
ollamaEmbeddingModel: ollamaModelSchema.catch({
baseUrl: '',
model: '',
}),
ollamaChatModel: ollamaModelSchema.catch({
baseUrl: '',
model: '',
@@ -94,9 +253,6 @@ export const InfioSettingsSchema = z.object({
apiKey: '',
model: '',
}),
// DEFAULT Apply Model
applyModelId: z.string().catch('deepseek-chat'),
ollamaApplyModel: ollamaModelSchema.catch({
baseUrl: '',
model: '',
@@ -107,15 +263,6 @@ export const InfioSettingsSchema = z.object({
model: '',
}),
// DEFAULT Embedding Model
embeddingModelId: z.string().catch(
'text-embedding-004',
),
ollamaEmbeddingModel: ollamaModelSchema.catch({
baseUrl: '',
model: '',
}),
// System Prompt
systemPrompt: z.string().catch(''),
@@ -132,10 +279,13 @@ export const InfioSettingsSchema = z.object({
// autocomplete options
autocompleteEnabled: z.boolean(),
advancedMode: z.boolean(),
// [compatible]
apiProvider: z.enum(['azure', 'openai', "ollama"]),
azureOAIApiSettings: z.string().catch(''),
openAIApiSettings: z.string().catch(''),
ollamaApiSettings: z.string().catch(''),
triggers: z.array(triggerSchema),
delay: z.number().int().min(MIN_DELAY, { message: "Delay must be between 0ms and 2000ms" }).max(MAX_DELAY, { message: "Delay must be between 0ms and 2000ms" }),
modelOptions: modelOptionsSchema,