diff --git a/src/shared/providers/definitions/llmapi.ts b/src/shared/providers/definitions/llmapi.ts new file mode 100644 index 000000000..e406078a2 --- /dev/null +++ b/src/shared/providers/definitions/llmapi.ts @@ -0,0 +1,32 @@ +import { ModelProviderEnum, ModelProviderType } from '../../types' +import { defineProvider } from '../registry' +import LLMApi from './models/llmapi' + +export const llmapiProvider = defineProvider({ + id: ModelProviderEnum.LLMApi, + name: 'LLM API', + type: ModelProviderType.OpenAI, + urls: { + website: 'https://llmapi.ai/', + }, + defaultSettings: { + apiHost: 'https://api.llmapi.ai', + models: [], + }, + createModel: (config) => { + return new LLMApi( + { + apiKey: config.providerSetting.apiKey || '', + model: config.model, + temperature: config.settings.temperature, + topP: config.settings.topP, + maxOutputTokens: config.settings.maxTokens, + stream: config.settings.stream, + }, + config.dependencies + ) + }, + getDisplayName: (modelId, providerSettings) => { + return `LLM API (${providerSettings?.models?.find((m) => m.modelId === modelId)?.nickname || modelId})` + }, +}) diff --git a/src/shared/providers/definitions/models/llmapi.ts b/src/shared/providers/definitions/models/llmapi.ts new file mode 100644 index 000000000..efc365457 --- /dev/null +++ b/src/shared/providers/definitions/models/llmapi.ts @@ -0,0 +1,28 @@ +import OpenAICompatible, { type OpenAICompatibleSettings } from '../../../models/openai-compatible' +import type { ModelDependencies } from '../../../types/adapters' + +interface Options extends OpenAICompatibleSettings {} + +export default class LLMApi extends OpenAICompatible { + public name = 'LLM API' + public options: Options + constructor(options: Omit, dependencies: ModelDependencies) { + const apiHost = 'https://api.llmapi.ai/v1' + super( + { + apiKey: options.apiKey, + apiHost, + model: options.model, + temperature: options.temperature, + topP: options.topP, + maxOutputTokens: options.maxOutputTokens, + stream: options.stream, + }, + dependencies + ) + this.options = { + ...options, + apiHost, + } + } +} diff --git a/src/shared/providers/index.ts b/src/shared/providers/index.ts index e1707ad13..13624fbef 100644 --- a/src/shared/providers/index.ts +++ b/src/shared/providers/index.ts @@ -16,6 +16,7 @@ import './definitions/lmstudio' import './definitions/azure' import './definitions/groq' import './definitions/xai' +import './definitions/llmapi' import './definitions/mistral-ai' import './definitions/perplexity' import './definitions/volcengine' diff --git a/src/shared/types/provider.ts b/src/shared/types/provider.ts index dc052d79f..5579cd22e 100644 --- a/src/shared/types/provider.ts +++ b/src/shared/types/provider.ts @@ -18,6 +18,7 @@ export enum ModelProviderEnum { LMStudio = 'lm-studio', Perplexity = 'perplexity', XAI = 'xAI', + LLMApi = 'llmapi', OpenRouter = 'openrouter', Custom = 'custom', } diff --git a/test/integration/model-provider/model-provider.test.ts b/test/integration/model-provider/model-provider.test.ts index ea66bd31d..8afcf67f5 100644 --- a/test/integration/model-provider/model-provider.test.ts +++ b/test/integration/model-provider/model-provider.test.ts @@ -70,6 +70,7 @@ const PROVIDER_TEST_MODELS: Record = { { modelId: 'anthropic/claude-haiku-4.5', capabilities: ['tool_use'] }, { modelId: 'deepseek/deepseek-v3.2', capabilities: ['tool_use', 'reasoning'] }, ], + [ModelProviderEnum.LLMApi]: [], [ModelProviderEnum.Perplexity]: [], [ModelProviderEnum.Custom]: [], }