Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions src/renderer/components/icons/ProviderIcon.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,13 @@ export default function ProviderIcon(props: { className?: string; size?: number;
<path d="M9.27 15.29l7.978-5.897c.391-.29.95-.177 1.137.272.98 2.369.542 5.215-1.41 7.169-1.951 1.954-4.667 2.382-7.149 1.406l-2.711 1.257c3.889 2.661 8.611 2.003 11.562-.953 2.341-2.344 3.066-5.539 2.388-8.42l.006.007c-.983-4.232.242-5.924 2.75-9.383.06-.082.12-.164.179-.248l-3.301 3.305v-.01L9.267 15.292M7.623 16.723c-2.792-2.67-2.31-6.801.071-9.184 1.761-1.763 4.647-2.483 7.166-1.425l2.705-1.25a7.808 7.808 0 00-1.829-1A8.975 8.975 0 005.984 5.83c-2.533 2.536-3.33 6.436-1.962 9.764 1.022 2.487-.653 4.246-2.34 6.022-.599.63-1.199 1.259-1.682 1.925l7.62-6.815"></path>
</>
)}
{provider === ModelProviderEnum.ModelScope && (
<>
<path d="M2.667 5.3H8v2.667H5.333v2.666H2.667V8.467H.5v2.166h2.167V13.3H0V7.967h2.667V5.3zM2.667 13.3h2.666v2.667H8v2.666H2.667V13.3zM8 10.633h2.667V13.3H8v-2.667zM13.333 13.3v2.667h-2.666V13.3h2.666zM13.333 13.3v-2.667H16V13.3h-2.667z"></path>
<path clip-rule="evenodd" d="M21.333 13.3v-2.667h-2.666V7.967H16V5.3h5.333v2.667H24V13.3h-2.667zm0-2.667H23.5V8.467h-2.167v2.166z"></path>
<path d="M21.333 13.3v5.333H16v-2.666h2.667V13.3h2.666z"></path>
</>
)}
</svg>
)
}
2 changes: 2 additions & 0 deletions src/renderer/packages/model-setting-utils/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import PerplexitySettingUtil from './perplexity-setting-util'
import SiliconFlowSettingUtil from './siliconflow-setting-util'
import VolcEngineSettingUtil from './volcengine-setting-util'
import XAISettingUtil from './xai-setting-util'
import ModelScopeSettingUtil from './modelscope-setting-util'

export function getModelSettingUtil(
aiProvider: ModelProvider,
Expand All @@ -50,6 +51,7 @@ export function getModelSettingUtil(
[ModelProviderEnum.LMStudio]: LMStudioSettingUtil,
[ModelProviderEnum.Perplexity]: PerplexitySettingUtil,
[ModelProviderEnum.XAI]: XAISettingUtil,
[ModelProviderEnum.ModelScope]: ModelScopeSettingUtil,
[ModelProviderEnum.Custom]: CustomModelSettingUtil,
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import ModelScope from 'src/shared/models/modelscope'
import { type ModelProvider, ModelProviderEnum, type ProviderSettings, type SessionType } from 'src/shared/types'
import { createModelDependencies } from '@/adapters'
import BaseConfig from './base-config'
import type { ModelSettingUtil } from './interface'

export default class ModelScopeSettingUtil extends BaseConfig implements ModelSettingUtil {
public provider: ModelProvider = ModelProviderEnum.ModelScope
async getCurrentModelDisplayName(
model: string,
sessionType: SessionType,
providerSettings?: ProviderSettings
): Promise<string> {
return `ModelScope API (${providerSettings?.models?.find((m) => m.modelId === model)?.nickname || model})`
}

protected async listProviderModels(settings: ProviderSettings) {
const dependencies = await createModelDependencies()
const modelscope = new ModelScope({ apiKey: settings.apiKey!, model: { modelId: '', capabilities: [] } }, dependencies)
return modelscope.listModels()
}
}
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
11 changes: 11 additions & 0 deletions src/renderer/stores/migration.ts
Original file line number Diff line number Diff line change
Expand Up @@ -466,6 +466,10 @@ async function migrate_9_to_10(dataStore: MigrateStore): Promise<boolean> {
xAIKey,
xAIModel,

// modelscope
modelscopeApiKey,
modelscopeModel,

// custom provider
selectedCustomProviderId, // 选中的自定义提供者 ID,仅当 aiProvider 为 custom 时有效
customProviders: oldCustomProviders,
Expand Down Expand Up @@ -581,6 +585,12 @@ async function migrate_9_to_10(dataStore: MigrateStore): Promise<boolean> {
}
log.info('migrate chatglm settings done')
}
if (modelscopeApiKey) {
providers[ModelProviderEnum.ModelScope] = {
apiKey: modelscopeApiKey,
}
log.info('migrate modelscope settings done')
}

try {
if (oldCustomProviders) {
Expand Down Expand Up @@ -649,6 +659,7 @@ async function migrate_9_to_10(dataStore: MigrateStore): Promise<boolean> {
[ModelProviderEnum.Perplexity]: 'perplexityModel',
[ModelProviderEnum.Groq]: 'groqModel',
[ModelProviderEnum.ChatGLM6B]: 'chatglmModel',
[ModelProviderEnum.ModelScope]: 'modelscopeModel',
[ModelProviderEnum.Custom]: 'model',
}[sessionProvider]
const modelId: string = oldSessionSettings[modelKey!] ?? oldSettings[modelKey!]
Expand Down
83 changes: 83 additions & 0 deletions src/shared/defaults.ts
Original file line number Diff line number Diff line change
Expand Up @@ -821,4 +821,87 @@ export const SystemProviders: ProviderBaseInfo[] = [
],
},
},
{
id: ModelProviderEnum.ModelScope,
name: 'ModelScope',
type: ModelProviderType.OpenAI,
urls: {
website: 'https://modelscope.cn',
},
defaultSettings: {
apiHost: 'https://api-inference.modelscope.cn/v1',
models: [
{
modelId: 'deepseek-ai/DeepSeek-V3.2-Exp',
capabilities: ['tool_use'],
contextWindow: 160_000,
},
{
modelId: 'deepseek-ai/DeepSeek-V3.1',
capabilities: ['tool_use'],
contextWindow: 64_000,
},
{
modelId: 'deepseek-ai/DeepSeek-R1-0528',
capabilities: ['reasoning', 'tool_use'],
contextWindow: 64_000,
},
{
modelId: 'Qwen/Qwen2.5-7B-Instruct',
capabilities: ['tool_use'],
contextWindow: 128_000,
},
{
modelId: 'Qwen/Qwen2.5-14B-Instruct',
capabilities: ['tool_use'],
contextWindow: 128_000,
},
{
modelId: 'Qwen/Qwen2.5-32B-Instruct',
capabilities: ['tool_use'],
contextWindow: 128_000,
},
{
modelId: 'Qwen/Qwen2.5-72B-Instruct',
capabilities: ['tool_use'],
contextWindow: 128_000,
},
{
modelId: 'Qwen/Qwen2.5-VL-32B-Instruct',
capabilities: ['vision'],
contextWindow: 128_000,
},
{
modelId: 'Qwen/Qwen2.5-VL-72B-Instruct',
capabilities: ['vision'],
contextWindow: 128_000,
},
{
modelId: 'Qwen/QVQ-72B-Preview',
capabilities: ['vision'],
contextWindow: 128_000,
},
{
modelId: 'Qwen/QwQ-32B',
capabilities: ['tool_use'],
contextWindow: 32_000,
},
{
modelId: 'Qwen/Qwen3-8B',
capabilities: ['tool_use'],
contextWindow: 32_000,
},
{
modelId: 'Qwen/Qwen3-VL-8B-Instruct',
capabilities: ['vision'],
contextWindow: 32_000,
},
{
modelId: 'Qwen/Qwen3-235B-A22B-Instruct-2507',
capabilities: ['tool_use'],
contextWindow: 256_000,
},
],
},
},
]
19 changes: 19 additions & 0 deletions src/shared/models/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import SiliconFlow from './siliconflow'
import type { ModelInterface } from './types'
import VolcEngine from './volcengine'
import XAI from './xai'
import ModelScope from './modelscope'

export function getProviderSettings(setting: SessionSettings, globalSettings: Settings) {
console.debug('getModel', setting.provider, setting.modelId)
Expand Down Expand Up @@ -302,6 +303,18 @@ export function getModel(
},
dependencies
)
case ModelProviderEnum.ModelScope:
return new ModelScope(
{
apiKey: providerSetting.apiKey || '',
model,
temperature: settings.temperature,
topP: settings.topP,
maxOutputTokens: settings.maxTokens,
stream: settings.stream,
},
dependencies
)
case ModelProviderEnum.OpenAIResponses:
return new CustomOpenAIResponses(
{
Expand Down Expand Up @@ -403,6 +416,7 @@ export const aiProviderNameHash: Record<ModelProvider, string> = {
[ModelProviderEnum.Perplexity]: 'Perplexity API',
[ModelProviderEnum.XAI]: 'xAI API',
[ModelProviderEnum.OpenRouter]: 'OpenRouter API',
[ModelProviderEnum.ModelScope]: 'ModelScope API',
[ModelProviderEnum.Custom]: 'Custom Provider',
}

Expand Down Expand Up @@ -488,6 +502,11 @@ export const AIModelProviderMenuOptionList = [
label: aiProviderNameHash[ModelProviderEnum.ChatGLM6B],
disabled: false,
},
{
value: ModelProviderEnum.ModelScope,
label: aiProviderNameHash[ModelProviderEnum.ModelScope],
disabled: false,
},
// {
// value: 'hunyuan',
// label: '腾讯混元',
Expand Down
28 changes: 28 additions & 0 deletions src/shared/models/modelscope.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import type { ModelDependencies } from '../types/adapters'
import OpenAICompatible, { type OpenAICompatibleSettings } from './openai-compatible'

interface Options extends OpenAICompatibleSettings {}

export default class ModelScope extends OpenAICompatible {
public name = 'ModelScope'
public options: Options
constructor(options: Omit<Options, 'apiHost'>, dependencies: ModelDependencies) {
const apiHost = 'https://api-inference.modelscope.cn/v1'
super(
{
apiKey: options.apiKey,
apiHost,
model: options.model,
temperature: options.temperature,
topP: options.topP,
maxOutputTokens: options.maxOutputTokens,
stream: options.stream,
},
dependencies
)
this.options = {
...options,
apiHost,
}
}
}
1 change: 1 addition & 0 deletions src/shared/types/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ export enum ModelProviderEnum {
Perplexity = 'perplexity',
XAI = 'xAI',
OpenRouter = 'openrouter',
ModelScope = 'modelscope',
Custom = 'custom',
}

Expand Down
1 change: 1 addition & 0 deletions src/shared/utils/llm_utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@ export function isOpenAICompatible(providerId: string, _modelId: string) {
ModelProviderEnum.Groq,
ModelProviderEnum.DeepSeek,
ModelProviderEnum.LMStudio,
ModelProviderEnum.ModelScope,
].includes(providerId as ModelProviderEnum) || providerId.startsWith('custom-provider-')
)
}