feat: enable cherryin provider

This commit is contained in:
kangfenmao
2025-09-23 19:18:42 +08:00
parent 3b34efd33a
commit 37218eef4f
40 changed files with 173 additions and 117 deletions

View File

@@ -98,7 +98,7 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
@@ -115,7 +115,7 @@ jobs:
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
@@ -127,7 +127,7 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}

View File

@@ -85,7 +85,7 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
@@ -103,7 +103,7 @@ jobs:
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}
@@ -115,7 +115,7 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_OPTIONS: --max-old-space-size=8192
MAIN_VITE_CHERRYIN_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYIN_CLIENT_SECRET }}
MAIN_VITE_CHERRYAI_CLIENT_SECRET: ${{ secrets.MAIN_VITE_CHERRYAI_CLIENT_SECRET }}
MAIN_VITE_MINERU_API_KEY: ${{ vars.MAIN_VITE_MINERU_API_KEY }}
RENDERER_VITE_AIHUBMIX_SECRET: ${{ vars.RENDERER_VITE_AIHUBMIX_SECRET }}
RENDERER_VITE_PPIO_APP_SECRET: ${{ vars.RENDERER_VITE_PPIO_APP_SECRET }}

View File

@@ -15,7 +15,7 @@
".gitignore",
"scripts/cloudflare-worker.js",
"src/main/integration/nutstore/sso/lib/**",
"src/main/integration/cherryin/index.js",
"src/main/integration/cherryai/index.js",
"src/main/integration/nutstore/sso/lib/**",
"src/renderer/src/ui/**",
"packages/**/dist",

View File

@@ -59,7 +59,7 @@ export default defineConfig([
'.gitignore',
'scripts/cloudflare-worker.js',
'src/main/integration/nutstore/sso/lib/**',
'src/main/integration/cherryin/index.js',
'src/main/integration/cherryai/index.js',
'src/main/integration/nutstore/sso/lib/**',
'src/renderer/src/ui/**',
'packages/**/dist'

View File

@@ -330,6 +330,6 @@ export enum IpcChannel {
// OCR
OCR_ocr = 'ocr:ocr',
// Cherryin
Cherryin_GetSignature = 'cherryin:get-signature'
// CherryAI
Cherryai_GetSignature = 'cherryai:get-signature'
}

View File

@@ -21,4 +21,4 @@ export const titleBarOverlayLight = {
symbolColor: '#000'
}
global.CHERRYIN_CLIENT_SECRET = import.meta.env.MAIN_VITE_CHERRYIN_CLIENT_SECRET
global.CHERRYAI_CLIENT_SECRET = import.meta.env.MAIN_VITE_CHERRYAI_CLIENT_SECRET

View File

@@ -0,0 +1 @@
var _0xe15d9a;const crypto=require("\u0063\u0072\u0079\u0070\u0074\u006F");_0xe15d9a=(988194^988194)+(417607^417603);var _0x9b_0x742=(247379^247387)+(371889^371892);const CLIENT_ID="\u0063\u0068\u0065\u0072\u0072\u0079\u002D\u0073\u0074\u0075\u0064\u0069\u006F";_0x9b_0x742=(202849^202856)+(796590^796585);var _0xa971e=(422203^422203)+(167917^167919);const CLIENT_SECRET_SUFFIX="\u0047\u0076\u0049\u0036\u0049\u0035\u005A\u0072\u0045\u0048\u0063\u0047\u004F\u0057\u006A\u004F\u0035\u0041\u004B\u0068\u004A\u004B\u0047\u006D\u006E\u0077\u0077\u0047\u0066\u004D\u0036\u0032\u0058\u004B\u0070\u0057\u0071\u006B\u006A\u0068\u0076\u007A\u0052\u0055\u0032\u004E\u005A\u0049\u0069\u006E\u004D\u0037\u0037\u0061\u0054\u0047\u0049\u0071\u0068\u0071\u0079\u0073\u0030\u0067";_0xa971e=(607707^607705)+(127822^127823);const CLIENT_SECRET=global['\u0043\u0048\u0045\u0052\u0052\u0059\u0041\u0049\u005F\u0043\u004C\u0049\u0045\u004E\u0054\u005F\u0053\u0045\u0043\u0052\u0045\u0054']+"\u002E"+CLIENT_SECRET_SUFFIX;class SignatureClient{constructor(clientId,clientSecret){this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064']=clientId||CLIENT_ID;this['\u0063\u006C\u0069\u0065\u006E\u0074\u0053\u0065\u0063\u0072\u0065\u0074']=clientSecret||CLIENT_SECRET;this['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065']=this['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065']['\u0062\u0069\u006E\u0064'](this);}generateSignature(options){const{'\u006D\u0065\u0074\u0068\u006F\u0064':method,'\u0070\u0061\u0074\u0068':path,'\u0071\u0075\u0065\u0072\u0079':query='','\u0062\u006F\u0064\u0079':body=''}=options;var _0x99a7f=(735625^735624)+(520507^520508);const timestamp=Math['\u0066\u006C\u006F\u006F\u0072'](Date['\u006E\u006F\u0077']()/(351300^352172))['\u0074\u006F\u0053\u0074\u0072\u0069\u006E\u0067']();_0x99a7f=376728^376729;var _0x733a=(876666^876671)+(658949^658944);let bodyString='';_0x733a="kgclcd".split("").reverse().join("");if(body){if(typeof body==="tcejbo".split("").reverse().join("")){bodyString=JSON['\u0073\u0074\u0072\u0069\u006E\u0067\u0069\u0066\u0079'](body);}else{bodyString=body['\u0074\u006F\u0053\u0074\u0072\u0069\u006E\u0067']();}}var _0xd8edff;const signatureParts=[method['\u0074\u006F\u0055\u0070\u0070\u0065\u0072\u0043\u0061\u0073\u0065'](),path,query,this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064'],timestamp,bodyString];_0xd8edff=(929945^929951)+(569907^569915);var _0x9g3c3b=(705579^705579)+(981211^981209);const signatureString=signatureParts['\u006A\u006F\u0069\u006E']("\u000A");_0x9g3c3b=527497^527499;var _0x95b35f=(811203^811200)+(628072^628076);const hmac=crypto['\u0063\u0072\u0065\u0061\u0074\u0065\u0048\u006D\u0061\u0063']("\u0073\u0068\u0061\u0032\u0035\u0036",this['\u0063\u006C\u0069\u0065\u006E\u0074\u0053\u0065\u0063\u0072\u0065\u0074']);_0x95b35f=104120^104112;hmac['\u0075\u0070\u0064\u0061\u0074\u0065'](signatureString);var _0xd0f6g;const signature=hmac['\u0064\u0069\u0067\u0065\u0073\u0074']("xeh".split("").reverse().join(""));_0xd0f6g=(615019^615018)+(266997^266992);return{'X-Client-ID':this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064'],"\u0058\u002D\u0054\u0069\u006D\u0065\u0073\u0074\u0061\u006D\u0070":timestamp,'X-Signature':signature};}}const signatureClient=new SignatureClient();const generateSignature=signatureClient['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065'];module['\u0065\u0078\u0070\u006F\u0072\u0074\u0073']={'\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065\u0043\u006C\u0069\u0065\u006E\u0074':SignatureClient,"generateSignature":generateSignature};

View File

@@ -1 +0,0 @@
var _0x6gg;const crypto=require("\u0063\u0072\u0079\u0070\u0074\u006F");_0x6gg='\u006D\u006F\u006C\u006A\u0065\u0065';var _0x111cbe;const CLIENT_ID="oiduts-yrrehc".split("").reverse().join("");_0x111cbe=(977158^977167)+(164595^164594);var _0x6d6adc=(756649^756650)+(497587^497587);const CLIENT_SECRET_SUFFIX="\u0047\u0076\u0049\u0036\u0049\u0035\u005A\u0072\u0045\u0048\u0063\u0047\u004F\u0057\u006A\u004F\u0035\u0041\u004B\u0068\u004A\u004B\u0047\u006D\u006E\u0077\u0077\u0047\u0066\u004D\u0036\u0032\u0058\u004B\u0070\u0057\u0071\u006B\u006A\u0068\u0076\u007A\u0052\u0055\u0032\u004E\u005A\u0049\u0069\u006E\u004D\u0037\u0037\u0061\u0054\u0047\u0049\u0071\u0068\u0071\u0079\u0073\u0030\u0067";_0x6d6adc=233169^233176;const CLIENT_SECRET=global['\u0043\u0048\u0045\u0052\u0052\u0059\u0049\u004E\u005F\u0043\u004C\u0049\u0045\u004E\u0054\u005F\u0053\u0045\u0043\u0052\u0045\u0054']+"\u002E"+CLIENT_SECRET_SUFFIX;class SignatureClient{constructor(clientId,clientSecret){this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064']=clientId||CLIENT_ID;this['\u0063\u006C\u0069\u0065\u006E\u0074\u0053\u0065\u0063\u0072\u0065\u0074']=clientSecret||CLIENT_SECRET;this['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065']=this['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065']['\u0062\u0069\u006E\u0064'](this);}generateSignature(options){const{"method":method,"path":path,"query":query='',"body":body=''}=options;const timestamp=Math['\u0066\u006C\u006F\u006F\u0072'](Date['\u006E\u006F\u0077']()/(110765^111429))['\u0074\u006F\u0053\u0074\u0072\u0069\u006E\u0067']();var _0xe08cc=(212246^212244)+(773521^773523);let bodyString='';_0xe08cc=(606778^606776)+(962748^962740);if(body){if(typeof body==="\u006F\u0062\u006A\u0065\u0063\u0074"){bodyString=JSON['\u0073\u0074\u0072\u0069\u006E\u0067\u0069\u0066\u0079'](body);}else{bodyString=body['\u0074\u006F\u0053\u0074\u0072\u0069\u006E\u0067']();}}const signatureParts=[method['\u0074\u006F\u0055\u0070\u0070\u0065\u0072\u0043\u0061\u0073\u0065'](),path,query,this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064'],timestamp,bodyString];var _0x5693g=(936664^936668)+(685268^685277);const signatureString=signatureParts['\u006A\u006F\u0069\u006E']("\u000A");_0x5693g=(266582^266576)+(337322^337315);const hmac=crypto['\u0063\u0072\u0065\u0061\u0074\u0065\u0048\u006D\u0061\u0063']("\u0073\u0068\u0061\u0032\u0035\u0036",this['\u0063\u006C\u0069\u0065\u006E\u0074\u0053\u0065\u0063\u0072\u0065\u0074']);hmac['\u0075\u0070\u0064\u0061\u0074\u0065'](signatureString);var _0x5fba=(354480^354481)+(537437^537434);const signature=hmac['\u0064\u0069\u0067\u0065\u0073\u0074']("\u0068\u0065\u0078");_0x5fba=(249614^249610)+(915906^915914);return{'X-Client-ID':this['\u0063\u006C\u0069\u0065\u006E\u0074\u0049\u0064'],'X-Timestamp':timestamp,'X-Signature':signature};}}const signatureClient=new SignatureClient();const generateSignature=signatureClient['\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065'];module['\u0065\u0078\u0070\u006F\u0072\u0074\u0073']={'\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065\u0043\u006C\u0069\u0065\u006E\u0074':SignatureClient,'\u0067\u0065\u006E\u0065\u0072\u0061\u0074\u0065\u0053\u0069\u0067\u006E\u0061\u0074\u0075\u0072\u0065':generateSignature};

View File

@@ -4,7 +4,7 @@ import path from 'node:path'
import { loggerService } from '@logger'
import { isLinux, isMac, isPortable, isWin } from '@main/constant'
import { generateSignature } from '@main/integration/cherryin'
import { generateSignature } from '@main/integration/cherryai'
import anthropicService from '@main/services/AnthropicService'
import { getBinaryPath, isBinaryExists, runInstallScript } from '@main/utils/process'
import { handleZoomFactor } from '@main/utils/zoom'
@@ -841,6 +841,6 @@ export function registerIpc(mainWindow: BrowserWindow, app: Electron.App) {
ocrService.ocr(file, provider)
)
// CherryIN
ipcMain.handle(IpcChannel.Cherryin_GetSignature, (_, params) => generateSignature(params))
// CherryAI
ipcMain.handle(IpcChannel.Cherryai_GetSignature, (_, params) => generateSignature(params))
}

View File

@@ -455,9 +455,9 @@ const api = {
ocr: (file: SupportedOcrFile, provider: OcrProvider): Promise<OcrResult> =>
ipcRenderer.invoke(IpcChannel.OCR_ocr, file, provider)
},
cherryin: {
cherryai: {
generateSignature: (params: { method: string; path: string; query: string; body: Record<string, any> }) =>
ipcRenderer.invoke(IpcChannel.Cherryin_GetSignature, params)
ipcRenderer.invoke(IpcChannel.Cherryai_GetSignature, params)
},
windowControls: {
minimize: (): Promise<void> => ipcRenderer.invoke(IpcChannel.Windows_Minimize),

View File

@@ -5,7 +5,7 @@ import { AihubmixAPIClient } from './aihubmix/AihubmixAPIClient'
import { AnthropicAPIClient } from './anthropic/AnthropicAPIClient'
import { AwsBedrockAPIClient } from './aws/AwsBedrockAPIClient'
import { BaseApiClient } from './BaseApiClient'
import { CherryinAPIClient } from './cherryin/CherryinAPIClient'
import { CherryAiAPIClient } from './cherryai/CherryAiAPIClient'
import { GeminiAPIClient } from './gemini/GeminiAPIClient'
import { VertexAPIClient } from './gemini/VertexAPIClient'
import { NewAPIClient } from './newapi/NewAPIClient'
@@ -34,8 +34,8 @@ export class ApiClientFactory {
let instance: BaseApiClient
// 首先检查特殊的 Provider ID
if (provider.id === 'cherryin') {
instance = new CherryinAPIClient(provider) as BaseApiClient
if (provider.id === 'cherryai') {
instance = new CherryAiAPIClient(provider) as BaseApiClient
return instance
}

View File

@@ -35,10 +35,16 @@ vi.mock('@renderer/config/models', () => ({
findTokenLimit: vi.fn().mockReturnValue(4096),
isFunctionCallingModel: vi.fn().mockReturnValue(false),
DEFAULT_MAX_TOKENS: 4096,
qwen38bModel: {
id: 'Qwen/Qwen3-8B',
name: 'Qwen3-8B',
provider: 'cherryai',
group: 'Qwen'
},
glm45FlashModel: {
id: 'glm-4.5-flash',
name: 'GLM-4.5-Flash',
provider: 'cherryin',
provider: 'cherryai',
group: 'GLM-4.5'
}
}))

View File

@@ -4,7 +4,7 @@ import OpenAI from 'openai'
import { OpenAIAPIClient } from '../openai/OpenAIApiClient'
export class CherryinAPIClient extends OpenAIAPIClient {
export class CherryAiAPIClient extends OpenAIAPIClient {
constructor(provider: Provider) {
super(provider)
}
@@ -17,7 +17,7 @@ export class CherryinAPIClient extends OpenAIAPIClient {
options = options || {}
options.headers = options.headers || {}
const signature = await window.api.cherryin.generateSignature({
const signature = await window.api.cherryai.generateSignature({
method: 'POST',
path: '/chat/completions',
query: '',
@@ -34,7 +34,7 @@ export class CherryinAPIClient extends OpenAIAPIClient {
}
override getClientCompatibilityType(): string[] {
return ['CherryinAPIClient']
return ['CherryAiAPIClient']
}
public async listModels(): Promise<OpenAI.Models.Model[]> {
@@ -43,7 +43,7 @@ export class CherryinAPIClient extends OpenAIAPIClient {
const created = Date.now()
return models.map((id) => ({
id,
owned_by: 'cherryin',
owned_by: 'cherryai',
object: 'model' as const,
created
}))

View File

@@ -1,6 +1,6 @@
import { loggerService } from '@logger'
import { isZhipuModel } from '@renderer/config/models'
import store from '@renderer/store'
import { getStoreProviders } from '@renderer/hooks/useStore'
import { Chunk } from '@renderer/types/chunk'
import { CompletionsParams, CompletionsResult } from '../schemas'
@@ -87,7 +87,7 @@ function handleError(error: any, params: CompletionsParams): any {
* 2. 绘画功能enableGenerateImage为true使用通用错误处理
*/
function handleZhipuError(error: any): any {
const provider = store.getState().llm.providers.find((p) => p.id === 'zhipu')
const provider = getStoreProviders().find((p) => p.id === 'zhipu')
const logger = loggerService.withContext('handleZhipuError')
// 定义错误模式映射

View File

@@ -250,10 +250,10 @@ export async function prepareSpecialProviderConfig(
config.options.apiKey = token
break
}
case 'cherryin': {
case 'cherryai': {
config.options.fetch = async (url, options) => {
// 在这里对最终参数进行签名
const signature = await window.api.cherryin.generateSignature({
const signature = await window.api.cherryai.generateSignature({
method: 'POST',
path: '/chat/completions',
query: '',

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

View File

@@ -15,7 +15,7 @@ interface Props {
}
export const FreeTrialModelTag: FC<Props> = ({ model, showLabel = true }) => {
if (model.provider !== 'cherryin') {
if (model.provider !== 'cherryai') {
return null
}

View File

@@ -1,5 +1,6 @@
import { PushpinOutlined } from '@ant-design/icons'
import { FreeTrialModelTag } from '@renderer/components/FreeTrialModelTag'
import { HStack } from '@renderer/components/Layout'
import ModelTagsWithLabel from '@renderer/components/ModelTagsWithLabel'
import { TopView } from '@renderer/components/TopView'
import { DynamicVirtualList, type DynamicVirtualListRef } from '@renderer/components/VirtualList'
@@ -102,16 +103,18 @@ const PopupContainer: React.FC<Props> = ({ model, filter: baseFilter, showTagFil
(model: Model, provider: Provider, isPinned: boolean): FlatListModel => {
const modelId = getModelUniqId(model)
const groupName = getFancyProviderName(provider)
const isCherryin = provider.id === 'cherryin'
const isCherryAi = provider.id === 'cherryai'
return {
key: isPinned ? `${modelId}_pinned` : modelId,
type: 'model',
name: (
<ModelName>
{model.name}
{isPinned && <span style={{ color: 'var(--color-text-3)' }}> | {groupName}</span>}
{isCherryin && <FreeTrialModelTag model={model} showLabel={false} />}
<HStack alignItems="center">
{model.name}
{isPinned && <span style={{ color: 'var(--color-text-3)' }}> | {groupName}</span>}
</HStack>
{isCherryAi && <FreeTrialModelTag model={model} showLabel={false} />}
</ModelName>
),
tags: (
@@ -542,6 +545,7 @@ const ModelItemLeft = styled.div`
const ModelName = styled.div`
display: flex;
flex-direction: row;
justify-content: space-between;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;

View File

@@ -16,7 +16,7 @@ describe('Qwen Model Detection', () => {
initialState: {}
}))
vi.mock('@renderer/services/AssistantService', () => ({
getProviderByModel: vi.fn().mockReturnValue({ id: 'cherryin' })
getProviderByModel: vi.fn().mockReturnValue({ id: 'cherryai' })
}))
})
test('isQwenReasoningModel', () => {
@@ -52,7 +52,7 @@ describe('Vision Model Detection', () => {
initialState: {}
}))
vi.mock('@renderer/services/AssistantService', () => ({
getProviderByModel: vi.fn().mockReturnValue({ id: 'cherryin' })
getProviderByModel: vi.fn().mockReturnValue({ id: 'cherryai' })
}))
})
test('isVisionModel', () => {
@@ -81,7 +81,7 @@ describe('Web Search Model Detection', () => {
initialState: {}
}))
vi.mock('@renderer/services/AssistantService', () => ({
getProviderByModel: vi.fn().mockReturnValue({ id: 'cherryin' })
getProviderByModel: vi.fn().mockReturnValue({ id: 'cherryai' })
}))
})
test('isWebSearchModel', () => {

View File

@@ -3,14 +3,14 @@ import { Model, SystemProviderId } from '@renderer/types'
export const glm45FlashModel: Model = {
id: 'glm-4.5-flash',
name: 'GLM-4.5-Flash',
provider: 'cherryin',
provider: 'cherryai',
group: 'GLM-4.5'
}
export const qwen38bModel: Model = {
id: 'Qwen/Qwen3-8B',
name: 'Qwen3-8B',
provider: 'cherryin',
provider: 'cherryai',
group: 'Qwen'
}
@@ -25,20 +25,7 @@ export const SYSTEM_MODELS: Record<SystemProviderId | 'defaultModel', Model[]> =
// Default quick assistant model
glm45FlashModel
],
cherryin: [
{
id: 'glm-4.5-flash',
name: 'GLM-4.5-Flash',
provider: 'cherryin',
group: 'GLM-4.5'
},
{
id: 'Qwen/Qwen3-8B',
name: 'Qwen3-8B',
provider: 'cherryin',
group: 'Qwen'
}
],
cherryin: [],
vertexai: [],
'302ai': [
{

View File

@@ -3,7 +3,7 @@ import HunyuanProviderLogo from '@renderer/assets/images/models/hunyuan.png'
import AzureProviderLogo from '@renderer/assets/images/models/microsoft.png'
import Ai302ProviderLogo from '@renderer/assets/images/providers/302ai.webp'
import AiHubMixProviderLogo from '@renderer/assets/images/providers/aihubmix.webp'
import AiOnlyProviderLogo from '@renderer/assets/images/providers/aiOnly.png'
import AiOnlyProviderLogo from '@renderer/assets/images/providers/aiOnly.webp'
import AlayaNewProviderLogo from '@renderer/assets/images/providers/alayanew.webp'
import AnthropicProviderLogo from '@renderer/assets/images/providers/anthropic.png'
import AwsProviderLogo from '@renderer/assets/images/providers/aws-bedrock.webp'
@@ -64,7 +64,18 @@ import {
} from '@renderer/types'
import { TOKENFLUX_HOST } from './constant'
import { SYSTEM_MODELS } from './models'
import { glm45FlashModel, qwen38bModel, SYSTEM_MODELS } from './models'
export const CHERRYAI_PROVIDER: SystemProvider = {
id: 'cherryai' as SystemProviderId,
name: 'CherryAI',
type: 'openai',
apiKey: '',
apiHost: 'https://api.cherry-ai.com/',
models: [glm45FlashModel, qwen38bModel],
isSystem: true,
enabled: true
}
export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> = {
cherryin: {
@@ -72,8 +83,8 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
name: 'CherryIN',
type: 'openai',
apiKey: '',
apiHost: 'https://api.cherry-ai.com/',
models: SYSTEM_MODELS.cherryin,
apiHost: 'https://open.cherryin.ai',
models: [],
isSystem: true,
enabled: true
},
@@ -699,12 +710,13 @@ type ProviderUrls = {
export const PROVIDER_URLS: Record<SystemProviderId, ProviderUrls> = {
cherryin: {
api: {
url: 'https://api.cherry-ai.com'
url: 'https://open.cherryin.ai'
},
websites: {
official: 'https://cherry-ai.com',
docs: 'https://docs.cherry-ai.com',
models: 'https://docs.cherry-ai.com/pre-basic/providers/cherryin'
official: 'https://open.cherryin.ai',
apiKey: 'https://open.cherryin.ai/console/token',
docs: 'https://open.cherryin.ai',
models: 'https://open.cherryin.ai/pricing'
}
},
ph8: {

View File

@@ -1,6 +1,5 @@
import store from '@renderer/store'
import { useProviders } from './useProvider'
import { getStoreProviders } from './useStore'
export function useModel(id?: string, providerId?: string) {
const { providers } = useProviders()
@@ -15,7 +14,7 @@ export function useModel(id?: string, providerId?: string) {
}
export function getModel(id?: string, providerId?: string) {
const providers = store.getState().llm.providers
const providers = getStoreProviders()
const allModels = providers.map((p) => p.models).flat()
return allModels.find((m) => {
if (providerId) {

View File

@@ -1,4 +1,5 @@
import { createSelector } from '@reduxjs/toolkit'
import { CHERRYAI_PROVIDER } from '@renderer/config/providers'
import { getDefaultProvider } from '@renderer/services/AssistantService'
import { useAppDispatch, useAppSelector } from '@renderer/store'
import {
@@ -16,7 +17,7 @@ import { useDefaultModel } from './useAssistant'
const selectEnabledProviders = createSelector(
(state) => state.llm.providers,
(providers) => providers.filter((p) => p.enabled)
(providers) => providers.filter((p) => p.enabled).concat(CHERRYAI_PROVIDER)
)
export function useProviders() {
@@ -24,7 +25,7 @@ export function useProviders() {
const dispatch = useAppDispatch()
return {
providers: providers || {},
providers: providers || [],
addProvider: (provider: Provider) => dispatch(addProvider(provider)),
removeProvider: (provider: Provider) => dispatch(removeProvider(provider)),
updateProvider: (updates: Partial<Provider> & { id: string }) => dispatch(updateProvider(updates)),
@@ -45,7 +46,9 @@ export function useAllProviders() {
}
export function useProvider(id: string) {
const provider = useAppSelector((state) => state.llm.providers.find((p) => p.id === id)) || getDefaultProvider()
const provider =
useAppSelector((state) => state.llm.providers.concat([CHERRYAI_PROVIDER]).find((p) => p.id === id)) ||
getDefaultProvider()
const dispatch = useAppDispatch()
return {

View File

@@ -1,4 +1,5 @@
import { useAppDispatch, useAppSelector } from '@renderer/store'
import { CHERRYAI_PROVIDER } from '@renderer/config/providers'
import store, { useAppDispatch, useAppSelector } from '@renderer/store'
import {
setAssistantsTabSortType,
setShowAssistants,
@@ -39,3 +40,7 @@ export function useAssistantsTabSortType() {
setAssistantsTabSortType: (sortType: AssistantsSortType) => dispatch(setAssistantsTabSortType(sortType))
}
}
export function getStoreProviders() {
return store.getState().llm.providers.concat([CHERRYAI_PROVIDER])
}

View File

@@ -2018,7 +2018,7 @@
"provider": {
"302ai": "302.AI",
"aihubmix": "AiHubMix",
"aionly": "唯一AI(AiOnly)",
"aionly": "唯一AI (AiOnly)",
"alayanew": "Alaya NeW",
"anthropic": "Anthropic",
"aws-bedrock": "AWS Bedrock",

View File

@@ -2018,7 +2018,7 @@
"provider": {
"302ai": "302.AI",
"aihubmix": "AiHubMix",
"aionly": "唯一AI(AiOnly)",
"aionly": "唯一AI (AiOnly)",
"alayanew": "Alaya NeW",
"anthropic": "Anthropic",
"aws-bedrock": "AWS Bedrock",

View File

@@ -70,7 +70,7 @@ const CodeToolsPage: FC = () => {
if (isEmbeddingModel(m) || isRerankModel(m) || isTextToImageModel(m)) {
return false
}
if (m.provider === 'cherryin') {
if (m.provider === 'cherryai') {
return false
}
if (selectedCliTool === codeTools.claudeCode) {

View File

@@ -3,8 +3,8 @@ import SelectModelPopup from '@renderer/components/Popups/SelectModelPopup'
import { isLocalAi } from '@renderer/config/env'
import { isEmbeddingModel, isRerankModel, isWebSearchModel } from '@renderer/config/models'
import { useAssistant } from '@renderer/hooks/useAssistant'
import { useProvider } from '@renderer/hooks/useProvider'
import { getProviderName } from '@renderer/services/ProviderService'
import { useAppSelector } from '@renderer/store'
import { Assistant, Model } from '@renderer/types'
import { Button, Tag } from 'antd'
import { ChevronsUpDown } from 'lucide-react'
@@ -20,7 +20,7 @@ const SelectModelButton: FC<Props> = ({ assistant }) => {
const { model, updateAssistant } = useAssistant(assistant.id)
const { t } = useTranslation()
const timerRef = useRef<NodeJS.Timeout>(undefined)
const provider = useAppSelector((state) => state.llm.providers.find((p) => p.id === model?.provider))
const provider = useProvider(model?.provider)
const modelFilter = (model: Model) => !isEmbeddingModel(model) && !isRerankModel(model)

View File

@@ -50,7 +50,6 @@ const ModelList: React.FC<ModelListProps> = ({ providerId }) => {
const providerConfig = PROVIDER_URLS[provider.id]
const docsWebsite = providerConfig?.websites?.docs
const modelsWebsite = providerConfig?.websites?.models
const editable = provider.id !== 'cherryin'
const [searchText, _setSearchText] = useState('')
const [displayedModelGroups, setDisplayedModelGroups] = useState<ModelGroups | null>(() => {
@@ -113,17 +112,15 @@ const ModelList: React.FC<ModelListProps> = ({ providerId }) => {
tooltip={t('models.search.tooltip')}
/>
</HStack>
{editable && (
<HStack>
<Tooltip title={t('settings.models.check.button_caption')} mouseLeaveDelay={0}>
<Button
type="text"
onClick={runHealthCheck}
icon={<StreamlineGoodHealthAndWellBeing size={16} isActive={isHealthChecking} />}
/>
</Tooltip>
</HStack>
)}
<HStack>
<Tooltip title={t('settings.models.check.button_caption')} mouseLeaveDelay={0}>
<Button
type="text"
onClick={runHealthCheck}
icon={<StreamlineGoodHealthAndWellBeing size={16} isActive={isHealthChecking} />}
/>
</Tooltip>
</HStack>
</HStack>
</SettingSubtitle>
<Spin spinning={isLoading} indicator={<LoadingIcon color="var(--color-text-2)" />}>
@@ -139,7 +136,6 @@ const ModelList: React.FC<ModelListProps> = ({ providerId }) => {
onEditModel={(model) => EditModelPopup.show({ provider, model })}
onRemoveModel={removeModel}
onRemoveGroup={() => displayedModelGroups[group].forEach((model) => removeModel(model))}
disabled={!editable}
/>
))}
</Flex>
@@ -167,16 +163,14 @@ const ModelList: React.FC<ModelListProps> = ({ providerId }) => {
<div style={{ height: 5 }} />
)}
</Flex>
{editable && (
<Flex gap={10} style={{ marginTop: 12 }}>
<Button type="primary" onClick={onManageModel} icon={<ListCheck size={16} />} disabled={isHealthChecking}>
{t('button.manage')}
</Button>
<Button type="default" onClick={onAddModel} icon={<Plus size={16} />} disabled={isHealthChecking}>
{t('button.add')}
</Button>
</Flex>
)}
<Flex gap={10} style={{ marginTop: 12 }}>
<Button type="primary" onClick={onManageModel} icon={<ListCheck size={16} />} disabled={isHealthChecking}>
{t('button.manage')}
</Button>
<Button type="default" onClick={onAddModel} icon={<Plus size={16} />} disabled={isHealthChecking}>
{t('button.add')}
</Button>
</Flex>
</>
)
}

View File

@@ -1,6 +1,6 @@
import AI302ProviderLogo from '@renderer/assets/images/providers/302ai.webp'
import AiHubMixProviderLogo from '@renderer/assets/images/providers/aihubmix.webp'
import AiOnlyProviderLogo from '@renderer/assets/images/providers/aiOnly.png'
import AiOnlyProviderLogo from '@renderer/assets/images/providers/aiOnly.webp'
import PPIOProviderLogo from '@renderer/assets/images/providers/ppio.png'
import SiliconFlowProviderLogo from '@renderer/assets/images/providers/silicon.png'
import TokenFluxProviderLogo from '@renderer/assets/images/providers/tokenflux.png'

View File

@@ -68,7 +68,7 @@ const ProviderSetting: FC<Props> = ({ providerId }) => {
const isAzureOpenAI = provider.id === 'azure-openai' || provider.type === 'azure-openai'
const isDmxapi = provider.id === 'dmxapi'
const hideApiInput = ['vertexai', 'aws-bedrock', 'cherryin'].includes(provider.id)
const hideApiInput = ['vertexai', 'aws-bedrock'].includes(provider.id)
const providerConfig = PROVIDER_URLS[provider.id]
const officialWebsite = providerConfig?.websites?.official

View File

@@ -337,7 +337,7 @@ export async function fetchGenerate({
export function hasApiKey(provider: Provider) {
if (!provider) return false
if (['ollama', 'lmstudio', 'vertexai', 'cherryin'].includes(provider.id)) return true
if (['ollama', 'lmstudio', 'vertexai', 'cherryai'].includes(provider.id)) return true
return !isEmpty(provider.apiKey)
}

View File

@@ -7,7 +7,9 @@ import {
UNLIMITED_CONTEXT_COUNT
} from '@renderer/config/constant'
import { isQwenMTModel } from '@renderer/config/models'
import { CHERRYAI_PROVIDER } from '@renderer/config/providers'
import { UNKNOWN } from '@renderer/config/translate'
import { getStoreProviders } from '@renderer/hooks/useStore'
import i18n from '@renderer/i18n'
import store from '@renderer/store'
import { addAssistant } from '@renderer/store/assistants'
@@ -126,26 +128,25 @@ export function getTranslateModel() {
}
export function getAssistantProvider(assistant: Assistant): Provider {
const providers = store.getState().llm.providers
const providers = getStoreProviders()
const provider = providers.find((p) => p.id === assistant.model?.provider)
return provider || getDefaultProvider()
}
export function getProviderByModel(model?: Model): Provider {
const providers = store.getState().llm.providers
const providers = getStoreProviders()
const provider = providers.find((p) => p.id === model?.provider)
if (!provider) {
const defaultProvider = providers.find((p) => p.id === getDefaultModel()?.provider)
const cherryinProvider = providers.find((p) => p.id === 'cherryin')
return defaultProvider || cherryinProvider || providers[0]
return defaultProvider || CHERRYAI_PROVIDER || providers[0]
}
return provider
}
export function getProviderByModelId(modelId?: string) {
const providers = store.getState().llm.providers
const providers = getStoreProviders()
const _modelId = modelId || getDefaultModel().id
return providers.find((p) => p.models.find((m) => m.id === _modelId)) as Provider
}

View File

@@ -1,4 +1,4 @@
import store from '@renderer/store'
import { getStoreProviders } from '@renderer/hooks/useStore'
import { Model } from '@renderer/types'
import { pick } from 'lodash'
@@ -9,9 +9,8 @@ export const getModelUniqId = (m?: Model) => {
}
export const hasModel = (m?: Model) => {
const allModels = store
.getState()
.llm.providers.filter((p) => p.enabled)
const allModels = getStoreProviders()
.filter((p) => p.enabled)
.map((p) => p.models)
.flat()
@@ -19,7 +18,7 @@ export const hasModel = (m?: Model) => {
}
export function getModelName(model?: Model) {
const provider = store.getState().llm.providers.find((p) => p.id === model?.provider)
const provider = getStoreProviders().find((p) => p.id === model?.provider)
const modelName = model?.name || model?.id || ''
if (provider) {

View File

@@ -1,4 +1,4 @@
import store from '@renderer/store'
import { getStoreProviders } from '@renderer/hooks/useStore'
import { Model, Provider } from '@renderer/types'
import { getFancyProviderName } from '@renderer/utils'
@@ -14,9 +14,9 @@ export function getProviderName(model?: Model) {
export function getProviderByModel(model?: Model) {
const id = model?.provider
const provider = store.getState().llm.providers.find((p) => p.id === id)
const provider = getStoreProviders().find((p) => p.id === id)
if (provider?.id === 'cherryin') {
if (provider?.id === 'cherryai') {
const map = {
'glm-4.5-flash': 'zhipu',
'Qwen/Qwen3-8B': 'silicon'
@@ -43,5 +43,5 @@ export function isProviderSupportCharge(provider: Provider) {
}
export function getProviderById(id: string) {
return store.getState().llm.providers.find((p) => p.id === id)
return getStoreProviders().find((p) => p.id === id)
}

View File

@@ -67,7 +67,7 @@ const persistedReducer = persistReducer(
{
key: 'cherry-studio',
storage,
version: 156,
version: 157,
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs'],
migrate
},

View File

@@ -2479,7 +2479,6 @@ const migrateConfig = {
},
'156': (state: RootState) => {
try {
addProvider(state, 'aionly')
state.llm.providers.forEach((provider) => {
if (provider.id === SystemProviderIds.anthropic) {
if (provider.apiHost.endsWith('/')) {
@@ -2492,6 +2491,53 @@ const migrateConfig = {
logger.error('migrate 156 error', error as Error)
return state
}
},
'157': (state: RootState) => {
try {
addProvider(state, 'aionly')
const cherryinProvider = state.llm.providers.find((provider) => provider.id === 'cherryin')
if (cherryinProvider) {
updateProvider(state, 'cherryin', { apiHost: 'https://open.cherryin.ai', models: [] })
}
if (state.llm.defaultModel?.provider === 'cherryin') {
state.llm.defaultModel.provider = 'cherryai'
}
if (state.llm.quickModel?.provider === 'cherryin') {
state.llm.quickModel.provider = 'cherryai'
}
if (state.llm.translateModel?.provider === 'cherryin') {
state.llm.translateModel.provider = 'cherryai'
}
state.assistants.assistants.forEach((assistant) => {
if (assistant.model?.provider === 'cherryin') {
assistant.model.provider = 'cherryai'
}
if (assistant.defaultModel?.provider === 'cherryin') {
assistant.defaultModel.provider = 'cherryai'
}
})
state.agents.agents.forEach((agent) => {
// @ts-ignore model is not defined in Agent
if (agent.model?.provider === 'cherryin') {
// @ts-ignore model is not defined in Agent
agent.model.provider = 'cherryai'
}
if (agent.defaultModel?.provider === 'cherryin') {
agent.defaultModel.provider = 'cherryai'
}
})
return state
} catch (error) {
logger.error('migrate 157 error', error as Error)
return state
}
}
}

View File

@@ -64,7 +64,7 @@ export const getModelTags = (models: Model[]): Record<ModelTag, boolean> => {
}
export function isFreeModel(model: Model) {
if (model.provider === 'cherryin') {
if (model.provider === 'cherryai') {
return true
}

View File

@@ -8,7 +8,7 @@
"tests/__mocks__/**/*",
"packages/mcp-trace/**/*",
"packages/aiCore/src/**/*",
"src/main/integration/cherryin/index.js",
"src/main/integration/cherryai/index.js",
"packages/extension-table-plus/**/*"
],
"compilerOptions": {