Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import {
geminiModels,
groqModels,
ioIntelligenceModels,
miniMaxModels,
mistralModels,
moonshotModels,
openAiNativeModels,
Expand Down Expand Up @@ -125,6 +126,7 @@ export const providerNames = [
"doubao",
"deepseek",
"featherless",
"minimax",
"fireworks",
"gemini",
"gemini-cli",
Expand Down Expand Up @@ -327,6 +329,11 @@ const moonshotSchema = apiModelIdProviderModelSchema.extend({
moonshotApiKey: z.string().optional(),
})

const miniMaxSchema = apiModelIdProviderModelSchema.extend({
miniMaxBaseUrl: z.string().optional(),
miniMaxApiKey: z.string().optional(),
})

const unboundSchema = baseProviderSettingsSchema.extend({
unboundApiKey: z.string().optional(),
unboundModelId: z.string().optional(),
Expand Down Expand Up @@ -433,6 +440,7 @@ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProv
mistralSchema.merge(z.object({ apiProvider: z.literal("mistral") })),
deepSeekSchema.merge(z.object({ apiProvider: z.literal("deepseek") })),
deepInfraSchema.merge(z.object({ apiProvider: z.literal("deepinfra") })),
miniMaxSchema.merge(z.object({ apiProvider: z.literal("minimax") })),
doubaoSchema.merge(z.object({ apiProvider: z.literal("doubao") })),
moonshotSchema.merge(z.object({ apiProvider: z.literal("moonshot") })),
unboundSchema.merge(z.object({ apiProvider: z.literal("unbound") })),
Expand Down Expand Up @@ -474,6 +482,7 @@ export const providerSettingsSchema = z.object({
...mistralSchema.shape,
...deepSeekSchema.shape,
...deepInfraSchema.shape,
...miniMaxSchema.shape,
...doubaoSchema.shape,
...moonshotSchema.shape,
...unboundSchema.shape,
Expand Down Expand Up @@ -562,6 +571,7 @@ export const modelIdKeysByProvider: Record<TypicalProvider, ModelIdKey> = {
moonshot: "apiModelId",
deepseek: "apiModelId",
deepinfra: "deepInfraModelId",
minimax: "apiModelId",
doubao: "apiModelId",
"qwen-code": "apiModelId",
unbound: "unboundModelId",
Expand Down Expand Up @@ -639,6 +649,11 @@ export const MODELS_BY_PROVIDER: Record<
label: "DeepSeek",
models: Object.keys(deepSeekModels),
},
minimax: {
id: "minimax",
label: "MiniMax",
models: Object.keys(miniMaxModels),
},
doubao: { id: "doubao", label: "Doubao", models: Object.keys(doubaoModels) },
featherless: {
id: "featherless",
Expand Down
1 change: 1 addition & 0 deletions packages/types/src/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ export * from "./io-intelligence.js"
export * from "./lite-llm.js"
export * from "./lm-studio.js"
export * from "./mistral.js"
export * from "./minimax.js"
export * from "./moonshot.js"
export * from "./ollama.js"
export * from "./openai.js"
Expand Down
36 changes: 36 additions & 0 deletions packages/types/src/providers/minimax.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import type { ModelInfo } from "../model.js"

// https://docs.minimaxi.com/docs/api
export type MiniMaxModelId = keyof typeof miniMaxModels

export const miniMaxDefaultModelId: MiniMaxModelId = "abab5.5s-chat"

export const miniMaxModels = {
"abab5.5s-chat": {
maxTokens: 8192, // 8K max output
contextWindow: 128_000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 5, // $5 per million tokens
outputPrice: 15, // $15 per million tokens
description: `MiniMax-M2 is a high-performance model optimized for coding, reasoning, and general AI-assisted development tasks. It offers strong capabilities in code generation, debugging, and technical problem-solving.`,
},
"abab6.5s-chat": {
maxTokens: 8192, // 8K max output
contextWindow: 245_000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 10, // $10 per million tokens
outputPrice: 30, // $30 per million tokens
description: `MiniMax-M2 Pro is an advanced version with extended context window and enhanced reasoning capabilities, ideal for complex coding projects and comprehensive code analysis.`,
},
"abab6.5g-chat": {
maxTokens: 8192, // 8K max output
contextWindow: 245_000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 10, // $10 per million tokens
outputPrice: 30, // $30 per million tokens
description: `MiniMax-M2 Vision adds multimodal capabilities to the Pro model, supporting image understanding alongside code generation and reasoning tasks.`,
},
} as const satisfies Record<string, ModelInfo>
3 changes: 3 additions & 0 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import {
GeminiHandler,
OpenAiNativeHandler,
DeepSeekHandler,
MiniMaxHandler,
MoonshotHandler,
MistralHandler,
VsCodeLmHandler,
Expand Down Expand Up @@ -117,6 +118,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
return new OpenAiNativeHandler(options)
case "deepseek":
return new DeepSeekHandler(options)
case "minimax":
return new MiniMaxHandler(options)
case "doubao":
return new DoubaoHandler(options)
case "qwen-code":
Expand Down
179 changes: 179 additions & 0 deletions src/api/providers/__tests__/minimax.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
import { describe, it, expect, vi, beforeEach } from "vitest"
import OpenAI from "openai"
import { miniMaxDefaultModelId, miniMaxModels } from "@roo-code/types"

import type { ApiHandlerOptions } from "../../../shared/api"

import { MiniMaxHandler } from "../minimax"

vi.mock("openai")

describe("MiniMaxHandler", () => {
let handler: MiniMaxHandler
let mockOptions: ApiHandlerOptions

beforeEach(() => {
mockOptions = {
apiModelId: "abab5.5s-chat",
miniMaxApiKey: "test-api-key",
}
handler = new MiniMaxHandler(mockOptions)
vi.clearAllMocks()
})

describe("constructor", () => {
it("should initialize with provided options", () => {
expect(handler).toBeInstanceOf(MiniMaxHandler)
expect(handler.getModel().id).toBe(mockOptions.apiModelId)
})

it("should handle missing API key", () => {
expect(() => {
new MiniMaxHandler({
...mockOptions,
miniMaxApiKey: undefined,
})
}).not.toThrow()
})

it("should use default model ID if not provided", () => {
const handlerWithoutModel = new MiniMaxHandler({
...mockOptions,
apiModelId: undefined,
})
expect(handlerWithoutModel.getModel().id).toBe(miniMaxDefaultModelId)
})

it("should use default base URL if not provided", () => {
const handlerWithoutBaseUrl = new MiniMaxHandler({
...mockOptions,
miniMaxBaseUrl: undefined,
})
expect(handlerWithoutBaseUrl).toBeInstanceOf(MiniMaxHandler)
// The base URL is passed to OpenAI client internally
})

it("should use custom base URL if provided", () => {
const customBaseUrl = "https://custom.minimax.com/v1"
const handlerWithCustomUrl = new MiniMaxHandler({
...mockOptions,
miniMaxBaseUrl: customBaseUrl,
})
expect(handlerWithCustomUrl).toBeInstanceOf(MiniMaxHandler)
// The custom base URL is passed to OpenAI client
})

it("should set includeMaxTokens to true", () => {
// Create a new handler and verify OpenAI client was called with includeMaxTokens
const _handler = new MiniMaxHandler(mockOptions)
expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ apiKey: mockOptions.miniMaxApiKey }))
// includeMaxTokens is an internal property passed to super constructor
})
})

describe("getModel", () => {
it("should return correct model info for abab5.5s-chat", () => {
const model = handler.getModel()
expect(model.id).toBe("abab5.5s-chat")
expect(model.info).toEqual(miniMaxModels["abab5.5s-chat"])
})

it("should return correct model info for abab6.5s-chat", () => {
const handlerWithPro = new MiniMaxHandler({
...mockOptions,
apiModelId: "abab6.5s-chat",
})
const model = handlerWithPro.getModel()
expect(model.id).toBe("abab6.5s-chat")
expect(model.info).toEqual(miniMaxModels["abab6.5s-chat"])
})

it("should return correct model info for abab6.5g-chat", () => {
const handlerWithVision = new MiniMaxHandler({
...mockOptions,
apiModelId: "abab6.5g-chat",
})
const model = handlerWithVision.getModel()
expect(model.id).toBe("abab6.5g-chat")
expect(model.info).toEqual(miniMaxModels["abab6.5g-chat"])
expect(model.info.supportsImages).toBe(true)
})

it("should return provided model ID with default model info if model does not exist", () => {
const handlerWithInvalidModel = new MiniMaxHandler({
...mockOptions,
apiModelId: "invalid-model",
})
const model = handlerWithInvalidModel.getModel()
expect(model.id).toBe("invalid-model")
// Should fallback to default model info
expect(model.info).toEqual(miniMaxModels[miniMaxDefaultModelId])
})

it("should return default model if no model ID is provided", () => {
const handlerWithoutModel = new MiniMaxHandler({
...mockOptions,
apiModelId: undefined,
})
const model = handlerWithoutModel.getModel()
expect(model.id).toBe(miniMaxDefaultModelId)
expect(model.info).toEqual(miniMaxModels[miniMaxDefaultModelId])
})
})

describe("model capabilities", () => {
it("should correctly report image support for models", () => {
const textOnlyModel = new MiniMaxHandler({
...mockOptions,
apiModelId: "abab5.5s-chat",
})
expect(textOnlyModel.getModel().info.supportsImages).toBe(false)

const visionModel = new MiniMaxHandler({
...mockOptions,
apiModelId: "abab6.5g-chat",
})
expect(visionModel.getModel().info.supportsImages).toBe(true)
})

it("should report no prompt cache support for all models", () => {
const models = ["abab5.5s-chat", "abab6.5s-chat", "abab6.5g-chat"]

models.forEach((modelId) => {
const handler = new MiniMaxHandler({
...mockOptions,
apiModelId: modelId,
})
expect(handler.getModel().info.supportsPromptCache).toBe(false)
})
})

it("should have correct context windows for each model", () => {
const contextWindows = {
"abab5.5s-chat": 128_000,
"abab6.5s-chat": 245_000,
"abab6.5g-chat": 245_000,
}

Object.entries(contextWindows).forEach(([modelId, expectedWindow]) => {
const handler = new MiniMaxHandler({
...mockOptions,
apiModelId: modelId,
})
expect(handler.getModel().info.contextWindow).toBe(expectedWindow)
})
})

it("should have correct max tokens for all models", () => {
const models = ["abab5.5s-chat", "abab6.5s-chat", "abab6.5g-chat"]

models.forEach((modelId) => {
const handler = new MiniMaxHandler({
...mockOptions,
apiModelId: modelId,
})
expect(handler.getModel().info.maxTokens).toBe(8192)
})
})
})
})
1 change: 1 addition & 0 deletions src/api/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ export { ChutesHandler } from "./chutes"
export { ClaudeCodeHandler } from "./claude-code"
export { DeepSeekHandler } from "./deepseek"
export { DoubaoHandler } from "./doubao"
export { MiniMaxHandler } from "./minimax"
export { MoonshotHandler } from "./moonshot"
export { FakeAIHandler } from "./fake-ai"
export { GeminiHandler } from "./gemini"
Expand Down
27 changes: 27 additions & 0 deletions src/api/providers/minimax.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import { miniMaxModels, miniMaxDefaultModelId } from "@roo-code/types"

import type { ApiHandlerOptions } from "../../shared/api"

import { getModelParams } from "../transform/model-params"

import { OpenAiHandler } from "./openai"

export class MiniMaxHandler extends OpenAiHandler {
constructor(options: ApiHandlerOptions) {
super({
...options,
openAiApiKey: options.miniMaxApiKey ?? "not-provided",
openAiModelId: options.apiModelId ?? miniMaxDefaultModelId,
openAiBaseUrl: options.miniMaxBaseUrl ?? "https://api.minimaxi.com/v1",
openAiStreamingEnabled: true,
includeMaxTokens: true,
})
}

override getModel() {
const id = this.options.apiModelId ?? miniMaxDefaultModelId
const info = miniMaxModels[id as keyof typeof miniMaxModels] || miniMaxModels[miniMaxDefaultModelId]
const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options })
return { id, info, ...params }
}
}
1 change: 1 addition & 0 deletions src/shared/ProfileValidator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ export class ProfileValidator {
case "gemini":
case "mistral":
case "deepseek":
case "minimax":
case "xai":
case "groq":
case "sambanova":
Expand Down
1 change: 1 addition & 0 deletions src/shared/__tests__/ProfileValidator.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,7 @@ describe("ProfileValidator", () => {
"gemini",
"mistral",
"deepseek",
"minimax",
"xai",
"groq",
"chutes",
Expand Down
Loading