From 627e98a3fd548ed504db55ac865f0f156ee1bf8a Mon Sep 17 00:00:00 2001 From: Roo Code Date: Sat, 25 Oct 2025 08:25:24 +0000 Subject: [PATCH 1/2] feat: add MiniMax AI provider support - Add MiniMax type definitions with models abab5.5s-chat, abab6.5s-chat, and abab6.5g-chat - Implement MiniMaxHandler extending OpenAiHandler for API compatibility - Add MiniMax to provider settings schema and validations - Include comprehensive test coverage for MiniMax provider - Support MiniMax-M2 models for coding, reasoning, and AI-assisted development tasks --- packages/types/src/provider-settings.ts | 15 ++ packages/types/src/providers/index.ts | 1 + packages/types/src/providers/minimax.ts | 38 ++++ src/api/index.ts | 3 + src/api/providers/__tests__/minimax.spec.ts | 179 ++++++++++++++++++ src/api/providers/index.ts | 1 + src/api/providers/minimax.ts | 27 +++ src/shared/ProfileValidator.ts | 1 + src/shared/__tests__/ProfileValidator.spec.ts | 1 + 9 files changed, 266 insertions(+) create mode 100644 packages/types/src/providers/minimax.ts create mode 100644 src/api/providers/__tests__/minimax.spec.ts create mode 100644 src/api/providers/minimax.ts diff --git a/packages/types/src/provider-settings.ts b/packages/types/src/provider-settings.ts index 4153db0da4e0..ed2267b660a0 100644 --- a/packages/types/src/provider-settings.ts +++ b/packages/types/src/provider-settings.ts @@ -15,6 +15,7 @@ import { geminiModels, groqModels, ioIntelligenceModels, + miniMaxModels, mistralModels, moonshotModels, openAiNativeModels, @@ -125,6 +126,7 @@ export const providerNames = [ "doubao", "deepseek", "featherless", + "minimax", "fireworks", "gemini", "gemini-cli", @@ -327,6 +329,11 @@ const moonshotSchema = apiModelIdProviderModelSchema.extend({ moonshotApiKey: z.string().optional(), }) +const miniMaxSchema = apiModelIdProviderModelSchema.extend({ + miniMaxBaseUrl: z.string().optional(), + miniMaxApiKey: z.string().optional(), +}) + const unboundSchema = baseProviderSettingsSchema.extend({ unboundApiKey: z.string().optional(), unboundModelId: z.string().optional(), @@ -433,6 +440,7 @@ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProv mistralSchema.merge(z.object({ apiProvider: z.literal("mistral") })), deepSeekSchema.merge(z.object({ apiProvider: z.literal("deepseek") })), deepInfraSchema.merge(z.object({ apiProvider: z.literal("deepinfra") })), + miniMaxSchema.merge(z.object({ apiProvider: z.literal("minimax") })), doubaoSchema.merge(z.object({ apiProvider: z.literal("doubao") })), moonshotSchema.merge(z.object({ apiProvider: z.literal("moonshot") })), unboundSchema.merge(z.object({ apiProvider: z.literal("unbound") })), @@ -474,6 +482,7 @@ export const providerSettingsSchema = z.object({ ...mistralSchema.shape, ...deepSeekSchema.shape, ...deepInfraSchema.shape, + ...miniMaxSchema.shape, ...doubaoSchema.shape, ...moonshotSchema.shape, ...unboundSchema.shape, @@ -562,6 +571,7 @@ export const modelIdKeysByProvider: Record = { moonshot: "apiModelId", deepseek: "apiModelId", deepinfra: "deepInfraModelId", + minimax: "apiModelId", doubao: "apiModelId", "qwen-code": "apiModelId", unbound: "unboundModelId", @@ -639,6 +649,11 @@ export const MODELS_BY_PROVIDER: Record< label: "DeepSeek", models: Object.keys(deepSeekModels), }, + minimax: { + id: "minimax", + label: "MiniMax", + models: Object.keys(miniMaxModels), + }, doubao: { id: "doubao", label: "Doubao", models: Object.keys(doubaoModels) }, featherless: { id: "featherless", diff --git a/packages/types/src/providers/index.ts b/packages/types/src/providers/index.ts index 21e43aaa99a6..9bc3332f3866 100644 --- a/packages/types/src/providers/index.ts +++ b/packages/types/src/providers/index.ts @@ -15,6 +15,7 @@ export * from "./io-intelligence.js" export * from "./lite-llm.js" export * from "./lm-studio.js" export * from "./mistral.js" +export * from "./minimax.js" export * from "./moonshot.js" export * from "./ollama.js" export * from "./openai.js" diff --git a/packages/types/src/providers/minimax.ts b/packages/types/src/providers/minimax.ts new file mode 100644 index 000000000000..c43b81eea73a --- /dev/null +++ b/packages/types/src/providers/minimax.ts @@ -0,0 +1,38 @@ +import type { ModelInfo } from "../model.js" + +// https://docs.minimaxi.com/docs/api +export type MiniMaxModelId = keyof typeof miniMaxModels + +export const miniMaxDefaultModelId: MiniMaxModelId = "abab5.5s-chat" + +export const miniMaxModels = { + "abab5.5s-chat": { + maxTokens: 8192, // 8K max output + contextWindow: 128_000, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 5, // $5 per million tokens + outputPrice: 15, // $15 per million tokens + description: `MiniMax-M2 is a high-performance model optimized for coding, reasoning, and general AI-assisted development tasks. It offers strong capabilities in code generation, debugging, and technical problem-solving.`, + }, + "abab6.5s-chat": { + maxTokens: 8192, // 8K max output + contextWindow: 245_000, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 10, // $10 per million tokens + outputPrice: 30, // $30 per million tokens + description: `MiniMax-M2 Pro is an advanced version with extended context window and enhanced reasoning capabilities, ideal for complex coding projects and comprehensive code analysis.`, + }, + "abab6.5g-chat": { + maxTokens: 8192, // 8K max output + contextWindow: 245_000, + supportsImages: true, + supportsPromptCache: false, + inputPrice: 10, // $10 per million tokens + outputPrice: 30, // $30 per million tokens + description: `MiniMax-M2 Vision adds multimodal capabilities to the Pro model, supporting image understanding alongside code generation and reasoning tasks.`, + }, +} as const satisfies Record + +export const MINIMAX_DEFAULT_TEMPERATURE = 0.7 diff --git a/src/api/index.ts b/src/api/index.ts index ac0096767624..78bdb4456844 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -17,6 +17,7 @@ import { GeminiHandler, OpenAiNativeHandler, DeepSeekHandler, + MiniMaxHandler, MoonshotHandler, MistralHandler, VsCodeLmHandler, @@ -117,6 +118,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler { return new OpenAiNativeHandler(options) case "deepseek": return new DeepSeekHandler(options) + case "minimax": + return new MiniMaxHandler(options) case "doubao": return new DoubaoHandler(options) case "qwen-code": diff --git a/src/api/providers/__tests__/minimax.spec.ts b/src/api/providers/__tests__/minimax.spec.ts new file mode 100644 index 000000000000..dfffed753a5a --- /dev/null +++ b/src/api/providers/__tests__/minimax.spec.ts @@ -0,0 +1,179 @@ +import { describe, it, expect, vi, beforeEach } from "vitest" +import OpenAI from "openai" +import { miniMaxDefaultModelId, miniMaxModels } from "@roo-code/types" + +import type { ApiHandlerOptions } from "../../../shared/api" + +import { MiniMaxHandler } from "../minimax" + +vi.mock("openai") + +describe("MiniMaxHandler", () => { + let handler: MiniMaxHandler + let mockOptions: ApiHandlerOptions + + beforeEach(() => { + mockOptions = { + apiModelId: "abab5.5s-chat", + miniMaxApiKey: "test-api-key", + } + handler = new MiniMaxHandler(mockOptions) + vi.clearAllMocks() + }) + + describe("constructor", () => { + it("should initialize with provided options", () => { + expect(handler).toBeInstanceOf(MiniMaxHandler) + expect(handler.getModel().id).toBe(mockOptions.apiModelId) + }) + + it("should handle missing API key", () => { + expect(() => { + new MiniMaxHandler({ + ...mockOptions, + miniMaxApiKey: undefined, + }) + }).not.toThrow() + }) + + it("should use default model ID if not provided", () => { + const handlerWithoutModel = new MiniMaxHandler({ + ...mockOptions, + apiModelId: undefined, + }) + expect(handlerWithoutModel.getModel().id).toBe(miniMaxDefaultModelId) + }) + + it("should use default base URL if not provided", () => { + const handlerWithoutBaseUrl = new MiniMaxHandler({ + ...mockOptions, + miniMaxBaseUrl: undefined, + }) + expect(handlerWithoutBaseUrl).toBeInstanceOf(MiniMaxHandler) + // The base URL is passed to OpenAI client internally + }) + + it("should use custom base URL if provided", () => { + const customBaseUrl = "https://custom.minimax.com/v1" + const handlerWithCustomUrl = new MiniMaxHandler({ + ...mockOptions, + miniMaxBaseUrl: customBaseUrl, + }) + expect(handlerWithCustomUrl).toBeInstanceOf(MiniMaxHandler) + // The custom base URL is passed to OpenAI client + }) + + it("should set includeMaxTokens to true", () => { + // Create a new handler and verify OpenAI client was called with includeMaxTokens + const _handler = new MiniMaxHandler(mockOptions) + expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ apiKey: mockOptions.miniMaxApiKey })) + // includeMaxTokens is an internal property passed to super constructor + }) + }) + + describe("getModel", () => { + it("should return correct model info for abab5.5s-chat", () => { + const model = handler.getModel() + expect(model.id).toBe("abab5.5s-chat") + expect(model.info).toEqual(miniMaxModels["abab5.5s-chat"]) + }) + + it("should return correct model info for abab6.5s-chat", () => { + const handlerWithPro = new MiniMaxHandler({ + ...mockOptions, + apiModelId: "abab6.5s-chat", + }) + const model = handlerWithPro.getModel() + expect(model.id).toBe("abab6.5s-chat") + expect(model.info).toEqual(miniMaxModels["abab6.5s-chat"]) + }) + + it("should return correct model info for abab6.5g-chat", () => { + const handlerWithVision = new MiniMaxHandler({ + ...mockOptions, + apiModelId: "abab6.5g-chat", + }) + const model = handlerWithVision.getModel() + expect(model.id).toBe("abab6.5g-chat") + expect(model.info).toEqual(miniMaxModels["abab6.5g-chat"]) + expect(model.info.supportsImages).toBe(true) + }) + + it("should return provided model ID with default model info if model does not exist", () => { + const handlerWithInvalidModel = new MiniMaxHandler({ + ...mockOptions, + apiModelId: "invalid-model", + }) + const model = handlerWithInvalidModel.getModel() + expect(model.id).toBe("invalid-model") + // Should fallback to default model info + expect(model.info).toEqual(miniMaxModels[miniMaxDefaultModelId]) + }) + + it("should return default model if no model ID is provided", () => { + const handlerWithoutModel = new MiniMaxHandler({ + ...mockOptions, + apiModelId: undefined, + }) + const model = handlerWithoutModel.getModel() + expect(model.id).toBe(miniMaxDefaultModelId) + expect(model.info).toEqual(miniMaxModels[miniMaxDefaultModelId]) + }) + }) + + describe("model capabilities", () => { + it("should correctly report image support for models", () => { + const textOnlyModel = new MiniMaxHandler({ + ...mockOptions, + apiModelId: "abab5.5s-chat", + }) + expect(textOnlyModel.getModel().info.supportsImages).toBe(false) + + const visionModel = new MiniMaxHandler({ + ...mockOptions, + apiModelId: "abab6.5g-chat", + }) + expect(visionModel.getModel().info.supportsImages).toBe(true) + }) + + it("should report no prompt cache support for all models", () => { + const models = ["abab5.5s-chat", "abab6.5s-chat", "abab6.5g-chat"] + + models.forEach((modelId) => { + const handler = new MiniMaxHandler({ + ...mockOptions, + apiModelId: modelId, + }) + expect(handler.getModel().info.supportsPromptCache).toBe(false) + }) + }) + + it("should have correct context windows for each model", () => { + const contextWindows = { + "abab5.5s-chat": 128_000, + "abab6.5s-chat": 245_000, + "abab6.5g-chat": 245_000, + } + + Object.entries(contextWindows).forEach(([modelId, expectedWindow]) => { + const handler = new MiniMaxHandler({ + ...mockOptions, + apiModelId: modelId, + }) + expect(handler.getModel().info.contextWindow).toBe(expectedWindow) + }) + }) + + it("should have correct max tokens for all models", () => { + const models = ["abab5.5s-chat", "abab6.5s-chat", "abab6.5g-chat"] + + models.forEach((modelId) => { + const handler = new MiniMaxHandler({ + ...mockOptions, + apiModelId: modelId, + }) + expect(handler.getModel().info.maxTokens).toBe(8192) + }) + }) + }) +}) diff --git a/src/api/providers/index.ts b/src/api/providers/index.ts index 85d877b6bc78..66cdcde9e2d6 100644 --- a/src/api/providers/index.ts +++ b/src/api/providers/index.ts @@ -6,6 +6,7 @@ export { ChutesHandler } from "./chutes" export { ClaudeCodeHandler } from "./claude-code" export { DeepSeekHandler } from "./deepseek" export { DoubaoHandler } from "./doubao" +export { MiniMaxHandler } from "./minimax" export { MoonshotHandler } from "./moonshot" export { FakeAIHandler } from "./fake-ai" export { GeminiHandler } from "./gemini" diff --git a/src/api/providers/minimax.ts b/src/api/providers/minimax.ts new file mode 100644 index 000000000000..2e80acfb9912 --- /dev/null +++ b/src/api/providers/minimax.ts @@ -0,0 +1,27 @@ +import { miniMaxModels, miniMaxDefaultModelId, MINIMAX_DEFAULT_TEMPERATURE } from "@roo-code/types" + +import type { ApiHandlerOptions } from "../../shared/api" + +import { getModelParams } from "../transform/model-params" + +import { OpenAiHandler } from "./openai" + +export class MiniMaxHandler extends OpenAiHandler { + constructor(options: ApiHandlerOptions) { + super({ + ...options, + openAiApiKey: options.miniMaxApiKey ?? "not-provided", + openAiModelId: options.apiModelId ?? miniMaxDefaultModelId, + openAiBaseUrl: options.miniMaxBaseUrl ?? "https://api.minimaxi.com/v1", + openAiStreamingEnabled: true, + includeMaxTokens: true, + }) + } + + override getModel() { + const id = this.options.apiModelId ?? miniMaxDefaultModelId + const info = miniMaxModels[id as keyof typeof miniMaxModels] || miniMaxModels[miniMaxDefaultModelId] + const params = getModelParams({ format: "openai", modelId: id, model: info, settings: this.options }) + return { id, info, ...params } + } +} diff --git a/src/shared/ProfileValidator.ts b/src/shared/ProfileValidator.ts index 78ff6ed9fe1f..3cb4f96cab52 100644 --- a/src/shared/ProfileValidator.ts +++ b/src/shared/ProfileValidator.ts @@ -64,6 +64,7 @@ export class ProfileValidator { case "gemini": case "mistral": case "deepseek": + case "minimax": case "xai": case "groq": case "sambanova": diff --git a/src/shared/__tests__/ProfileValidator.spec.ts b/src/shared/__tests__/ProfileValidator.spec.ts index 5cfe7a720bf1..82a971a02a15 100644 --- a/src/shared/__tests__/ProfileValidator.spec.ts +++ b/src/shared/__tests__/ProfileValidator.spec.ts @@ -189,6 +189,7 @@ describe("ProfileValidator", () => { "gemini", "mistral", "deepseek", + "minimax", "xai", "groq", "chutes", From 8653a05734b6f22cdbc58f3054bf068b2d47ac0b Mon Sep 17 00:00:00 2001 From: Roo Code Date: Sat, 25 Oct 2025 08:34:53 +0000 Subject: [PATCH 2/2] fix: remove unused MINIMAX_DEFAULT_TEMPERATURE constant --- packages/types/src/providers/minimax.ts | 2 -- src/api/providers/minimax.ts | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/types/src/providers/minimax.ts b/packages/types/src/providers/minimax.ts index c43b81eea73a..2c2d4ec07c00 100644 --- a/packages/types/src/providers/minimax.ts +++ b/packages/types/src/providers/minimax.ts @@ -34,5 +34,3 @@ export const miniMaxModels = { description: `MiniMax-M2 Vision adds multimodal capabilities to the Pro model, supporting image understanding alongside code generation and reasoning tasks.`, }, } as const satisfies Record - -export const MINIMAX_DEFAULT_TEMPERATURE = 0.7 diff --git a/src/api/providers/minimax.ts b/src/api/providers/minimax.ts index 2e80acfb9912..c4a2c188bb26 100644 --- a/src/api/providers/minimax.ts +++ b/src/api/providers/minimax.ts @@ -1,4 +1,4 @@ -import { miniMaxModels, miniMaxDefaultModelId, MINIMAX_DEFAULT_TEMPERATURE } from "@roo-code/types" +import { miniMaxModels, miniMaxDefaultModelId } from "@roo-code/types" import type { ApiHandlerOptions } from "../../shared/api"