diff --git a/docs/src/content/docs/configuration/minimax.mdx b/docs/src/content/docs/configuration/minimax.mdx
new file mode 100644
index 0000000000..bcd840b053
--- /dev/null
+++ b/docs/src/content/docs/configuration/minimax.mdx
@@ -0,0 +1,54 @@
+---
+title: MiniMax
+sidebar:
+ order: 50
+---
+
+import { FileTree } from "@astrojs/starlight/components"
+import { Steps } from "@astrojs/starlight/components"
+import { Tabs, TabItem } from "@astrojs/starlight/components"
+import { Image } from "astro:assets"
+import LLMProviderFeatures from "../../../components/LLMProviderFeatures.astro"
+
+`minimax` is the [MiniMax AI (https://platform.minimaxi.com/)](https://platform.minimaxi.com/) chat model provider.
+It uses the `MINIMAX_API_...` environment variables.
+
+
+
+
+
+-
+ Create a new API key from the [MiniMax API Keys
+ portal](https://platform.minimaxi.com/user-center/basic-information/interface-key).
+
+
+-
+
+Update the `.env` file with the API key.
+
+```txt title=".env"
+MINIMAX_API_KEY=eyJh...
+```
+
+
+
+-
+
+Set the `model` field in `script` to use a MiniMax model.
+
+```js 'model: "minimax:MiniMax-M2.7"'
+script({
+ model: "minimax:MiniMax-M2.7",
+ ...
+})
+```
+
+Available models: `MiniMax-M2.7`, `MiniMax-M2.7-highspeed`, `MiniMax-M2.5`, `MiniMax-M2.5-highspeed`.
+
+
+
+
+
+
+
+
diff --git a/packages/core/src/constants.ts b/packages/core/src/constants.ts
index 80fc1a9058..7083987fe0 100644
--- a/packages/core/src/constants.ts
+++ b/packages/core/src/constants.ts
@@ -146,6 +146,7 @@ export const ALIBABA_BASE =
"https://dashscope-intl.aliyuncs.com/compatible-mode/v1"
export const MISTRAL_API_BASE = "https://api.mistral.ai/v1"
export const DEEPSEEK_API_BASE = "https://api.deepseek.com/v1"
+export const MINIMAX_API_BASE = "https://api.minimax.io/v1"
export const WHISPERASR_API_BASE = "http://localhost:9000"
export const WINDOWS_AI_API_BASE = "http://127.0.0.1:5272/v1"
export const DOCKER_MODEL_RUNNER_API_BASE =
@@ -193,6 +194,7 @@ export const MODEL_PROVIDER_JAN = "jan"
export const MODEL_PROVIDER_SGLANG = "sglang"
export const MODEL_PROVIDER_VLLM = "vllm"
export const MODEL_PROVIDER_DEEPSEEK = "deepseek"
+export const MODEL_PROVIDER_MINIMAX = "minimax"
export const MODEL_PROVIDER_WHISPERASR = "whisperasr"
export const MODEL_PROVIDER_WINDOWS_AI = "windows"
export const MODEL_PROVIDER_DOCKER_MODEL_RUNNER = "docker"
@@ -462,3 +464,4 @@ export const BOX_LEFT_AND_DOWN = "╮"
export const BOX_LEFT_AND_UP = "╯"
export const GITHUB_ASSET_URL_RX = /^https:\/\/github\.com\/.*\/assets\/.*$/i
+
diff --git a/packages/core/src/env.ts b/packages/core/src/env.ts
index a20f9eba4e..13c8624e3e 100644
--- a/packages/core/src/env.ts
+++ b/packages/core/src/env.ts
@@ -36,6 +36,8 @@ import {
MODEL_PROVIDER_ANTHROPIC_BEDROCK,
MODEL_PROVIDER_DEEPSEEK,
DEEPSEEK_API_BASE,
+ MODEL_PROVIDER_MINIMAX,
+ MINIMAX_API_BASE,
MODEL_PROVIDER_WHISPERASR,
WHISPERASR_API_BASE,
MODEL_PROVIDER_ECHO,
@@ -656,6 +658,27 @@ export async function parseTokenFromEnv(
}
}
+ if (provider === MODEL_PROVIDER_MINIMAX) {
+ dbg(`processing ${MODEL_PROVIDER_MINIMAX}`)
+ const base =
+ findEnvVar(env, "MINIMAX", BASE_SUFFIX)?.value || MINIMAX_API_BASE
+ if (!URL.canParse(base)) {
+ throw new Error(`${base} must be a valid URL`)
+ }
+ const token = env.MINIMAX_API_KEY
+ if (!token) {
+ throw new Error("MINIMAX_API_KEY not configured")
+ }
+ return {
+ provider,
+ model,
+ base,
+ token,
+ type: "openai",
+ source: "env: MINIMAX_API_...",
+ }
+ }
+
if (provider === MODEL_PROVIDER_WHISPERASR) {
dbg(`processing ${MODEL_PROVIDER_WHISPERASR}`)
const base =
@@ -879,3 +902,4 @@ export async function parseTokenFromEnv(
return b
}
}
+
diff --git a/packages/core/src/llms.json b/packages/core/src/llms.json
index 5498deed69..65c46a1634 100644
--- a/packages/core/src/llms.json
+++ b/packages/core/src/llms.json
@@ -526,6 +526,29 @@
}
}
},
+ {
+ "id": "minimax",
+ "detail": "MiniMax AI",
+ "url": "https://platform.minimaxi.com/",
+ "bearerToken": true,
+ "listModels": false,
+ "prediction": false,
+ "aliases": {
+ "large": "MiniMax-M2.7",
+ "small": "MiniMax-M2.7-highspeed"
+ },
+ "env": {
+ "MINIMAX_API_KEY": {
+ "description": "MiniMax API key",
+ "required": true,
+ "secret": true
+ },
+ "MINIMAX_API_BASE": {
+ "description": "MiniMax API base URL",
+ "format": "url"
+ }
+ }
+ },
{
"id": "lmstudio",
"detail": "LM Studio local server",
@@ -1340,6 +1363,23 @@
"price_per_million_input_tokens": 0.14,
"price_per_million_output_tokens": 0.28,
"input_cache_token_rebate": 0.1
+ },
+ "minimax:MiniMax-M2.7": {
+ "price_per_million_input_tokens": 1,
+ "price_per_million_output_tokens": 10
+ },
+ "minimax:MiniMax-M2.7-highspeed": {
+ "price_per_million_input_tokens": 1,
+ "price_per_million_output_tokens": 10
+ },
+ "minimax:MiniMax-M2.5": {
+ "price_per_million_input_tokens": 0.5,
+ "price_per_million_output_tokens": 5
+ },
+ "minimax:MiniMax-M2.5-highspeed": {
+ "price_per_million_input_tokens": 0.5,
+ "price_per_million_output_tokens": 5
}
}
}
+
diff --git a/packages/core/src/minimax.test.ts b/packages/core/src/minimax.test.ts
new file mode 100644
index 0000000000..61a1447888
--- /dev/null
+++ b/packages/core/src/minimax.test.ts
@@ -0,0 +1,85 @@
+import { describe, test, beforeEach } from "node:test"
+import assert from "node:assert/strict"
+import { parseModelIdentifier } from "./models"
+import { MODEL_PROVIDER_MINIMAX, MINIMAX_API_BASE } from "./constants"
+import { defaultModelConfigurations } from "./llms"
+import { TestHost } from "./testhost"
+import { resolveLanguageModel } from "./lm"
+
+describe("MiniMax provider", () => {
+ beforeEach(async () => {
+ TestHost.install()
+ })
+
+ describe("parseModelIdentifier", () => {
+ test("minimax:MiniMax-M2.7", () => {
+ const { provider, model, family } =
+ parseModelIdentifier("minimax:MiniMax-M2.7")
+ assert.equal(provider, MODEL_PROVIDER_MINIMAX)
+ assert.equal(model, "MiniMax-M2.7")
+ assert.equal(family, "MiniMax-M2.7")
+ })
+
+ test("minimax:MiniMax-M2.7-highspeed", () => {
+ const { provider, model, family } = parseModelIdentifier(
+ "minimax:MiniMax-M2.7-highspeed"
+ )
+ assert.equal(provider, MODEL_PROVIDER_MINIMAX)
+ assert.equal(model, "MiniMax-M2.7-highspeed")
+ assert.equal(family, "MiniMax-M2.7-highspeed")
+ })
+
+ test("minimax:MiniMax-M2.5", () => {
+ const { provider, model, family } =
+ parseModelIdentifier("minimax:MiniMax-M2.5")
+ assert.equal(provider, MODEL_PROVIDER_MINIMAX)
+ assert.equal(model, "MiniMax-M2.5")
+ assert.equal(family, "MiniMax-M2.5")
+ })
+ })
+
+ describe("constants", () => {
+ test("MODEL_PROVIDER_MINIMAX is defined", () => {
+ assert.equal(MODEL_PROVIDER_MINIMAX, "minimax")
+ })
+
+ test("MINIMAX_API_BASE is correct", () => {
+ assert.equal(MINIMAX_API_BASE, "https://api.minimax.io/v1")
+ })
+ })
+
+ describe("llms.json configuration", () => {
+ test("minimax provider is registered", () => {
+ const configs = defaultModelConfigurations()
+ assert(configs)
+ // minimax should contribute aliases for large and small
+ const largeConfig = configs.large
+ assert(largeConfig)
+ assert(largeConfig.candidates)
+ const hasMinimax = largeConfig.candidates.some((c: string) =>
+ c.startsWith("minimax:")
+ )
+ assert(hasMinimax, "minimax should be in large candidates")
+ })
+
+ test("minimax small alias is registered", () => {
+ const configs = defaultModelConfigurations()
+ const smallConfig = configs.small
+ assert(smallConfig)
+ assert(smallConfig.candidates)
+ const hasMinimax = smallConfig.candidates.some((c: string) =>
+ c.startsWith("minimax:")
+ )
+ assert(hasMinimax, "minimax should be in small candidates")
+ })
+ })
+
+ describe("resolveLanguageModel", () => {
+ test("minimax resolves to OpenAI-compatible model", () => {
+ const lm = resolveLanguageModel(MODEL_PROVIDER_MINIMAX)
+ assert(lm)
+ assert(lm.completer)
+ assert.equal(lm.id, MODEL_PROVIDER_MINIMAX)
+ })
+ })
+})
diff --git a/packages/core/src/minimax_integration.test.ts b/packages/core/src/minimax_integration.test.ts
new file mode 100644
index 0000000000..c1ff2379a7
--- /dev/null
+++ b/packages/core/src/minimax_integration.test.ts
@@ -0,0 +1,53 @@
+import { describe, test, beforeEach } from "node:test"
+import assert from "node:assert/strict"
+import { parseTokenFromEnv } from "./env"
+import { MODEL_PROVIDER_MINIMAX, MINIMAX_API_BASE } from "./constants"
+import { TestHost } from "./testhost"
+
+describe("MiniMax integration", () => {
+ beforeEach(async () => {
+ TestHost.install()
+ })
+
+ describe("parseTokenFromEnv", () => {
+ test("resolves MINIMAX_API_KEY and base URL", async () => {
+ const env: Record = {
+ MINIMAX_API_KEY: "test-minimax-key",
+ }
+ const result = await parseTokenFromEnv(
+ env,
+ "minimax:MiniMax-M2.7",
+ {}
+ )
+ assert(result)
+ assert.equal(result.provider, MODEL_PROVIDER_MINIMAX)
+ assert.equal(result.model, "MiniMax-M2.7")
+ assert.equal(result.token, "test-minimax-key")
+ assert.equal(result.base, MINIMAX_API_BASE)
+ assert.equal(result.type, "openai")
+ assert.equal(result.source, "env: MINIMAX_API_...")
+ })
+
+ test("uses custom MINIMAX_API_BASE when set", async () => {
+ const env: Record = {
+ MINIMAX_API_KEY: "test-key",
+ MINIMAX_API_BASE: "https://custom.minimax.io/v1",
+ }
+ const result = await parseTokenFromEnv(
+ env,
+ "minimax:MiniMax-M2.5",
+ {}
+ )
+ assert(result)
+ assert.equal(result.base, "https://custom.minimax.io/v1")
+ })
+
+ test("throws when MINIMAX_API_KEY is missing", async () => {
+ const env: Record = {}
+ await assert.rejects(
+ () => parseTokenFromEnv(env, "minimax:MiniMax-M2.7", {}),
+ { message: "MINIMAX_API_KEY not configured" }
+ )
+ })
+ })
+})