feat: upgrade MiniMax default model to M2.7

- Add MiniMax-M2.7 and MiniMax-M2.7-highspeed to chat model list
- Set MiniMax-M2.7 as default model (replacing M2.5)
- Keep all previous models as available alternatives
This commit is contained in:
Octopus 2026-03-18 07:31:52 -05:00
parent c01e019286
commit 1ac73e81bc
2 changed files with 5 additions and 3 deletions

View File

@ -121,6 +121,8 @@ class MinimaxVideoGenerationResponse(BaseModel):
class MiniMaxChatModel(str, Enum):
M2_7 = 'MiniMax-M2.7'
M2_7_highspeed = 'MiniMax-M2.7-highspeed'
M2_5 = 'MiniMax-M2.5'
M2_5_highspeed = 'MiniMax-M2.5-highspeed'

View File

@ -452,7 +452,7 @@ class MinimaxChatNode(IO.ComfyNode):
node_id="MinimaxChatNode",
display_name="MiniMax Chat",
category="api node/text/MiniMax",
description="Generate text responses using MiniMax language models (MiniMax-M2.5).",
description="Generate text responses using MiniMax language models (MiniMax-M2.7).",
inputs=[
IO.String.Input(
"prompt",
@ -463,7 +463,7 @@ class MinimaxChatNode(IO.ComfyNode):
IO.Combo.Input(
"model",
options=MiniMaxChatModel,
default=MiniMaxChatModel.M2_5.value,
default=MiniMaxChatModel.M2_7.value,
tooltip="The MiniMax model to use for text generation.",
),
IO.String.Input(
@ -524,7 +524,7 @@ class MinimaxChatNode(IO.ComfyNode):
async def execute(
cls,
prompt: str,
model: str = MiniMaxChatModel.M2_5.value,
model: str = MiniMaxChatModel.M2_7.value,
system_prompt: Optional[str] = None,
max_tokens: int = 4096,
temperature: float = 0.7,