diff --git a/comfy_api_nodes/apis/minimax.py b/comfy_api_nodes/apis/minimax.py index 873de3d4c..117306af0 100644 --- a/comfy_api_nodes/apis/minimax.py +++ b/comfy_api_nodes/apis/minimax.py @@ -121,6 +121,8 @@ class MinimaxVideoGenerationResponse(BaseModel): class MiniMaxChatModel(str, Enum): + M2_7 = 'MiniMax-M2.7' + M2_7_highspeed = 'MiniMax-M2.7-highspeed' M2_5 = 'MiniMax-M2.5' M2_5_highspeed = 'MiniMax-M2.5-highspeed' diff --git a/comfy_api_nodes/nodes_minimax.py b/comfy_api_nodes/nodes_minimax.py index d31a0084f..a75477165 100644 --- a/comfy_api_nodes/nodes_minimax.py +++ b/comfy_api_nodes/nodes_minimax.py @@ -452,7 +452,7 @@ class MinimaxChatNode(IO.ComfyNode): node_id="MinimaxChatNode", display_name="MiniMax Chat", category="api node/text/MiniMax", - description="Generate text responses using MiniMax language models (MiniMax-M2.5).", + description="Generate text responses using MiniMax language models (MiniMax-M2.7).", inputs=[ IO.String.Input( "prompt", @@ -463,7 +463,7 @@ class MinimaxChatNode(IO.ComfyNode): IO.Combo.Input( "model", options=MiniMaxChatModel, - default=MiniMaxChatModel.M2_5.value, + default=MiniMaxChatModel.M2_7.value, tooltip="The MiniMax model to use for text generation.", ), IO.String.Input( @@ -524,7 +524,7 @@ class MinimaxChatNode(IO.ComfyNode): async def execute( cls, prompt: str, - model: str = MiniMaxChatModel.M2_5.value, + model: str = MiniMaxChatModel.M2_7.value, system_prompt: Optional[str] = None, max_tokens: int = 4096, temperature: float = 0.7,