feat: upgrade MiniMax default model to M2.7 (#3865)
Add MiniMax-M2.7 and M2.7-highspeed to model selection lists. Set M2.7 as the new default for MiniMax, Novita, and Ollama cloud providers. Retain all previous models (M2.5, M2.1, M2) as alternatives.
This commit is contained in:
parent
7d2ae75fdd
commit
c9f6e97334
@ -783,10 +783,15 @@ fn allows_unauthenticated_model_fetch(provider_name: &str) -> bool {
|
||||
}
|
||||
|
||||
/// Pick a sensible default model for the given provider.
|
||||
const MINIMAX_ONBOARD_MODELS: [(&str, &str); 5] = [
|
||||
("MiniMax-M2.5", "MiniMax M2.5 (latest, recommended)"),
|
||||
const MINIMAX_ONBOARD_MODELS: [(&str, &str); 7] = [
|
||||
(
|
||||
"MiniMax-M2.7",
|
||||
"MiniMax M2.7 (latest flagship, recommended)",
|
||||
),
|
||||
("MiniMax-M2.7-highspeed", "MiniMax M2.7 High-Speed (faster)"),
|
||||
("MiniMax-M2.5", "MiniMax M2.5 (stable)"),
|
||||
("MiniMax-M2.5-highspeed", "MiniMax M2.5 High-Speed (faster)"),
|
||||
("MiniMax-M2.1", "MiniMax M2.1 (stable)"),
|
||||
("MiniMax-M2.1", "MiniMax M2.1 (previous gen)"),
|
||||
("MiniMax-M2.1-highspeed", "MiniMax M2.1 High-Speed (faster)"),
|
||||
("MiniMax-M2", "MiniMax M2 (legacy)"),
|
||||
];
|
||||
@ -803,12 +808,12 @@ fn default_model_for_provider(provider: &str) -> String {
|
||||
"xai" => "grok-4-1-fast-reasoning".into(),
|
||||
"perplexity" => "sonar-pro".into(),
|
||||
"fireworks" => "accounts/fireworks/models/llama-v3p3-70b-instruct".into(),
|
||||
"novita" => "minimax/minimax-m2.5".into(),
|
||||
"novita" => "minimax/minimax-m2.7".into(),
|
||||
"together-ai" => "meta-llama/Llama-3.3-70B-Instruct-Turbo".into(),
|
||||
"cohere" => "command-a-03-2025".into(),
|
||||
"moonshot" => "kimi-k2.5".into(),
|
||||
"glm" | "zai" => "glm-5".into(),
|
||||
"minimax" => "MiniMax-M2.5".into(),
|
||||
"minimax" => "MiniMax-M2.7".into(),
|
||||
"qwen" => "qwen-plus".into(),
|
||||
"qwen-code" => "qwen3-coder-plus".into(),
|
||||
"ollama" => "llama3.2".into(),
|
||||
@ -997,10 +1002,16 @@ fn curated_models_for_provider(provider_name: &str) -> Vec<(String, String)> {
|
||||
"Mixtral 8x22B".to_string(),
|
||||
),
|
||||
],
|
||||
"novita" => vec![(
|
||||
"minimax/minimax-m2.5".to_string(),
|
||||
"MiniMax M2.5".to_string(),
|
||||
)],
|
||||
"novita" => vec![
|
||||
(
|
||||
"minimax/minimax-m2.7".to_string(),
|
||||
"MiniMax M2.7 (latest flagship)".to_string(),
|
||||
),
|
||||
(
|
||||
"minimax/minimax-m2.5".to_string(),
|
||||
"MiniMax M2.5".to_string(),
|
||||
),
|
||||
],
|
||||
"together-ai" => vec![
|
||||
(
|
||||
"meta-llama/Llama-3.3-70B-Instruct-Turbo".to_string(),
|
||||
@ -1065,9 +1076,17 @@ fn curated_models_for_provider(provider_name: &str) -> Vec<(String, String)> {
|
||||
),
|
||||
],
|
||||
"minimax" => vec![
|
||||
(
|
||||
"MiniMax-M2.7".to_string(),
|
||||
"MiniMax M2.7 (latest flagship)".to_string(),
|
||||
),
|
||||
(
|
||||
"MiniMax-M2.7-highspeed".to_string(),
|
||||
"MiniMax M2.7 High-Speed (fast)".to_string(),
|
||||
),
|
||||
(
|
||||
"MiniMax-M2.5".to_string(),
|
||||
"MiniMax M2.5 (latest flagship)".to_string(),
|
||||
"MiniMax M2.5 (stable)".to_string(),
|
||||
),
|
||||
(
|
||||
"MiniMax-M2.5-highspeed".to_string(),
|
||||
@ -1075,7 +1094,7 @@ fn curated_models_for_provider(provider_name: &str) -> Vec<(String, String)> {
|
||||
),
|
||||
(
|
||||
"MiniMax-M2.1".to_string(),
|
||||
"MiniMax M2.1 (strong coding/reasoning)".to_string(),
|
||||
"MiniMax M2.1 (previous gen)".to_string(),
|
||||
),
|
||||
],
|
||||
"qwen" => vec![
|
||||
@ -1631,7 +1650,7 @@ async fn fetch_live_models_for_provider(
|
||||
"qwen3-coder-next:cloud".to_string(),
|
||||
"qwen3-coder:480b:cloud".to_string(),
|
||||
"kimi-k2.5:cloud".to_string(),
|
||||
"minimax-m2.5:cloud".to_string(),
|
||||
"minimax-m2.7:cloud".to_string(),
|
||||
"deepseek-v3.1:671b:cloud".to_string(),
|
||||
]
|
||||
} else {
|
||||
@ -6662,7 +6681,7 @@ mod tests {
|
||||
assert_eq!(default_model_for_provider("qwen-intl"), "qwen-plus");
|
||||
assert_eq!(default_model_for_provider("qwen-code"), "qwen3-coder-plus");
|
||||
assert_eq!(default_model_for_provider("glm-cn"), "glm-5");
|
||||
assert_eq!(default_model_for_provider("minimax-cn"), "MiniMax-M2.5");
|
||||
assert_eq!(default_model_for_provider("minimax-cn"), "MiniMax-M2.7");
|
||||
assert_eq!(default_model_for_provider("zai-cn"), "glm-5");
|
||||
assert_eq!(default_model_for_provider("gemini"), "gemini-2.5-pro");
|
||||
assert_eq!(default_model_for_provider("google"), "gemini-2.5-pro");
|
||||
|
||||
Loading…
Reference in New Issue
Block a user