Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions code-rs/common/src/model_presets.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,21 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
}),
show_in_picker: true,
},
ModelPreset {
id: "glm-4.7".to_string(),
model: "glm-4.7".to_string(),
display_name: "glm-4.7".to_string(),
description: "Z.AI flagship coding model with thinking always enabled.".to_string(),
default_reasoning_effort: ReasoningEffort::Medium,
supported_reasoning_efforts: vec![ReasoningEffortPreset {
effort: ReasoningEffort::Medium,
description: "Thinking is always enabled for GLM-4.7".to_string(),
}],
supported_text_verbosity: &[TextVerbosityConfig::Medium],
is_default: false,
upgrade: None,
show_in_picker: true,
},
ModelPreset {
id: "bengalfox".to_string(),
model: "bengalfox".to_string(),
Expand Down
18 changes: 16 additions & 2 deletions code-rs/core/src/chat_completions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,13 @@ pub(crate) async fn stream_chat_completions(
for (idx, item) in input.iter().enumerate() {
match item {
ResponseItem::Message { role, content, .. } => {
let effective_role = if model_slug.eq_ignore_ascii_case("glm-4.7")
&& role.eq_ignore_ascii_case("developer")
{
"system"
} else {
role.as_str()
};
// If the message contains any images, we must use the
// multi-modal array form supported by Chat Completions:
// [{ type: "text", text: "..." }, { type: "image_url", image_url: { url: "data:..." } }]
Expand All @@ -191,7 +198,7 @@ pub(crate) async fn stream_chat_completions(
}
}
}
messages.push(json!({"role": role, "content": parts}));
messages.push(json!({"role": effective_role, "content": parts}));
} else {
// Text-only messages can be sent as a single string for
// maximal compatibility with providers that only accept
Expand All @@ -206,7 +213,7 @@ pub(crate) async fn stream_chat_completions(
_ => {}
}
}
messages.push(json!({"role": role, "content": text}));
messages.push(json!({"role": effective_role, "content": text}));
}
}
ResponseItem::CompactionSummary { .. } => {
Expand Down Expand Up @@ -296,6 +303,13 @@ pub(crate) async fn stream_chat_completions(
"tools": tools_json,
});

if model_slug.eq_ignore_ascii_case("glm-4.7") {
if let Some(obj) = payload.as_object_mut() {
obj.insert("temperature".to_string(), json!(1.0));
obj.insert("thinking".to_string(), json!({ "type": "enabled" }));
}
}

if let Some(openrouter_cfg) = provider.openrouter_config() {
if let Some(obj) = payload.as_object_mut() {
if let Some(provider_cfg) = &openrouter_cfg.provider {
Expand Down
1 change: 1 addition & 0 deletions code-rs/core/src/codex/streaming.rs
Original file line number Diff line number Diff line change
Expand Up @@ -377,6 +377,7 @@ pub(super) async fn submission_loop(
&new_config.model,
Some(new_config.model_reasoning_effort),
new_config.preferred_model_reasoning_effort,
Some(&new_config.model_provider_id),
)
.await
{
Expand Down
34 changes: 32 additions & 2 deletions code-rs/core/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -917,11 +917,11 @@ impl Config {
model_providers.entry(key).or_insert(provider);
}

let model_provider_id = model_provider
let mut model_provider_id = model_provider
.or(config_profile.model_provider)
.or(cfg.model_provider)
.unwrap_or_else(|| "openai".to_string());
let model_provider = model_providers
let mut model_provider = model_providers
.get(&model_provider_id)
.ok_or_else(|| {
std::io::Error::new(
Expand Down Expand Up @@ -1082,6 +1082,32 @@ impl Config {
let model_family =
find_family_for_model(&model).unwrap_or_else(|| derive_default_model_family(&model));

if model.eq_ignore_ascii_case("glm-4.7") {
if !model_provider_id.eq_ignore_ascii_case("zai") {
model_provider = model_providers
.get("zai")
.ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::NotFound,
"Model provider `zai` not found",
)
})?
.clone();
model_provider_id = "zai".to_string();
}
} else if model_provider_id.eq_ignore_ascii_case("zai") {
model_provider = model_providers
.get("openai")
.ok_or_else(|| {
std::io::Error::new(
Comment on lines +1098 to +1102

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 Badge Preserve non-OpenAI provider after leaving glm-4.7

This branch forces model_provider_id to openai whenever the current provider is zai and the model is not glm-4.7. That means a user who normally runs with another provider (e.g., oss/openrouter) and briefly switches to glm-4.7 will be silently switched to OpenAI when they select any other model, and that provider choice gets persisted. For models that are only available on the original provider, requests will now be routed to the wrong backend and can fail. Consider restoring the previous provider (or the config default) instead of hardcoding OpenAI here, or only reverting when the provider was auto-set for glm-4.7.

Useful? React with 👍 / 👎.

std::io::ErrorKind::NotFound,
"Model provider `openai` not found",
)
})?
.clone();
model_provider_id = "openai".to_string();
}

// Chat model reasoning effort (used when other flows follow the chat model).
let preferred_model_reasoning_effort = config_profile
.preferred_model_reasoning_effort
Expand Down Expand Up @@ -1870,6 +1896,7 @@ args = ["-y", "@upstash/context7-mcp"]
"gpt-5.1-codex",
Some(ReasoningEffort::High),
None,
None,
)
.await?;

Expand Down Expand Up @@ -1906,6 +1933,7 @@ model = "gpt-4.1"
"o4-mini",
Some(ReasoningEffort::High),
None,
None,
)
.await?;

Expand Down Expand Up @@ -1935,6 +1963,7 @@ model = "gpt-4.1"
"gpt-5.1-codex",
Some(ReasoningEffort::Medium),
None,
None,
)
.await?;

Expand Down Expand Up @@ -1979,6 +2008,7 @@ model = "gpt-5.1-codex"
"o4-high",
Some(ReasoningEffort::Medium),
None,
None,
)
.await?;

Expand Down
12 changes: 12 additions & 0 deletions code-rs/core/src/config/sources.rs
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ pub async fn persist_model_selection(
model: &str,
effort: Option<ReasoningEffort>,
preferred_effort: Option<ReasoningEffort>,
model_provider_id: Option<&str>,
) -> anyhow::Result<()> {
use tokio::fs;

Expand Down Expand Up @@ -208,6 +209,12 @@ pub async fn persist_model_selection(
} else {
profile_table.remove("preferred_model_reasoning_effort");
}

if let Some(provider_id) = model_provider_id {
profile_table["model_provider"] = toml_edit::value(provider_id.to_string());
} else {
profile_table.remove("model_provider");
}
} else {
root["model"] = toml_edit::value(model.to_string());
match effort {
Expand All @@ -229,6 +236,11 @@ pub async fn persist_model_selection(
root.remove("preferred_model_reasoning_effort");
}
}
if let Some(provider_id) = model_provider_id {
root["model_provider"] = toml_edit::value(provider_id.to_string());
} else {
root.remove("model_provider");
}
}
}

Expand Down
6 changes: 6 additions & 0 deletions code-rs/core/src/model_family.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,12 @@ pub fn find_family_for_model(slug: &str) -> Option<ModelFamily> {
context_window: Some(CONTEXT_WINDOW_1M),
max_output_tokens: Some(32_768),
)
} else if slug.starts_with("glm-4.7") {
model_family!(
slug, "glm-4.7",
context_window: Some(CONTEXT_WINDOW_200K),
max_output_tokens: Some(128_000),
)
} else if slug.starts_with("gpt-oss") || slug.starts_with("openai/gpt-oss") {
model_family!(slug, "gpt-oss", apply_patch_tool_type: Some(ApplyPatchToolType::Function),
uses_local_shell_tool: true,
Expand Down
20 changes: 20 additions & 0 deletions code-rs/core/src/model_provider_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -519,6 +519,26 @@ pub fn built_in_model_providers() -> HashMap<String, ModelProviderInfo> {
openrouter: None,
},
),
(
"zai",
P {
name: "Z.AI".into(),
base_url: Some("https://api.z.ai/api/coding/paas/v4".to_string()),
env_key: Some("Z_AI_API_KEY".to_string()),
env_key_instructions: Some(
"Set Z_AI_API_KEY to your Z.AI API key.".to_string(),
),
wire_api: WireApi::Chat,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
requires_openai_auth: false,
openrouter: None,
},
),
(BUILT_IN_OSS_MODEL_PROVIDER_ID, create_oss_provider()),
]
.into_iter()
Expand Down
5 changes: 5 additions & 0 deletions code-rs/core/src/reasoning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ const DEFAULT_EFFORTS: &[ReasoningEffort] = &[
ReasoningEffort::Medium,
ReasoningEffort::High,
];
const GLM_4_7_EFFORTS: &[ReasoningEffort] = &[ReasoningEffort::Medium];

fn reasoning_effort_rank(effort: ReasoningEffort) -> u8 {
match effort {
Expand Down Expand Up @@ -108,6 +109,10 @@ pub fn supported_reasoning_efforts_for_model(model: &str) -> &'static [Reasoning
return GPT5_EFFORTS;
}

if lower.starts_with("glm-4.7") {
return GLM_4_7_EFFORTS;
}

if lower.starts_with("codex-") {
return CODEX_FALLBACK_EFFORTS;
}
Expand Down
30 changes: 30 additions & 0 deletions code-rs/mcp-server/src/message_processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1604,6 +1604,10 @@ fn apply_model_selection(config: &mut Config, model: &str, effort: ReasoningEffo
updated = true;
}

if update_provider_for_model(config, model) {
updated = true;
}

if config.model_reasoning_effort != clamped_effort {
config.model_reasoning_effort = clamped_effort;
updated = true;
Expand All @@ -1612,6 +1616,32 @@ fn apply_model_selection(config: &mut Config, model: &str, effort: ReasoningEffo
updated
}

fn update_provider_for_model(config: &mut Config, model: &str) -> bool {
const ZAI_PROVIDER_ID: &str = "zai";
const GLM_4_7_MODEL: &str = "glm-4.7";

if model.eq_ignore_ascii_case(GLM_4_7_MODEL) {
if !config.model_provider_id.eq_ignore_ascii_case(ZAI_PROVIDER_ID) {
if let Some(provider) = config.model_providers.get(ZAI_PROVIDER_ID) {
config.model_provider_id = ZAI_PROVIDER_ID.to_string();
config.model_provider = provider.clone();
return true;
}
}
return false;
}

if config.model_provider_id.eq_ignore_ascii_case(ZAI_PROVIDER_ID) {
if let Some(provider) = config.model_providers.get("openai") {
config.model_provider_id = "openai".to_string();
config.model_provider = provider.clone();
return true;
}
}

false
}

fn configure_session_op_from_config(config: &Config) -> Op {
Op::ConfigureSession {
provider: config.model_provider.clone(),
Expand Down
30 changes: 30 additions & 0 deletions code-rs/tui/src/chatwidget.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21913,6 +21913,32 @@ Have we met every part of this goal and is there no further work to do?"#
.unwrap_or(requested)
}

fn update_provider_for_model(&mut self, model: &str) -> bool {
const ZAI_PROVIDER_ID: &str = "zai";
const GLM_4_7_MODEL: &str = "glm-4.7";

if model.eq_ignore_ascii_case(GLM_4_7_MODEL) {
if !self.config.model_provider_id.eq_ignore_ascii_case(ZAI_PROVIDER_ID) {
if let Some(provider) = self.config.model_providers.get(ZAI_PROVIDER_ID) {
self.config.model_provider_id = ZAI_PROVIDER_ID.to_string();
self.config.model_provider = provider.clone();
return true;
}
}
return false;
}

if self.config.model_provider_id.eq_ignore_ascii_case(ZAI_PROVIDER_ID) {
if let Some(provider) = self.config.model_providers.get("openai") {
self.config.model_provider_id = "openai".to_string();
self.config.model_provider = provider.clone();
return true;
}
}

false
}

fn apply_model_selection_inner(
&mut self,
model: String,
Expand All @@ -21939,6 +21965,10 @@ Have we met every part of this goal and is there no further work to do?"#
updated = true;
}

if self.update_provider_for_model(trimmed) {
updated = true;
}

if let Some(explicit) = effort {
if self.config.preferred_model_reasoning_effort != Some(explicit) {
self.config.preferred_model_reasoning_effort = Some(explicit);
Expand Down
1 change: 1 addition & 0 deletions code-rs/tui/src/history_cell/formatting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -397,6 +397,7 @@ pub(crate) fn pretty_provider_name(id: &str) -> String {
"read-website-fast" => "readweb",
"sequential-thinking" => "think",
"discord-bot" => "discord",
"zai" => "z.ai",
_ => id,
}
.to_string()
Expand Down
Loading