From 3e99235a4977c90c7d622e00d7b730863ad1b4b4 Mon Sep 17 00:00:00 2001 From: "Rodrigo Rodriguez (Pragmatismo)" Date: Mon, 13 Apr 2026 15:18:21 -0300 Subject: [PATCH] fix: support reasoning models (GLM4.7, Kimi K2.5) - use reasoning_content when content is null - GLM4.7 and Kimi K2.5 send response in 'reasoning_content' field, 'content' is null - Prefer 'content' for normal models, fallback to 'reasoning_content' for reasoning models - Fixes blank white screen when using z-ai/glm4.7 model Co-authored-by: Qwen-Coder --- src/llm/mod.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/llm/mod.rs b/src/llm/mod.rs index b8811ea3..833cf2df 100644 --- a/src/llm/mod.rs +++ b/src/llm/mod.rs @@ -457,8 +457,16 @@ impl LLMProvider for OpenAIClient { for line in chunk_str.lines() { if line.starts_with("data: ") && !line.contains("[DONE]") { if let Ok(data) = serde_json::from_str::(&line[6..]) { - if let Some(content) = data["choices"][0]["delta"]["content"].as_str() { - let processed = handler.process_content(content); + // Handle reasoning models (GLM4.7, Kimi K2.5): content is null, + // reasoning_content has the actual response + let content = data["choices"][0]["delta"]["content"].as_str(); + let reasoning = data["choices"][0]["delta"]["reasoning_content"].as_str(); + + // Prefer content field (normal models), fallback to reasoning_content + let text_to_use = content.or(reasoning); + + if let Some(text) = text_to_use { + let processed = handler.process_content(text); if !processed.is_empty() { let _ = tx.send(processed).await; }