debug: add LLM output traces to diagnose blank HTML rendering issue
All checks were successful
BotServer CI/CD / build (push) Successful in 4m0s
All checks were successful
BotServer CI/CD / build (push) Successful in 4m0s
- Log full LLM response preview (500 chars) with has_html detection - Log WebSocket send with message type, completeness, and content preview - Use clone() for chunk in BotResponse to ensure accurate logging Co-authored-by: Qwen-Coder <qwen-coder@alibabacloud.com>
This commit is contained in:
parent
d1652fc413
commit
326305d55e
2 changed files with 21 additions and 1 deletions
|
|
@ -70,6 +70,16 @@ impl WebChannelAdapter {
|
||||||
session_id: &str,
|
session_id: &str,
|
||||||
message: BotResponse,
|
message: BotResponse,
|
||||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
|
// DEBUG: Log what's being sent to WebSocket
|
||||||
|
let content_preview = if message.content.len() > 200 {
|
||||||
|
format!("{}... ({} chars)", &message.content[..200], message.content.len())
|
||||||
|
} else {
|
||||||
|
message.content.clone()
|
||||||
|
};
|
||||||
|
debug!("[WS_SEND] session={} msg_type={:?} is_complete={} content_preview=\"{}\"",
|
||||||
|
session_id, message.message_type, message.is_complete,
|
||||||
|
content_preview.replace('\n', "\\n"));
|
||||||
|
|
||||||
let connections = self.connections.lock().await;
|
let connections = self.connections.lock().await;
|
||||||
if let Some(tx) = connections.get(session_id) {
|
if let Some(tx) = connections.get(session_id) {
|
||||||
if let Err(e) = tx.send(message).await {
|
if let Err(e) = tx.send(message).await {
|
||||||
|
|
|
||||||
|
|
@ -1128,7 +1128,7 @@ impl BotOrchestrator {
|
||||||
user_id: message.user_id.clone(),
|
user_id: message.user_id.clone(),
|
||||||
session_id: message.session_id.clone(),
|
session_id: message.session_id.clone(),
|
||||||
channel: message.channel.clone(),
|
channel: message.channel.clone(),
|
||||||
content: chunk,
|
content: chunk.clone(),
|
||||||
message_type: MessageType::BOT_RESPONSE,
|
message_type: MessageType::BOT_RESPONSE,
|
||||||
stream_token: None,
|
stream_token: None,
|
||||||
is_complete: false,
|
is_complete: false,
|
||||||
|
|
@ -1145,6 +1145,16 @@ impl BotOrchestrator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DEBUG: Log LLM output for troubleshooting HTML rendering issues
|
||||||
|
let has_html = full_response.contains("</") || full_response.contains("<!--");
|
||||||
|
let preview = if full_response.len() > 500 {
|
||||||
|
format!("{}... ({} chars total)", &full_response[..500], full_response.len())
|
||||||
|
} else {
|
||||||
|
full_response.clone()
|
||||||
|
};
|
||||||
|
info!("[LLM_OUTPUT] session={} has_html={} preview=\"{}\"",
|
||||||
|
session_id, has_html, preview.replace('\n', "\\n"));
|
||||||
|
|
||||||
trace!("LLM stream complete. Full response: {}", full_response);
|
trace!("LLM stream complete. Full response: {}", full_response);
|
||||||
|
|
||||||
let state_for_save = self.state.clone();
|
let state_for_save = self.state.clone();
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue