fix: UTF-8 char boundary panics in log truncation
Some checks failed
BotServer CI/CD / build (push) Failing after 6m45s

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2026-04-09 01:37:48 -03:00
parent 5371047fa1
commit 61f4353dbb
3 changed files with 47 additions and 9 deletions

View file

@ -286,7 +286,7 @@ Respond with JSON only:
let response = self.call_llm(&prompt, bot_id).await?;
let elapsed = start.elapsed();
info!("LLM classification completed in {:?}, response_len={} chars", elapsed, response.len());
trace!("LLM classification response: {}", &response[..response.len().min(500)]);
trace!("LLM classification response: {}", response.chars().take(500).collect::<String>());
Self::parse_classification_response(&response, intent)
}

View file

@ -535,11 +535,36 @@ impl BotOrchestrator {
.ok();
// Load system-prompt from config.csv, fallback to default
let system_prompt = config_manager
.get_config(&session.bot_id, "system-prompt", Some("You are a helpful assistant with access to tools that can help you complete tasks. When a user's request matches one of your available tools, use the appropriate tool instead of providing a generic response."))
.unwrap_or_else(|_| "You are a helpful General Bots assistant.".to_string());
// Load system-prompt: auto-detect PROMPT.md, PROMPT.txt, prompt.md, prompt.txt in .gbot folder
// Ignore system-prompt-file config to avoid double .gbot path bug
let bot_id = session.bot_id;
let bot_name = {
let conn = state_clone.conn.get().ok();
if let Some(mut db_conn) = conn {
use crate::core::shared::models::schema::bots::dsl::*;
bots.filter(id.eq(bot_id))
.select(name)
.first::<String>(&mut db_conn)
.unwrap_or_else(|_| "default".to_string())
} else {
"default".to_string()
}
};
let work_dir = crate::core::shared::utils::get_stack_path();
let gbot_dir = format!("{}/data/system/work/{}.gbai/{}.gbot/",
work_dir, bot_name, bot_name);
let system_prompt = std::fs::read_to_string(format!("{}PROMPT.md", gbot_dir))
.or_else(|_| std::fs::read_to_string(format!("{}prompt.md", gbot_dir)))
.or_else(|_| std::fs::read_to_string(format!("{}PROMPT.txt", gbot_dir)))
.or_else(|_| std::fs::read_to_string(format!("{}prompt.txt", gbot_dir)))
.unwrap_or_else(|_| {
config_manager
.get_config(&session.bot_id, "system-prompt", Some("You are a helpful assistant with access to tools that can help you complete tasks. When a user's request matches one of your available tools, use the appropriate tool instead of providing a generic response."))
.unwrap_or_else(|_| "You are a helpful General Bots assistant.".to_string())
});
info!("Loaded system-prompt for bot {}: {}", session.bot_id, &system_prompt[..system_prompt.len().min(500)]);
info!("Loaded system-prompt for bot {}: {}", session.bot_id, system_prompt.chars().take(500).collect::<String>());
Ok((session, context_data, history, model, key, system_prompt, bot_llm_url, explicit_llm_provider))
},

View file

@ -1079,10 +1079,23 @@ fn create_bot_from_drive(
// LocalFileMonitor and ConfigWatcher disabled - drive (MinIO) is the only source now
async fn start_local_file_monitor(_app_state: Arc<AppState>) {
trace!("LocalFileMonitor disabled for state - using drive (MinIO) only");
async fn start_local_file_monitor(app_state: Arc<AppState>) {
use crate::drive::local_file_monitor::LocalFileMonitor;
let monitor = LocalFileMonitor::new(app_state.clone());
if let Err(e) = monitor.start_monitoring().await {
error!("Failed to start LocalFileMonitor: {}", e);
} else {
trace!("LocalFileMonitor started - monitoring /opt/gbo/data for bot changes");
}
}
async fn start_config_watcher(_app_state: Arc<AppState>) {
trace!("ConfigWatcher disabled for state - using drive (MinIO) only");
async fn start_config_watcher(app_state: Arc<AppState>) {
use crate::core::config::watcher::ConfigWatcher;
use std::sync::Arc as StdArc;
let data_dir = std::path::PathBuf::from("/home/rodriguez/src/gb/botserver-stack/data/system/work");
let watcher = ConfigWatcher::new(data_dir, app_state.clone());
let _handle = StdArc::new(watcher).spawn();
trace!("ConfigWatcher started - monitoring config.csv changes");
}