generalbots/botserver/src/paper/llm.rs
Rodrigo Rodriguez (Pragmatismo) c70fbba099 refactor: Remove ooxmlsdk from default build, split document_processor, fix DriveMonitor sync
- Replace docs/sheet/slides with kb-extraction in default features (~4-6min compile time savings, ~300MB less disk)
- Add kb-extraction feature using zip+quick-xml+calamine for lightweight KB extraction
- Split document_processor.rs (829 lines) into mod.rs+types.rs+ooxml_extract.rs+rtf.rs
- Move DOCX/PPTX ZIP-based extraction to document_processor::ooxml_extract (no ooxmlsdk needed)
- Remove dead code: save_docx_preserving(), save_pptx_preserving() (zero callers)
- Fix dep: prefix for optional dependencies in feature definitions
- DriveMonitor: full S3 sync, ETag change detection, KB incremental indexing, config.csv sync
- ConfigManager: real DB reads from bot_configuration table
- 0 warnings, 0 errors on both default and full feature builds
2026-04-21 14:54:41 +00:00

44 lines
1.2 KiB
Rust

use std::sync::Arc;
use crate::core::shared::state::AppState;
#[cfg(feature = "llm")]
use crate::llm::OpenAIClient;
pub async fn call_llm(
state: &Arc<AppState>,
system_prompt: &str,
user_content: &str,
) -> Result<String, String> {
#[cfg(feature = "llm")]
{
let llm = &state.llm_provider;
let messages = OpenAIClient::build_messages(
system_prompt,
"",
&[("user".to_string(), user_content.to_string())],
);
let config_manager = crate::core::config::ConfigManager::new(state.conn.clone());
let model = config_manager
.get_config(&uuid::Uuid::nil(), "llm-model", None)
.unwrap_or_else(|_| "gpt-3.5-turbo".to_string());
let key = config_manager
.get_config(&uuid::Uuid::nil(), "llm-key", None)
.unwrap_or_else(|_| String::new());
llm.generate(user_content, &messages, &model, &key)
.await
.map_err(|e| format!("LLM error: {}", e))
}
#[cfg(not(feature = "llm"))]
{
let _ = (state, system_prompt);
Ok(format!(
"[LLM not available] Processing: {}...",
&user_content[..50.min(user_content.len())]
))
}
}