Use max_completion_tokens for GPT-5 models
All checks were successful
BotServer CI/CD / build (push) Successful in 3m13s

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2026-04-16 16:21:16 -03:00
parent 05f2a5b2ab
commit f9178e947e

View file

@ -506,11 +506,17 @@ impl LLMProvider for OpenAIClient {
}
// Build the request body - include tools if provided
// GPT-5 models use max_completion_tokens instead of max_tokens
let token_key = if model.contains("gpt-5") {
"max_completion_tokens"
} else {
"max_tokens"
};
let mut request_body = serde_json::json!({
"model": model,
"messages": messages,
"stream": true,
"max_tokens": 16384,
token_key: 16384,
"temperature": 1.0,
"top_p": 1.0
});