Use max_completion_tokens for GPT-5 models
All checks were successful
BotServer CI/CD / build (push) Successful in 3m13s
All checks were successful
BotServer CI/CD / build (push) Successful in 3m13s
This commit is contained in:
parent
05f2a5b2ab
commit
f9178e947e
1 changed files with 7 additions and 1 deletions
|
|
@ -506,11 +506,17 @@ impl LLMProvider for OpenAIClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build the request body - include tools if provided
|
// Build the request body - include tools if provided
|
||||||
|
// GPT-5 models use max_completion_tokens instead of max_tokens
|
||||||
|
let token_key = if model.contains("gpt-5") {
|
||||||
|
"max_completion_tokens"
|
||||||
|
} else {
|
||||||
|
"max_tokens"
|
||||||
|
};
|
||||||
let mut request_body = serde_json::json!({
|
let mut request_body = serde_json::json!({
|
||||||
"model": model,
|
"model": model,
|
||||||
"messages": messages,
|
"messages": messages,
|
||||||
"stream": true,
|
"stream": true,
|
||||||
"max_tokens": 16384,
|
token_key: 16384,
|
||||||
"temperature": 1.0,
|
"temperature": 1.0,
|
||||||
"top_p": 1.0
|
"top_p": 1.0
|
||||||
});
|
});
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue