mirror of
https://github.com/open-webui/open-webui.git
synced 2025-12-16 20:07:49 +01:00
Merge pull request #16397 from 17jmumford/add_gpt_5_max_token_handling
fix: added gpt-5 to reasoning model payload handler
This commit is contained in:
@@ -95,12 +95,12 @@ async def cleanup_response(
|
|||||||
await session.close()
|
await session.close()
|
||||||
|
|
||||||
|
|
||||||
def openai_o_series_handler(payload):
|
def openai_reasoning_model_handler(payload):
|
||||||
"""
|
"""
|
||||||
Handle "o" series specific parameters
|
Handle reasoning model specific parameters
|
||||||
"""
|
"""
|
||||||
if "max_tokens" in payload:
|
if "max_tokens" in payload:
|
||||||
# Convert "max_tokens" to "max_completion_tokens" for all o-series models
|
# Convert "max_tokens" to "max_completion_tokens" for all reasoning models
|
||||||
payload["max_completion_tokens"] = payload["max_tokens"]
|
payload["max_completion_tokens"] = payload["max_tokens"]
|
||||||
del payload["max_tokens"]
|
del payload["max_tokens"]
|
||||||
|
|
||||||
@@ -789,10 +789,10 @@ async def generate_chat_completion(
|
|||||||
url = request.app.state.config.OPENAI_API_BASE_URLS[idx]
|
url = request.app.state.config.OPENAI_API_BASE_URLS[idx]
|
||||||
key = request.app.state.config.OPENAI_API_KEYS[idx]
|
key = request.app.state.config.OPENAI_API_KEYS[idx]
|
||||||
|
|
||||||
# Check if model is from "o" series
|
# Check if model is a reasoning model that needs special handling
|
||||||
is_o_series = payload["model"].lower().startswith(("o1", "o3", "o4"))
|
is_reasoning_model = payload["model"].lower().startswith(("o1", "o3", "o4", "gpt-5"))
|
||||||
if is_o_series:
|
if is_reasoning_model:
|
||||||
payload = openai_o_series_handler(payload)
|
payload = openai_reasoning_model_handler(payload)
|
||||||
elif "api.openai.com" not in url:
|
elif "api.openai.com" not in url:
|
||||||
# Remove "max_completion_tokens" from the payload for backward compatibility
|
# Remove "max_completion_tokens" from the payload for backward compatibility
|
||||||
if "max_completion_tokens" in payload:
|
if "max_completion_tokens" in payload:
|
||||||
|
|||||||
Reference in New Issue
Block a user