mirror of
https://github.com/open-webui/open-webui.git
synced 2025-04-11 21:39:07 +02:00
Remove mapping of max_completion_tokens
1) max_completion_tokens is being looked for in openai_payload, but would be located in openai_payload['options'], so is never found. (This applies to the prior two commits as well). 2) max_completion_tokens is not sent from the frontend, only max_tokens. It does not appear in AdvancedParams.svelte. 2b) Openai.py does use max_completion_tokens, but for o1,o3 models and converts it from max_tokens.
This commit is contained in:
parent
e6919c3242
commit
aea8977d05
@ -185,9 +185,7 @@ def convert_payload_openai_to_ollama(openai_payload: dict) -> dict:
|
||||
ollama_options[param] = openai_payload[param]
|
||||
|
||||
# Mapping OpenAI's `max_tokens` -> Ollama's `num_predict`
|
||||
if "max_completion_tokens" in openai_payload:
|
||||
ollama_options["num_predict"] = openai_payload["max_completion_tokens"]
|
||||
elif "max_tokens" in openai_payload:
|
||||
if "max_tokens" in openai_payload:
|
||||
ollama_options["num_predict"] = openai_payload["max_tokens"]
|
||||
|
||||
# Add options to payload if any have been set
|
||||
|
Loading…
x
Reference in New Issue
Block a user