Merge pull request #11697 from foraxe/fix-keep-alive-top-level

fix: Promote keep_alive to top-level in payload for Ollama API (dev branch)
This commit is contained in:
Timothy Jaeryang Baek 2025-03-15 14:43:27 +00:00 committed by GitHub
commit c8d02aafd9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 11 additions and 1 deletions

View File

@ -1169,7 +1169,7 @@ async def generate_chat_completion(
prefix_id = api_config.get("prefix_id", None)
if prefix_id:
payload["model"] = payload["model"].replace(f"{prefix_id}.", "")
# payload["keep_alive"] = -1 # keep alive forever
return await send_post_request(
url=f"{url}/api/chat",
payload=json.dumps(payload),

View File

@ -110,6 +110,11 @@ def apply_model_params_to_body_ollama(params: dict, form_data: dict) -> dict:
"num_thread": int,
}
# Extract keep_alive from options if it exists
if "options" in form_data and "keep_alive" in form_data["options"]:
form_data["keep_alive"] = form_data["options"]["keep_alive"]
del form_data["options"]["keep_alive"]
return apply_model_params_to_body(params, form_data, mappings)
@ -231,6 +236,11 @@ def convert_payload_openai_to_ollama(openai_payload: dict) -> dict:
"system"
] # To prevent Ollama warning of invalid option provided
# Extract keep_alive from options if it exists
if "keep_alive" in ollama_options:
ollama_payload["keep_alive"] = ollama_options["keep_alive"]
del ollama_options["keep_alive"]
# If there is the "stop" parameter in the openai_payload, remap it to the ollama_payload.options
if "stop" in openai_payload:
ollama_options = ollama_payload.get("options", {})