openai: allow for content _and_ tool calls in the same message

Previously our OpenAI chat completions compat layer assumed that tool
calls and content would never be provided together, but this is not a
correct assumption. Content is only optional when tool calls are
present, but tool calls and content can be provided together

Fixes: https://github.com/ollama/ollama/issues/11704
This commit is contained in:
Devon Rifkin
2025-08-06 15:50:30 -07:00
parent 30f8a68c4c
commit 203c137810
2 changed files with 67 additions and 1 deletions

View File

@@ -403,7 +403,11 @@ func fromChatRequest(r ChatCompletionRequest) (*api.ChatRequest, error) {
for _, msg := range r.Messages {
switch content := msg.Content.(type) {
case string:
messages = append(messages, api.Message{Role: msg.Role, Content: content, Thinking: msg.Reasoning})
toolCalls, err := fromCompletionToolCall(msg.ToolCalls)
if err != nil {
return nil, err
}
messages = append(messages, api.Message{Role: msg.Role, Content: content, Thinking: msg.Reasoning, ToolCalls: toolCalls})
case []any:
for _, c := range content {
data, ok := c.(map[string]any)
@@ -454,7 +458,17 @@ func fromChatRequest(r ChatCompletionRequest) (*api.ChatRequest, error) {
return nil, errors.New("invalid message format")
}
}
// since we might have added multiple messages above, if we have tools
// calls we'll add them to the last message
if len(messages) > 0 && len(msg.ToolCalls) > 0 {
toolCalls, err := fromCompletionToolCall(msg.ToolCalls)
if err != nil {
return nil, err
}
messages[len(messages)-1].ToolCalls = toolCalls
}
default:
// content is only optional if tool calls are present
if msg.ToolCalls == nil {
return nil, fmt.Errorf("invalid message content type: %T", content)
}
@@ -549,6 +563,19 @@ func fromChatRequest(r ChatCompletionRequest) (*api.ChatRequest, error) {
}, nil
}
func fromCompletionToolCall(toolCalls []ToolCall) ([]api.ToolCall, error) {
apiToolCalls := make([]api.ToolCall, len(toolCalls))
for i, tc := range toolCalls {
apiToolCalls[i].Function.Name = tc.Function.Name
err := json.Unmarshal([]byte(tc.Function.Arguments), &apiToolCalls[i].Function.Arguments)
if err != nil {
return nil, errors.New("invalid tool call arguments")
}
}
return apiToolCalls, nil
}
func fromCompleteRequest(r CompletionRequest) (api.GenerateRequest, error) {
options := make(map[string]any)

View File

@@ -235,6 +235,45 @@ func TestChatMiddleware(t *testing.T) {
Stream: &False,
},
},
{
name: "chat handler with tools and content",
body: `{
"model": "test-model",
"messages": [
{"role": "user", "content": "What's the weather like in Paris Today?"},
{"role": "assistant", "content": "Let's see what the weather is like in Paris", "tool_calls": [{"id": "id", "type": "function", "function": {"name": "get_current_weather", "arguments": "{\"location\": \"Paris, France\", \"format\": \"celsius\"}"}}]}
]
}`,
req: api.ChatRequest{
Model: "test-model",
Messages: []api.Message{
{
Role: "user",
Content: "What's the weather like in Paris Today?",
},
{
Role: "assistant",
Content: "Let's see what the weather is like in Paris",
ToolCalls: []api.ToolCall{
{
Function: api.ToolCallFunction{
Name: "get_current_weather",
Arguments: map[string]any{
"location": "Paris, France",
"format": "celsius",
},
},
},
},
},
},
Options: map[string]any{
"temperature": 1.0,
"top_p": 1.0,
},
Stream: &False,
},
},
{
name: "chat handler with streaming tools",
body: `{