mirror of
https://github.com/ollama/ollama.git
synced 2025-11-11 18:27:06 +01:00
thinking: allow "think": false for non-thinking models (#12555)
This commit is contained in:
@@ -332,14 +332,16 @@ func (s *Server) GenerateHandler(c *gin.Context) {
|
||||
}
|
||||
|
||||
modelCaps := m.Capabilities()
|
||||
if req.Think != nil {
|
||||
if slices.Contains(modelCaps, model.CapabilityThinking) {
|
||||
caps = append(caps, model.CapabilityThinking)
|
||||
} else {
|
||||
// add thinking if the model supports it
|
||||
if slices.Contains(modelCaps, model.CapabilityThinking) {
|
||||
caps = append(caps, model.CapabilityThinking)
|
||||
if req.Think == nil {
|
||||
req.Think = &api.ThinkValue{Value: true}
|
||||
}
|
||||
} else {
|
||||
if req.Think != nil && req.Think.Bool() {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("%q does not support thinking", req.Model)})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
r, m, opts, err := s.scheduleRunner(c.Request.Context(), name.String(), caps, req.Options, req.KeepAlive)
|
||||
@@ -1877,14 +1879,16 @@ func (s *Server) ChatHandler(c *gin.Context) {
|
||||
}
|
||||
|
||||
modelCaps := m.Capabilities()
|
||||
if req.Think != nil {
|
||||
if slices.Contains(modelCaps, model.CapabilityThinking) {
|
||||
caps = append(caps, model.CapabilityThinking)
|
||||
} else {
|
||||
// add thinking if the model supports it
|
||||
if slices.Contains(modelCaps, model.CapabilityThinking) {
|
||||
caps = append(caps, model.CapabilityThinking)
|
||||
if req.Think == nil {
|
||||
req.Think = &api.ThinkValue{Value: true}
|
||||
}
|
||||
} else {
|
||||
if req.Think != nil && req.Think.Bool() {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("%q does not support thinking", req.Model)})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
r, m, opts, err := s.scheduleRunner(c.Request.Context(), name.String(), caps, req.Options, req.KeepAlive)
|
||||
|
||||
@@ -158,11 +158,26 @@ func TestGenerateChat(t *testing.T) {
|
||||
t.Errorf("expected status 400, got %d", w.Code)
|
||||
}
|
||||
|
||||
if diff := cmp.Diff(w.Body.String(), `{"error":"registry.ollama.ai/library/test:latest does not support thinking"}`); diff != "" {
|
||||
if diff := cmp.Diff(w.Body.String(), `{"error":"\"test\" does not support thinking"}`); diff != "" {
|
||||
t.Errorf("mismatch (-got +want):\n%s", diff)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("model can't think but think set false", func(t *testing.T) {
|
||||
think := false
|
||||
w := createRequest(t, s.ChatHandler, api.ChatRequest{
|
||||
Model: "test",
|
||||
Messages: []api.Message{
|
||||
{Role: "user", Content: "Hello!"},
|
||||
},
|
||||
Think: &api.ThinkValue{Value: think},
|
||||
})
|
||||
|
||||
if w.Code != http.StatusOK {
|
||||
t.Errorf("expected status 200, got %d", w.Code)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("missing model", func(t *testing.T) {
|
||||
w := createRequest(t, s.ChatHandler, api.ChatRequest{})
|
||||
if w.Code != http.StatusBadRequest {
|
||||
|
||||
Reference in New Issue
Block a user