mirror of
https://github.com/ollama/ollama.git
synced 2025-09-29 22:15:59 +02:00
Add Cache flag to api (#1642)
This commit is contained in:
@@ -103,6 +103,7 @@ type Options struct {
|
||||
MirostatEta float32 `json:"mirostat_eta,omitempty"`
|
||||
PenalizeNewline bool `json:"penalize_newline,omitempty"`
|
||||
Stop []string `json:"stop,omitempty"`
|
||||
Cache bool `json:"cache,omitempty"`
|
||||
}
|
||||
|
||||
// Runner options which must be set when the model is loaded into memory
|
||||
@@ -355,6 +356,7 @@ func DefaultOptions() Options {
|
||||
MirostatEta: 0.1,
|
||||
PenalizeNewline: true,
|
||||
Seed: -1,
|
||||
Cache: true,
|
||||
|
||||
Runner: Runner{
|
||||
// options set when the model is loaded
|
||||
|
@@ -306,7 +306,8 @@ curl http://localhost:11434/api/generate -d '{
|
||||
"embedding_only": false,
|
||||
"rope_frequency_base": 1.1,
|
||||
"rope_frequency_scale": 0.8,
|
||||
"num_thread": 8
|
||||
"num_thread": 8,
|
||||
"cache": true
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
@@ -235,6 +235,7 @@ func predict(llm extServer, opts api.Options, ctx context.Context, predict Predi
|
||||
"seed": opts.Seed,
|
||||
"stop": opts.Stop,
|
||||
"image_data": imageData,
|
||||
"cache_prompt": opts.Cache,
|
||||
}
|
||||
|
||||
if predict.Format == "json" {
|
||||
|
Reference in New Issue
Block a user