mirror of
https://github.com/ollama/ollama.git
synced 2025-04-07 03:18:24 +02:00
conditionally enable parallel pipelines
This commit is contained in:
parent
50b5962042
commit
4561fff36e
@ -371,7 +371,7 @@ func New(r *os.File, params ml.BackendParams) (ml.Backend, error) {
|
||||
(*C.ggml_backend_buffer_type_t)(unsafe.Pointer(&schedBufts[0])),
|
||||
C.int(len(schedBackends)),
|
||||
C.size_t(maxGraphNodes),
|
||||
true,
|
||||
C._Bool(len(gpus) > 1 && slices.Contains(gpus, output.d)),
|
||||
),
|
||||
input: deviceBufferTypes[input.d],
|
||||
output: deviceBufferTypes[output.d],
|
||||
|
Loading…
x
Reference in New Issue
Block a user