mirror of
https://github.com/ollama/ollama.git
synced 2025-05-11 10:26:53 +02:00
conditionally enable parallel pipelines
This commit is contained in:
parent
50b5962042
commit
4561fff36e
1 changed files with 1 additions and 1 deletions
|
@ -371,7 +371,7 @@ func New(r *os.File, params ml.BackendParams) (ml.Backend, error) {
|
|||
(*C.ggml_backend_buffer_type_t)(unsafe.Pointer(&schedBufts[0])),
|
||||
C.int(len(schedBackends)),
|
||||
C.size_t(maxGraphNodes),
|
||||
true,
|
||||
C._Bool(len(gpus) > 1 && slices.Contains(gpus, output.d)),
|
||||
),
|
||||
input: deviceBufferTypes[input.d],
|
||||
output: deviceBufferTypes[output.d],
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue