From 01d9a46854c2ac9411fed105ec3f8802608e2275 Mon Sep 17 00:00:00 2001 From: Jesse Gross Date: Thu, 30 Jan 2025 17:52:19 -0800 Subject: [PATCH] ggml-backend: Let GGML allocate context memory Passing in a Go buffer is not safe because the garbage collector could free or move the memory while the context is still open. However, if we pass in the size and a nil pointer then GGML will allocate it from the C side. --- ml/backend/ggml/ggml.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/ml/backend/ggml/ggml.go b/ml/backend/ggml/ggml.go index d1b2d646e..8b33d38fd 100644 --- a/ml/backend/ggml/ggml.go +++ b/ml/backend/ggml/ggml.go @@ -198,10 +198,9 @@ func (b *Backend) Get(name string) ml.Tensor { func (b *Backend) NewContext() ml.Context { nodes := max(8192, len(b.meta.Tensors().Items())*5) - bts := make([]byte, C.size_t(nodes)*C.ggml_tensor_overhead()+C.ggml_graph_overhead_custom(C.size_t(nodes), false)) c := C.ggml_init(C.struct_ggml_init_params{ - mem_buffer: unsafe.Pointer(&bts[0]), - mem_size: C.size_t(len(bts)), + mem_buffer: nil, + mem_size: C.size_t(nodes)*C.ggml_tensor_overhead() + C.ggml_graph_overhead_custom(C.size_t(nodes), false), no_alloc: true, })