diff --git a/model/process_text.go b/model/process_text.go index bfb0a5f20..0d75a0ed0 100644 --- a/model/process_text.go +++ b/model/process_text.go @@ -177,7 +177,6 @@ func (bpe BytePairEncoding) Encode(s string, addSpecial bool) ([]int32, error) { for _, frag := range fragments { if len(frag.ids) > 0 { ids = append(ids, frag.ids...) - slog.Debug("encoded", "text", frag.value, "ids", frag.ids, "special", true) continue } @@ -201,7 +200,6 @@ func (bpe BytePairEncoding) Encode(s string, addSpecial bool) ([]int32, error) { // short circuit if the fragment is in the vocabulary if id := bpe.vocab.Encode(sb.String()); id >= 0 { ids = append(ids, id) - slog.Debug("encoded", "text", sb.String(), "ids", []int32{id}) continue } @@ -275,7 +273,6 @@ func (bpe BytePairEncoding) Encode(s string, addSpecial bool) ([]int32, error) { // TODO: handle the edge case where the rune isn't in the vocabulary if id := bpe.vocab.Encode(string(merge.runes)); id >= 0 { ids = append(ids, id) - slog.Debug("encoded", "text", string(merge.runes), "ids", []int32{id}) } } } @@ -329,6 +326,5 @@ func (bpe BytePairEncoding) Decode(ids []int32) (string, error) { } } - slog.Debug("decoded", "ids", ids, "text", sb.String()) return sb.String(), nil } diff --git a/runner/ollamarunner/runner.go b/runner/ollamarunner/runner.go index 81e065624..d9f479708 100644 --- a/runner/ollamarunner/runner.go +++ b/runner/ollamarunner/runner.go @@ -436,8 +436,10 @@ func (s *Server) processBatch() error { // if done processing the prompt, generate an embedding and return if seq.embeddingOnly { // TODO(jessegross): Embedding support - s.removeSequence(i, "") - continue + // s.removeSequence(i, "") + // continue + + panic("generation of embedding outputs not yet supported") } // sample a token @@ -597,6 +599,10 @@ func (s *Server) completion(w http.ResponseWriter, r *http.Request) { req.Seed, ) + if req.Grammar != "" { + panic("grammars are not yet supported") + } + seq, err := s.NewSequence(req.Prompt, req.Images, NewSequenceParams{ numPredict: req.NumPredict, stop: req.Stop,