mirror of
https://github.com/ollama/ollama.git
synced 2025-12-09 06:42:12 +01:00
Adds logprobs support to Ollama's API including support for Ollama's OpenAI-compatible API. By specifying the new 'logprobs' boolean parameter in the API, Ollama will return the log probabilities for each token generated. 'top_logprobs', an integer value can also be specified up to the value 20. When specified, the API will also provide the number of most likely tokens to return at each token position Co-authored-by: Baptiste Jamin <baptiste@crisp.chat>
30 lines
683 B
Go
30 lines
683 B
Go
package server
|
|
|
|
import (
|
|
"github.com/ollama/ollama/api"
|
|
"github.com/ollama/ollama/llm"
|
|
)
|
|
|
|
// toAPILogprobs converts llm.Logprobs to api.Logprobs
|
|
func toAPILogprobs(logprobs []llm.Logprob) []api.Logprob {
|
|
result := make([]api.Logprob, len(logprobs))
|
|
for i, lp := range logprobs {
|
|
result[i] = api.Logprob{
|
|
TokenLogprob: api.TokenLogprob{
|
|
Token: lp.Token,
|
|
Logprob: lp.Logprob,
|
|
},
|
|
}
|
|
if len(lp.TopLogprobs) > 0 {
|
|
result[i].TopLogprobs = make([]api.TokenLogprob, len(lp.TopLogprobs))
|
|
for j, tlp := range lp.TopLogprobs {
|
|
result[i].TopLogprobs[j] = api.TokenLogprob{
|
|
Token: tlp.Token,
|
|
Logprob: tlp.Logprob,
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return result
|
|
}
|