llm: do not error on "null" format (#8139)

This fixes another regression in the previous commit that fixed other
known bugs.
This commit is contained in:
Blake Mizerany 2024-12-17 09:49:37 -08:00 committed by GitHub
parent 2cde4b8817
commit 2ddc32d5c5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 24 additions and 11 deletions

View file

@ -700,20 +700,24 @@ func (s *llmServer) Completion(ctx context.Context, req CompletionRequest, fn fu
}
if len(req.Format) > 0 {
switch {
case bytes.Equal(req.Format, []byte(`""`)):
// fallthrough
case bytes.Equal(req.Format, []byte(`"json"`)):
switch string(req.Format) {
case `null`, `""`:
// Field was set, but "missing" a value. We accept
// these as "not set".
break
case `"json"`:
request["grammar"] = grammarJSON
case bytes.HasPrefix(req.Format, []byte("{")):
default:
if req.Format[0] != '{' {
return fmt.Errorf("invalid format: %q; expected \"json\" or a valid JSON Schema object", req.Format)
}
// User provided a JSON schema
g := llama.SchemaToGrammar(req.Format)
if g == nil {
return fmt.Errorf("invalid JSON schema in format")
}
request["grammar"] = string(g)
default:
return fmt.Errorf("invalid format: %q; expected \"json\" or a valid JSON Schema", req.Format)
}
}