chore(all): replace instances of interface with any (#10067)

Both interface{} and any (which is just an alias for interface{} introduced in Go 1.18) represent the empty interface that all types satisfy.
This commit is contained in:
Bruce MacDonald 2025-04-02 09:44:27 -07:00 committed by GitHub
parent 4e415029b3
commit 9876c9faa4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 58 additions and 58 deletions

View file

@ -82,7 +82,7 @@ type GenerateRequest struct {
// Options lists model-specific options. For example, temperature can be // Options lists model-specific options. For example, temperature can be
// set through this field, if the model supports it. // set through this field, if the model supports it.
Options map[string]interface{} `json:"options"` Options map[string]any `json:"options"`
} }
// ChatRequest describes a request sent by [Client.Chat]. // ChatRequest describes a request sent by [Client.Chat].
@ -107,7 +107,7 @@ type ChatRequest struct {
Tools `json:"tools,omitempty"` Tools `json:"tools,omitempty"`
// Options lists model-specific options. // Options lists model-specific options.
Options map[string]interface{} `json:"options"` Options map[string]any `json:"options"`
} }
type Tools []Tool type Tools []Tool
@ -261,7 +261,7 @@ type EmbedRequest struct {
Truncate *bool `json:"truncate,omitempty"` Truncate *bool `json:"truncate,omitempty"`
// Options lists model-specific options. // Options lists model-specific options.
Options map[string]interface{} `json:"options"` Options map[string]any `json:"options"`
} }
// EmbedResponse is the response from [Client.Embed]. // EmbedResponse is the response from [Client.Embed].
@ -287,7 +287,7 @@ type EmbeddingRequest struct {
KeepAlive *Duration `json:"keep_alive,omitempty"` KeepAlive *Duration `json:"keep_alive,omitempty"`
// Options lists model-specific options. // Options lists model-specific options.
Options map[string]interface{} `json:"options"` Options map[string]any `json:"options"`
} }
// EmbeddingResponse is the response from [Client.Embeddings]. // EmbeddingResponse is the response from [Client.Embeddings].
@ -333,7 +333,7 @@ type ShowRequest struct {
Template string `json:"template"` Template string `json:"template"`
Verbose bool `json:"verbose"` Verbose bool `json:"verbose"`
Options map[string]interface{} `json:"options"` Options map[string]any `json:"options"`
// Deprecated: set the model name with Model instead // Deprecated: set the model name with Model instead
Name string `json:"name"` Name string `json:"name"`
@ -505,7 +505,7 @@ func (m *Metrics) Summary() {
} }
} }
func (opts *Options) FromMap(m map[string]interface{}) error { func (opts *Options) FromMap(m map[string]any) error {
valueOpts := reflect.ValueOf(opts).Elem() // names of the fields in the options struct valueOpts := reflect.ValueOf(opts).Elem() // names of the fields in the options struct
typeOpts := reflect.TypeOf(opts).Elem() // types of the fields in the options struct typeOpts := reflect.TypeOf(opts).Elem() // types of the fields in the options struct
@ -562,12 +562,12 @@ func (opts *Options) FromMap(m map[string]interface{}) error {
} }
field.SetString(val) field.SetString(val)
case reflect.Slice: case reflect.Slice:
// JSON unmarshals to []interface{}, not []string // JSON unmarshals to []any, not []string
val, ok := val.([]interface{}) val, ok := val.([]any)
if !ok { if !ok {
return fmt.Errorf("option %q must be of type array", key) return fmt.Errorf("option %q must be of type array", key)
} }
// convert []interface{} to []string // convert []any to []string
slice := make([]string, len(val)) slice := make([]string, len(val))
for i, item := range val { for i, item := range val {
str, ok := item.(string) str, ok := item.(string)
@ -674,7 +674,7 @@ func (d *Duration) UnmarshalJSON(b []byte) (err error) {
} }
// FormatParams converts specified parameter options to their correct types // FormatParams converts specified parameter options to their correct types
func FormatParams(params map[string][]string) (map[string]interface{}, error) { func FormatParams(params map[string][]string) (map[string]any, error) {
opts := Options{} opts := Options{}
valueOpts := reflect.ValueOf(&opts).Elem() // names of the fields in the options struct valueOpts := reflect.ValueOf(&opts).Elem() // names of the fields in the options struct
typeOpts := reflect.TypeOf(opts) // types of the fields in the options struct typeOpts := reflect.TypeOf(opts) // types of the fields in the options struct
@ -688,7 +688,7 @@ func FormatParams(params map[string][]string) (map[string]interface{}, error) {
} }
} }
out := make(map[string]interface{}) out := make(map[string]any)
// iterate params and set values based on json struct tags // iterate params and set values based on json struct tags
for key, vals := range params { for key, vals := range params {
if opt, ok := jsonOpts[key]; !ok { if opt, ok := jsonOpts[key]; !ok {

View file

@ -134,7 +134,7 @@ func TestUseMmapParsingFromJSON(t *testing.T) {
for _, test := range tests { for _, test := range tests {
t.Run(test.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
var oMap map[string]interface{} var oMap map[string]any
err := json.Unmarshal([]byte(test.req), &oMap) err := json.Unmarshal([]byte(test.req), &oMap)
require.NoError(t, err) require.NoError(t, err)
opts := DefaultOptions() opts := DefaultOptions()

View file

@ -92,7 +92,7 @@ func BenchmarkColdStart(b *testing.B) {
req := &api.GenerateRequest{ req := &api.GenerateRequest{
Model: m, Model: m,
Prompt: tt.prompt, Prompt: tt.prompt,
Options: map[string]interface{}{"num_predict": tt.maxTokens, "temperature": 0.1}, Options: map[string]any{"num_predict": tt.maxTokens, "temperature": 0.1},
} }
runGenerateBenchmark(b, ctx, client, req) runGenerateBenchmark(b, ctx, client, req)
@ -155,7 +155,7 @@ func warmup(client *api.Client, model string, prompt string, b *testing.B) {
&api.GenerateRequest{ &api.GenerateRequest{
Model: model, Model: model,
Prompt: prompt, Prompt: prompt,
Options: map[string]interface{}{"num_predict": 50, "temperature": 0.1}, Options: map[string]any{"num_predict": 50, "temperature": 0.1},
}, },
func(api.GenerateResponse) error { return nil }, func(api.GenerateResponse) error { return nil },
) )

View file

@ -268,7 +268,7 @@ func RunHandler(cmd *cobra.Command, args []string) error {
opts := runOptions{ opts := runOptions{
Model: args[0], Model: args[0],
WordWrap: os.Getenv("TERM") == "xterm-256color", WordWrap: os.Getenv("TERM") == "xterm-256color",
Options: map[string]interface{}{}, Options: map[string]any{},
} }
format, err := cmd.Flags().GetString("format") format, err := cmd.Flags().GetString("format")
@ -852,7 +852,7 @@ type runOptions struct {
Format string Format string
System string System string
Images []api.ImageData Images []api.ImageData
Options map[string]interface{} Options map[string]any
MultiModal bool MultiModal bool
KeepAlive *api.Duration KeepAlive *api.Duration
} }

View file

@ -1360,7 +1360,7 @@ func file_sentencepiece_model_proto_rawDescGZIP() []byte {
var file_sentencepiece_model_proto_enumTypes = make([]protoimpl.EnumInfo, 2) var file_sentencepiece_model_proto_enumTypes = make([]protoimpl.EnumInfo, 2)
var file_sentencepiece_model_proto_msgTypes = make([]protoimpl.MessageInfo, 6) var file_sentencepiece_model_proto_msgTypes = make([]protoimpl.MessageInfo, 6)
var file_sentencepiece_model_proto_goTypes = []interface{}{ var file_sentencepiece_model_proto_goTypes = []any{
(TrainerSpec_ModelType)(0), // 0: sentencepiece.TrainerSpec.ModelType (TrainerSpec_ModelType)(0), // 0: sentencepiece.TrainerSpec.ModelType
(ModelProto_SentencePiece_Type)(0), // 1: sentencepiece.ModelProto.SentencePiece.Type (ModelProto_SentencePiece_Type)(0), // 1: sentencepiece.ModelProto.SentencePiece.Type
(*TrainerSpec)(nil), // 2: sentencepiece.TrainerSpec (*TrainerSpec)(nil), // 2: sentencepiece.TrainerSpec
@ -1392,7 +1392,7 @@ func file_sentencepiece_model_proto_init() {
return return
} }
if !protoimpl.UnsafeEnabled { if !protoimpl.UnsafeEnabled {
file_sentencepiece_model_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { file_sentencepiece_model_proto_msgTypes[0].Exporter = func(v any, i int) any {
switch v := v.(*TrainerSpec); i { switch v := v.(*TrainerSpec); i {
case 0: case 0:
return &v.state return &v.state
@ -1406,7 +1406,7 @@ func file_sentencepiece_model_proto_init() {
return nil return nil
} }
} }
file_sentencepiece_model_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { file_sentencepiece_model_proto_msgTypes[1].Exporter = func(v any, i int) any {
switch v := v.(*NormalizerSpec); i { switch v := v.(*NormalizerSpec); i {
case 0: case 0:
return &v.state return &v.state
@ -1420,7 +1420,7 @@ func file_sentencepiece_model_proto_init() {
return nil return nil
} }
} }
file_sentencepiece_model_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { file_sentencepiece_model_proto_msgTypes[2].Exporter = func(v any, i int) any {
switch v := v.(*SelfTestData); i { switch v := v.(*SelfTestData); i {
case 0: case 0:
return &v.state return &v.state
@ -1434,7 +1434,7 @@ func file_sentencepiece_model_proto_init() {
return nil return nil
} }
} }
file_sentencepiece_model_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { file_sentencepiece_model_proto_msgTypes[3].Exporter = func(v any, i int) any {
switch v := v.(*ModelProto); i { switch v := v.(*ModelProto); i {
case 0: case 0:
return &v.state return &v.state
@ -1448,7 +1448,7 @@ func file_sentencepiece_model_proto_init() {
return nil return nil
} }
} }
file_sentencepiece_model_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { file_sentencepiece_model_proto_msgTypes[4].Exporter = func(v any, i int) any {
switch v := v.(*SelfTestData_Sample); i { switch v := v.(*SelfTestData_Sample); i {
case 0: case 0:
return &v.state return &v.state
@ -1460,7 +1460,7 @@ func file_sentencepiece_model_proto_init() {
return nil return nil
} }
} }
file_sentencepiece_model_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { file_sentencepiece_model_proto_msgTypes[5].Exporter = func(v any, i int) any {
switch v := v.(*ModelProto_SentencePiece); i { switch v := v.(*ModelProto_SentencePiece); i {
case 0: case 0:
return &v.state return &v.state

View file

@ -12,7 +12,7 @@ func IsNUMA() bool {
// numa support in llama.cpp is linux only // numa support in llama.cpp is linux only
return false return false
} }
ids := map[string]interface{}{} ids := map[string]any{}
packageIds, _ := filepath.Glob("/sys/devices/system/cpu/cpu*/topology/physical_package_id") packageIds, _ := filepath.Glob("/sys/devices/system/cpu/cpu*/topology/physical_package_id")
for _, packageId := range packageIds { for _, packageId := range packageIds {
id, err := os.ReadFile(packageId) id, err := os.ReadFile(packageId)

View file

@ -5,7 +5,7 @@ import (
"time" "time"
) )
func assertEqual(t *testing.T, a interface{}, b interface{}) { func assertEqual(t *testing.T, a any, b any) {
if a != b { if a != b {
t.Errorf("Assert failed, expected %v, got %v", b, a) t.Errorf("Assert failed, expected %v, got %v", b, a)
} }

View file

@ -22,7 +22,7 @@ func TestOrcaMiniBlueSky(t *testing.T) {
Model: "orca-mini", Model: "orca-mini",
Prompt: "why is the sky blue?", Prompt: "why is the sky blue?",
Stream: &stream, Stream: &stream,
Options: map[string]interface{}{ Options: map[string]any{
"temperature": 0, "temperature": 0,
"seed": 123, "seed": 123,
}, },
@ -39,7 +39,7 @@ func TestUnicode(t *testing.T) {
Model: "deepseek-coder-v2:16b-lite-instruct-q2_K", Model: "deepseek-coder-v2:16b-lite-instruct-q2_K",
Prompt: "天空为什么是蓝色的?", Prompt: "天空为什么是蓝色的?",
Stream: &stream, Stream: &stream,
Options: map[string]interface{}{ Options: map[string]any{
"temperature": 0, "temperature": 0,
"seed": 123, "seed": 123,
// Workaround deepseek context shifting bug // Workaround deepseek context shifting bug
@ -61,7 +61,7 @@ func TestExtendedUnicodeOutput(t *testing.T) {
Model: "gemma2:2b", Model: "gemma2:2b",
Prompt: "Output some smily face emoji", Prompt: "Output some smily face emoji",
Stream: &stream, Stream: &stream,
Options: map[string]interface{}{ Options: map[string]any{
"temperature": 0, "temperature": 0,
"seed": 123, "seed": 123,
}, },
@ -96,7 +96,7 @@ func TestUnicodeModelDir(t *testing.T) {
Model: "orca-mini", Model: "orca-mini",
Prompt: "why is the sky blue?", Prompt: "why is the sky blue?",
Stream: &stream, Stream: &stream,
Options: map[string]interface{}{ Options: map[string]any{
"temperature": 0, "temperature": 0,
"seed": 123, "seed": 123,
}, },

View file

@ -25,7 +25,7 @@ func TestMultiModelConcurrency(t *testing.T) {
Prompt: "why is the ocean blue?", Prompt: "why is the ocean blue?",
Stream: &stream, Stream: &stream,
KeepAlive: &api.Duration{Duration: 10 * time.Second}, KeepAlive: &api.Duration{Duration: 10 * time.Second},
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },
@ -34,7 +34,7 @@ func TestMultiModelConcurrency(t *testing.T) {
Prompt: "what is the origin of the us thanksgiving holiday?", Prompt: "what is the origin of the us thanksgiving holiday?",
Stream: &stream, Stream: &stream,
KeepAlive: &api.Duration{Duration: 10 * time.Second}, KeepAlive: &api.Duration{Duration: 10 * time.Second},
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },

View file

@ -23,7 +23,7 @@ func TestLongInputContext(t *testing.T) {
Model: "llama2", Model: "llama2",
Prompt: "Oh, dont speak to me of Austria. Perhaps I dont understand things, but Austria never has wished, and does not wish, for war. She is betraying us! Russia alone must save Europe. Our gracious sovereign recognizes his high vocation and will be true to it. That is the one thing I have faith in! Our good and wonderful sovereign has to perform the noblest role on earth, and he is so virtuous and noble that God will not forsake him. He will fulfill his vocation and crush the hydra of revolution, which has become more terrible than ever in the person of this murderer and villain! We alone must avenge the blood of the just one.... Whom, I ask you, can we rely on?... England with her commercial spirit will not and cannot understand the Emperor Alexanders loftiness of soul. She has refused to evacuate Malta. She wanted to find, and still seeks, some secret motive in our actions. What answer did Novosíltsev get? None. The English have not understood and cannot understand the self-abnegation of our Emperor who wants nothing for himself, but only desires the good of mankind. And what have they promised? Nothing! And what little they have promised they will not perform! Prussia has always declared that Buonaparte is invincible, and that all Europe is powerless before him.... And I dont believe a word that Hardenburg says, or Haugwitz either. This famous Prussian neutrality is just a trap. I have faith only in God and the lofty destiny of our adored monarch. He will save Europe! What country is this referring to?", Prompt: "Oh, dont speak to me of Austria. Perhaps I dont understand things, but Austria never has wished, and does not wish, for war. She is betraying us! Russia alone must save Europe. Our gracious sovereign recognizes his high vocation and will be true to it. That is the one thing I have faith in! Our good and wonderful sovereign has to perform the noblest role on earth, and he is so virtuous and noble that God will not forsake him. He will fulfill his vocation and crush the hydra of revolution, which has become more terrible than ever in the person of this murderer and villain! We alone must avenge the blood of the just one.... Whom, I ask you, can we rely on?... England with her commercial spirit will not and cannot understand the Emperor Alexanders loftiness of soul. She has refused to evacuate Malta. She wanted to find, and still seeks, some secret motive in our actions. What answer did Novosíltsev get? None. The English have not understood and cannot understand the self-abnegation of our Emperor who wants nothing for himself, but only desires the good of mankind. And what have they promised? Nothing! And what little they have promised they will not perform! Prussia has always declared that Buonaparte is invincible, and that all Europe is powerless before him.... And I dont believe a word that Hardenburg says, or Haugwitz either. This famous Prussian neutrality is just a trap. I have faith only in God and the lofty destiny of our adored monarch. He will save Europe! What country is this referring to?",
Stream: &stream, Stream: &stream,
Options: map[string]interface{}{ Options: map[string]any{
"temperature": 0, "temperature": 0,
"seed": 123, "seed": 123,
"num_ctx": 128, "num_ctx": 128,
@ -50,7 +50,7 @@ func TestContextExhaustion(t *testing.T) {
Model: "llama2", Model: "llama2",
Prompt: "Write me a story with a ton of emojis?", Prompt: "Write me a story with a ton of emojis?",
Stream: &stream, Stream: &stream,
Options: map[string]interface{}{ Options: map[string]any{
"temperature": 0, "temperature": 0,
"seed": 123, "seed": 123,
"num_ctx": 128, "num_ctx": 128,

View file

@ -19,7 +19,7 @@ func TestIntegrationLlava(t *testing.T) {
Model: "llava:7b", Model: "llava:7b",
Prompt: "what does the text in this image say?", Prompt: "what does the text in this image say?",
Stream: &stream, Stream: &stream,
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },
@ -47,7 +47,7 @@ func TestIntegrationMllama(t *testing.T) {
Model: "x/llama3.2-vision", Model: "x/llama3.2-vision",
Prompt: "what does the text in this image say?", Prompt: "what does the text in this image say?",
Stream: &stream, Stream: &stream,
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },
@ -75,7 +75,7 @@ func TestIntegrationSplitBatch(t *testing.T) {
System: "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed aliquet, justo in malesuada lobortis, odio ligula volutpat quam, quis faucibus ipsum magna quis sapien. Aliquam in venenatis diam, eu viverra magna. Phasellus imperdiet hendrerit volutpat. Vivamus sem ex, facilisis placerat felis non, dictum elementum est. Phasellus aliquam imperdiet lacus, eget placerat ligula sodales vel. Pellentesque nec auctor mi. Curabitur arcu nisi, faucibus eget nunc id, viverra interdum mi. Curabitur ornare ipsum ex, ac euismod ex aliquam in. Vestibulum id magna at purus accumsan fermentum. Proin scelerisque posuere nunc quis interdum. Maecenas sed mollis nisl. Etiam vitae ipsum interdum, placerat est quis, tincidunt velit. Nullam tempor nibh non lorem volutpat efficitur. Cras laoreet diam imperdiet ipsum auctor bibendum. Suspendisse ultrices urna sed metus sagittis suscipit. Quisque ullamcorper aliquam nibh ut mollis. Aenean dapibus mauris pharetra, venenatis elit ac, hendrerit odio. Cras vestibulum erat tempor, lobortis justo eu, lobortis ipsum. Nam laoreet dapibus sem. Proin vel diam ultrices, elementum ante et, ornare lectus. Proin eu accumsan nisl. Praesent ac ex vitae ipsum vulputate tristique facilisis sit amet lacus. Nullam faucibus magna a pellentesque pretium. Nunc lacinia ullamcorper sollicitudin. Donec vitae accumsan turpis, sed porttitor est. Donec porttitor mi vitae augue faucibus, vel mollis diam tincidunt.", System: "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed aliquet, justo in malesuada lobortis, odio ligula volutpat quam, quis faucibus ipsum magna quis sapien. Aliquam in venenatis diam, eu viverra magna. Phasellus imperdiet hendrerit volutpat. Vivamus sem ex, facilisis placerat felis non, dictum elementum est. Phasellus aliquam imperdiet lacus, eget placerat ligula sodales vel. Pellentesque nec auctor mi. Curabitur arcu nisi, faucibus eget nunc id, viverra interdum mi. Curabitur ornare ipsum ex, ac euismod ex aliquam in. Vestibulum id magna at purus accumsan fermentum. Proin scelerisque posuere nunc quis interdum. Maecenas sed mollis nisl. Etiam vitae ipsum interdum, placerat est quis, tincidunt velit. Nullam tempor nibh non lorem volutpat efficitur. Cras laoreet diam imperdiet ipsum auctor bibendum. Suspendisse ultrices urna sed metus sagittis suscipit. Quisque ullamcorper aliquam nibh ut mollis. Aenean dapibus mauris pharetra, venenatis elit ac, hendrerit odio. Cras vestibulum erat tempor, lobortis justo eu, lobortis ipsum. Nam laoreet dapibus sem. Proin vel diam ultrices, elementum ante et, ornare lectus. Proin eu accumsan nisl. Praesent ac ex vitae ipsum vulputate tristique facilisis sit amet lacus. Nullam faucibus magna a pellentesque pretium. Nunc lacinia ullamcorper sollicitudin. Donec vitae accumsan turpis, sed porttitor est. Donec porttitor mi vitae augue faucibus, vel mollis diam tincidunt.",
Prompt: "what does the text in this image say?", Prompt: "what does the text in this image say?",
Stream: &stream, Stream: &stream,
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },

View file

@ -20,7 +20,7 @@ var (
Model: "orca-mini", Model: "orca-mini",
Prompt: "why is the ocean blue?", Prompt: "why is the ocean blue?",
Stream: &stream, Stream: &stream,
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },
@ -28,7 +28,7 @@ var (
Model: "orca-mini", Model: "orca-mini",
Prompt: "what is the origin of the us thanksgiving holiday?", Prompt: "what is the origin of the us thanksgiving holiday?",
Stream: &stream, Stream: &stream,
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },

View file

@ -32,7 +32,7 @@ func TestMaxQueue(t *testing.T) {
req := api.GenerateRequest{ req := api.GenerateRequest{
Model: "orca-mini", Model: "orca-mini",
Prompt: "write a long historical fiction story about christopher columbus. use at least 10 facts from his actual journey", Prompt: "write a long historical fiction story about christopher columbus. use at least 10 facts from his actual journey",
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },

View file

@ -291,7 +291,7 @@ func GenerateRequests() ([]api.GenerateRequest, [][]string) {
Prompt: "why is the ocean blue?", Prompt: "why is the ocean blue?",
Stream: &stream, Stream: &stream,
KeepAlive: &api.Duration{Duration: 10 * time.Second}, KeepAlive: &api.Duration{Duration: 10 * time.Second},
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },
@ -300,7 +300,7 @@ func GenerateRequests() ([]api.GenerateRequest, [][]string) {
Prompt: "why is the color of dirt brown?", Prompt: "why is the color of dirt brown?",
Stream: &stream, Stream: &stream,
KeepAlive: &api.Duration{Duration: 10 * time.Second}, KeepAlive: &api.Duration{Duration: 10 * time.Second},
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },
@ -309,7 +309,7 @@ func GenerateRequests() ([]api.GenerateRequest, [][]string) {
Prompt: "what is the origin of the us thanksgiving holiday?", Prompt: "what is the origin of the us thanksgiving holiday?",
Stream: &stream, Stream: &stream,
KeepAlive: &api.Duration{Duration: 10 * time.Second}, KeepAlive: &api.Duration{Duration: 10 * time.Second},
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },
@ -318,7 +318,7 @@ func GenerateRequests() ([]api.GenerateRequest, [][]string) {
Prompt: "what is the origin of independence day?", Prompt: "what is the origin of independence day?",
Stream: &stream, Stream: &stream,
KeepAlive: &api.Duration{Duration: 10 * time.Second}, KeepAlive: &api.Duration{Duration: 10 * time.Second},
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },
@ -327,7 +327,7 @@ func GenerateRequests() ([]api.GenerateRequest, [][]string) {
Prompt: "what is the composition of air?", Prompt: "what is the composition of air?",
Stream: &stream, Stream: &stream,
KeepAlive: &api.Duration{Duration: 10 * time.Second}, KeepAlive: &api.Duration{Duration: 10 * time.Second},
Options: map[string]interface{}{ Options: map[string]any{
"seed": 42, "seed": 42,
"temperature": 0.0, "temperature": 0.0,
}, },

View file

@ -23,10 +23,10 @@ import (
var finishReasonToolCalls = "tool_calls" var finishReasonToolCalls = "tool_calls"
type Error struct { type Error struct {
Message string `json:"message"` Message string `json:"message"`
Type string `json:"type"` Type string `json:"type"`
Param interface{} `json:"param"` Param any `json:"param"`
Code *string `json:"code"` Code *string `json:"code"`
} }
type ErrorResponse struct { type ErrorResponse struct {
@ -465,7 +465,7 @@ func fromChatRequest(r ChatCompletionRequest) (*api.ChatRequest, error) {
} }
} }
options := make(map[string]interface{}) options := make(map[string]any)
switch stop := r.Stop.(type) { switch stop := r.Stop.(type) {
case string: case string:

View file

@ -219,7 +219,7 @@ func TestChatMiddleware(t *testing.T) {
{ {
Function: api.ToolCallFunction{ Function: api.ToolCallFunction{
Name: "get_current_weather", Name: "get_current_weather",
Arguments: map[string]interface{}{ Arguments: map[string]any{
"location": "Paris, France", "location": "Paris, France",
"format": "celsius", "format": "celsius",
}, },

View file

@ -60,7 +60,7 @@ type Model struct {
System string System string
License []string License []string
Digest string Digest string
Options map[string]interface{} Options map[string]any
Messages []api.Message Messages []api.Message
Template *template.Template Template *template.Template

View file

@ -72,7 +72,7 @@ var (
errBadTemplate = errors.New("template error") errBadTemplate = errors.New("template error")
) )
func modelOptions(model *Model, requestOpts map[string]interface{}) (api.Options, error) { func modelOptions(model *Model, requestOpts map[string]any) (api.Options, error) {
opts := api.DefaultOptions() opts := api.DefaultOptions()
if err := opts.FromMap(model.Options); err != nil { if err := opts.FromMap(model.Options); err != nil {
return api.Options{}, err return api.Options{}, err
@ -826,7 +826,7 @@ func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
cs := 30 cs := 30
for k, v := range m.Options { for k, v := range m.Options {
switch val := v.(type) { switch val := v.(type) {
case []interface{}: case []any:
for _, nv := range val { for _, nv := range val {
params = append(params, fmt.Sprintf("%-*s %#v", cs, k, nv)) params = append(params, fmt.Sprintf("%-*s %#v", cs, k, nv))
} }
@ -1336,7 +1336,7 @@ func Serve(ln net.Listener) error {
return nil return nil
} }
func waitForStream(c *gin.Context, ch chan interface{}) { func waitForStream(c *gin.Context, ch chan any) {
c.Header("Content-Type", "application/json") c.Header("Content-Type", "application/json")
for resp := range ch { for resp := range ch {
switch r := resp.(type) { switch r := resp.(type) {

View file

@ -38,7 +38,7 @@ type Scheduler struct {
pendingReqCh chan *LlmRequest pendingReqCh chan *LlmRequest
finishedReqCh chan *LlmRequest finishedReqCh chan *LlmRequest
expiredCh chan *runnerRef expiredCh chan *runnerRef
unloadedCh chan interface{} unloadedCh chan any
loaded map[string]*runnerRef loaded map[string]*runnerRef
loadedMu sync.Mutex loadedMu sync.Mutex
@ -68,7 +68,7 @@ func InitScheduler(ctx context.Context) *Scheduler {
pendingReqCh: make(chan *LlmRequest, maxQueue), pendingReqCh: make(chan *LlmRequest, maxQueue),
finishedReqCh: make(chan *LlmRequest, maxQueue), finishedReqCh: make(chan *LlmRequest, maxQueue),
expiredCh: make(chan *runnerRef, maxQueue), expiredCh: make(chan *runnerRef, maxQueue),
unloadedCh: make(chan interface{}, maxQueue), unloadedCh: make(chan any, maxQueue),
loaded: make(map[string]*runnerRef), loaded: make(map[string]*runnerRef),
newServerFn: llm.NewLlamaServer, newServerFn: llm.NewLlamaServer,
getGpuFn: discover.GetGPUInfo, getGpuFn: discover.GetGPUInfo,
@ -618,8 +618,8 @@ func (runner *runnerRef) needsReload(ctx context.Context, req *LlmRequest) bool
// a before and after GPU memory allocation. The returned channel // a before and after GPU memory allocation. The returned channel
// will be notified when we're done waiting, or have timed out and should // will be notified when we're done waiting, or have timed out and should
// proceed anyway // proceed anyway
func (runner *runnerRef) waitForVRAMRecovery() chan interface{} { func (runner *runnerRef) waitForVRAMRecovery() chan any {
finished := make(chan interface{}, 1) finished := make(chan any, 1)
// CPU or Metal don't need checking, so no waiting required // CPU or Metal don't need checking, so no waiting required
// windows can page VRAM, only cuda currently can report accurate used vram usage // windows can page VRAM, only cuda currently can report accurate used vram usage