mirror of
https://github.com/ollama/ollama.git
synced 2025-05-11 18:36:41 +02:00
Update the /api/create endpoint to use JSON (#7935)
Replaces `POST /api/create` to use JSON instead of a Modelfile. This is a breaking change.
This commit is contained in:
parent
459d822b51
commit
86a622cbdc
17 changed files with 1523 additions and 1094 deletions
|
@ -2,10 +2,8 @@ package server
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
@ -13,7 +11,6 @@ import (
|
|||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/ollama/ollama/api"
|
||||
"github.com/ollama/ollama/llm"
|
||||
"github.com/ollama/ollama/template"
|
||||
)
|
||||
|
||||
|
@ -139,87 +136,6 @@ The temperature in San Francisco, CA is 70°F and in Toronto, Canada is 20°C.`,
|
|||
}
|
||||
}
|
||||
|
||||
func TestParseFromFileFromLayer(t *testing.T) {
|
||||
tempModels := t.TempDir()
|
||||
t.Setenv("OLLAMA_MODELS", tempModels)
|
||||
|
||||
file, err := os.CreateTemp(tempModels, "")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to open file: %v", err)
|
||||
}
|
||||
defer file.Close()
|
||||
if err := llm.WriteGGUF(file, llm.KV{"general.architecture": "gemma"}, []llm.Tensor{}); err != nil {
|
||||
t.Fatalf("failed to write gguf: %v", err)
|
||||
}
|
||||
|
||||
if _, err := file.Seek(0, io.SeekStart); err != nil {
|
||||
t.Fatalf("failed to seek to start: %v", err)
|
||||
}
|
||||
|
||||
layers, err := parseFromFile(context.Background(), "model", []*layerGGML{}, file, "", func(api.ProgressResponse) {})
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse from file: %v", err)
|
||||
}
|
||||
|
||||
if len(layers) != 1 {
|
||||
t.Fatalf("got %d != want 1", len(layers))
|
||||
}
|
||||
|
||||
if _, err := file.Seek(0, io.SeekStart); err != nil {
|
||||
t.Fatalf("failed to seek to start: %v", err)
|
||||
}
|
||||
|
||||
layers2, err := parseFromFile(context.Background(), "model", []*layerGGML{}, file, layers[0].Digest, func(api.ProgressResponse) {})
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse from file: %v", err)
|
||||
}
|
||||
if len(layers2) != 1 {
|
||||
t.Fatalf("got %d != want 1", len(layers2))
|
||||
}
|
||||
|
||||
if layers[0].Digest != layers2[0].Digest {
|
||||
t.Fatalf("got %s != want %s", layers[0].Digest, layers2[0].Digest)
|
||||
}
|
||||
|
||||
if layers[0].Size != layers2[0].Size {
|
||||
t.Fatalf("got %d != want %d", layers[0].Size, layers2[0].Size)
|
||||
}
|
||||
|
||||
if layers[0].MediaType != layers2[0].MediaType {
|
||||
t.Fatalf("got %v != want %v", layers[0].MediaType, layers2[0].MediaType)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseLayerFromCopy(t *testing.T) {
|
||||
tempModels := t.TempDir()
|
||||
t.Setenv("OLLAMA_MODELS", tempModels)
|
||||
|
||||
file2, err := os.CreateTemp(tempModels, "")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to open file: %v", err)
|
||||
}
|
||||
defer file2.Close()
|
||||
|
||||
for range 5 {
|
||||
if err := llm.WriteGGUF(file2, llm.KV{"general.architecture": "gemma"}, []llm.Tensor{}); err != nil {
|
||||
t.Fatalf("failed to write gguf: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := file2.Seek(0, io.SeekStart); err != nil {
|
||||
t.Fatalf("failed to seek to start: %v", err)
|
||||
}
|
||||
|
||||
layers, err := parseFromFile(context.Background(), "model", []*layerGGML{}, file2, "", func(api.ProgressResponse) {})
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse from file: %v", err)
|
||||
}
|
||||
|
||||
if len(layers) != 5 {
|
||||
t.Fatalf("got %d != want 5", len(layers))
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseObjects(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue