Skip to content

Commit 949b6c0

Browse files
committed
Revamp go based integration tests
This uplevels the integration tests to run the server which can allow testing an existing server, or a remote server.
1 parent a5ba0fc commit 949b6c0

File tree

8 files changed

+313
-261
lines changed

8 files changed

+313
-261
lines changed

integration/README.md

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
# Integration Tests
2+
3+
This directory contains integration tests to exercise Ollama end-to-end to verify behavior
4+
5+
By default, these tests are disabled so `go test ./...` will exercise only unit tests. To run integration tests you must pass the integration tag. `go test -tags=integration ./...`
6+
7+
8+
The integration tests have 2 modes of operating.
9+
10+
1. By default, they will start the server on a random port, run the tests, and then shutdown the server.
11+
2. If `OLLAMA_TEST_EXISTING` is set to a non-empty string, the tests will run against an existing running server, which can be remote

integration/basic_test.go

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
//go:build integration
2+
3+
package integration
4+
5+
import (
6+
"context"
7+
"net/http"
8+
"testing"
9+
"time"
10+
11+
"github.com/jmorganca/ollama/api"
12+
)
13+
14+
func TestOrcaMiniBlueSky(t *testing.T) {
15+
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute)
16+
defer cancel()
17+
// Set up the test data
18+
req := api.GenerateRequest{
19+
Model: "orca-mini",
20+
Prompt: "why is the sky blue?",
21+
Stream: &stream,
22+
Options: map[string]interface{}{
23+
"temperature": 0,
24+
"seed": 123,
25+
},
26+
}
27+
GenerateTestHelper(ctx, t, &http.Client{}, req, []string{"rayleigh"})
28+
}

server/llm_image_test.go renamed to integration/llm_image_test.go

Lines changed: 11 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,49 +1,38 @@
11
//go:build integration
22

3-
package server
3+
package integration
44

55
import (
66
"context"
77
"encoding/base64"
8-
"log"
9-
"os"
10-
"strings"
8+
"net/http"
119
"testing"
1210
"time"
1311

1412
"github.com/jmorganca/ollama/api"
15-
"github.com/jmorganca/ollama/llm"
16-
"github.com/stretchr/testify/assert"
1713
"github.com/stretchr/testify/require"
1814
)
1915

2016
func TestIntegrationMultimodal(t *testing.T) {
21-
SkipIFNoTestData(t)
2217
image, err := base64.StdEncoding.DecodeString(imageEncoding)
2318
require.NoError(t, err)
2419
req := api.GenerateRequest{
25-
Model: "llava:7b",
26-
Prompt: "what does the text in this image say?",
27-
Options: map[string]interface{}{},
20+
Model: "llava:7b",
21+
Prompt: "what does the text in this image say?",
22+
Stream: &stream,
23+
Options: map[string]interface{}{
24+
"seed": 42,
25+
"temperature": 0.0,
26+
},
2827
Images: []api.ImageData{
2928
image,
3029
},
3130
}
31+
3232
resp := "the ollamas"
33-
workDir, err := os.MkdirTemp("", "ollama")
34-
require.NoError(t, err)
35-
defer os.RemoveAll(workDir)
36-
require.NoError(t, llm.Init(workDir))
3733
ctx, cancel := context.WithTimeout(context.Background(), time.Second*60)
3834
defer cancel()
39-
opts := api.DefaultOptions()
40-
opts.Seed = 42
41-
opts.Temperature = 0.0
42-
model, llmRunner := PrepareModelForPrompts(t, req.Model, opts)
43-
defer llmRunner.Close()
44-
response := OneShotPromptResponse(t, ctx, req, model, llmRunner)
45-
log.Print(response)
46-
assert.Contains(t, strings.ToLower(response), resp)
35+
GenerateTestHelper(ctx, t, &http.Client{}, req, []string{resp})
4736
}
4837

4938
const imageEncoding = `iVBORw0KGgoAAAANSUhEUgAAANIAAAB4CAYAAACHHqzKAAAAAXNSR0IArs4c6QAAAIRlWElmTU0AKgAAAAgABQESAAMAAAABAAEAAAEaAAUAAAABAAAASgEb

integration/llm_test.go

Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
//go:build integration
2+
3+
package integration
4+
5+
import (
6+
"context"
7+
"net/http"
8+
"sync"
9+
"testing"
10+
"time"
11+
12+
"github.com/jmorganca/ollama/api"
13+
)
14+
15+
// TODO - this would ideally be in the llm package, but that would require some refactoring of interfaces in the server
16+
// package to avoid circular dependencies
17+
18+
// WARNING - these tests will fail on mac if you don't manually copy ggml-metal.metal to this dir (./server)
19+
//
20+
// TODO - Fix this ^^
21+
22+
var (
23+
stream = false
24+
req = [2]api.GenerateRequest{
25+
{
26+
Model: "orca-mini",
27+
Prompt: "why is the ocean blue?",
28+
Stream: &stream,
29+
Options: map[string]interface{}{
30+
"seed": 42,
31+
"temperature": 0.0,
32+
},
33+
}, {
34+
Model: "orca-mini",
35+
Prompt: "what is the origin of the us thanksgiving holiday?",
36+
Stream: &stream,
37+
Options: map[string]interface{}{
38+
"seed": 42,
39+
"temperature": 0.0,
40+
},
41+
},
42+
}
43+
resp = [2]string{
44+
"scattering",
45+
"united states thanksgiving",
46+
}
47+
)
48+
49+
func TestIntegrationSimpleOrcaMini(t *testing.T) {
50+
ctx, cancel := context.WithTimeout(context.Background(), time.Second*60)
51+
defer cancel()
52+
GenerateTestHelper(ctx, t, &http.Client{}, req[0], []string{resp[0]})
53+
}
54+
55+
// TODO
56+
// The server always loads a new runner and closes the old one, which forces serial execution
57+
// At present this test case fails with concurrency problems. Eventually we should try to
58+
// get true concurrency working with n_parallel support in the backend
59+
func TestIntegrationConcurrentPredictOrcaMini(t *testing.T) {
60+
var wg sync.WaitGroup
61+
wg.Add(len(req))
62+
ctx, cancel := context.WithTimeout(context.Background(), time.Second*60)
63+
defer cancel()
64+
for i := 0; i < len(req); i++ {
65+
go func(i int) {
66+
defer wg.Done()
67+
GenerateTestHelper(ctx, t, &http.Client{}, req[i], []string{resp[i]})
68+
}(i)
69+
}
70+
wg.Wait()
71+
}
72+
73+
// TODO - create a parallel test with 2 different models once we support concurrency

integration/utils_test.go

Lines changed: 190 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,190 @@
1+
//go:build integration
2+
3+
package integration
4+
5+
import (
6+
"bytes"
7+
"context"
8+
"encoding/json"
9+
"fmt"
10+
"io"
11+
"log/slog"
12+
"math/rand"
13+
"net"
14+
"net/http"
15+
"os"
16+
"path/filepath"
17+
"runtime"
18+
"strconv"
19+
"strings"
20+
"sync"
21+
"testing"
22+
"time"
23+
24+
"github.com/jmorganca/ollama/api"
25+
"github.com/jmorganca/ollama/app/lifecycle"
26+
"github.com/stretchr/testify/assert"
27+
)
28+
29+
func FindPort() string {
30+
port := 0
31+
if a, err := net.ResolveTCPAddr("tcp", "localhost:0"); err == nil {
32+
var l *net.TCPListener
33+
if l, err = net.ListenTCP("tcp", a); err == nil {
34+
port = l.Addr().(*net.TCPAddr).Port
35+
l.Close()
36+
}
37+
}
38+
if port == 0 {
39+
port = rand.Intn(65535-49152) + 49152 // get a random port in the ephemeral range
40+
}
41+
return strconv.Itoa(port)
42+
}
43+
44+
func GetTestEndpoint() (string, string) {
45+
defaultPort := "11434"
46+
ollamaHost := os.Getenv("OLLAMA_HOST")
47+
48+
scheme, hostport, ok := strings.Cut(ollamaHost, "://")
49+
if !ok {
50+
scheme, hostport = "http", ollamaHost
51+
}
52+
53+
// trim trailing slashes
54+
hostport = strings.TrimRight(hostport, "/")
55+
56+
host, port, err := net.SplitHostPort(hostport)
57+
if err != nil {
58+
host, port = "127.0.0.1", defaultPort
59+
if ip := net.ParseIP(strings.Trim(hostport, "[]")); ip != nil {
60+
host = ip.String()
61+
} else if hostport != "" {
62+
host = hostport
63+
}
64+
}
65+
66+
if os.Getenv("OLLAMA_TEST_EXISTING") == "" && port == defaultPort {
67+
port = FindPort()
68+
}
69+
70+
url := fmt.Sprintf("%s:%s", host, port)
71+
slog.Info("server connection", "url", url)
72+
return scheme, url
73+
}
74+
75+
// TODO make fanicier, grab logs, etc.
76+
var serverMutex sync.Mutex
77+
var serverReady bool
78+
79+
func StartServer(ctx context.Context, ollamaHost string) error {
80+
// Make sure the server has been built
81+
CLIName, err := filepath.Abs("../ollama")
82+
if err != nil {
83+
return err
84+
}
85+
86+
if runtime.GOOS == "windows" {
87+
CLIName += ".exe"
88+
}
89+
_, err = os.Stat(CLIName)
90+
if err != nil {
91+
return fmt.Errorf("CLI missing, did you forget to build first? %w", err)
92+
}
93+
serverMutex.Lock()
94+
defer serverMutex.Unlock()
95+
if serverReady {
96+
return nil
97+
}
98+
99+
if tmp := os.Getenv("OLLAMA_HOST"); tmp != ollamaHost {
100+
slog.Info("setting env", "OLLAMA_HOST", ollamaHost)
101+
os.Setenv("OLLAMA_HOST", ollamaHost)
102+
}
103+
104+
slog.Info("starting server", "url", ollamaHost)
105+
done, err := lifecycle.SpawnServer(ctx, "../ollama")
106+
if err != nil {
107+
return fmt.Errorf("failed to start server: %w", err)
108+
}
109+
110+
go func() {
111+
<-ctx.Done()
112+
serverMutex.Lock()
113+
defer serverMutex.Unlock()
114+
exitCode := <-done
115+
if exitCode > 0 {
116+
slog.Warn("server failure", "exit", exitCode)
117+
}
118+
serverReady = false
119+
}()
120+
121+
// TODO wait only long enough for the server to be responsive...
122+
time.Sleep(500 * time.Millisecond)
123+
124+
serverReady = true
125+
return nil
126+
}
127+
128+
func GenerateTestHelper(ctx context.Context, t *testing.T, client *http.Client, genReq api.GenerateRequest, anyResp []string) {
129+
requestJSON, err := json.Marshal(genReq)
130+
if err != nil {
131+
t.Fatalf("Error serializing request: %v", err)
132+
}
133+
defer func() {
134+
if t.Failed() && os.Getenv("OLLAMA_TEST_EXISTING") == "" {
135+
// TODO
136+
fp, err := os.Open(lifecycle.ServerLogFile)
137+
if err != nil {
138+
slog.Error("failed to open server log", "logfile", lifecycle.ServerLogFile, "error", err)
139+
return
140+
}
141+
data, err := io.ReadAll(fp)
142+
if err != nil {
143+
slog.Error("failed to read server log", "logfile", lifecycle.ServerLogFile, "error", err)
144+
return
145+
}
146+
slog.Warn("SERVER LOG FOLLOWS")
147+
os.Stderr.Write(data)
148+
slog.Warn("END OF SERVER")
149+
}
150+
err = os.Remove(lifecycle.ServerLogFile)
151+
if err != nil && !os.IsNotExist(err) {
152+
slog.Warn("failed to cleanup", "logfile", lifecycle.ServerLogFile, "error", err)
153+
}
154+
}()
155+
scheme, testEndpoint := GetTestEndpoint()
156+
157+
if os.Getenv("OLLAMA_TEST_EXISTING") == "" {
158+
assert.NoError(t, StartServer(ctx, testEndpoint))
159+
}
160+
161+
// Make the request and get the response
162+
req, err := http.NewRequest("POST", scheme+"://"+testEndpoint+"/api/generate", bytes.NewReader(requestJSON))
163+
if err != nil {
164+
t.Fatalf("Error creating request: %v", err)
165+
}
166+
167+
// Set the content type for the request
168+
req.Header.Set("Content-Type", "application/json")
169+
170+
// Make the request with the HTTP client
171+
response, err := client.Do(req.WithContext(ctx))
172+
if err != nil {
173+
t.Fatalf("Error making request: %v", err)
174+
}
175+
body, err := io.ReadAll(response.Body)
176+
assert.NoError(t, err)
177+
assert.Equal(t, response.StatusCode, 200, string(body))
178+
179+
// Verify the response is valid JSON
180+
var payload api.GenerateResponse
181+
err = json.Unmarshal(body, &payload)
182+
if err != nil {
183+
assert.NoError(t, err, body)
184+
}
185+
186+
// Verify the response contains the expected data
187+
for _, resp := range anyResp {
188+
assert.Contains(t, strings.ToLower(payload.Response), resp)
189+
}
190+
}

0 commit comments

Comments
 (0)