2024-02-21 01:21:19 +00:00
|
|
|
package http_test
|
2023-04-21 22:44:52 +00:00
|
|
|
|
|
|
|
import (
|
2023-05-20 15:03:53 +00:00
|
|
|
"bytes"
|
2023-04-21 22:44:52 +00:00
|
|
|
"context"
|
2023-06-05 07:42:50 +00:00
|
|
|
"embed"
|
2023-05-20 15:03:53 +00:00
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
2023-07-14 23:19:43 +00:00
|
|
|
"io"
|
2023-05-20 15:03:53 +00:00
|
|
|
"net/http"
|
2023-04-21 22:44:52 +00:00
|
|
|
"os"
|
2023-05-12 08:04:20 +00:00
|
|
|
"path/filepath"
|
2023-05-12 12:10:18 +00:00
|
|
|
"runtime"
|
2023-04-21 22:44:52 +00:00
|
|
|
|
2024-12-08 12:50:33 +00:00
|
|
|
"github.com/mudler/LocalAI/core/application"
|
2024-06-23 08:24:36 +00:00
|
|
|
"github.com/mudler/LocalAI/core/config"
|
|
|
|
. "github.com/mudler/LocalAI/core/http"
|
|
|
|
"github.com/mudler/LocalAI/core/schema"
|
2024-03-01 15:19:53 +00:00
|
|
|
|
2023-04-21 22:44:52 +00:00
|
|
|
"github.com/gofiber/fiber/v2"
|
2024-06-24 15:32:12 +00:00
|
|
|
"github.com/mudler/LocalAI/core/gallery"
|
2024-06-23 08:24:36 +00:00
|
|
|
"github.com/mudler/LocalAI/pkg/downloader"
|
2023-04-21 22:44:52 +00:00
|
|
|
. "github.com/onsi/ginkgo/v2"
|
|
|
|
. "github.com/onsi/gomega"
|
2023-05-20 15:03:53 +00:00
|
|
|
"gopkg.in/yaml.v3"
|
2023-04-21 22:44:52 +00:00
|
|
|
|
2023-04-29 07:22:09 +00:00
|
|
|
openaigo "github.com/otiai10/openaigo"
|
2023-04-21 22:44:52 +00:00
|
|
|
"github.com/sashabaranov/go-openai"
|
2023-07-14 23:19:43 +00:00
|
|
|
"github.com/sashabaranov/go-openai/jsonschema"
|
2023-04-21 22:44:52 +00:00
|
|
|
)
|
|
|
|
|
2024-09-24 07:32:48 +00:00
|
|
|
const apiKey = "joshua"
|
|
|
|
const bearerKey = "Bearer " + apiKey
|
|
|
|
|
2024-02-08 19:12:51 +00:00
|
|
|
const testPrompt = `### System:
|
|
|
|
You are an AI assistant that follows instruction extremely well. Help as much as you can.
|
|
|
|
|
|
|
|
### User:
|
|
|
|
|
|
|
|
Can you help rephrasing sentences?
|
|
|
|
|
|
|
|
### Response:`
|
|
|
|
|
2023-05-20 15:03:53 +00:00
|
|
|
type modelApplyRequest struct {
|
2023-07-28 22:04:25 +00:00
|
|
|
ID string `json:"id"`
|
|
|
|
URL string `json:"url"`
|
2024-04-11 22:49:23 +00:00
|
|
|
ConfigURL string `json:"config_url"`
|
2023-07-28 22:04:25 +00:00
|
|
|
Name string `json:"name"`
|
|
|
|
Overrides map[string]interface{} `json:"overrides"`
|
2023-05-20 15:03:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func getModelStatus(url string) (response map[string]interface{}) {
|
|
|
|
// Create the HTTP request
|
2024-09-24 07:32:48 +00:00
|
|
|
req, err := http.NewRequest("GET", url, nil)
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
|
|
req.Header.Set("Authorization", bearerKey)
|
2023-05-20 15:03:53 +00:00
|
|
|
if err != nil {
|
|
|
|
fmt.Println("Error creating request:", err)
|
|
|
|
return
|
|
|
|
}
|
2024-09-24 07:32:48 +00:00
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Println("Error sending request:", err)
|
|
|
|
return
|
|
|
|
}
|
2023-05-20 15:03:53 +00:00
|
|
|
defer resp.Body.Close()
|
|
|
|
|
2023-07-30 11:23:43 +00:00
|
|
|
body, err := io.ReadAll(resp.Body)
|
2023-05-20 15:03:53 +00:00
|
|
|
if err != nil {
|
|
|
|
fmt.Println("Error reading response body:", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Unmarshal the response into a map[string]interface{}
|
|
|
|
err = json.Unmarshal(body, &response)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Println("Error unmarshaling JSON response:", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
2023-06-24 06:18:17 +00:00
|
|
|
|
2024-09-24 07:32:48 +00:00
|
|
|
func getModels(url string) ([]gallery.GalleryModel, error) {
|
|
|
|
response := []gallery.GalleryModel{}
|
2024-08-02 18:06:25 +00:00
|
|
|
uri := downloader.URI(url)
|
2024-06-04 14:32:47 +00:00
|
|
|
// TODO: No tests currently seem to exercise file:// urls. Fix?
|
2024-09-24 07:32:48 +00:00
|
|
|
err := uri.DownloadWithAuthorizationAndCallback("", bearerKey, func(url string, i []byte) error {
|
2023-06-26 10:25:38 +00:00
|
|
|
// Unmarshal YAML data into a struct
|
|
|
|
return json.Unmarshal(i, &response)
|
|
|
|
})
|
2024-09-24 07:32:48 +00:00
|
|
|
return response, err
|
2023-06-24 06:18:17 +00:00
|
|
|
}
|
|
|
|
|
2023-05-20 15:03:53 +00:00
|
|
|
func postModelApplyRequest(url string, request modelApplyRequest) (response map[string]interface{}) {
|
|
|
|
|
|
|
|
//url := "http://localhost:AI/models/apply"
|
|
|
|
|
|
|
|
// Create the request payload
|
|
|
|
|
|
|
|
payload, err := json.Marshal(request)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Println("Error marshaling JSON:", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create the HTTP request
|
|
|
|
req, err := http.NewRequest("POST", url, bytes.NewBuffer(payload))
|
|
|
|
if err != nil {
|
|
|
|
fmt.Println("Error creating request:", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
2024-09-24 07:32:48 +00:00
|
|
|
req.Header.Set("Authorization", bearerKey)
|
2023-05-20 15:03:53 +00:00
|
|
|
|
|
|
|
// Make the request
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Println("Error making request:", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
2023-07-30 11:23:43 +00:00
|
|
|
body, err := io.ReadAll(resp.Body)
|
2023-05-20 15:03:53 +00:00
|
|
|
if err != nil {
|
|
|
|
fmt.Println("Error reading response body:", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Unmarshal the response into a map[string]interface{}
|
|
|
|
err = json.Unmarshal(body, &response)
|
|
|
|
if err != nil {
|
|
|
|
fmt.Println("Error unmarshaling JSON response:", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-03-22 20:14:04 +00:00
|
|
|
func postRequestJSON[B any](url string, bodyJson *B) error {
|
|
|
|
payload, err := json.Marshal(bodyJson)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
GinkgoWriter.Printf("POST %s: %s\n", url, string(payload))
|
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", url, bytes.NewBuffer(payload))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
2024-09-24 07:32:48 +00:00
|
|
|
req.Header.Set("Authorization", bearerKey)
|
2024-03-22 20:14:04 +00:00
|
|
|
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
body, err := io.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if resp.StatusCode < 200 || resp.StatusCode >= 400 {
|
|
|
|
return fmt.Errorf("unexpected status code: %d, body: %s", resp.StatusCode, string(body))
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func postRequestResponseJSON[B1 any, B2 any](url string, reqJson *B1, respJson *B2) error {
|
|
|
|
payload, err := json.Marshal(reqJson)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
GinkgoWriter.Printf("POST %s: %s\n", url, string(payload))
|
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", url, bytes.NewBuffer(payload))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
2024-09-24 07:32:48 +00:00
|
|
|
req.Header.Set("Authorization", bearerKey)
|
2024-03-22 20:14:04 +00:00
|
|
|
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
body, err := io.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if resp.StatusCode < 200 || resp.StatusCode >= 400 {
|
|
|
|
return fmt.Errorf("unexpected status code: %d, body: %s", resp.StatusCode, string(body))
|
|
|
|
}
|
|
|
|
|
|
|
|
return json.Unmarshal(body, respJson)
|
|
|
|
}
|
|
|
|
|
2024-09-24 07:32:48 +00:00
|
|
|
func postInvalidRequest(url string) (error, int) {
|
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", url, bytes.NewBufferString("invalid request"))
|
|
|
|
if err != nil {
|
|
|
|
return err, -1
|
|
|
|
}
|
|
|
|
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
|
|
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
return err, -1
|
|
|
|
}
|
|
|
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
body, err := io.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
return err, -1
|
|
|
|
}
|
|
|
|
|
|
|
|
if resp.StatusCode < 200 || resp.StatusCode >= 400 {
|
|
|
|
return fmt.Errorf("unexpected status code: %d, body: %s", resp.StatusCode, string(body)), resp.StatusCode
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil, resp.StatusCode
|
|
|
|
}
|
|
|
|
|
2024-11-27 15:34:28 +00:00
|
|
|
const bertEmbeddingsURL = `https://gist.githubusercontent.com/mudler/0a080b166b87640e8644b09c2aee6e3b/raw/f0e8c26bb72edc16d9fbafbfd6638072126ff225/bert-embeddings-gallery.yaml`
|
|
|
|
|
2023-06-05 07:42:50 +00:00
|
|
|
//go:embed backend-assets/*
|
|
|
|
var backendAssets embed.FS
|
|
|
|
|
2023-04-21 22:44:52 +00:00
|
|
|
var _ = Describe("API test", func() {
|
|
|
|
|
|
|
|
var app *fiber.App
|
|
|
|
var client *openai.Client
|
2023-04-29 07:22:09 +00:00
|
|
|
var client2 *openaigo.Client
|
2023-05-18 13:59:03 +00:00
|
|
|
var c context.Context
|
|
|
|
var cancel context.CancelFunc
|
2023-05-20 15:03:53 +00:00
|
|
|
var tmpdir string
|
2024-03-01 15:19:53 +00:00
|
|
|
var modelDir string
|
|
|
|
|
|
|
|
commonOpts := []config.AppOption{
|
|
|
|
config.WithDebug(true),
|
2023-07-20 21:45:29 +00:00
|
|
|
}
|
2023-05-20 15:03:53 +00:00
|
|
|
|
|
|
|
Context("API with ephemeral models", func() {
|
2024-03-01 15:19:53 +00:00
|
|
|
|
|
|
|
BeforeEach(func(sc SpecContext) {
|
2023-05-20 15:03:53 +00:00
|
|
|
var err error
|
|
|
|
tmpdir, err = os.MkdirTemp("", "")
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
|
2024-03-01 15:19:53 +00:00
|
|
|
modelDir = filepath.Join(tmpdir, "models")
|
2024-06-05 06:45:24 +00:00
|
|
|
err = os.Mkdir(modelDir, 0750)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2024-03-01 15:19:53 +00:00
|
|
|
backendAssetsDir := filepath.Join(tmpdir, "backend-assets")
|
2024-04-25 22:47:06 +00:00
|
|
|
err = os.Mkdir(backendAssetsDir, 0750)
|
2024-03-01 15:19:53 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
|
2023-05-20 15:03:53 +00:00
|
|
|
c, cancel = context.WithCancel(context.Background())
|
|
|
|
|
2023-06-24 06:18:17 +00:00
|
|
|
g := []gallery.GalleryModel{
|
|
|
|
{
|
|
|
|
Name: "bert",
|
2024-11-27 15:34:28 +00:00
|
|
|
URL: bertEmbeddingsURL,
|
2023-06-24 06:18:17 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
Name: "bert2",
|
2024-11-27 15:34:28 +00:00
|
|
|
URL: bertEmbeddingsURL,
|
2023-06-24 06:18:17 +00:00
|
|
|
Overrides: map[string]interface{}{"foo": "bar"},
|
2024-11-27 15:34:28 +00:00
|
|
|
AdditionalFiles: []gallery.File{{Filename: "foo.yaml", URI: bertEmbeddingsURL}},
|
2023-06-24 06:18:17 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
out, err := yaml.Marshal(g)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2024-06-05 06:45:24 +00:00
|
|
|
err = os.WriteFile(filepath.Join(modelDir, "gallery_simple.yaml"), out, 0600)
|
2023-06-24 06:18:17 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
|
2024-06-24 15:32:12 +00:00
|
|
|
galleries := []config.Gallery{
|
2023-06-24 06:18:17 +00:00
|
|
|
{
|
|
|
|
Name: "test",
|
2024-06-05 06:45:24 +00:00
|
|
|
URL: "file://" + filepath.Join(modelDir, "gallery_simple.yaml"),
|
2023-06-24 06:18:17 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2024-12-08 12:50:33 +00:00
|
|
|
application, err := application.New(
|
2024-03-01 15:19:53 +00:00
|
|
|
append(commonOpts,
|
|
|
|
config.WithContext(c),
|
|
|
|
config.WithGalleries(galleries),
|
|
|
|
config.WithModelPath(modelDir),
|
2024-09-24 07:32:48 +00:00
|
|
|
config.WithApiKeys([]string{apiKey}),
|
2024-03-01 15:19:53 +00:00
|
|
|
config.WithBackendAssets(backendAssets),
|
|
|
|
config.WithBackendAssetsOutput(backendAssetsDir))...)
|
2023-10-17 16:22:53 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
|
2024-12-08 12:50:33 +00:00
|
|
|
app, err = API(application)
|
2023-05-30 10:35:32 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2024-03-01 15:19:53 +00:00
|
|
|
|
2023-05-20 15:03:53 +00:00
|
|
|
go app.Listen("127.0.0.1:9090")
|
|
|
|
|
2024-09-24 07:32:48 +00:00
|
|
|
defaultConfig := openai.DefaultConfig(apiKey)
|
2023-05-20 15:03:53 +00:00
|
|
|
defaultConfig.BaseURL = "http://127.0.0.1:9090/v1"
|
|
|
|
|
|
|
|
client2 = openaigo.NewClient("")
|
|
|
|
client2.BaseURL = defaultConfig.BaseURL
|
|
|
|
|
|
|
|
// Wait for API to be ready
|
|
|
|
client = openai.NewClientWithConfig(defaultConfig)
|
|
|
|
Eventually(func() error {
|
|
|
|
_, err := client.ListModels(context.TODO())
|
|
|
|
return err
|
|
|
|
}, "2m").ShouldNot(HaveOccurred())
|
|
|
|
})
|
|
|
|
|
2024-03-01 15:19:53 +00:00
|
|
|
AfterEach(func(sc SpecContext) {
|
2023-05-20 15:03:53 +00:00
|
|
|
cancel()
|
2024-03-01 15:19:53 +00:00
|
|
|
if app != nil {
|
|
|
|
err := app.Shutdown()
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
}
|
|
|
|
err := os.RemoveAll(tmpdir)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
_, err = os.ReadDir(tmpdir)
|
|
|
|
Expect(err).To(HaveOccurred())
|
2023-05-20 15:03:53 +00:00
|
|
|
})
|
|
|
|
|
2024-09-24 07:32:48 +00:00
|
|
|
Context("Auth Tests", func() {
|
|
|
|
It("Should fail if the api key is missing", func() {
|
|
|
|
err, sc := postInvalidRequest("http://127.0.0.1:9090/models/available")
|
|
|
|
Expect(err).ToNot(BeNil())
|
2024-11-19 17:43:02 +00:00
|
|
|
Expect(sc).To(Equal(401))
|
2024-09-24 07:32:48 +00:00
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2023-05-20 15:03:53 +00:00
|
|
|
Context("Applying models", func() {
|
2023-06-24 06:18:17 +00:00
|
|
|
|
2024-03-01 15:19:53 +00:00
|
|
|
It("applies models from a gallery", func() {
|
2024-09-24 07:32:48 +00:00
|
|
|
models, err := getModels("http://127.0.0.1:9090/models/available")
|
|
|
|
Expect(err).To(BeNil())
|
2023-06-24 06:18:17 +00:00
|
|
|
Expect(len(models)).To(Equal(2), fmt.Sprint(models))
|
|
|
|
Expect(models[0].Installed).To(BeFalse(), fmt.Sprint(models))
|
|
|
|
Expect(models[1].Installed).To(BeFalse(), fmt.Sprint(models))
|
|
|
|
|
|
|
|
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
|
|
|
ID: "test@bert2",
|
|
|
|
})
|
|
|
|
|
|
|
|
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
|
|
|
|
|
|
uuid := response["uuid"].(string)
|
|
|
|
resp := map[string]interface{}{}
|
|
|
|
Eventually(func() bool {
|
|
|
|
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
|
|
fmt.Println(response)
|
|
|
|
resp = response
|
|
|
|
return response["processed"].(bool)
|
2023-07-14 23:19:43 +00:00
|
|
|
}, "360s", "10s").Should(Equal(true))
|
2023-06-24 06:18:17 +00:00
|
|
|
Expect(resp["message"]).ToNot(ContainSubstring("error"))
|
|
|
|
|
2024-03-01 15:19:53 +00:00
|
|
|
dat, err := os.ReadFile(filepath.Join(modelDir, "bert2.yaml"))
|
2023-06-24 06:18:17 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
|
2024-03-01 15:19:53 +00:00
|
|
|
_, err = os.ReadFile(filepath.Join(modelDir, "foo.yaml"))
|
2023-06-24 06:18:17 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
|
|
|
|
content := map[string]interface{}{}
|
|
|
|
err = yaml.Unmarshal(dat, &content)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2024-11-27 15:34:28 +00:00
|
|
|
Expect(content["usage"]).To(ContainSubstring("You can test this model with curl like this"))
|
2023-06-24 06:18:17 +00:00
|
|
|
Expect(content["foo"]).To(Equal("bar"))
|
|
|
|
|
2024-09-24 07:32:48 +00:00
|
|
|
models, err = getModels("http://127.0.0.1:9090/models/available")
|
|
|
|
Expect(err).To(BeNil())
|
2023-06-24 06:18:17 +00:00
|
|
|
Expect(len(models)).To(Equal(2), fmt.Sprint(models))
|
|
|
|
Expect(models[0].Name).To(Or(Equal("bert"), Equal("bert2")))
|
|
|
|
Expect(models[1].Name).To(Or(Equal("bert"), Equal("bert2")))
|
|
|
|
for _, m := range models {
|
|
|
|
if m.Name == "bert2" {
|
|
|
|
Expect(m.Installed).To(BeTrue())
|
|
|
|
} else {
|
|
|
|
Expect(m.Installed).To(BeFalse())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
2023-05-20 15:03:53 +00:00
|
|
|
It("overrides models", func() {
|
2024-03-01 15:19:53 +00:00
|
|
|
|
2023-05-20 15:03:53 +00:00
|
|
|
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
2024-11-27 15:34:28 +00:00
|
|
|
URL: bertEmbeddingsURL,
|
2023-05-20 15:03:53 +00:00
|
|
|
Name: "bert",
|
2023-07-28 22:04:25 +00:00
|
|
|
Overrides: map[string]interface{}{
|
2023-05-20 15:03:53 +00:00
|
|
|
"backend": "llama",
|
|
|
|
},
|
|
|
|
})
|
|
|
|
|
|
|
|
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
|
|
|
|
|
|
uuid := response["uuid"].(string)
|
|
|
|
|
|
|
|
Eventually(func() bool {
|
|
|
|
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
|
|
return response["processed"].(bool)
|
2023-07-14 23:19:43 +00:00
|
|
|
}, "360s", "10s").Should(Equal(true))
|
2023-05-20 15:03:53 +00:00
|
|
|
|
2024-03-01 15:19:53 +00:00
|
|
|
dat, err := os.ReadFile(filepath.Join(modelDir, "bert.yaml"))
|
2023-05-20 15:03:53 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
|
|
|
|
content := map[string]interface{}{}
|
|
|
|
err = yaml.Unmarshal(dat, &content)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(content["backend"]).To(Equal("llama"))
|
|
|
|
})
|
2024-04-11 22:49:23 +00:00
|
|
|
It("apply models from config", func() {
|
|
|
|
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
|
|
|
ConfigURL: "https://raw.githubusercontent.com/mudler/LocalAI/master/embedded/models/hermes-2-pro-mistral.yaml",
|
|
|
|
})
|
|
|
|
|
|
|
|
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
|
|
|
|
|
|
uuid := response["uuid"].(string)
|
|
|
|
|
|
|
|
Eventually(func() bool {
|
|
|
|
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
|
|
return response["processed"].(bool)
|
2024-10-30 14:34:44 +00:00
|
|
|
}, "900s", "10s").Should(Equal(true))
|
2024-04-11 22:49:23 +00:00
|
|
|
|
|
|
|
Eventually(func() []string {
|
|
|
|
models, _ := client.ListModels(context.TODO())
|
|
|
|
modelList := []string{}
|
|
|
|
for _, m := range models.Models {
|
|
|
|
modelList = append(modelList, m.ID)
|
|
|
|
}
|
|
|
|
return modelList
|
|
|
|
}, "360s", "10s").Should(ContainElements("hermes-2-pro-mistral"))
|
|
|
|
})
|
2023-05-20 15:03:53 +00:00
|
|
|
It("apply models without overrides", func() {
|
|
|
|
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
2024-11-27 15:34:28 +00:00
|
|
|
URL: bertEmbeddingsURL,
|
2023-05-20 15:03:53 +00:00
|
|
|
Name: "bert",
|
2023-07-28 22:04:25 +00:00
|
|
|
Overrides: map[string]interface{}{},
|
2023-05-20 15:03:53 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
|
|
|
|
|
|
uuid := response["uuid"].(string)
|
|
|
|
|
|
|
|
Eventually(func() bool {
|
|
|
|
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
|
|
return response["processed"].(bool)
|
2023-07-14 23:19:43 +00:00
|
|
|
}, "360s", "10s").Should(Equal(true))
|
2023-05-20 15:03:53 +00:00
|
|
|
|
2024-03-01 15:19:53 +00:00
|
|
|
dat, err := os.ReadFile(filepath.Join(modelDir, "bert.yaml"))
|
2023-05-20 15:03:53 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
|
|
|
|
content := map[string]interface{}{}
|
|
|
|
err = yaml.Unmarshal(dat, &content)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2024-11-27 15:34:28 +00:00
|
|
|
Expect(content["usage"]).To(ContainSubstring("You can test this model with curl like this"))
|
2023-05-20 15:03:53 +00:00
|
|
|
})
|
2023-06-07 22:36:11 +00:00
|
|
|
|
2023-12-18 17:58:44 +00:00
|
|
|
It("runs openllama(llama-ggml backend)", Label("llama"), func() {
|
2023-06-07 22:36:11 +00:00
|
|
|
if runtime.GOOS != "linux" {
|
|
|
|
Skip("test supported only on linux")
|
|
|
|
}
|
|
|
|
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
|
|
|
URL: "github:go-skynet/model-gallery/openllama_3b.yaml",
|
|
|
|
Name: "openllama_3b",
|
2023-11-18 07:18:43 +00:00
|
|
|
Overrides: map[string]interface{}{"backend": "llama-ggml", "mmap": true, "f16": true, "context_size": 128},
|
2023-06-07 22:36:11 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
|
|
|
|
|
|
uuid := response["uuid"].(string)
|
|
|
|
|
|
|
|
Eventually(func() bool {
|
|
|
|
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
|
|
return response["processed"].(bool)
|
2023-07-14 23:19:43 +00:00
|
|
|
}, "360s", "10s").Should(Equal(true))
|
2023-06-07 22:36:11 +00:00
|
|
|
|
2023-07-14 23:19:43 +00:00
|
|
|
By("testing completion")
|
2023-06-07 22:36:11 +00:00
|
|
|
resp, err := client.CreateCompletion(context.TODO(), openai.CompletionRequest{Model: "openllama_3b", Prompt: "Count up to five: one, two, three, four, "})
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(len(resp.Choices)).To(Equal(1))
|
|
|
|
Expect(resp.Choices[0].Text).To(ContainSubstring("five"))
|
2023-07-14 23:19:43 +00:00
|
|
|
|
|
|
|
By("testing functions")
|
|
|
|
resp2, err := client.CreateChatCompletion(
|
|
|
|
context.TODO(),
|
|
|
|
openai.ChatCompletionRequest{
|
|
|
|
Model: "openllama_3b",
|
|
|
|
Messages: []openai.ChatCompletionMessage{
|
|
|
|
{
|
|
|
|
Role: "user",
|
|
|
|
Content: "What is the weather like in San Francisco (celsius)?",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Functions: []openai.FunctionDefinition{
|
|
|
|
openai.FunctionDefinition{
|
|
|
|
Name: "get_current_weather",
|
|
|
|
Description: "Get the current weather",
|
|
|
|
Parameters: jsonschema.Definition{
|
|
|
|
Type: jsonschema.Object,
|
|
|
|
Properties: map[string]jsonschema.Definition{
|
|
|
|
"location": {
|
|
|
|
Type: jsonschema.String,
|
|
|
|
Description: "The city and state, e.g. San Francisco, CA",
|
|
|
|
},
|
|
|
|
"unit": {
|
|
|
|
Type: jsonschema.String,
|
|
|
|
Enum: []string{"celcius", "fahrenheit"},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Required: []string{"location"},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
})
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(len(resp2.Choices)).To(Equal(1))
|
2024-04-17 21:33:49 +00:00
|
|
|
Expect(resp2.Choices[0].Message.FunctionCall).ToNot(BeNil())
|
|
|
|
Expect(resp2.Choices[0].Message.FunctionCall.Name).To(Equal("get_current_weather"), resp2.Choices[0].Message.FunctionCall.Name)
|
2023-07-14 23:19:43 +00:00
|
|
|
|
|
|
|
var res map[string]string
|
2024-04-17 21:33:49 +00:00
|
|
|
err = json.Unmarshal([]byte(resp2.Choices[0].Message.FunctionCall.Arguments), &res)
|
2023-07-14 23:19:43 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2024-12-08 12:50:33 +00:00
|
|
|
Expect(res["location"]).To(ContainSubstring("San Francisco"), fmt.Sprint(res))
|
2023-07-14 23:19:43 +00:00
|
|
|
Expect(res["unit"]).To(Equal("celcius"), fmt.Sprint(res))
|
|
|
|
Expect(string(resp2.Choices[0].FinishReason)).To(Equal("function_call"), fmt.Sprint(resp2.Choices[0].FinishReason))
|
2023-12-18 17:58:44 +00:00
|
|
|
|
2023-06-07 22:36:11 +00:00
|
|
|
})
|
|
|
|
|
2023-12-18 17:58:44 +00:00
|
|
|
It("runs openllama gguf(llama-cpp)", Label("llama-gguf"), func() {
|
2024-04-17 21:33:49 +00:00
|
|
|
if runtime.GOOS != "linux" {
|
|
|
|
Skip("test supported only on linux")
|
|
|
|
}
|
2024-04-24 16:44:04 +00:00
|
|
|
|
|
|
|
modelName := "hermes-2-pro-mistral"
|
2023-08-23 23:18:58 +00:00
|
|
|
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
2024-04-24 16:44:04 +00:00
|
|
|
ConfigURL: "https://raw.githubusercontent.com/mudler/LocalAI/master/embedded/models/hermes-2-pro-mistral.yaml",
|
2023-08-23 23:18:58 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
|
|
|
|
|
|
uuid := response["uuid"].(string)
|
|
|
|
|
|
|
|
Eventually(func() bool {
|
|
|
|
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
|
|
return response["processed"].(bool)
|
2024-10-31 14:40:43 +00:00
|
|
|
}, "900s", "10s").Should(Equal(true))
|
2023-08-23 23:18:58 +00:00
|
|
|
|
2023-08-30 21:01:55 +00:00
|
|
|
By("testing chat")
|
|
|
|
resp, err := client.CreateChatCompletion(context.TODO(), openai.ChatCompletionRequest{Model: modelName, Messages: []openai.ChatCompletionMessage{
|
|
|
|
{
|
|
|
|
Role: "user",
|
|
|
|
Content: "How much is 2+2?",
|
|
|
|
},
|
|
|
|
}})
|
2023-08-23 23:18:58 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(len(resp.Choices)).To(Equal(1))
|
2023-08-30 21:01:55 +00:00
|
|
|
Expect(resp.Choices[0].Message.Content).To(Or(ContainSubstring("4"), ContainSubstring("four")))
|
2023-08-23 23:18:58 +00:00
|
|
|
|
|
|
|
By("testing functions")
|
|
|
|
resp2, err := client.CreateChatCompletion(
|
|
|
|
context.TODO(),
|
|
|
|
openai.ChatCompletionRequest{
|
2023-08-30 21:01:55 +00:00
|
|
|
Model: modelName,
|
2023-08-23 23:18:58 +00:00
|
|
|
Messages: []openai.ChatCompletionMessage{
|
|
|
|
{
|
|
|
|
Role: "user",
|
|
|
|
Content: "What is the weather like in San Francisco (celsius)?",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Functions: []openai.FunctionDefinition{
|
|
|
|
openai.FunctionDefinition{
|
|
|
|
Name: "get_current_weather",
|
|
|
|
Description: "Get the current weather",
|
|
|
|
Parameters: jsonschema.Definition{
|
|
|
|
Type: jsonschema.Object,
|
|
|
|
Properties: map[string]jsonschema.Definition{
|
|
|
|
"location": {
|
|
|
|
Type: jsonschema.String,
|
|
|
|
Description: "The city and state, e.g. San Francisco, CA",
|
|
|
|
},
|
|
|
|
"unit": {
|
|
|
|
Type: jsonschema.String,
|
|
|
|
Enum: []string{"celcius", "fahrenheit"},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Required: []string{"location"},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
})
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(len(resp2.Choices)).To(Equal(1))
|
2024-04-17 21:33:49 +00:00
|
|
|
Expect(resp2.Choices[0].Message.FunctionCall).ToNot(BeNil())
|
|
|
|
Expect(resp2.Choices[0].Message.FunctionCall.Name).To(Equal("get_current_weather"), resp2.Choices[0].Message.FunctionCall.Name)
|
2023-08-23 23:18:58 +00:00
|
|
|
|
|
|
|
var res map[string]string
|
2024-04-17 21:33:49 +00:00
|
|
|
err = json.Unmarshal([]byte(resp2.Choices[0].Message.FunctionCall.Arguments), &res)
|
2023-08-23 23:18:58 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2024-04-24 16:44:04 +00:00
|
|
|
Expect(res["location"]).To(ContainSubstring("San Francisco"), fmt.Sprint(res))
|
2023-08-23 23:18:58 +00:00
|
|
|
Expect(res["unit"]).To(Equal("celcius"), fmt.Sprint(res))
|
|
|
|
Expect(string(resp2.Choices[0].FinishReason)).To(Equal("function_call"), fmt.Sprint(resp2.Choices[0].FinishReason))
|
|
|
|
})
|
2023-07-14 23:19:43 +00:00
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
Context("Model gallery", func() {
|
|
|
|
BeforeEach(func() {
|
|
|
|
var err error
|
|
|
|
tmpdir, err = os.MkdirTemp("", "")
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2024-03-01 15:19:53 +00:00
|
|
|
modelDir = filepath.Join(tmpdir, "models")
|
|
|
|
backendAssetsDir := filepath.Join(tmpdir, "backend-assets")
|
2024-04-25 22:47:06 +00:00
|
|
|
err = os.Mkdir(backendAssetsDir, 0750)
|
2024-03-01 15:19:53 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2023-07-14 23:19:43 +00:00
|
|
|
|
|
|
|
c, cancel = context.WithCancel(context.Background())
|
|
|
|
|
2024-06-24 15:32:12 +00:00
|
|
|
galleries := []config.Gallery{
|
2023-07-14 23:19:43 +00:00
|
|
|
{
|
|
|
|
Name: "model-gallery",
|
|
|
|
URL: "https://raw.githubusercontent.com/go-skynet/model-gallery/main/index.yaml",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2024-12-08 12:50:33 +00:00
|
|
|
application, err := application.New(
|
2023-07-19 23:36:34 +00:00
|
|
|
append(commonOpts,
|
2024-03-01 15:19:53 +00:00
|
|
|
config.WithContext(c),
|
|
|
|
config.WithAudioDir(tmpdir),
|
|
|
|
config.WithImageDir(tmpdir),
|
|
|
|
config.WithGalleries(galleries),
|
|
|
|
config.WithModelPath(modelDir),
|
|
|
|
config.WithBackendAssets(backendAssets),
|
|
|
|
config.WithBackendAssetsOutput(tmpdir))...,
|
2023-07-14 23:19:43 +00:00
|
|
|
)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2024-12-08 12:50:33 +00:00
|
|
|
app, err = API(application)
|
2024-03-01 15:19:53 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
|
2023-07-14 23:19:43 +00:00
|
|
|
go app.Listen("127.0.0.1:9090")
|
|
|
|
|
|
|
|
defaultConfig := openai.DefaultConfig("")
|
|
|
|
defaultConfig.BaseURL = "http://127.0.0.1:9090/v1"
|
|
|
|
|
|
|
|
client2 = openaigo.NewClient("")
|
|
|
|
client2.BaseURL = defaultConfig.BaseURL
|
|
|
|
|
|
|
|
// Wait for API to be ready
|
|
|
|
client = openai.NewClientWithConfig(defaultConfig)
|
|
|
|
Eventually(func() error {
|
|
|
|
_, err := client.ListModels(context.TODO())
|
|
|
|
return err
|
|
|
|
}, "2m").ShouldNot(HaveOccurred())
|
|
|
|
})
|
|
|
|
|
|
|
|
AfterEach(func() {
|
|
|
|
cancel()
|
2024-03-01 15:19:53 +00:00
|
|
|
if app != nil {
|
|
|
|
err := app.Shutdown()
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
}
|
|
|
|
err := os.RemoveAll(tmpdir)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
_, err = os.ReadDir(tmpdir)
|
|
|
|
Expect(err).To(HaveOccurred())
|
2023-07-14 23:19:43 +00:00
|
|
|
})
|
|
|
|
It("installs and is capable to run tts", Label("tts"), func() {
|
|
|
|
if runtime.GOOS != "linux" {
|
|
|
|
Skip("test supported only on linux")
|
|
|
|
}
|
|
|
|
|
|
|
|
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
|
|
|
ID: "model-gallery@voice-en-us-kathleen-low",
|
|
|
|
})
|
|
|
|
|
|
|
|
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
|
|
|
|
|
|
uuid := response["uuid"].(string)
|
|
|
|
|
|
|
|
Eventually(func() bool {
|
|
|
|
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
|
|
fmt.Println(response)
|
|
|
|
return response["processed"].(bool)
|
|
|
|
}, "360s", "10s").Should(Equal(true))
|
|
|
|
|
|
|
|
// An HTTP Post to the /tts endpoint should return a wav audio file
|
|
|
|
resp, err := http.Post("http://127.0.0.1:9090/tts", "application/json", bytes.NewBuffer([]byte(`{"input": "Hello world", "model": "en-us-kathleen-low.onnx"}`)))
|
|
|
|
Expect(err).ToNot(HaveOccurred(), fmt.Sprint(resp))
|
|
|
|
dat, err := io.ReadAll(resp.Body)
|
|
|
|
Expect(err).ToNot(HaveOccurred(), fmt.Sprint(resp))
|
|
|
|
|
|
|
|
Expect(resp.StatusCode).To(Equal(200), fmt.Sprint(string(dat)))
|
|
|
|
Expect(resp.Header.Get("Content-Type")).To(Equal("audio/x-wav"))
|
|
|
|
})
|
|
|
|
It("installs and is capable to generate images", Label("stablediffusion"), func() {
|
|
|
|
if runtime.GOOS != "linux" {
|
|
|
|
Skip("test supported only on linux")
|
|
|
|
}
|
|
|
|
|
|
|
|
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
|
|
|
|
ID: "model-gallery@stablediffusion",
|
2023-08-09 06:38:51 +00:00
|
|
|
Overrides: map[string]interface{}{
|
|
|
|
"parameters": map[string]interface{}{"model": "stablediffusion_assets"},
|
|
|
|
},
|
2023-07-14 23:19:43 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
|
|
|
|
|
|
|
|
uuid := response["uuid"].(string)
|
|
|
|
|
|
|
|
Eventually(func() bool {
|
|
|
|
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
|
|
|
fmt.Println(response)
|
|
|
|
return response["processed"].(bool)
|
|
|
|
}, "360s", "10s").Should(Equal(true))
|
|
|
|
|
|
|
|
resp, err := http.Post(
|
|
|
|
"http://127.0.0.1:9090/v1/images/generations",
|
|
|
|
"application/json",
|
|
|
|
bytes.NewBuffer([]byte(`{
|
|
|
|
"prompt": "floating hair, portrait, ((loli)), ((one girl)), cute face, hidden hands, asymmetrical bangs, beautiful detailed eyes, eye shadow, hair ornament, ribbons, bowties, buttons, pleated skirt, (((masterpiece))), ((best quality)), colorful|((part of the head)), ((((mutated hands and fingers)))), deformed, blurry, bad anatomy, disfigured, poorly drawn face, mutation, mutated, extra limb, ugly, poorly drawn hands, missing limb, blurry, floating limbs, disconnected limbs, malformed hands, blur, out of focus, long neck, long body, Octane renderer, lowres, bad anatomy, bad hands, text",
|
|
|
|
"mode": 2, "seed":9000,
|
|
|
|
"size": "256x256", "n":2}`)))
|
|
|
|
// The response should contain an URL
|
|
|
|
Expect(err).ToNot(HaveOccurred(), fmt.Sprint(resp))
|
|
|
|
dat, err := io.ReadAll(resp.Body)
|
2024-05-05 18:46:33 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred(), "error reading /image/generations response")
|
|
|
|
|
|
|
|
imgUrlResp := &schema.OpenAIResponse{}
|
|
|
|
err = json.Unmarshal(dat, imgUrlResp)
|
|
|
|
Expect(imgUrlResp.Data).ToNot(Or(BeNil(), BeZero()))
|
|
|
|
imgUrl := imgUrlResp.Data[0].URL
|
|
|
|
Expect(imgUrl).To(ContainSubstring("http://127.0.0.1:9090/"), imgUrl)
|
|
|
|
Expect(imgUrl).To(ContainSubstring(".png"), imgUrl)
|
|
|
|
|
|
|
|
imgResp, err := http.Get(imgUrl)
|
|
|
|
Expect(err).To(BeNil())
|
|
|
|
Expect(imgResp).ToNot(BeNil())
|
|
|
|
Expect(imgResp.StatusCode).To(Equal(200))
|
|
|
|
Expect(imgResp.ContentLength).To(BeNumerically(">", 0))
|
|
|
|
imgData := make([]byte, 512)
|
|
|
|
count, err := io.ReadFull(imgResp.Body, imgData)
|
|
|
|
Expect(err).To(Or(BeNil(), MatchError(io.EOF)))
|
|
|
|
Expect(count).To(BeNumerically(">", 0))
|
|
|
|
Expect(count).To(BeNumerically("<=", 512))
|
|
|
|
Expect(http.DetectContentType(imgData)).To(Equal("image/png"))
|
2023-05-20 15:03:53 +00:00
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2023-04-21 22:44:52 +00:00
|
|
|
Context("API query", func() {
|
|
|
|
BeforeEach(func() {
|
2024-03-01 15:19:53 +00:00
|
|
|
modelPath := os.Getenv("MODELS_PATH")
|
2023-05-18 13:59:03 +00:00
|
|
|
c, cancel = context.WithCancel(context.Background())
|
|
|
|
|
2024-03-01 15:19:53 +00:00
|
|
|
var err error
|
2023-10-17 16:22:53 +00:00
|
|
|
|
2024-12-08 12:50:33 +00:00
|
|
|
application, err := application.New(
|
2023-07-19 23:36:34 +00:00
|
|
|
append(commonOpts,
|
2024-03-01 15:19:53 +00:00
|
|
|
config.WithExternalBackend("huggingface", os.Getenv("HUGGINGFACE_GRPC")),
|
|
|
|
config.WithContext(c),
|
|
|
|
config.WithModelPath(modelPath),
|
2023-07-20 22:52:43 +00:00
|
|
|
)...)
|
2023-05-30 10:35:32 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2024-12-08 12:50:33 +00:00
|
|
|
app, err = API(application)
|
2024-03-01 15:19:53 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2023-04-21 22:44:52 +00:00
|
|
|
go app.Listen("127.0.0.1:9090")
|
|
|
|
|
|
|
|
defaultConfig := openai.DefaultConfig("")
|
|
|
|
defaultConfig.BaseURL = "http://127.0.0.1:9090/v1"
|
|
|
|
|
2023-04-29 07:22:09 +00:00
|
|
|
client2 = openaigo.NewClient("")
|
|
|
|
client2.BaseURL = defaultConfig.BaseURL
|
|
|
|
|
2023-04-21 22:44:52 +00:00
|
|
|
// Wait for API to be ready
|
|
|
|
client = openai.NewClientWithConfig(defaultConfig)
|
|
|
|
Eventually(func() error {
|
|
|
|
_, err := client.ListModels(context.TODO())
|
|
|
|
return err
|
|
|
|
}, "2m").ShouldNot(HaveOccurred())
|
|
|
|
})
|
|
|
|
AfterEach(func() {
|
2023-05-18 13:59:03 +00:00
|
|
|
cancel()
|
2024-03-01 15:19:53 +00:00
|
|
|
if app != nil {
|
|
|
|
err := app.Shutdown()
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
}
|
2023-04-21 22:44:52 +00:00
|
|
|
})
|
|
|
|
It("returns the models list", func() {
|
|
|
|
models, err := client.ListModels(context.TODO())
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2024-11-27 15:34:28 +00:00
|
|
|
Expect(len(models.Models)).To(Equal(7)) // If "config.yaml" should be included, this should be 8?
|
2023-04-21 22:44:52 +00:00
|
|
|
})
|
2024-03-18 18:19:43 +00:00
|
|
|
It("can generate completions via ggml", func() {
|
|
|
|
resp, err := client.CreateCompletion(context.TODO(), openai.CompletionRequest{Model: "testmodel.ggml", Prompt: testPrompt})
|
2023-04-21 22:44:52 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(len(resp.Choices)).To(Equal(1))
|
|
|
|
Expect(resp.Choices[0].Text).ToNot(BeEmpty())
|
|
|
|
})
|
2023-04-27 04:18:18 +00:00
|
|
|
|
2024-03-18 18:19:43 +00:00
|
|
|
It("can generate chat completions via ggml", func() {
|
|
|
|
resp, err := client.CreateChatCompletion(context.TODO(), openai.ChatCompletionRequest{Model: "testmodel.ggml", Messages: []openai.ChatCompletionMessage{openai.ChatCompletionMessage{Role: "user", Content: testPrompt}}})
|
2023-04-27 04:18:18 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(len(resp.Choices)).To(Equal(1))
|
|
|
|
Expect(resp.Choices[0].Message.Content).ToNot(BeEmpty())
|
|
|
|
})
|
|
|
|
|
2023-04-24 21:42:03 +00:00
|
|
|
It("returns errors", func() {
|
2024-02-08 19:12:51 +00:00
|
|
|
_, err := client.CreateCompletion(context.TODO(), openai.CompletionRequest{Model: "foomodel", Prompt: testPrompt})
|
2023-04-24 21:42:03 +00:00
|
|
|
Expect(err).To(HaveOccurred())
|
2024-05-04 15:56:12 +00:00
|
|
|
Expect(err.Error()).To(ContainSubstring("error, status code: 500, message: could not load model - all backends returned error:"))
|
2023-04-24 21:42:03 +00:00
|
|
|
})
|
2024-05-04 15:56:12 +00:00
|
|
|
|
2024-09-05 18:44:30 +00:00
|
|
|
It("shows the external backend", func() {
|
|
|
|
// do an http request to the /system endpoint
|
|
|
|
resp, err := http.Get("http://127.0.0.1:9090/system")
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(resp.StatusCode).To(Equal(200))
|
|
|
|
dat, err := io.ReadAll(resp.Body)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(string(dat)).To(ContainSubstring("huggingface"))
|
|
|
|
Expect(string(dat)).To(ContainSubstring("llama-cpp"))
|
|
|
|
})
|
|
|
|
|
2023-05-12 12:10:18 +00:00
|
|
|
It("transcribes audio", func() {
|
|
|
|
if runtime.GOOS != "linux" {
|
|
|
|
Skip("test supported only on linux")
|
|
|
|
}
|
2023-05-12 08:04:20 +00:00
|
|
|
resp, err := client.CreateTranscription(
|
|
|
|
context.Background(),
|
|
|
|
openai.AudioRequest{
|
|
|
|
Model: openai.Whisper1,
|
|
|
|
FilePath: filepath.Join(os.Getenv("TEST_DIR"), "audio.wav"),
|
|
|
|
},
|
|
|
|
)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(resp.Text).To(ContainSubstring("This is the Micro Machine Man presenting"))
|
|
|
|
})
|
2023-05-12 15:16:49 +00:00
|
|
|
|
|
|
|
It("calculate embeddings", func() {
|
|
|
|
if runtime.GOOS != "linux" {
|
|
|
|
Skip("test supported only on linux")
|
|
|
|
}
|
|
|
|
resp, err := client.CreateEmbeddings(
|
|
|
|
context.Background(),
|
|
|
|
openai.EmbeddingRequest{
|
|
|
|
Model: openai.AdaEmbeddingV2,
|
|
|
|
Input: []string{"sun", "cat"},
|
|
|
|
},
|
|
|
|
)
|
2023-10-29 21:04:43 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred(), err)
|
2024-11-27 15:34:28 +00:00
|
|
|
Expect(len(resp.Data[0].Embedding)).To(BeNumerically("==", 2048))
|
|
|
|
Expect(len(resp.Data[1].Embedding)).To(BeNumerically("==", 2048))
|
2023-05-12 15:16:49 +00:00
|
|
|
|
|
|
|
sunEmbedding := resp.Data[0].Embedding
|
|
|
|
resp2, err := client.CreateEmbeddings(
|
|
|
|
context.Background(),
|
|
|
|
openai.EmbeddingRequest{
|
|
|
|
Model: openai.AdaEmbeddingV2,
|
|
|
|
Input: []string{"sun"},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(resp2.Data[0].Embedding).To(Equal(sunEmbedding))
|
|
|
|
})
|
2023-05-15 06:15:01 +00:00
|
|
|
|
2023-07-20 22:52:43 +00:00
|
|
|
Context("External gRPC calls", func() {
|
2023-11-20 20:21:17 +00:00
|
|
|
It("calculate embeddings with sentencetransformers", func() {
|
2023-07-20 22:52:43 +00:00
|
|
|
if runtime.GOOS != "linux" {
|
|
|
|
Skip("test supported only on linux")
|
|
|
|
}
|
|
|
|
resp, err := client.CreateEmbeddings(
|
|
|
|
context.Background(),
|
|
|
|
openai.EmbeddingRequest{
|
|
|
|
Model: openai.AdaCodeSearchCode,
|
|
|
|
Input: []string{"sun", "cat"},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(len(resp.Data[0].Embedding)).To(BeNumerically("==", 384))
|
|
|
|
Expect(len(resp.Data[1].Embedding)).To(BeNumerically("==", 384))
|
|
|
|
|
|
|
|
sunEmbedding := resp.Data[0].Embedding
|
|
|
|
resp2, err := client.CreateEmbeddings(
|
|
|
|
context.Background(),
|
|
|
|
openai.EmbeddingRequest{
|
|
|
|
Model: openai.AdaCodeSearchCode,
|
|
|
|
Input: []string{"sun"},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(resp2.Data[0].Embedding).To(Equal(sunEmbedding))
|
|
|
|
Expect(resp2.Data[0].Embedding).ToNot(Equal(resp.Data[1].Embedding))
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2024-03-22 20:14:04 +00:00
|
|
|
// See tests/integration/stores_test
|
|
|
|
Context("Stores", Label("stores"), func() {
|
|
|
|
|
|
|
|
It("sets, gets, finds and deletes entries", func() {
|
|
|
|
ks := [][]float32{
|
|
|
|
{0.1, 0.2, 0.3},
|
|
|
|
{0.4, 0.5, 0.6},
|
|
|
|
{0.7, 0.8, 0.9},
|
|
|
|
}
|
|
|
|
vs := []string{
|
|
|
|
"test1",
|
|
|
|
"test2",
|
|
|
|
"test3",
|
|
|
|
}
|
|
|
|
setBody := schema.StoresSet{
|
|
|
|
Keys: ks,
|
|
|
|
Values: vs,
|
|
|
|
}
|
|
|
|
|
|
|
|
url := "http://127.0.0.1:9090/stores/"
|
|
|
|
err := postRequestJSON(url+"set", &setBody)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
|
|
|
|
getBody := schema.StoresGet{
|
|
|
|
Keys: ks,
|
|
|
|
}
|
|
|
|
var getRespBody schema.StoresGetResponse
|
|
|
|
err = postRequestResponseJSON(url+"get", &getBody, &getRespBody)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(len(getRespBody.Keys)).To(Equal(len(ks)))
|
|
|
|
|
|
|
|
for i, v := range getRespBody.Keys {
|
|
|
|
if v[0] == 0.1 {
|
|
|
|
Expect(getRespBody.Values[i]).To(Equal("test1"))
|
|
|
|
} else if v[0] == 0.4 {
|
|
|
|
Expect(getRespBody.Values[i]).To(Equal("test2"))
|
|
|
|
} else {
|
|
|
|
Expect(getRespBody.Values[i]).To(Equal("test3"))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
deleteBody := schema.StoresDelete{
|
|
|
|
Keys: [][]float32{
|
|
|
|
{0.1, 0.2, 0.3},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
err = postRequestJSON(url+"delete", &deleteBody)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
|
|
|
|
findBody := schema.StoresFind{
|
|
|
|
Key: []float32{0.1, 0.3, 0.7},
|
|
|
|
Topk: 10,
|
|
|
|
}
|
|
|
|
|
|
|
|
var findRespBody schema.StoresFindResponse
|
|
|
|
err = postRequestResponseJSON(url+"find", &findBody, &findRespBody)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(len(findRespBody.Keys)).To(Equal(2))
|
|
|
|
|
|
|
|
for i, v := range findRespBody.Keys {
|
|
|
|
if v[0] == 0.4 {
|
|
|
|
Expect(findRespBody.Values[i]).To(Equal("test2"))
|
|
|
|
} else {
|
|
|
|
Expect(findRespBody.Values[i]).To(Equal("test3"))
|
|
|
|
}
|
|
|
|
|
|
|
|
Expect(findRespBody.Similarities[i]).To(BeNumerically(">=", -1))
|
|
|
|
Expect(findRespBody.Similarities[i]).To(BeNumerically("<=", 1))
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
2023-04-27 04:18:18 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
Context("Config file", func() {
|
|
|
|
BeforeEach(func() {
|
2024-03-01 15:19:53 +00:00
|
|
|
modelPath := os.Getenv("MODELS_PATH")
|
2023-05-18 13:59:03 +00:00
|
|
|
c, cancel = context.WithCancel(context.Background())
|
|
|
|
|
2024-03-01 15:19:53 +00:00
|
|
|
var err error
|
2024-12-08 12:50:33 +00:00
|
|
|
application, err := application.New(
|
2023-07-19 23:36:34 +00:00
|
|
|
append(commonOpts,
|
2024-03-01 15:19:53 +00:00
|
|
|
config.WithContext(c),
|
|
|
|
config.WithModelPath(modelPath),
|
|
|
|
config.WithConfigFile(os.Getenv("CONFIG_FILE")))...,
|
2023-07-19 23:36:34 +00:00
|
|
|
)
|
2023-05-30 10:35:32 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
2024-12-08 12:50:33 +00:00
|
|
|
app, err = API(application)
|
2024-03-01 15:19:53 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
|
2023-04-27 04:18:18 +00:00
|
|
|
go app.Listen("127.0.0.1:9090")
|
|
|
|
|
|
|
|
defaultConfig := openai.DefaultConfig("")
|
|
|
|
defaultConfig.BaseURL = "http://127.0.0.1:9090/v1"
|
2023-04-29 07:22:09 +00:00
|
|
|
client2 = openaigo.NewClient("")
|
|
|
|
client2.BaseURL = defaultConfig.BaseURL
|
2023-04-27 04:18:18 +00:00
|
|
|
// Wait for API to be ready
|
|
|
|
client = openai.NewClientWithConfig(defaultConfig)
|
|
|
|
Eventually(func() error {
|
|
|
|
_, err := client.ListModels(context.TODO())
|
|
|
|
return err
|
|
|
|
}, "2m").ShouldNot(HaveOccurred())
|
|
|
|
})
|
|
|
|
AfterEach(func() {
|
2023-05-18 13:59:03 +00:00
|
|
|
cancel()
|
2024-03-01 15:19:53 +00:00
|
|
|
if app != nil {
|
|
|
|
err := app.Shutdown()
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
}
|
2023-04-27 04:18:18 +00:00
|
|
|
})
|
2023-07-31 17:14:32 +00:00
|
|
|
It("can generate chat completions from config file (list1)", func() {
|
2024-02-08 19:12:51 +00:00
|
|
|
resp, err := client.CreateChatCompletion(context.TODO(), openai.ChatCompletionRequest{Model: "list1", Messages: []openai.ChatCompletionMessage{{Role: "user", Content: testPrompt}}})
|
2023-04-27 04:18:18 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(len(resp.Choices)).To(Equal(1))
|
|
|
|
Expect(resp.Choices[0].Message.Content).ToNot(BeEmpty())
|
|
|
|
})
|
2023-07-31 17:14:32 +00:00
|
|
|
It("can generate chat completions from config file (list2)", func() {
|
2024-02-08 19:12:51 +00:00
|
|
|
resp, err := client.CreateChatCompletion(context.TODO(), openai.ChatCompletionRequest{Model: "list2", Messages: []openai.ChatCompletionMessage{{Role: "user", Content: testPrompt}}})
|
2023-04-27 04:18:18 +00:00
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(len(resp.Choices)).To(Equal(1))
|
|
|
|
Expect(resp.Choices[0].Message.Content).ToNot(BeEmpty())
|
|
|
|
})
|
2023-04-29 07:22:09 +00:00
|
|
|
It("can generate edit completions from config file", func() {
|
|
|
|
request := openaigo.EditCreateRequestBody{
|
|
|
|
Model: "list2",
|
|
|
|
Instruction: "foo",
|
|
|
|
Input: "bar",
|
|
|
|
}
|
|
|
|
resp, err := client2.CreateEdit(context.Background(), request)
|
|
|
|
Expect(err).ToNot(HaveOccurred())
|
|
|
|
Expect(len(resp.Choices)).To(Equal(1))
|
|
|
|
Expect(resp.Choices[0].Text).ToNot(BeEmpty())
|
|
|
|
})
|
2023-05-12 08:04:20 +00:00
|
|
|
|
2023-04-21 22:44:52 +00:00
|
|
|
})
|
|
|
|
})
|