diff --git a/api/openai/chat.go b/api/openai/chat.go index 9c4a956f..955f48c2 100644 --- a/api/openai/chat.go +++ b/api/openai/chat.go @@ -6,6 +6,7 @@ import ( "encoding/json" "fmt" "strings" + "time" "github.com/go-skynet/LocalAI/api/backend" config "github.com/go-skynet/LocalAI/api/config" @@ -15,15 +16,20 @@ import ( model "github.com/go-skynet/LocalAI/pkg/model" "github.com/go-skynet/LocalAI/pkg/utils" "github.com/gofiber/fiber/v2" + "github.com/google/uuid" "github.com/rs/zerolog/log" "github.com/valyala/fasthttp" ) func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error { emptyMessage := "" + id := uuid.New().String() + created := int(time.Now().Unix()) process := func(s string, req *schema.OpenAIRequest, config *config.Config, loader *model.ModelLoader, responses chan schema.OpenAIResponse) { initialMessage := schema.OpenAIResponse{ + ID: id, + Created: created, Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. Choices: []schema.Choice{{Delta: &schema.Message{Role: "assistant", Content: &emptyMessage}}}, Object: "chat.completion.chunk", @@ -32,6 +38,8 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) ComputeChoices(req, s, config, o, loader, func(s string, c *[]schema.Choice) {}, func(s string, usage backend.TokenUsage) bool { resp := schema.OpenAIResponse{ + ID: id, + Created: created, Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. Choices: []schema.Choice{{Delta: &schema.Message{Content: &s}, Index: 0}}, Object: "chat.completion.chunk", @@ -261,7 +269,9 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) } resp := &schema.OpenAIResponse{ - Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. + ID: id, + Created: created, + Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. Choices: []schema.Choice{ { FinishReason: "stop", @@ -355,6 +365,8 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) } resp := &schema.OpenAIResponse{ + ID: id, + Created: created, Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. Choices: result, Object: "chat.completion", diff --git a/api/openai/completion.go b/api/openai/completion.go index 00ddd910..5f20fa49 100644 --- a/api/openai/completion.go +++ b/api/openai/completion.go @@ -6,6 +6,7 @@ import ( "encoding/json" "errors" "fmt" + "time" "github.com/go-skynet/LocalAI/api/backend" config "github.com/go-skynet/LocalAI/api/config" @@ -13,16 +14,22 @@ import ( "github.com/go-skynet/LocalAI/api/schema" model "github.com/go-skynet/LocalAI/pkg/model" "github.com/gofiber/fiber/v2" + "github.com/google/uuid" "github.com/rs/zerolog/log" "github.com/valyala/fasthttp" ) // https://platform.openai.com/docs/api-reference/completions func CompletionEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error { + id := uuid.New().String() + created := int(time.Now().Unix()) + process := func(s string, req *schema.OpenAIRequest, config *config.Config, loader *model.ModelLoader, responses chan schema.OpenAIResponse) { ComputeChoices(req, s, config, o, loader, func(s string, c *[]schema.Choice) {}, func(s string, usage backend.TokenUsage) bool { resp := schema.OpenAIResponse{ - Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. + ID: id, + Created: created, + Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. Choices: []schema.Choice{ { Index: 0, @@ -108,7 +115,9 @@ func CompletionEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fibe } resp := &schema.OpenAIResponse{ - Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. + ID: id, + Created: created, + Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. Choices: []schema.Choice{ { Index: 0, @@ -156,6 +165,8 @@ func CompletionEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fibe } resp := &schema.OpenAIResponse{ + ID: id, + Created: created, Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. Choices: result, Object: "text_completion", diff --git a/api/openai/edit.go b/api/openai/edit.go index 8a89ab28..088f0035 100644 --- a/api/openai/edit.go +++ b/api/openai/edit.go @@ -3,6 +3,7 @@ package openai import ( "encoding/json" "fmt" + "time" "github.com/go-skynet/LocalAI/api/backend" config "github.com/go-skynet/LocalAI/api/config" @@ -10,6 +11,7 @@ import ( "github.com/go-skynet/LocalAI/api/schema" model "github.com/go-skynet/LocalAI/pkg/model" "github.com/gofiber/fiber/v2" + "github.com/google/uuid" "github.com/rs/zerolog/log" ) @@ -62,7 +64,11 @@ func EditEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) result = append(result, r...) } + id := uuid.New().String() + created := int(time.Now().Unix()) resp := &schema.OpenAIResponse{ + ID: id, + Created: created, Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. Choices: result, Object: "edit", diff --git a/api/openai/embeddings.go b/api/openai/embeddings.go index 37b0a079..15e31e92 100644 --- a/api/openai/embeddings.go +++ b/api/openai/embeddings.go @@ -3,10 +3,12 @@ package openai import ( "encoding/json" "fmt" + "time" "github.com/go-skynet/LocalAI/api/backend" config "github.com/go-skynet/LocalAI/api/config" "github.com/go-skynet/LocalAI/api/schema" + "github.com/google/uuid" "github.com/go-skynet/LocalAI/api/options" "github.com/gofiber/fiber/v2" @@ -57,10 +59,14 @@ func EmbeddingsEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fibe items = append(items, schema.Item{Embedding: embeddings, Index: i, Object: "embedding"}) } + id := uuid.New().String() + created := int(time.Now().Unix()) resp := &schema.OpenAIResponse{ - Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. - Data: items, - Object: "list", + ID: id, + Created: created, + Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. + Data: items, + Object: "list", } jsonResult, _ := json.Marshal(resp) diff --git a/api/openai/image.go b/api/openai/image.go index 9ab8fd3a..8243e167 100644 --- a/api/openai/image.go +++ b/api/openai/image.go @@ -5,11 +5,14 @@ import ( "encoding/base64" "encoding/json" "fmt" - "github.com/go-skynet/LocalAI/api/schema" "os" "path/filepath" "strconv" "strings" + "time" + + "github.com/go-skynet/LocalAI/api/schema" + "github.com/google/uuid" "github.com/go-skynet/LocalAI/api/backend" config "github.com/go-skynet/LocalAI/api/config" @@ -174,8 +177,12 @@ func ImageEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx } } + id := uuid.New().String() + created := int(time.Now().Unix()) resp := &schema.OpenAIResponse{ - Data: result, + ID: id, + Created: created, + Data: result, } jsonResult, _ := json.Marshal(resp)