diff --git a/core/http/endpoints/localai/backend_monitor.go b/core/http/endpoints/localai/backend_monitor.go index b211d7bb..fa11b5c3 100644 --- a/core/http/endpoints/localai/backend_monitor.go +++ b/core/http/endpoints/localai/backend_monitor.go @@ -6,6 +6,11 @@ import ( "github.com/mudler/LocalAI/core/services" ) +// BackendMonitorEndpoint returns the status of the specified backend +// @Summary Backend monitor endpoint +// @Param request body schema.BackendMonitorRequest true "Backend statistics request" +// @Success 200 {object} proto.StatusResponse "Response" +// @Router /backend/monitor [get] func BackendMonitorEndpoint(bm *services.BackendMonitorService) func(c *fiber.Ctx) error { return func(c *fiber.Ctx) error { @@ -23,6 +28,10 @@ func BackendMonitorEndpoint(bm *services.BackendMonitorService) func(c *fiber.Ct } } +// BackendMonitorEndpoint shuts down the specified backend +// @Summary Backend monitor endpoint +// @Param request body schema.BackendMonitorRequest true "Backend statistics request" +// @Router /backend/shutdown [post] func BackendShutdownEndpoint(bm *services.BackendMonitorService) func(c *fiber.Ctx) error { return func(c *fiber.Ctx) error { input := new(schema.BackendMonitorRequest) diff --git a/core/http/endpoints/localai/metrics.go b/core/http/endpoints/localai/metrics.go index a7e6c99d..8fcc0a7a 100644 --- a/core/http/endpoints/localai/metrics.go +++ b/core/http/endpoints/localai/metrics.go @@ -9,8 +9,11 @@ import ( "github.com/prometheus/client_golang/prometheus/promhttp" ) +// LocalAIMetricsEndpoint returns the metrics endpoint for LocalAI +// @Summary Prometheus metrics endpoint +// @Param request body config.Gallery true "Gallery details" +// @Router /metrics [get] func LocalAIMetricsEndpoint() fiber.Handler { - return adaptor.HTTPHandler(promhttp.Handler()) } diff --git a/core/http/endpoints/openai/assistant.go b/core/http/endpoints/openai/assistant.go index ba2ebcde..ff218730 100644 --- a/core/http/endpoints/openai/assistant.go +++ b/core/http/endpoints/openai/assistant.go @@ -11,6 +11,7 @@ import ( "github.com/gofiber/fiber/v2" "github.com/mudler/LocalAI/core/config" + "github.com/mudler/LocalAI/core/schema" "github.com/mudler/LocalAI/core/services" model "github.com/mudler/LocalAI/pkg/model" "github.com/mudler/LocalAI/pkg/utils" @@ -125,6 +126,14 @@ func generateRandomID() int64 { return currentId } +// ListAssistantsEndpoint is the OpenAI Assistant API endpoint to list assistents https://platform.openai.com/docs/api-reference/assistants/listAssistants +// @Summary List available assistents +// @Param limit query int false "Limit the number of assistants returned" +// @Param order query string false "Order of assistants returned" +// @Param after query string false "Return assistants created after the given ID" +// @Param before query string false "Return assistants created before the given ID" +// @Success 200 {object} []Assistant "Response" +// @Router /v1/assistants [get] func ListAssistantsEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error { return func(c *fiber.Ctx) error { // Because we're altering the existing assistants list we should just duplicate it for now. @@ -230,13 +239,11 @@ func modelExists(cl *config.BackendConfigLoader, ml *model.ModelLoader, modelNam return } +// DeleteAssistantEndpoint is the OpenAI Assistant API endpoint to delete assistents https://platform.openai.com/docs/api-reference/assistants/deleteAssistant +// @Summary Delete assistents +// @Success 200 {object} schema.DeleteAssistantResponse "Response" +// @Router /v1/assistants/{assistant_id} [delete] func DeleteAssistantEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error { - type DeleteAssistantResponse struct { - ID string `json:"id"` - Object string `json:"object"` - Deleted bool `json:"deleted"` - } - return func(c *fiber.Ctx) error { assistantID := c.Params("assistant_id") if assistantID == "" { @@ -247,7 +254,7 @@ func DeleteAssistantEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoad if assistant.ID == assistantID { Assistants = append(Assistants[:i], Assistants[i+1:]...) utils.SaveConfig(appConfig.ConfigsDir, AssistantsConfigFile, Assistants) - return c.Status(fiber.StatusOK).JSON(DeleteAssistantResponse{ + return c.Status(fiber.StatusOK).JSON(schema.DeleteAssistantResponse{ ID: assistantID, Object: "assistant.deleted", Deleted: true, @@ -256,7 +263,7 @@ func DeleteAssistantEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoad } log.Warn().Msgf("Unable to find assistant %s for deletion", assistantID) - return c.Status(fiber.StatusNotFound).JSON(DeleteAssistantResponse{ + return c.Status(fiber.StatusNotFound).JSON(schema.DeleteAssistantResponse{ ID: assistantID, Object: "assistant.deleted", Deleted: false, @@ -264,6 +271,10 @@ func DeleteAssistantEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoad } } +// GetAssistantEndpoint is the OpenAI Assistant API endpoint to get assistents https://platform.openai.com/docs/api-reference/assistants/getAssistant +// @Summary Get assistent data +// @Success 200 {object} Assistant "Response" +// @Router /v1/assistants/{assistant_id} [get] func GetAssistantEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error { return func(c *fiber.Ctx) error { assistantID := c.Params("assistant_id") @@ -293,19 +304,9 @@ var ( AssistantsFileConfigFile = "assistantsFile.json" ) -type AssistantFileRequest struct { - FileID string `json:"file_id"` -} - -type DeleteAssistantFileResponse struct { - ID string `json:"id"` - Object string `json:"object"` - Deleted bool `json:"deleted"` -} - func CreateAssistantFileEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error { return func(c *fiber.Ctx) error { - request := new(AssistantFileRequest) + request := new(schema.AssistantFileRequest) if err := c.BodyParser(request); err != nil { return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Cannot parse JSON"}) } @@ -346,7 +347,7 @@ func CreateAssistantFileEndpoint(cl *config.BackendConfigLoader, ml *model.Model func ListAssistantFilesEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error { type ListAssistantFiles struct { - Data []File + Data []schema.File Object string } @@ -464,7 +465,7 @@ func DeleteAssistantFileEndpoint(cl *config.BackendConfigLoader, ml *model.Model // Remove the file from the assistantFiles slice AssistantFiles = append(AssistantFiles[:i], AssistantFiles[i+1:]...) utils.SaveConfig(appConfig.ConfigsDir, AssistantsFileConfigFile, AssistantFiles) - return c.Status(fiber.StatusOK).JSON(DeleteAssistantFileResponse{ + return c.Status(fiber.StatusOK).JSON(schema.DeleteAssistantFileResponse{ ID: fileId, Object: "assistant.file.deleted", Deleted: true, @@ -480,7 +481,7 @@ func DeleteAssistantFileEndpoint(cl *config.BackendConfigLoader, ml *model.Model AssistantFiles = append(AssistantFiles[:i], AssistantFiles[i+1:]...) utils.SaveConfig(appConfig.ConfigsDir, AssistantsFileConfigFile, AssistantFiles) - return c.Status(fiber.StatusNotFound).JSON(DeleteAssistantFileResponse{ + return c.Status(fiber.StatusNotFound).JSON(schema.DeleteAssistantFileResponse{ ID: fileId, Object: "assistant.file.deleted", Deleted: true, @@ -491,7 +492,7 @@ func DeleteAssistantFileEndpoint(cl *config.BackendConfigLoader, ml *model.Model } log.Warn().Msgf("Unable to find assistant: %s", assistantID) - return c.Status(fiber.StatusNotFound).JSON(DeleteAssistantFileResponse{ + return c.Status(fiber.StatusNotFound).JSON(schema.DeleteAssistantFileResponse{ ID: fileId, Object: "assistant.file.deleted", Deleted: false, diff --git a/core/http/endpoints/openai/assistant_test.go b/core/http/endpoints/openai/assistant_test.go index 7d6c0c06..6858f65d 100644 --- a/core/http/endpoints/openai/assistant_test.go +++ b/core/http/endpoints/openai/assistant_test.go @@ -14,6 +14,7 @@ import ( "github.com/gofiber/fiber/v2" "github.com/mudler/LocalAI/core/config" + "github.com/mudler/LocalAI/core/schema" "github.com/mudler/LocalAI/pkg/model" "github.com/stretchr/testify/assert" ) @@ -26,7 +27,7 @@ type MockLoader struct { func tearDown() func() { return func() { - UploadedFiles = []File{} + UploadedFiles = []schema.File{} Assistants = []Assistant{} AssistantFiles = []AssistantFile{} _ = os.Remove(filepath.Join(configsDir, AssistantsConfigFile)) @@ -294,7 +295,7 @@ func TestAssistantEndpoints(t *testing.T) { file, assistant, err := createFileAndAssistant(t, app, appConfig) assert.NoError(t, err) - afr := AssistantFileRequest{FileID: file.ID} + afr := schema.AssistantFileRequest{FileID: file.ID} af, _, err := createAssistantFile(app, afr, assistant.ID) assert.NoError(t, err) @@ -305,7 +306,7 @@ func TestAssistantEndpoints(t *testing.T) { file, assistant, err := createFileAndAssistant(t, app, appConfig) assert.NoError(t, err) - afr := AssistantFileRequest{FileID: file.ID} + afr := schema.AssistantFileRequest{FileID: file.ID} af, _, err := createAssistantFile(app, afr, assistant.ID) assert.NoError(t, err) @@ -316,7 +317,7 @@ func TestAssistantEndpoints(t *testing.T) { file, assistant, err := createFileAndAssistant(t, app, appConfig) assert.NoError(t, err) - afr := AssistantFileRequest{FileID: file.ID} + afr := schema.AssistantFileRequest{FileID: file.ID} af, _, err := createAssistantFile(app, afr, assistant.ID) assert.NoError(t, err) t.Cleanup(cleanupAssistantFile(t, app, af.ID, af.AssistantID)) @@ -338,7 +339,7 @@ func TestAssistantEndpoints(t *testing.T) { file, assistant, err := createFileAndAssistant(t, app, appConfig) assert.NoError(t, err) - afr := AssistantFileRequest{FileID: file.ID} + afr := schema.AssistantFileRequest{FileID: file.ID} af, _, err := createAssistantFile(app, afr, assistant.ID) assert.NoError(t, err) @@ -349,7 +350,7 @@ func TestAssistantEndpoints(t *testing.T) { } -func createFileAndAssistant(t *testing.T, app *fiber.App, o *config.ApplicationConfig) (File, Assistant, error) { +func createFileAndAssistant(t *testing.T, app *fiber.App, o *config.ApplicationConfig) (schema.File, Assistant, error) { ar := &AssistantRequest{ Model: "ggml-gpt4all-j", Name: "3.5-turbo", @@ -362,7 +363,7 @@ func createFileAndAssistant(t *testing.T, app *fiber.App, o *config.ApplicationC assistant, _, err := createAssistant(app, *ar) if err != nil { - return File{}, Assistant{}, err + return schema.File{}, Assistant{}, err } t.Cleanup(cleanupAllAssistants(t, app, []string{assistant.ID})) @@ -374,7 +375,7 @@ func createFileAndAssistant(t *testing.T, app *fiber.App, o *config.ApplicationC return file, assistant, nil } -func createAssistantFile(app *fiber.App, afr AssistantFileRequest, assistantId string) (AssistantFile, *http.Response, error) { +func createAssistantFile(app *fiber.App, afr schema.AssistantFileRequest, assistantId string) (AssistantFile, *http.Response, error) { afrJson, err := json.Marshal(afr) if err != nil { return AssistantFile{}, nil, err @@ -451,7 +452,7 @@ func cleanupAssistantFile(t *testing.T, app *fiber.App, fileId, assistantId stri resp, err := app.Test(request) assert.NoError(t, err) - var dafr DeleteAssistantFileResponse + var dafr schema.DeleteAssistantFileResponse err = json.NewDecoder(resp.Body).Decode(&dafr) assert.NoError(t, err) assert.True(t, dafr.Deleted) diff --git a/core/http/endpoints/openai/edit.go b/core/http/endpoints/openai/edit.go index bb43ac3b..12fb4035 100644 --- a/core/http/endpoints/openai/edit.go +++ b/core/http/endpoints/openai/edit.go @@ -16,6 +16,11 @@ import ( "github.com/rs/zerolog/log" ) +// EditEndpoint is the OpenAI edit API endpoint +// @Summary OpenAI edit endpoint +// @Param request body schema.OpenAIRequest true "query params" +// @Success 200 {object} schema.OpenAIResponse "Response" +// @Router /v1/edits [post] func EditEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error { return func(c *fiber.Ctx) error { modelFile, input, err := readRequest(c, cl, ml, appConfig, true) diff --git a/core/http/endpoints/openai/files.go b/core/http/endpoints/openai/files.go index d7741580..903484b4 100644 --- a/core/http/endpoints/openai/files.go +++ b/core/http/endpoints/openai/files.go @@ -9,25 +9,16 @@ import ( "time" "github.com/mudler/LocalAI/core/config" + "github.com/mudler/LocalAI/core/schema" "github.com/gofiber/fiber/v2" "github.com/mudler/LocalAI/pkg/utils" ) -var UploadedFiles []File +var UploadedFiles []schema.File const UploadedFilesFile = "uploadedFiles.json" -// File represents the structure of a file object from the OpenAI API. -type File struct { - ID string `json:"id"` // Unique identifier for the file - Object string `json:"object"` // Type of the object (e.g., "file") - Bytes int `json:"bytes"` // Size of the file in bytes - CreatedAt time.Time `json:"created_at"` // The time at which the file was created - Filename string `json:"filename"` // The name of the file - Purpose string `json:"purpose"` // The purpose of the file (e.g., "fine-tune", "classifications", etc.) -} - // UploadFilesEndpoint https://platform.openai.com/docs/api-reference/files/create func UploadFilesEndpoint(cm *config.BackendConfigLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error { return func(c *fiber.Ctx) error { @@ -61,7 +52,7 @@ func UploadFilesEndpoint(cm *config.BackendConfigLoader, appConfig *config.Appli return c.Status(fiber.StatusInternalServerError).SendString("Failed to save file: " + err.Error()) } - f := File{ + f := schema.File{ ID: fmt.Sprintf("file-%d", getNextFileId()), Object: "file", Bytes: int(file.Size), @@ -84,14 +75,13 @@ func getNextFileId() int64 { } // ListFilesEndpoint https://platform.openai.com/docs/api-reference/files/list +// @Summary List files. +// @Success 200 {object} schema.ListFiles "Response" +// @Router /v1/files [get] func ListFilesEndpoint(cm *config.BackendConfigLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error { - type ListFiles struct { - Data []File - Object string - } return func(c *fiber.Ctx) error { - var listFiles ListFiles + var listFiles schema.ListFiles purpose := c.Query("purpose") if purpose == "" { @@ -108,7 +98,7 @@ func ListFilesEndpoint(cm *config.BackendConfigLoader, appConfig *config.Applica } } -func getFileFromRequest(c *fiber.Ctx) (*File, error) { +func getFileFromRequest(c *fiber.Ctx) (*schema.File, error) { id := c.Params("file_id") if id == "" { return nil, fmt.Errorf("file_id parameter is required") @@ -125,7 +115,7 @@ func getFileFromRequest(c *fiber.Ctx) (*File, error) { // GetFilesEndpoint is the OpenAI API endpoint to get files https://platform.openai.com/docs/api-reference/files/retrieve // @Summary Returns information about a specific file. -// @Success 200 {object} File "Response" +// @Success 200 {object} schema.File "Response" // @Router /v1/files/{file_id} [get] func GetFilesEndpoint(cm *config.BackendConfigLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error { return func(c *fiber.Ctx) error { diff --git a/core/http/endpoints/openai/files_test.go b/core/http/endpoints/openai/files_test.go index 80b01eb9..b8cb5da3 100644 --- a/core/http/endpoints/openai/files_test.go +++ b/core/http/endpoints/openai/files_test.go @@ -14,6 +14,7 @@ import ( "github.com/rs/zerolog/log" "github.com/mudler/LocalAI/core/config" + "github.com/mudler/LocalAI/core/schema" "github.com/gofiber/fiber/v2" utils2 "github.com/mudler/LocalAI/pkg/utils" @@ -22,11 +23,6 @@ import ( "testing" ) -type ListFiles struct { - Data []File - Object string -} - func startUpApp() (app *fiber.App, option *config.ApplicationConfig, loader *config.BackendConfigLoader) { // Preparing the mocked objects loader = &config.BackendConfigLoader{} @@ -159,7 +155,7 @@ func TestUploadFileExceedSizeLimit(t *testing.T) { resp, _ := app.Test(req) assert.Equal(t, 200, resp.StatusCode) - var listFiles ListFiles + var listFiles schema.ListFiles if err := json.Unmarshal(bodyToByteArray(resp, t), &listFiles); err != nil { t.Errorf("Failed to decode response: %v", err) return @@ -201,7 +197,7 @@ func CallFilesUploadEndpoint(t *testing.T, app *fiber.App, fileName, tag, purpos return app.Test(req) } -func CallFilesUploadEndpointWithCleanup(t *testing.T, app *fiber.App, fileName, tag, purpose string, fileSize int, appConfig *config.ApplicationConfig) File { +func CallFilesUploadEndpointWithCleanup(t *testing.T, app *fiber.App, fileName, tag, purpose string, fileSize int, appConfig *config.ApplicationConfig) schema.File { // Create a file that exceeds the limit testName := strings.Split(t.Name(), "/")[1] file := createTestFile(t, testName+"-"+fileName, fileSize, appConfig) @@ -280,8 +276,8 @@ func bodyToByteArray(resp *http.Response, t *testing.T) []byte { return bodyBytes } -func responseToFile(t *testing.T, resp *http.Response) File { - var file File +func responseToFile(t *testing.T, resp *http.Response) schema.File { + var file schema.File responseToString := bodyToString(resp, t) err := json.NewDecoder(strings.NewReader(responseToString)).Decode(&file) @@ -292,8 +288,8 @@ func responseToFile(t *testing.T, resp *http.Response) File { return file } -func responseToListFile(t *testing.T, resp *http.Response) ListFiles { - var listFiles ListFiles +func responseToListFile(t *testing.T, resp *http.Response) schema.ListFiles { + var listFiles schema.ListFiles responseToString := bodyToString(resp, t) err := json.NewDecoder(strings.NewReader(responseToString)).Decode(&listFiles) diff --git a/core/schema/openai.go b/core/schema/openai.go index 9735bb32..354b1f05 100644 --- a/core/schema/openai.go +++ b/core/schema/openai.go @@ -2,6 +2,7 @@ package schema import ( "context" + "time" functions "github.com/mudler/LocalAI/pkg/functions" ) @@ -99,6 +100,37 @@ type OpenAIModel struct { Object string `json:"object"` } +type DeleteAssistantResponse struct { + ID string `json:"id"` + Object string `json:"object"` + Deleted bool `json:"deleted"` +} + +// File represents the structure of a file object from the OpenAI API. +type File struct { + ID string `json:"id"` // Unique identifier for the file + Object string `json:"object"` // Type of the object (e.g., "file") + Bytes int `json:"bytes"` // Size of the file in bytes + CreatedAt time.Time `json:"created_at"` // The time at which the file was created + Filename string `json:"filename"` // The name of the file + Purpose string `json:"purpose"` // The purpose of the file (e.g., "fine-tune", "classifications", etc.) +} + +type ListFiles struct { + Data []File + Object string +} + +type AssistantFileRequest struct { + FileID string `json:"file_id"` +} + +type DeleteAssistantFileResponse struct { + ID string `json:"id"` + Object string `json:"object"` + Deleted bool `json:"deleted"` +} + type ImageGenerationResponseFormat string type ChatCompletionResponseFormatType string