Files
LocalAI/core/http/routes/ui_gallery.go
Ettore Di Giacinto 2d64269763
Some checks failed
Explorer deployment / build-linux (push) Has been cancelled
GPU tests / ubuntu-latest (1.21.x) (push) Has been cancelled
generate and publish intel docker caches / generate_caches (intel/oneapi-basekit:2025.1.0-0-devel-ubuntu22.04, linux/amd64, ubuntu-latest) (push) Has been cancelled
build container images / hipblas-jobs (-aio-gpu-hipblas, rocm/dev-ubuntu-22.04:6.1, hipblas, true, ubuntu:22.04, core, latest-gpu-hipblas, latest-aio-gpu-hipblas, --jobs=3 --output-sync=target, linux/amd64, arc-runner-set, false, -hipblas) (push) Has been cancelled
build container images / core-image-build (-aio-cpu, ubuntu:22.04, , true, core, latest-cpu, latest-aio-cpu, --jobs=4 --output-sync=target, linux/amd64,linux/arm64, arc-runner-set, false, auto, ) (push) Has been cancelled
build container images / core-image-build (-aio-gpu-intel-f16, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f16, true, ubuntu:22.04, core, latest-gpu-intel-f16, latest-aio-gpu-intel-f16, --jobs=3 --output-sync=target, linux/amd64, arc-runner-set, false, -sycl-f16) (push) Has been cancelled
build container images / core-image-build (-aio-gpu-intel-f32, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f32, true, ubuntu:22.04, core, latest-gpu-intel-f32, latest-aio-gpu-intel-f32, --jobs=3 --output-sync=target, linux/amd64, arc-runner-set, false, -sycl-f32) (push) Has been cancelled
build container images / core-image-build (-aio-gpu-nvidia-cuda-11, ubuntu:22.04, cublas, 11, 7, true, core, latest-gpu-nvidia-cuda-11, latest-aio-gpu-nvidia-cuda-11, --jobs=4 --output-sync=target, linux/amd64, arc-runner-set, false, false, -cublas-cuda11) (push) Has been cancelled
build container images / core-image-build (-aio-gpu-nvidia-cuda-12, ubuntu:22.04, cublas, 12, 0, true, core, latest-gpu-nvidia-cuda-12, latest-aio-gpu-nvidia-cuda-12, --jobs=4 --output-sync=target, linux/amd64, arc-runner-set, false, false, -cublas-cuda12) (push) Has been cancelled
build container images / core-image-build (-aio-gpu-vulkan, ubuntu:22.04, vulkan, true, core, latest-gpu-vulkan, latest-aio-gpu-vulkan, --jobs=4 --output-sync=target, linux/amd64, arc-runner-set, false, false, -vulkan) (push) Has been cancelled
build container images / gh-runner (nvcr.io/nvidia/l4t-jetpack:r36.4.0, cublas, 12, 0, true, core, latest-nvidia-l4t-arm64, --jobs=4 --output-sync=target, linux/arm64, ubuntu-24.04-arm, true, false, -nvidia-l4t-arm64) (push) Has been cancelled
build python backend container images / backend-jobs (bark, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f16, , , latest-gpu-intel-sycl-f16-bark, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f16-bark) (push) Has been cancelled
build python backend container images / backend-jobs (bark, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f32, , , latest-gpu-intel-sycl-f32-bark, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f32-bark) (push) Has been cancelled
build python backend container images / backend-jobs (bark, rocm/dev-ubuntu-22.04:6.1, hipblas, , , latest-gpu-rocm-hipblas-bark, linux/amd64, arc-runner-set, true, -gpu-rocm-hipblas-bark) (push) Has been cancelled
build python backend container images / backend-jobs (bark, ubuntu:22.04, cublas, 11, 7, latest-gpu-nvidia-cuda-11-bark, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-11-bark) (push) Has been cancelled
build python backend container images / backend-jobs (bark, ubuntu:22.04, cublas, 12, 0, latest-gpu-nvidia-cuda-12-bark, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-12-bark) (push) Has been cancelled
build python backend container images / backend-jobs (chatterbox, ubuntu:22.04, cublas, 11, 7, latest-gpu-nvidia-cuda-11-chatterbox, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-11-chatterbox) (push) Has been cancelled
build python backend container images / backend-jobs (chatterbox, ubuntu:22.04, cublas, 12, 0, latest-gpu-nvidia-cuda-12-chatterbox, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-12-chatterbox) (push) Has been cancelled
build python backend container images / backend-jobs (coqui, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f16, , , latest-gpu-intel-sycl-f16-coqui, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f16-coqui) (push) Has been cancelled
build python backend container images / backend-jobs (coqui, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f32, , , latest-gpu-intel-sycl-f32-coqui, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f32-coqui) (push) Has been cancelled
build python backend container images / backend-jobs (coqui, rocm/dev-ubuntu-22.04:6.1, hipblas, , , latest-gpu-rocm-hipblas-coqui, linux/amd64, arc-runner-set, true, -gpu-rocm-hipblas-coqui) (push) Has been cancelled
build python backend container images / backend-jobs (coqui, ubuntu:22.04, cublas, 11, 7, latest-gpu-nvidia-cuda-11-coqui, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-11-coqui) (push) Has been cancelled
build python backend container images / backend-jobs (coqui, ubuntu:22.04, cublas, 12, 0, latest-gpu-nvidia-cuda-12-coqui, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-12-coqui) (push) Has been cancelled
build python backend container images / backend-jobs (diffusers, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f32, , , latest-gpu-intel-sycl-f32-diffusers, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f32-diffusers) (push) Has been cancelled
build python backend container images / backend-jobs (diffusers, rocm/dev-ubuntu-22.04:6.1, hipblas, , , latest-gpu-rocm-hipblas-diffusers, linux/amd64, arc-runner-set, true, -gpu-rocm-hipblas-diffusers) (push) Has been cancelled
build python backend container images / backend-jobs (diffusers, ubuntu:22.04, cublas, 11, 7, latest-gpu-nvidia-cuda-11-diffusers, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-11-diffusers) (push) Has been cancelled
build python backend container images / backend-jobs (diffusers, ubuntu:22.04, cublas, 12, 0, latest-gpu-nvidia-cuda-12-diffusers, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-12-diffusers) (push) Has been cancelled
build python backend container images / backend-jobs (faster-whisper, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f16, , , latest-gpu-intel-sycl-f16-faster-whisper, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f16-faster-whisper) (push) Has been cancelled
build python backend container images / backend-jobs (faster-whisper, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f32, , , latest-gpu-intel-sycl-f32-faster-whisper, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f32-faster-whisper) (push) Has been cancelled
build python backend container images / backend-jobs (faster-whisper, rocm/dev-ubuntu-22.04:6.1, hipblas, , , latest-gpu-rocm-hipblas-faster-whisper, linux/amd64, arc-runner-set, true, -gpu-rocm-hipblas-faster-whisper) (push) Has been cancelled
build python backend container images / backend-jobs (faster-whisper, ubuntu:22.04, cublas, 11, 7, latest-gpu-nvidia-cuda-11-faster-whisper, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-11-faster-whisper) (push) Has been cancelled
build python backend container images / backend-jobs (faster-whisper, ubuntu:22.04, cublas, 12, 0, latest-gpu-nvidia-cuda-12-faster-whisper, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-12-faster-whisper) (push) Has been cancelled
build python backend container images / backend-jobs (kokoro, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f16, , , latest-gpu-intel-sycl-f16-kokoro, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f16-kokoro) (push) Has been cancelled
build python backend container images / backend-jobs (kokoro, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f32, , , latest-gpu-intel-sycl-f32-kokoro, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f32-kokoro) (push) Has been cancelled
build python backend container images / backend-jobs (kokoro, rocm/dev-ubuntu-22.04:6.1, hipblas, , , latest-gpu-rocm-hipblas-kokoro, linux/amd64, arc-runner-set, true, -gpu-rocm-hipblas-kokoro) (push) Has been cancelled
build python backend container images / backend-jobs (kokoro, ubuntu:22.04, cublas, 11, 7, latest-gpu-nvidia-cuda-11-kokoro, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-11-kokoro) (push) Has been cancelled
build python backend container images / backend-jobs (kokoro, ubuntu:22.04, cublas, 12, 0, latest-gpu-nvidia-cuda-12-kokoro, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-12-kokoro) (push) Has been cancelled
build python backend container images / backend-jobs (rerankers, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f16, , , latest-gpu-intel-sycl-f16-rerankers, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f16-rerankers) (push) Has been cancelled
build python backend container images / backend-jobs (rerankers, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f32, , , latest-gpu-intel-sycl-f32-rerankers, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f32-rerankers) (push) Has been cancelled
build python backend container images / backend-jobs (rerankers, rocm/dev-ubuntu-22.04:6.1, hipblas, , , latest-gpu-rocm-hipblas-rerankers, linux/amd64, arc-runner-set, true, -gpu-rocm-hipblas-rerankers) (push) Has been cancelled
build python backend container images / backend-jobs (rerankers, ubuntu:22.04, cublas, 11, 7, latest-gpu-nvidia-cuda-11-rerankers, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-11-rerankers) (push) Has been cancelled
build python backend container images / backend-jobs (rerankers, ubuntu:22.04, cublas, 12, 0, latest-gpu-nvidia-cuda-12-rerankers, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-12-rerankers) (push) Has been cancelled
build python backend container images / backend-jobs (transformers, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f16, , , latest-gpu-intel-sycl-f16-transformers, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f16-transformers) (push) Has been cancelled
build python backend container images / backend-jobs (transformers, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f32, , , latest-gpu-intel-sycl-f32-transformers, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f32-transformers) (push) Has been cancelled
build python backend container images / backend-jobs (transformers, rocm/dev-ubuntu-22.04:6.1, hipblas, , , latest-gpu-rocm-hipblas-transformers, linux/amd64, arc-runner-set, true, -gpu-rocm-hipblas-transformers) (push) Has been cancelled
build python backend container images / backend-jobs (transformers, ubuntu:22.04, cublas, 11, 7, latest-gpu-nvidia-cuda-11-transformers, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-11-transformers) (push) Has been cancelled
build python backend container images / backend-jobs (transformers, ubuntu:22.04, cublas, 12, 0, latest-gpu-nvidia-cuda-12-transformers, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-12-transformers) (push) Has been cancelled
build python backend container images / backend-jobs (vllm, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f16, , , latest-gpu-intel-sycl-f16-vllm, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f16-vllm) (push) Has been cancelled
build python backend container images / backend-jobs (vllm, quay.io/go-skynet/intel-oneapi-base:latest, sycl_f32, , , latest-gpu-intel-sycl-f32-vllm, linux/amd64, arc-runner-set, true, -gpu-intel-sycl-f32-vllm) (push) Has been cancelled
build python backend container images / backend-jobs (vllm, rocm/dev-ubuntu-22.04:6.1, hipblas, , , latest-gpu-rocm-hipblas-vllm, linux/amd64, arc-runner-set, true, -gpu-rocm-hipblas-vllm) (push) Has been cancelled
build python backend container images / backend-jobs (vllm, ubuntu:22.04, cublas, 11, 7, latest-gpu-nvidia-cuda-11-vllm, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-11-vllm) (push) Has been cancelled
build python backend container images / backend-jobs (vllm, ubuntu:22.04, cublas, 12, 0, latest-gpu-nvidia-cuda-12-vllm, linux/amd64, arc-runner-set, true, -gpu-nvidia-cuda-12-vllm) (push) Has been cancelled
Security Scan / tests (push) Has been cancelled
Tests extras backends / tests-transformers (push) Has been cancelled
Tests extras backends / tests-rerankers (push) Has been cancelled
Tests extras backends / tests-diffusers (push) Has been cancelled
Tests extras backends / tests-coqui (push) Has been cancelled
tests / tests-linux (1.21.x) (push) Has been cancelled
tests / tests-aio-container (push) Has been cancelled
tests / tests-apple (1.21.x) (push) Has been cancelled
Update swagger / swagger (push) Has been cancelled
Check if checksums are up-to-date / checksum_check (push) Has been cancelled
Bump dependencies / bump (mudler/LocalAI) (push) Has been cancelled
Bump dependencies / bump (main, PABannier/bark.cpp, BARKCPP_VERSION) (push) Has been cancelled
Bump dependencies / bump (master, ggml-org/llama.cpp, CPPLLAMA_VERSION) (push) Has been cancelled
Bump dependencies / bump (master, ggml-org/whisper.cpp, WHISPER_CPP_VERSION) (push) Has been cancelled
Bump dependencies / bump (master, leejet/stable-diffusion.cpp, STABLEDIFFUSION_GGML_VERSION) (push) Has been cancelled
Bump dependencies / bump (master, mudler/go-piper, PIPER_VERSION) (push) Has been cancelled
Bump dependencies / bump (master, mudler/go-stable-diffusion, STABLEDIFFUSION_VERSION) (push) Has been cancelled
feat: Add backend gallery (#5607)
* feat: Add backend gallery

This PR add support to manage backends as similar to models. There is
now available a backend gallery which can be used to install and remove
extra backends.
The backend gallery can be configured similarly as a model gallery, and
API calls allows to install and remove new backends in runtime, and as
well during the startup phase of LocalAI.

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Add backends docs

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* wip: Backend Dockerfile for python backends

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* feat: drop extras images, build python backends separately

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* fixup on all backends

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* test CI

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Tweaks

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Drop old backends leftovers

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Fixup CI

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Move dockerfile upper

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Fix proto

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Feature dropped for consistency - we prefer model galleries

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Add missing packages in the build image

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* exllama is ponly available on cublas

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* pin torch on chatterbox

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Fixups to index

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* CI

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Debug CI

* Install accellerators deps

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Add target arch

* Add cuda minor version

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Use self-hosted runners

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* ci: use quay for test images

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* fixups for vllm and chatterbox

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Small fixups on CI

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* chatterbox is only available for nvidia

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Simplify CI builds

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Adapt test, use qwen3

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* chore(model gallery): add jina-reranker-v1-tiny-en-gguf

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* fix(gguf-parser): recover from potential panics that can happen while reading ggufs with gguf-parser

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Use reranker from llama.cpp in AIO images

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Limit concurrent jobs

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

---------

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
Signed-off-by: Ettore Di Giacinto <mudler@users.noreply.github.com>
2025-06-15 14:56:52 +02:00

283 lines
8.1 KiB
Go

package routes
import (
"fmt"
"html/template"
"math"
"sort"
"strconv"
"strings"
"github.com/gofiber/fiber/v2"
"github.com/google/uuid"
"github.com/microcosm-cc/bluemonday"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/core/http/elements"
"github.com/mudler/LocalAI/core/http/utils"
"github.com/mudler/LocalAI/core/p2p"
"github.com/mudler/LocalAI/core/services"
"github.com/mudler/LocalAI/internal"
"github.com/rs/zerolog/log"
)
func registerGalleryRoutes(app *fiber.App, cl *config.BackendConfigLoader, appConfig *config.ApplicationConfig, galleryService *services.GalleryService, opcache *services.OpCache) {
// Show the Models page (all models)
app.Get("/browse", func(c *fiber.Ctx) error {
term := c.Query("term")
page := c.Query("page")
items := c.Query("items")
models, err := gallery.AvailableGalleryModels(appConfig.Galleries, appConfig.ModelPath)
if err != nil {
log.Error().Err(err).Msg("could not list models from galleries")
return c.Status(fiber.StatusInternalServerError).Render("views/error", fiber.Map{
"Title": "LocalAI - Models",
"BaseURL": utils.BaseURL(c),
"Version": internal.PrintableVersion(),
"ErrorCode": "500",
"ErrorMessage": err.Error(),
})
}
// Get all available tags
allTags := map[string]struct{}{}
tags := []string{}
for _, m := range models {
for _, t := range m.Tags {
allTags[t] = struct{}{}
}
}
for t := range allTags {
tags = append(tags, t)
}
sort.Strings(tags)
if term != "" {
models = gallery.GalleryElements[*gallery.GalleryModel](models).Search(term)
}
// Get model statuses
processingModelsData, taskTypes := opcache.GetStatus()
summary := fiber.Map{
"Title": "LocalAI - Models",
"BaseURL": utils.BaseURL(c),
"Version": internal.PrintableVersion(),
"Models": template.HTML(elements.ListModels(models, opcache, galleryService)),
"Repositories": appConfig.Galleries,
"AllTags": tags,
"ProcessingModels": processingModelsData,
"AvailableModels": len(models),
"IsP2PEnabled": p2p.IsP2PEnabled(),
"TaskTypes": taskTypes,
// "ApplicationConfig": appConfig,
}
if page == "" {
page = "1"
}
if page != "" {
// return a subset of the models
pageNum, err := strconv.Atoi(page)
if err != nil {
return c.Status(fiber.StatusBadRequest).SendString("Invalid page number")
}
if pageNum == 0 {
return c.Render("views/models", summary)
}
itemsNum, err := strconv.Atoi(items)
if err != nil {
itemsNum = 21
}
totalPages := int(math.Ceil(float64(len(models)) / float64(itemsNum)))
models = models.Paginate(pageNum, itemsNum)
prevPage := pageNum - 1
nextPage := pageNum + 1
if prevPage < 1 {
prevPage = 1
}
if nextPage > totalPages {
nextPage = totalPages
}
if prevPage != pageNum {
summary["PrevPage"] = prevPage
}
summary["NextPage"] = nextPage
summary["TotalPages"] = totalPages
summary["CurrentPage"] = pageNum
summary["Models"] = template.HTML(elements.ListModels(models, opcache, galleryService))
}
// Render index
return c.Render("views/models", summary)
})
// Show the models, filtered from the user input
// https://htmx.org/examples/active-search/
app.Post("/browse/search/models", func(c *fiber.Ctx) error {
page := c.Query("page")
items := c.Query("items")
form := struct {
Search string `form:"search"`
}{}
if err := c.BodyParser(&form); err != nil {
return c.Status(fiber.StatusBadRequest).SendString(bluemonday.StrictPolicy().Sanitize(err.Error()))
}
models, _ := gallery.AvailableGalleryModels(appConfig.Galleries, appConfig.ModelPath)
if page != "" {
// return a subset of the models
pageNum, err := strconv.Atoi(page)
if err != nil {
return c.Status(fiber.StatusBadRequest).SendString("Invalid page number")
}
itemsNum, err := strconv.Atoi(items)
if err != nil {
itemsNum = 21
}
models = models.Paginate(pageNum, itemsNum)
}
if form.Search != "" {
models = models.Search(form.Search)
}
return c.SendString(elements.ListModels(models, opcache, galleryService))
})
/*
Install routes
*/
// This route is used when the "Install" button is pressed, we submit here a new job to the gallery service
// https://htmx.org/examples/progress-bar/
app.Post("/browse/install/model/:id", func(c *fiber.Ctx) error {
galleryID := strings.Clone(c.Params("id")) // note: strings.Clone is required for multiple requests!
log.Debug().Msgf("UI job submitted to install : %+v\n", galleryID)
id, err := uuid.NewUUID()
if err != nil {
return err
}
uid := id.String()
opcache.Set(galleryID, uid)
op := services.GalleryOp[gallery.GalleryModel]{
ID: uid,
GalleryElementName: galleryID,
Galleries: appConfig.Galleries,
}
go func() {
galleryService.ModelGalleryChannel <- op
}()
return c.SendString(elements.StartModelProgressBar(uid, "0", "Installation"))
})
// This route is used when the "Install" button is pressed, we submit here a new job to the gallery service
// https://htmx.org/examples/progress-bar/
app.Post("/browse/delete/model/:id", func(c *fiber.Ctx) error {
galleryID := strings.Clone(c.Params("id")) // note: strings.Clone is required for multiple requests!
log.Debug().Msgf("UI job submitted to delete : %+v\n", galleryID)
var galleryName = galleryID
if strings.Contains(galleryID, "@") {
// if the galleryID contains a @ it means that it's a model from a gallery
// but we want to delete it from the local models which does not need
// a repository ID
galleryName = strings.Split(galleryID, "@")[1]
}
id, err := uuid.NewUUID()
if err != nil {
return err
}
uid := id.String()
// Track the deletion job by galleryID and galleryName
// The GalleryID contains information about the repository,
// while the GalleryName is ONLY the name of the model
opcache.Set(galleryName, uid)
opcache.Set(galleryID, uid)
op := services.GalleryOp[gallery.GalleryModel]{
ID: uid,
Delete: true,
GalleryElementName: galleryName,
Galleries: appConfig.Galleries,
}
go func() {
galleryService.ModelGalleryChannel <- op
cl.RemoveBackendConfig(galleryName)
}()
return c.SendString(elements.StartModelProgressBar(uid, "0", "Deletion"))
})
// Display the job current progress status
// If the job is done, we trigger the /browse/job/:uid route
// https://htmx.org/examples/progress-bar/
app.Get("/browse/job/progress/:uid", func(c *fiber.Ctx) error {
jobUID := strings.Clone(c.Params("uid")) // note: strings.Clone is required for multiple requests!
status := galleryService.GetStatus(jobUID)
if status == nil {
//fmt.Errorf("could not find any status for ID")
return c.SendString(elements.ProgressBar("0"))
}
if status.Progress == 100 {
c.Set("HX-Trigger", "done") // this triggers /browse/job/:uid (which is when the job is done)
return c.SendString(elements.ProgressBar("100"))
}
if status.Error != nil {
// TODO: instead of deleting the job, we should keep it in the cache and make it dismissable by the user
opcache.DeleteUUID(jobUID)
return c.SendString(elements.ErrorProgress(status.Error.Error(), status.GalleryElementName))
}
return c.SendString(elements.ProgressBar(fmt.Sprint(status.Progress)))
})
// this route is hit when the job is done, and we display the
// final state (for now just displays "Installation completed")
app.Get("/browse/job/:uid", func(c *fiber.Ctx) error {
jobUID := strings.Clone(c.Params("uid")) // note: strings.Clone is required for multiple requests!
status := galleryService.GetStatus(jobUID)
galleryID := status.GalleryElementName
opcache.DeleteUUID(jobUID)
if galleryID == "" {
log.Debug().Msgf("no processing model found for job : %+v\n", jobUID)
}
log.Debug().Msgf("JOB finished : %+v\n", status)
showDelete := true
displayText := "Installation completed"
if status.Deletion {
showDelete = false
displayText = "Deletion completed"
}
return c.SendString(elements.DoneModelProgress(galleryID, displayText, showDelete))
})
}