fix(gallery): do clear out errors once displayed (#3033)

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto 2024-07-28 10:28:10 +02:00 committed by GitHub
parent 2a839e1432
commit d6a7a77f6b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 46 additions and 11 deletions

View File

@ -9,7 +9,6 @@ import (
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/core/p2p"
"github.com/mudler/LocalAI/core/services"
"github.com/mudler/LocalAI/pkg/xsync"
)
const (
@ -372,7 +371,12 @@ func dropBadChars(s string) string {
return strings.ReplaceAll(s, "@", "__")
}
func ListModels(models []*gallery.GalleryModel, processing *xsync.SyncedMap[string, string], galleryService *services.GalleryService) string {
type ProcessTracker interface {
Exists(string) bool
Get(string) string
}
func ListModels(models []*gallery.GalleryModel, processTracker ProcessTracker, galleryService *services.GalleryService) string {
modelsElements := []elem.Node{}
descriptionDiv := func(m *gallery.GalleryModel) elem.Node {
return elem.Div(
@ -396,7 +400,7 @@ func ListModels(models []*gallery.GalleryModel, processing *xsync.SyncedMap[stri
actionDiv := func(m *gallery.GalleryModel) elem.Node {
galleryID := fmt.Sprintf("%s@%s", m.Gallery.Name, m.Name)
currentlyProcessing := processing.Exists(galleryID)
currentlyProcessing := processTracker.Exists(galleryID)
jobID := ""
isDeletionOp := false
if currentlyProcessing {
@ -404,7 +408,7 @@ func ListModels(models []*gallery.GalleryModel, processing *xsync.SyncedMap[stri
if status != nil && status.Deletion {
isDeletionOp = true
}
jobID = processing.Get(galleryID)
jobID = processTracker.Get(galleryID)
// TODO:
// case not handled, if status == nil : "Waiting"
}

View File

@ -21,6 +21,40 @@ import (
"github.com/google/uuid"
)
type modelOpCache struct {
status *xsync.SyncedMap[string, string]
}
func NewModelOpCache() *modelOpCache {
return &modelOpCache{
status: xsync.NewSyncedMap[string, string](),
}
}
func (m *modelOpCache) Set(key string, value string) {
m.status.Set(key, value)
}
func (m *modelOpCache) Get(key string) string {
return m.status.Get(key)
}
func (m *modelOpCache) DeleteUUID(uuid string) {
for _, k := range m.status.Keys() {
if m.status.Get(k) == uuid {
m.status.Delete(k)
}
}
}
func (m *modelOpCache) Map() map[string]string {
return m.status.Map()
}
func (m *modelOpCache) Exists(key string) bool {
return m.status.Exists(key)
}
func RegisterUIRoutes(app *fiber.App,
cl *config.BackendConfigLoader,
ml *model.ModelLoader,
@ -29,7 +63,7 @@ func RegisterUIRoutes(app *fiber.App,
auth func(*fiber.Ctx) error) {
// keeps the state of models that are being installed from the UI
var processingModels = xsync.NewSyncedMap[string, string]()
var processingModels = NewModelOpCache()
// modelStatus returns the current status of the models being processed (installation or deletion)
// it is called asynchonously from the UI
@ -232,6 +266,8 @@ func RegisterUIRoutes(app *fiber.App,
return c.SendString(elements.ProgressBar("100"))
}
if status.Error != nil {
// TODO: instead of deleting the job, we should keep it in the cache and make it dismissable
processingModels.DeleteUUID(jobUID)
return c.SendString(elements.ErrorProgress(status.Error.Error(), status.GalleryModelName))
}
@ -246,12 +282,7 @@ func RegisterUIRoutes(app *fiber.App,
status := galleryService.GetStatus(jobUID)
galleryID := ""
for _, k := range processingModels.Keys() {
if processingModels.Get(k) == jobUID {
galleryID = k
processingModels.Delete(k)
}
}
processingModels.DeleteUUID(jobUID)
if galleryID == "" {
log.Debug().Msgf("no processing model found for job : %+v\n", jobUID)
}