Skip to content

Commit

Permalink
fix(gallery): do clear out errors once displayed (#3033)
Browse files Browse the repository at this point in the history
Signed-off-by: Ettore Di Giacinto <[email protected]>
  • Loading branch information
mudler authored Jul 28, 2024
1 parent 2a839e1 commit d6a7a77
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 11 deletions.
12 changes: 8 additions & 4 deletions core/http/elements/gallery.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import (
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/core/p2p"
"github.com/mudler/LocalAI/core/services"
"github.com/mudler/LocalAI/pkg/xsync"
)

const (
Expand Down Expand Up @@ -372,7 +371,12 @@ func dropBadChars(s string) string {
return strings.ReplaceAll(s, "@", "__")
}

func ListModels(models []*gallery.GalleryModel, processing *xsync.SyncedMap[string, string], galleryService *services.GalleryService) string {
type ProcessTracker interface {
Exists(string) bool
Get(string) string
}

func ListModels(models []*gallery.GalleryModel, processTracker ProcessTracker, galleryService *services.GalleryService) string {
modelsElements := []elem.Node{}
descriptionDiv := func(m *gallery.GalleryModel) elem.Node {
return elem.Div(
Expand All @@ -396,15 +400,15 @@ func ListModels(models []*gallery.GalleryModel, processing *xsync.SyncedMap[stri

actionDiv := func(m *gallery.GalleryModel) elem.Node {
galleryID := fmt.Sprintf("%s@%s", m.Gallery.Name, m.Name)
currentlyProcessing := processing.Exists(galleryID)
currentlyProcessing := processTracker.Exists(galleryID)
jobID := ""
isDeletionOp := false
if currentlyProcessing {
status := galleryService.GetStatus(galleryID)
if status != nil && status.Deletion {
isDeletionOp = true
}
jobID = processing.Get(galleryID)
jobID = processTracker.Get(galleryID)
// TODO:
// case not handled, if status == nil : "Waiting"
}
Expand Down
45 changes: 38 additions & 7 deletions core/http/routes/ui.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,40 @@ import (
"github.com/google/uuid"
)

type modelOpCache struct {
status *xsync.SyncedMap[string, string]
}

func NewModelOpCache() *modelOpCache {
return &modelOpCache{
status: xsync.NewSyncedMap[string, string](),
}
}

func (m *modelOpCache) Set(key string, value string) {
m.status.Set(key, value)
}

func (m *modelOpCache) Get(key string) string {
return m.status.Get(key)
}

func (m *modelOpCache) DeleteUUID(uuid string) {
for _, k := range m.status.Keys() {
if m.status.Get(k) == uuid {
m.status.Delete(k)
}
}
}

func (m *modelOpCache) Map() map[string]string {
return m.status.Map()
}

func (m *modelOpCache) Exists(key string) bool {
return m.status.Exists(key)
}

func RegisterUIRoutes(app *fiber.App,
cl *config.BackendConfigLoader,
ml *model.ModelLoader,
Expand All @@ -29,7 +63,7 @@ func RegisterUIRoutes(app *fiber.App,
auth func(*fiber.Ctx) error) {

// keeps the state of models that are being installed from the UI
var processingModels = xsync.NewSyncedMap[string, string]()
var processingModels = NewModelOpCache()

// modelStatus returns the current status of the models being processed (installation or deletion)
// it is called asynchonously from the UI
Expand Down Expand Up @@ -232,6 +266,8 @@ func RegisterUIRoutes(app *fiber.App,
return c.SendString(elements.ProgressBar("100"))
}
if status.Error != nil {
// TODO: instead of deleting the job, we should keep it in the cache and make it dismissable
processingModels.DeleteUUID(jobUID)
return c.SendString(elements.ErrorProgress(status.Error.Error(), status.GalleryModelName))
}

Expand All @@ -246,12 +282,7 @@ func RegisterUIRoutes(app *fiber.App,
status := galleryService.GetStatus(jobUID)

galleryID := ""
for _, k := range processingModels.Keys() {
if processingModels.Get(k) == jobUID {
galleryID = k
processingModels.Delete(k)
}
}
processingModels.DeleteUUID(jobUID)
if galleryID == "" {
log.Debug().Msgf("no processing model found for job : %+v\n", jobUID)
}
Expand Down

0 comments on commit d6a7a77

Please sign in to comment.