From 8c4d01ab3bbd804de358b1dbdfd9865fe3495d37 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sat, 7 Mar 2026 11:26:07 +0300 Subject: [PATCH] Enha: atomic global vars instead of mutexes --- bot.go | 48 ++++++++++++++----------------- helpfuncs.go | 78 +++++++++++++++++++++++++++++--------------------- popups.go | 9 +++--- props_table.go | 10 ++----- 4 files changed, 75 insertions(+), 70 deletions(-) diff --git a/bot.go b/bot.go index 773d508..dbcb933 100644 --- a/bot.go +++ b/bot.go @@ -22,7 +22,7 @@ import ( "slices" "strconv" "strings" - "sync" + "sync/atomic" "time" ) @@ -49,7 +49,6 @@ var ( //nolint:unused // TTS_ENABLED conditionally uses this orator Orator asr STT - localModelsMu sync.RWMutex defaultLCPProps = map[string]float32{ "temperature": 0.8, "dry_multiplier": 0.0, @@ -64,11 +63,17 @@ var ( "google/gemma-3-27b-it:free", "meta-llama/llama-3.3-70b-instruct:free", } - LocalModels = []string{} - localModelsData *models.LCPModels - orModelsData *models.ORModels + LocalModels atomic.Value // stores []string + localModelsData atomic.Value // stores *models.LCPModels + orModelsData atomic.Value // stores *models.ORModels ) +func init() { + LocalModels.Store([]string{}) + localModelsData.Store((*models.LCPModels)(nil)) + orModelsData.Store((*models.ORModels)(nil)) +} + var thinkBlockRE = regexp.MustCompile(`(?s).*?`) // parseKnownToTag extracts known_to list from content using configured tag. @@ -356,7 +361,7 @@ func fetchORModels(free bool) ([]string, error) { if err := json.NewDecoder(resp.Body).Decode(data); err != nil { return nil, err } - orModelsData = data + orModelsData.Store(data) freeModels := data.ListModels(free) return freeModels, nil } @@ -418,9 +423,7 @@ func fetchLCPModelsWithStatus() (*models.LCPModels, error) { if err := json.NewDecoder(resp.Body).Decode(data); err != nil { return nil, err } - localModelsMu.Lock() - localModelsData = data - localModelsMu.Unlock() + localModelsData.Store(data) return data, nil } @@ -1403,7 +1406,7 @@ func charToStart(agentName string, keepSysP bool) bool { func updateModelLists() { var err error if cfg.OpenRouterToken != "" { - ORFreeModels, err = fetchORModels(true) + _, err := fetchORModels(true) if err != nil { logger.Warn("failed to fetch or models", "error", err) } @@ -1413,22 +1416,19 @@ func updateModelLists() { if err != nil { logger.Warn("failed to fetch llama.cpp models", "error", err) } - localModelsMu.Lock() - LocalModels = ml - localModelsMu.Unlock() + LocalModels.Store(ml) for statusLineWidget == nil { time.Sleep(time.Millisecond * 100) } // set already loaded model in llama.cpp if strings.Contains(cfg.CurrentAPI, "localhost") || strings.Contains(cfg.CurrentAPI, "127.0.0.1") { - localModelsMu.Lock() - defer localModelsMu.Unlock() - for i := range LocalModels { - if strings.Contains(LocalModels[i], models.LoadedMark) { - m := strings.TrimPrefix(LocalModels[i], models.LoadedMark) + modelList := LocalModels.Load().([]string) + for i := range modelList { + if strings.Contains(modelList[i], models.LoadedMark) { + m := strings.TrimPrefix(modelList[i], models.LoadedMark) cfg.CurrentModel = m chatBody.Model = m - cachedModelColor = "green" + cachedModelColor.Store("green") updateStatusLine() updateToolCapabilities() app.Draw() @@ -1439,21 +1439,17 @@ func updateModelLists() { } func refreshLocalModelsIfEmpty() { - localModelsMu.RLock() - if len(LocalModels) > 0 { - localModelsMu.RUnlock() + models := LocalModels.Load().([]string) + if len(models) > 0 { return } - localModelsMu.RUnlock() // try to fetch models, err := fetchLCPModels() if err != nil { logger.Warn("failed to fetch llama.cpp models", "error", err) return } - localModelsMu.Lock() - LocalModels = models - localModelsMu.Unlock() + LocalModels.Store(models) } func summarizeAndStartNewChat() { diff --git a/helpfuncs.go b/helpfuncs.go index f18b782..abbd3b2 100644 --- a/helpfuncs.go +++ b/helpfuncs.go @@ -16,11 +16,17 @@ import ( "time" "unicode" + "sync/atomic" + "github.com/rivo/tview" ) // Cached model color - updated by background goroutine -var cachedModelColor string = "orange" +var cachedModelColor atomic.Value // stores string + +func init() { + cachedModelColor.Store("orange") +} // startModelColorUpdater starts a background goroutine that periodically updates // the cached model color. Only runs HTTP requests for local llama.cpp APIs. @@ -39,20 +45,20 @@ func startModelColorUpdater() { // updateCachedModelColor updates the global cachedModelColor variable func updateCachedModelColor() { if !isLocalLlamacpp() { - cachedModelColor = "orange" + cachedModelColor.Store("orange") return } // Check if model is loaded loaded, err := isModelLoaded(chatBody.GetModel()) if err != nil { // On error, assume not loaded (red) - cachedModelColor = "red" + cachedModelColor.Store("red") return } if loaded { - cachedModelColor = "green" + cachedModelColor.Store("green") } else { - cachedModelColor = "red" + cachedModelColor.Store("red") } } @@ -335,7 +341,7 @@ func isLocalLlamacpp() bool { // The cached value is updated by a background goroutine every 5 seconds. // For non-local models, returns orange. For local llama.cpp models, returns green if loaded, red if not. func getModelColor() string { - return cachedModelColor + return cachedModelColor.Load().(string) } func makeStatusLine() string { @@ -421,40 +427,48 @@ func getMaxContextTokens() int { modelName := chatBody.GetModel() switch { case strings.Contains(cfg.CurrentAPI, "openrouter"): - if orModelsData != nil { - for i := range orModelsData.Data { - m := &orModelsData.Data[i] - if m.ID == modelName { - return m.ContextLength + ord := orModelsData.Load() + if ord != nil { + data := ord.(*models.ORModels) + if data != nil { + for i := range data.Data { + m := &data.Data[i] + if m.ID == modelName { + return m.ContextLength + } } } } case strings.Contains(cfg.CurrentAPI, "deepseek"): return deepseekContext default: - if localModelsData != nil { - for i := range localModelsData.Data { - m := &localModelsData.Data[i] - if m.ID == modelName { - for _, arg := range m.Status.Args { - if strings.HasPrefix(arg, "--ctx-size") { - if strings.Contains(arg, "=") { - val := strings.Split(arg, "=")[1] - if n, err := strconv.Atoi(val); err == nil { - return n - } - } else { - idx := -1 - for j, a := range m.Status.Args { - if a == "--ctx-size" && j+1 < len(m.Status.Args) { - idx = j + 1 - break - } - } - if idx != -1 { - if n, err := strconv.Atoi(m.Status.Args[idx]); err == nil { + lmd := localModelsData.Load() + if lmd != nil { + data := lmd.(*models.LCPModels) + if data != nil { + for i := range data.Data { + m := &data.Data[i] + if m.ID == modelName { + for _, arg := range m.Status.Args { + if strings.HasPrefix(arg, "--ctx-size") { + if strings.Contains(arg, "=") { + val := strings.Split(arg, "=")[1] + if n, err := strconv.Atoi(val); err == nil { return n } + } else { + idx := -1 + for j, a := range m.Status.Args { + if a == "--ctx-size" && j+1 < len(m.Status.Args) { + idx = j + 1 + break + } + } + if idx != -1 { + if n, err := strconv.Atoi(m.Status.Args[idx]); err == nil { + return n + } + } } } } diff --git a/popups.go b/popups.go index 74cf522..dca3f61 100644 --- a/popups.go +++ b/popups.go @@ -22,7 +22,7 @@ func showModelSelectionPopup() { models, err := fetchLCPModelsWithLoadStatus() if err != nil { logger.Error("failed to fetch models with load status", "error", err) - return LocalModels + return LocalModels.Load().([]string) } return models } @@ -30,7 +30,8 @@ func showModelSelectionPopup() { modelList := getModelListForAPI(cfg.CurrentAPI) // Check for empty options list if len(modelList) == 0 { - logger.Warn("empty model list for", "api", cfg.CurrentAPI, "localModelsLen", len(LocalModels), "orModelsLen", len(ORFreeModels)) + localModels := LocalModels.Load().([]string) + logger.Warn("empty model list for", "api", cfg.CurrentAPI, "localModelsLen", len(localModels), "orModelsLen", len(ORFreeModels)) var message string switch { case strings.Contains(cfg.CurrentAPI, "openrouter.ai"): @@ -150,9 +151,7 @@ func showAPILinkSelectionPopup() { } // Assume local llama.cpp refreshLocalModelsIfEmpty() - localModelsMu.RLock() - defer localModelsMu.RUnlock() - return LocalModels + return LocalModels.Load().([]string) } newModelList := getModelListForAPI(cfg.CurrentAPI) // Ensure chatBody.Model is in the new list; if not, set to first available model diff --git a/props_table.go b/props_table.go index 5c3d8d7..431f020 100644 --- a/props_table.go +++ b/props_table.go @@ -4,14 +4,11 @@ import ( "fmt" "strconv" "strings" - "sync" "github.com/gdamore/tcell/v2" "github.com/rivo/tview" ) -var _ = sync.RWMutex{} - // Define constants for cell types const ( CellTypeCheckbox = "checkbox" @@ -157,9 +154,7 @@ func makePropsTable(props map[string]float32) *tview.Table { } // Assume local llama.cpp refreshLocalModelsIfEmpty() - localModelsMu.RLock() - defer localModelsMu.RUnlock() - return LocalModels + return LocalModels.Load().([]string) } // Add input fields addInputRow("New char to write msg as", "", func(text string) { @@ -262,7 +257,8 @@ func makePropsTable(props map[string]float32) *tview.Table { // Check for empty options list if len(data.Options) == 0 { - logger.Warn("empty options list for", "label", label, "api", cfg.CurrentAPI, "localModelsLen", len(LocalModels), "orModelsLen", len(ORFreeModels)) + localModels := LocalModels.Load().([]string) + logger.Warn("empty options list for", "label", label, "api", cfg.CurrentAPI, "localModelsLen", len(localModels), "orModelsLen", len(ORFreeModels)) message := "No options available for " + label if label == "Select a model" { switch {