Enha: atomic global vars instead of mutexes

This commit is contained in:
Grail Finder
2026-03-07 11:26:07 +03:00
parent a842b00e96
commit 8c4d01ab3b
4 changed files with 75 additions and 70 deletions

View File

@@ -16,11 +16,17 @@ import (
"time"
"unicode"
"sync/atomic"
"github.com/rivo/tview"
)
// Cached model color - updated by background goroutine
var cachedModelColor string = "orange"
var cachedModelColor atomic.Value // stores string
func init() {
cachedModelColor.Store("orange")
}
// startModelColorUpdater starts a background goroutine that periodically updates
// the cached model color. Only runs HTTP requests for local llama.cpp APIs.
@@ -39,20 +45,20 @@ func startModelColorUpdater() {
// updateCachedModelColor updates the global cachedModelColor variable
func updateCachedModelColor() {
if !isLocalLlamacpp() {
cachedModelColor = "orange"
cachedModelColor.Store("orange")
return
}
// Check if model is loaded
loaded, err := isModelLoaded(chatBody.GetModel())
if err != nil {
// On error, assume not loaded (red)
cachedModelColor = "red"
cachedModelColor.Store("red")
return
}
if loaded {
cachedModelColor = "green"
cachedModelColor.Store("green")
} else {
cachedModelColor = "red"
cachedModelColor.Store("red")
}
}
@@ -335,7 +341,7 @@ func isLocalLlamacpp() bool {
// The cached value is updated by a background goroutine every 5 seconds.
// For non-local models, returns orange. For local llama.cpp models, returns green if loaded, red if not.
func getModelColor() string {
return cachedModelColor
return cachedModelColor.Load().(string)
}
func makeStatusLine() string {
@@ -421,40 +427,48 @@ func getMaxContextTokens() int {
modelName := chatBody.GetModel()
switch {
case strings.Contains(cfg.CurrentAPI, "openrouter"):
if orModelsData != nil {
for i := range orModelsData.Data {
m := &orModelsData.Data[i]
if m.ID == modelName {
return m.ContextLength
ord := orModelsData.Load()
if ord != nil {
data := ord.(*models.ORModels)
if data != nil {
for i := range data.Data {
m := &data.Data[i]
if m.ID == modelName {
return m.ContextLength
}
}
}
}
case strings.Contains(cfg.CurrentAPI, "deepseek"):
return deepseekContext
default:
if localModelsData != nil {
for i := range localModelsData.Data {
m := &localModelsData.Data[i]
if m.ID == modelName {
for _, arg := range m.Status.Args {
if strings.HasPrefix(arg, "--ctx-size") {
if strings.Contains(arg, "=") {
val := strings.Split(arg, "=")[1]
if n, err := strconv.Atoi(val); err == nil {
return n
}
} else {
idx := -1
for j, a := range m.Status.Args {
if a == "--ctx-size" && j+1 < len(m.Status.Args) {
idx = j + 1
break
}
}
if idx != -1 {
if n, err := strconv.Atoi(m.Status.Args[idx]); err == nil {
lmd := localModelsData.Load()
if lmd != nil {
data := lmd.(*models.LCPModels)
if data != nil {
for i := range data.Data {
m := &data.Data[i]
if m.ID == modelName {
for _, arg := range m.Status.Args {
if strings.HasPrefix(arg, "--ctx-size") {
if strings.Contains(arg, "=") {
val := strings.Split(arg, "=")[1]
if n, err := strconv.Atoi(val); err == nil {
return n
}
} else {
idx := -1
for j, a := range m.Status.Args {
if a == "--ctx-size" && j+1 < len(m.Status.Args) {
idx = j + 1
break
}
}
if idx != -1 {
if n, err := strconv.Atoi(m.Status.Args[idx]); err == nil {
return n
}
}
}
}
}