Fix: model load if llama.cpp started after gf-lt

This commit is contained in:
Grail Finder
2025-12-20 14:21:40 +03:00
parent 0ca709b7c6
commit ba3330ee54
3 changed files with 34 additions and 0 deletions

22
bot.go
View File

@@ -21,6 +21,7 @@ import (
"path"
"strconv"
"strings"
"sync"
"time"
"github.com/neurosnap/sentences/english"
@@ -52,6 +53,7 @@ var (
//nolint:unused // TTS_ENABLED conditionally uses this
orator extra.Orator
asr extra.STT
localModelsMu sync.RWMutex
defaultLCPProps = map[string]float32{
"temperature": 0.8,
"dry_multiplier": 0.0,
@@ -1002,12 +1004,32 @@ func updateModelLists() {
}
}
// if llama.cpp started after gf-lt?
localModelsMu.Lock()
LocalModels, err = fetchLCPModels()
localModelsMu.Unlock()
if err != nil {
logger.Warn("failed to fetch llama.cpp models", "error", err)
}
}
func refreshLocalModelsIfEmpty() {
localModelsMu.RLock()
if len(LocalModels) > 0 {
localModelsMu.RUnlock()
return
}
localModelsMu.RUnlock()
// try to fetch
models, err := fetchLCPModels()
if err != nil {
logger.Warn("failed to fetch llama.cpp models", "error", err)
return
}
localModelsMu.Lock()
LocalModels = models
localModelsMu.Unlock()
}
func init() {
var err error
cfg, err = config.LoadConfig("config.toml")

View File

@@ -5,11 +5,14 @@ import (
"slices"
"strconv"
"strings"
"sync"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
)
var _ = sync.RWMutex{}
// Define constants for cell types
const (
CellTypeCheckbox = "checkbox"
@@ -138,6 +141,10 @@ func makePropsTable(props map[string]float32) *tview.Table {
} else if strings.Contains(api, "openrouter.ai") {
return ORFreeModels
}
// Assume local llama.cpp
refreshLocalModelsIfEmpty()
localModelsMu.RLock()
defer localModelsMu.RUnlock()
return LocalModels
}
var modelRowIndex int // will be set before model row is added

5
tui.go
View File

@@ -12,11 +12,14 @@ import (
"path"
"strconv"
"strings"
"sync"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
)
var _ = sync.RWMutex{}
var (
app *tview.Application
pages *tview.Pages
@@ -988,11 +991,13 @@ func init() {
}
updateStatusLine()
} else {
localModelsMu.RLock()
if len(LocalModels) > 0 {
currentLocalModelIndex = (currentLocalModelIndex + 1) % len(LocalModels)
chatBody.Model = LocalModels[currentLocalModelIndex]
cfg.CurrentModel = chatBody.Model
}
localModelsMu.RUnlock()
updateStatusLine()
// // For non-OpenRouter APIs, use the old logic
// go func() {