Enha: model list based on current api

This commit is contained in:
Grail Finder
2025-12-14 09:58:24 +03:00
parent e481661799
commit 1d5af981b5
2 changed files with 35 additions and 18 deletions

41
bot.go
View File

@@ -826,6 +826,30 @@ func charToStart(agentName string) bool {
return true
}
func updateModelLists() {
var err error
if cfg.OpenRouterToken != "" {
ORFreeModels, err = fetchORModels(true)
if err != nil {
logger.Warn("failed to fetch or models", "error", err)
}
}
// if llama.cpp started after gf-lt?
LocalModels, err = fetchLCPModels()
if err != nil {
logger.Warn("failed to fetch llama.cpp models", "error", err)
}
}
func updateModelListsTicker() {
updateModelLists() // run on the start
ticker := time.NewTicker(time.Minute * 1)
for {
<-ticker.C
updateModelLists()
}
}
func init() {
var err error
cfg, err = config.LoadConfig("config.toml")
@@ -878,22 +902,6 @@ func init() {
playerOrder = []string{cfg.UserRole, cfg.AssistantRole, cfg.CluedoRole2}
cluedoState = extra.CluedoPrepCards(playerOrder)
}
if cfg.OpenRouterToken != "" {
go func() {
ORModels, err := fetchORModels(true)
if err != nil {
logger.Error("failed to fetch or models", "error", err)
} else {
ORFreeModels = ORModels
}
}()
}
go func() {
LocalModels, err = fetchLCPModels()
if err != nil {
logger.Error("failed to fetch llama.cpp models", "error", err)
}
}()
choseChunkParser()
httpClient = createClient(time.Second * 15)
if cfg.TTS_ENABLED {
@@ -902,4 +910,5 @@ func init() {
if cfg.STT_ENABLED {
asr = extra.NewSTT(logger, cfg)
}
go updateModelListsTicker()
}

View File

@@ -4,6 +4,7 @@ import (
"fmt"
"slices"
"strconv"
"strings"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
@@ -134,9 +135,16 @@ func makePropsTable(props map[string]float32) *tview.Table {
addListPopupRow("Select an api", apiLinks, cfg.CurrentAPI, func(option string) {
cfg.CurrentAPI = option
})
var modelList []string
// INFO: modelList is chosen based on current api link
if strings.Contains(cfg.CurrentAPI, "api.deepseek.com/") {
modelList = []string{chatBody.Model, "deepseek-chat", "deepseek-reasoner"}
} else if strings.Contains(cfg.CurrentAPI, "opentouter.ai") {
modelList = ORFreeModels
} else { // would match on localhost but what if llama.cpp served non localy?
modelList = LocalModels
}
// Prepare model list dropdown
modelList := []string{chatBody.Model, "deepseek-chat", "deepseek-reasoner"}
modelList = append(modelList, ORFreeModels...)
addListPopupRow("Select a model", modelList, chatBody.Model, func(option string) {
chatBody.Model = option
})