Enha: onnx config vars

This commit is contained in:
Grail Finder
2026-03-05 19:20:21 +03:00
parent c2c107c786
commit ac8c8bb055
5 changed files with 43 additions and 13 deletions

7
bot.go
View File

@@ -1393,12 +1393,13 @@ func updateModelLists() {
}
}
// if llama.cpp started after gf-lt?
localModelsMu.Lock()
LocalModels, err = fetchLCPModelsWithLoadStatus()
localModelsMu.Unlock()
ml, err := fetchLCPModelsWithLoadStatus()
if err != nil {
logger.Warn("failed to fetch llama.cpp models", "error", err)
}
localModelsMu.Lock()
LocalModels = ml
localModelsMu.Unlock()
// set already loaded model in llama.cpp
if strings.Contains(cfg.CurrentAPI, "localhost") || strings.Contains(cfg.CurrentAPI, "127.0.0.1") {
localModelsMu.Lock()