Chore: remove AutoCleanToolCallsFromCtx, atomic model color

This commit is contained in:
Grail Finder
2026-03-08 06:45:51 +03:00
parent 4f0bce50c5
commit 23cb8f2578
6 changed files with 25 additions and 34 deletions

19
bot.go
View File

@@ -851,7 +851,7 @@ out:
if thinkingCollapsed {
// Show placeholder immediately when thinking starts in collapsed mode
fmt.Fprint(textView, "[yellow::i][thinking... (press Alt+T to expand)][-:-:-]")
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
textView.ScrollToEnd()
}
respText.WriteString(chunk)
@@ -866,7 +866,7 @@ out:
// Thinking already displayed as placeholder, just update respText
respText.WriteString(chunk)
justExitedThinkingCollapsed = true
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
textView.ScrollToEnd()
}
continue
@@ -888,7 +888,7 @@ out:
respText.WriteString(chunk)
// Update the message in chatBody.Messages so it persists during Alt+T
chatBody.Messages[msgIdx].Content = respText.String()
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
textView.ScrollToEnd()
}
// Send chunk to audio stream handler
@@ -898,7 +898,7 @@ out:
case toolChunk := <-openAIToolChan:
fmt.Fprint(textView, toolChunk)
toolResp.WriteString(toolChunk)
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
textView.ScrollToEnd()
}
case <-streamDone:
@@ -906,7 +906,7 @@ out:
chunk := <-chunkChan
fmt.Fprint(textView, chunk)
respText.WriteString(chunk)
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
textView.ScrollToEnd()
}
if cfg.TTS_ENABLED {
@@ -1394,9 +1394,6 @@ func updateModelLists() {
localModelsMu.Lock()
LocalModels = ml
localModelsMu.Unlock()
for statusLineWidget == nil {
time.Sleep(time.Millisecond * 100)
}
// set already loaded model in llama.cpp
if !isLocalLlamacpp() {
return
@@ -1408,7 +1405,7 @@ func updateModelLists() {
m := strings.TrimPrefix(LocalModels[i], models.LoadedMark)
cfg.CurrentModel = m
chatBody.Model = m
cachedModelColor = "green"
cachedModelColor.Store("green")
updateStatusLine()
updateToolCapabilities()
app.Draw()
@@ -1546,8 +1543,8 @@ func init() {
}
}
}
// Initialize scrollToEndEnabled based on config
scrollToEndEnabled = cfg.AutoScrollEnabled
// atomic default values
cachedModelColor.Store("orange")
go chatWatcher(ctx)
initTUI()
initTools()

View File

@@ -27,7 +27,6 @@ type Config struct {
WriteNextMsgAs string
WriteNextMsgAsCompletionAgent string
SkipLLMResp bool
AutoCleanToolCallsFromCtx bool `toml:"AutoCleanToolCallsFromCtx"`
DBPATH string `toml:"DBPATH"`
FilePickerDir string `toml:"FilePickerDir"`
FilePickerExts string `toml:"FilePickerExts"`

View File

@@ -63,9 +63,6 @@ This document explains how to set up and configure the application using the `co
#### AutoScrollEnabled (`true`)
- Whether to automatically scroll chat window while llm streams its repsonse.
#### AutoCleanToolCallsFromCtx (`false`)
- Whether to automatically clean tool calls from the conversation context to manage token usage.
### RAG (Retrieval Augmented Generation) Settings
#### EmbedURL (`"http://localhost:8082/v1/embeddings"`)

View File

@@ -12,6 +12,7 @@ import (
"slices"
"strconv"
"strings"
"sync/atomic"
"time"
"unicode"
@@ -19,7 +20,8 @@ import (
)
// Cached model color - updated by background goroutine
var cachedModelColor string = "orange"
// var cachedModelColor string = "orange"
var cachedModelColor atomic.Value
// startModelColorUpdater starts a background goroutine that periodically updates
// the cached model color. Only runs HTTP requests for local llama.cpp APIs.
@@ -38,20 +40,20 @@ func startModelColorUpdater() {
// updateCachedModelColor updates the global cachedModelColor variable
func updateCachedModelColor() {
if !isLocalLlamacpp() {
cachedModelColor = "orange"
cachedModelColor.Store("orange")
return
}
// Check if model is loaded
loaded, err := isModelLoaded(chatBody.Model)
if err != nil {
// On error, assume not loaded (red)
cachedModelColor = "red"
cachedModelColor.Store("red")
return
}
if loaded {
cachedModelColor = "green"
cachedModelColor.Store("green")
} else {
cachedModelColor = "red"
cachedModelColor.Store("red")
}
}
@@ -107,7 +109,7 @@ func refreshChatDisplay() {
textView.SetText(displayText)
colorText()
updateStatusLine()
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
textView.ScrollToEnd()
}
}
@@ -332,7 +334,7 @@ func isLocalLlamacpp() bool {
// The cached value is updated by a background goroutine every 5 seconds.
// For non-local models, returns orange. For local llama.cpp models, returns green if loaded, red if not.
func getModelColor() string {
return cachedModelColor
return cachedModelColor.Load().(string)
}
func makeStatusLine() string {
@@ -539,7 +541,7 @@ func executeCommandAndDisplay(cmdText string) {
cmdText = strings.TrimSpace(cmdText)
if cmdText == "" {
fmt.Fprintf(textView, "\n[red]Error: No command provided[-:-:-]\n")
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
textView.ScrollToEnd()
}
colorText()
@@ -571,7 +573,7 @@ func executeCommandAndDisplay(cmdText string) {
Content: "$ " + cmdText + "\n\n" + outputContent,
}
chatBody.Messages = append(chatBody.Messages, combinedMsg)
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
textView.ScrollToEnd()
}
colorText()
@@ -586,7 +588,7 @@ func executeCommandAndDisplay(cmdText string) {
Content: "$ " + cmdText + "\n\n" + outputContent,
}
chatBody.Messages = append(chatBody.Messages, combinedMsg)
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
textView.ScrollToEnd()
}
colorText()
@@ -634,7 +636,7 @@ func executeCommandAndDisplay(cmdText string) {
}
chatBody.Messages = append(chatBody.Messages, combinedMsg)
// Scroll to end and update colors
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
textView.ScrollToEnd()
}
colorText()

View File

@@ -121,9 +121,6 @@ func makePropsTable(props map[string]float32) *tview.Table {
addCheckboxRow("TTS Enabled", cfg.TTS_ENABLED, func(checked bool) {
cfg.TTS_ENABLED = checked
})
addCheckboxRow("Auto clean tool calls from context", cfg.AutoCleanToolCallsFromCtx, func(checked bool) {
cfg.AutoCleanToolCallsFromCtx = checked
})
addCheckboxRow("Enable Mouse", cfg.EnableMouse, func(checked bool) {
cfg.EnableMouse = checked
// Reconfigure the app's mouse setting

9
tui.go
View File

@@ -42,7 +42,6 @@ var (
confirmPageName = "confirm"
fullscreenMode bool
positionVisible bool = true
scrollToEndEnabled bool = true
// pages
historyPage = "historyPage"
agentPage = "agentPage"
@@ -634,7 +633,7 @@ func initTUI() {
updateStatusLine()
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText()
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
textView.ScrollToEnd()
}
// init sysmap
@@ -663,9 +662,9 @@ func initTUI() {
}
if event.Key() == tcell.KeyRune && event.Rune() == '2' && event.Modifiers()&tcell.ModAlt != 0 {
// toggle auto-scrolling
scrollToEndEnabled = !scrollToEndEnabled
cfg.AutoScrollEnabled = !cfg.AutoScrollEnabled
status := "disabled"
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
status = "enabled"
}
showToast("autoscroll", "Auto-scrolling "+status)
@@ -1139,7 +1138,7 @@ func initTUI() {
fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
nl, len(chatBody.Messages), persona, msgText)
textArea.SetText("", true)
if scrollToEndEnabled {
if cfg.AutoScrollEnabled {
textView.ScrollToEnd()
}
colorText()