16 Commits

Author SHA1 Message Date
Grail Finder
a1b5f9cdc5 Enha: rag tuning and tests 2026-03-08 16:12:32 +03:00
Grail Finder
e74ff8c03f Enha (rag): semantic hybrid search 2026-03-08 13:27:09 +03:00
Grail Finder
b6e802c12e Enha (rag): bigger default batch 2026-03-08 11:38:56 +03:00
Grail Finder
c0d5db29a5 Chore (rag): x to exit label 2026-03-08 10:28:30 +03:00
Grail Finder
6ed96c9bd3 Fix (ctrl+w): avoid msg duplication 2026-03-08 09:42:07 +03:00
Grail Finder
b5f0eabeea Fix (rag): do not hang on delete 2026-03-08 09:00:24 +03:00
Grail Finder
e0201886f8 Enha (rag): keep page open until user closes it 2026-03-08 08:50:50 +03:00
Grail Finder
5b175c12a6 Chore: update readme 2026-03-08 07:29:04 +03:00
Grail Finder
c200c9328c Enha: botresp, toolresp to atomic 2026-03-08 07:13:27 +03:00
Grail Finder
23cb8f2578 Chore: remove AutoCleanToolCallsFromCtx, atomic model color 2026-03-08 06:45:51 +03:00
Grail Finder
4f0bce50c5 Chore: one init for clear call order 2026-03-07 19:11:13 +03:00
Grail Finder
bf655a1087 Enha: llama.cpp on non localhost 2026-03-07 18:42:12 +03:00
Grail Finder
c8f00198d6 Dep (stt): use ffmpeg instead of portaudio 2026-03-07 18:13:11 +03:00
Grail Finder
c5a24b2a3f Enha: google-tts replay speed 2026-03-07 16:37:09 +03:00
Grail Finder
0f0c43f327 Dep: remove beep/portaudio dependancy 2026-03-07 16:24:39 +03:00
Grail Finder
0e55e44f62 Enha (kokoro): use ffplay instead of beep (portaudio) 2026-03-07 15:41:39 +03:00
30 changed files with 2120 additions and 1565 deletions

View File

@@ -143,11 +143,10 @@ build-whisper: ## Build whisper.cpp from source in batteries directory
download-whisper-model: ## Download Whisper model for STT in batteries directory download-whisper-model: ## Download Whisper model for STT in batteries directory
@echo "Downloading Whisper model for STT..." @echo "Downloading Whisper model for STT..."
@if [ ! -d "batteries/whisper.cpp" ]; then \ @if [ ! -d "batteries/whisper.cpp/models" ]; then \
echo "Please run 'make setup-whisper' first to clone the repository."; \ mkdir -p "batteries/whisper.cpp/models" \
exit 1; \
fi fi
@cd batteries/whisper.cpp && bash ./models/download-ggml-model.sh large-v3-turbo-q5_0 curl -o batteries/whisper.cpp/models/ggml-large-v3-turbo-q5_0.bin -L "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3-turbo-q5_0.bin?download=true"
@echo "Whisper model downloaded successfully!" @echo "Whisper model downloaded successfully!"
# Docker targets for STT/TTS services (in batteries directory) # Docker targets for STT/TTS services (in batteries directory)

View File

@@ -13,6 +13,12 @@ made with use of [tview](https://github.com/rivo/tview)
#### how it looks #### how it looks
![how it looks](assets/ex01.png) ![how it looks](assets/ex01.png)
#### dependencies
- make
- go
- ffmpeg (extra)
#### how to install #### how to install
(requires golang) (requires golang)
clone the project clone the project

View File

@@ -6,19 +6,27 @@ services:
ports: ports:
- "8081:8081" - "8081:8081"
volumes: volumes:
- whisper_models:/app/models - ./whisper.cpp/models/ggml-large-v3-turbo-q5_0.bin:/app/models/ggml-large-v3-turbo-q5_0.bin
working_dir: /app working_dir: /app
entrypoint: "" entrypoint: ""
command: > command: >
sh -c " sh -c "
if [ ! -f /app/models/ggml-large-v3-turbo.bin ]; then if [ ! -f /app/models/ggml-large-v3-turbo-q5_0.bin ]; then
echo 'Downloading ggml-large-v3-turbo model...' echo 'Downloading ggml-large-v3-turboq5_0 model...'
./download-ggml-model.sh large-v3-turbo /app/models curl -o /app/models/ggml-large-v3-turbo-q5_0.bin -L "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3-turbo-q5_0.bin?download=true"
fi && fi &&
./build/bin/whisper-server -m /app/models/ggml-large-v3-turbo.bin -t 4 -p 1 --port 8081 --host 0.0.0.0 ./build/bin/whisper-server -m /app/models/ggml-large-v3-turbo-q5_0.bin -t 4 -p 1 --port 8081 --host 0.0.0.0
" "
environment: environment:
- WHISPER_LOG_LEVEL=3 - WHISPER_LOG_LEVEL=3
# For GPU support, uncomment the following lines:
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
# Restart policy in case the service fails # Restart policy in case the service fails
restart: unless-stopped restart: unless-stopped
@@ -45,7 +53,5 @@ services:
volumes: volumes:
models: models:
driver: local driver: local
audio:
driver: local
whisper_models: whisper_models:
driver: local driver: local

256
bot.go
View File

@@ -16,12 +16,12 @@ import (
"log/slog" "log/slog"
"net" "net"
"net/http" "net/http"
"net/url"
"os" "os"
"regexp" "regexp"
"slices" "slices"
"strconv" "strconv"
"strings" "strings"
"sync"
"sync/atomic" "sync/atomic"
"time" "time"
) )
@@ -37,11 +37,11 @@ var (
chunkChan = make(chan string, 10) chunkChan = make(chan string, 10)
openAIToolChan = make(chan string, 10) openAIToolChan = make(chan string, 10)
streamDone = make(chan bool, 1) streamDone = make(chan bool, 1)
chatBody *models.SafeChatBody chatBody *models.ChatBody
store storage.FullRepo store storage.FullRepo
defaultFirstMsg = "Hello! What can I do for you?" defaultFirstMsg = "Hello! What can I do for you?"
defaultStarter = []models.RoleMsg{} defaultStarter = []models.RoleMsg{}
interruptResp = false interruptResp atomic.Bool
ragger *rag.RAG ragger *rag.RAG
chunkParser ChunkParser chunkParser ChunkParser
lastToolCall *models.FuncCall lastToolCall *models.FuncCall
@@ -49,6 +49,7 @@ var (
//nolint:unused // TTS_ENABLED conditionally uses this //nolint:unused // TTS_ENABLED conditionally uses this
orator Orator orator Orator
asr STT asr STT
localModelsMu sync.RWMutex
defaultLCPProps = map[string]float32{ defaultLCPProps = map[string]float32{
"temperature": 0.8, "temperature": 0.8,
"dry_multiplier": 0.0, "dry_multiplier": 0.0,
@@ -63,17 +64,11 @@ var (
"google/gemma-3-27b-it:free", "google/gemma-3-27b-it:free",
"meta-llama/llama-3.3-70b-instruct:free", "meta-llama/llama-3.3-70b-instruct:free",
} }
LocalModels atomic.Value // stores []string LocalModels = []string{}
localModelsData atomic.Value // stores *models.LCPModels localModelsData *models.LCPModels
orModelsData atomic.Value // stores *models.ORModels orModelsData *models.ORModels
) )
func init() {
LocalModels.Store([]string{})
localModelsData.Store((*models.LCPModels)(nil))
orModelsData.Store((*models.ORModels)(nil))
}
var thinkBlockRE = regexp.MustCompile(`(?s)<think>.*?</think>`) var thinkBlockRE = regexp.MustCompile(`(?s)<think>.*?</think>`)
// parseKnownToTag extracts known_to list from content using configured tag. // parseKnownToTag extracts known_to list from content using configured tag.
@@ -258,22 +253,17 @@ func createClient(connectTimeout time.Duration) *http.Client {
} }
func warmUpModel() { func warmUpModel() {
u, err := url.Parse(cfg.CurrentAPI) if !isLocalLlamacpp() {
if err != nil {
return
}
host := u.Hostname()
if host != "localhost" && host != "127.0.0.1" && host != "::1" {
return return
} }
// Check if model is already loaded // Check if model is already loaded
loaded, err := isModelLoaded(chatBody.GetModel()) loaded, err := isModelLoaded(chatBody.Model)
if err != nil { if err != nil {
logger.Debug("failed to check model status", "model", chatBody.GetModel(), "error", err) logger.Debug("failed to check model status", "model", chatBody.Model, "error", err)
// Continue with warmup attempt anyway // Continue with warmup attempt anyway
} }
if loaded { if loaded {
showToast("model already loaded", "Model "+chatBody.GetModel()+" is already loaded.") showToast("model already loaded", "Model "+chatBody.Model+" is already loaded.")
return return
} }
go func() { go func() {
@@ -282,7 +272,7 @@ func warmUpModel() {
switch { switch {
case strings.HasSuffix(cfg.CurrentAPI, "/completion"): case strings.HasSuffix(cfg.CurrentAPI, "/completion"):
// Old completion endpoint // Old completion endpoint
req := models.NewLCPReq(".", chatBody.GetModel(), nil, map[string]float32{ req := models.NewLCPReq(".", chatBody.Model, nil, map[string]float32{
"temperature": 0.8, "temperature": 0.8,
"dry_multiplier": 0.0, "dry_multiplier": 0.0,
"min_p": 0.05, "min_p": 0.05,
@@ -294,7 +284,7 @@ func warmUpModel() {
// OpenAI-compatible chat endpoint // OpenAI-compatible chat endpoint
req := models.OpenAIReq{ req := models.OpenAIReq{
ChatBody: &models.ChatBody{ ChatBody: &models.ChatBody{
Model: chatBody.GetModel(), Model: chatBody.Model,
Messages: []models.RoleMsg{ Messages: []models.RoleMsg{
{Role: "system", Content: "."}, {Role: "system", Content: "."},
}, },
@@ -318,7 +308,7 @@ func warmUpModel() {
} }
resp.Body.Close() resp.Body.Close()
// Start monitoring for model load completion // Start monitoring for model load completion
monitorModelLoad(chatBody.GetModel()) monitorModelLoad(chatBody.Model)
}() }()
} }
@@ -361,7 +351,7 @@ func fetchORModels(free bool) ([]string, error) {
if err := json.NewDecoder(resp.Body).Decode(data); err != nil { if err := json.NewDecoder(resp.Body).Decode(data); err != nil {
return nil, err return nil, err
} }
orModelsData.Store(data) orModelsData = data
freeModels := data.ListModels(free) freeModels := data.ListModels(free)
return freeModels, nil return freeModels, nil
} }
@@ -423,7 +413,7 @@ func fetchLCPModelsWithStatus() (*models.LCPModels, error) {
if err := json.NewDecoder(resp.Body).Decode(data); err != nil { if err := json.NewDecoder(resp.Body).Decode(data); err != nil {
return nil, err return nil, err
} }
localModelsData.Store(data) localModelsData = data
return data, nil return data, nil
} }
@@ -654,7 +644,7 @@ func sendMsgToLLM(body io.Reader) {
// continue // continue
} }
if len(line) <= 1 { if len(line) <= 1 {
if interruptResp { if interruptResp.Load() {
goto interrupt // get unstuck from bad connection goto interrupt // get unstuck from bad connection
} }
continue // skip \n continue // skip \n
@@ -747,8 +737,7 @@ func sendMsgToLLM(body io.Reader) {
lastToolCall.ID = chunk.ToolID lastToolCall.ID = chunk.ToolID
} }
interrupt: interrupt:
if interruptResp { // read bytes, so it would not get into beginning of the next req if interruptResp.Load() { // read bytes, so it would not get into beginning of the next req
// interruptResp = false
logger.Info("interrupted bot response", "chunk_counter", counter) logger.Info("interrupted bot response", "chunk_counter", counter)
streamDone <- true streamDone <- true
break break
@@ -781,14 +770,14 @@ func showSpinner() {
if cfg.WriteNextMsgAsCompletionAgent != "" { if cfg.WriteNextMsgAsCompletionAgent != "" {
botPersona = cfg.WriteNextMsgAsCompletionAgent botPersona = cfg.WriteNextMsgAsCompletionAgent
} }
for botRespMode || toolRunningMode { for botRespMode.Load() || toolRunningMode.Load() {
time.Sleep(400 * time.Millisecond) time.Sleep(400 * time.Millisecond)
spin := i % len(spinners) spin := i % len(spinners)
app.QueueUpdateDraw(func() { app.QueueUpdateDraw(func() {
switch { switch {
case toolRunningMode: case toolRunningMode.Load():
textArea.SetTitle(spinners[spin] + " tool") textArea.SetTitle(spinners[spin] + " tool")
case botRespMode: case botRespMode.Load():
textArea.SetTitle(spinners[spin] + " " + botPersona + " (F6 to interrupt)") textArea.SetTitle(spinners[spin] + " " + botPersona + " (F6 to interrupt)")
default: default:
textArea.SetTitle(spinners[spin] + " input") textArea.SetTitle(spinners[spin] + " input")
@@ -802,8 +791,8 @@ func showSpinner() {
} }
func chatRound(r *models.ChatRoundReq) error { func chatRound(r *models.ChatRoundReq) error {
interruptResp = false interruptResp.Store(false)
botRespMode = true botRespMode.Store(true)
go showSpinner() go showSpinner()
updateStatusLine() updateStatusLine()
botPersona := cfg.AssistantRole botPersona := cfg.AssistantRole
@@ -811,7 +800,7 @@ func chatRound(r *models.ChatRoundReq) error {
botPersona = cfg.WriteNextMsgAsCompletionAgent botPersona = cfg.WriteNextMsgAsCompletionAgent
} }
defer func() { defer func() {
botRespMode = false botRespMode.Store(false)
ClearImageAttachment() ClearImageAttachment()
}() }()
// check that there is a model set to use if is not local // check that there is a model set to use if is not local
@@ -826,10 +815,10 @@ func chatRound(r *models.ChatRoundReq) error {
} }
go sendMsgToLLM(reader) go sendMsgToLLM(reader)
logger.Debug("looking at vars in chatRound", "msg", r.UserMsg, "regen", r.Regen, "resume", r.Resume) logger.Debug("looking at vars in chatRound", "msg", r.UserMsg, "regen", r.Regen, "resume", r.Resume)
msgIdx := chatBody.GetMessageCount() msgIdx := len(chatBody.Messages)
if !r.Resume { if !r.Resume {
// Add empty message to chatBody immediately so it persists during Alt+T toggle // Add empty message to chatBody immediately so it persists during Alt+T toggle
chatBody.AppendMessage(models.RoleMsg{ chatBody.Messages = append(chatBody.Messages, models.RoleMsg{
Role: botPersona, Content: "", Role: botPersona, Content: "",
}) })
nl := "\n\n" nl := "\n\n"
@@ -841,7 +830,7 @@ func chatRound(r *models.ChatRoundReq) error {
} }
fmt.Fprintf(textView, "%s[-:-:b](%d) %s[-:-:-]\n", nl, msgIdx, roleToIcon(botPersona)) fmt.Fprintf(textView, "%s[-:-:b](%d) %s[-:-:-]\n", nl, msgIdx, roleToIcon(botPersona))
} else { } else {
msgIdx = chatBody.GetMessageCount() - 1 msgIdx = len(chatBody.Messages) - 1
} }
respText := strings.Builder{} respText := strings.Builder{}
toolResp := strings.Builder{} toolResp := strings.Builder{}
@@ -862,7 +851,7 @@ out:
if thinkingCollapsed { if thinkingCollapsed {
// Show placeholder immediately when thinking starts in collapsed mode // Show placeholder immediately when thinking starts in collapsed mode
fmt.Fprint(textView, "[yellow::i][thinking... (press Alt+T to expand)][-:-:-]") fmt.Fprint(textView, "[yellow::i][thinking... (press Alt+T to expand)][-:-:-]")
if scrollToEndEnabled { if cfg.AutoScrollEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
} }
respText.WriteString(chunk) respText.WriteString(chunk)
@@ -877,7 +866,7 @@ out:
// Thinking already displayed as placeholder, just update respText // Thinking already displayed as placeholder, just update respText
respText.WriteString(chunk) respText.WriteString(chunk)
justExitedThinkingCollapsed = true justExitedThinkingCollapsed = true
if scrollToEndEnabled { if cfg.AutoScrollEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
} }
continue continue
@@ -898,11 +887,10 @@ out:
fmt.Fprint(textView, chunk) fmt.Fprint(textView, chunk)
respText.WriteString(chunk) respText.WriteString(chunk)
// Update the message in chatBody.Messages so it persists during Alt+T // Update the message in chatBody.Messages so it persists during Alt+T
chatBody.UpdateMessageFunc(msgIdx, func(msg models.RoleMsg) models.RoleMsg { if !r.Resume {
msg.Content = respText.String() chatBody.Messages[msgIdx].Content += respText.String()
return msg }
}) if cfg.AutoScrollEnabled {
if scrollToEndEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
} }
// Send chunk to audio stream handler // Send chunk to audio stream handler
@@ -912,7 +900,7 @@ out:
case toolChunk := <-openAIToolChan: case toolChunk := <-openAIToolChan:
fmt.Fprint(textView, toolChunk) fmt.Fprint(textView, toolChunk)
toolResp.WriteString(toolChunk) toolResp.WriteString(toolChunk)
if scrollToEndEnabled { if cfg.AutoScrollEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
} }
case <-streamDone: case <-streamDone:
@@ -920,7 +908,7 @@ out:
chunk := <-chunkChan chunk := <-chunkChan
fmt.Fprint(textView, chunk) fmt.Fprint(textView, chunk)
respText.WriteString(chunk) respText.WriteString(chunk)
if scrollToEndEnabled { if cfg.AutoScrollEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
} }
if cfg.TTS_ENABLED { if cfg.TTS_ENABLED {
@@ -942,39 +930,36 @@ out:
} }
lastRespStats = nil lastRespStats = nil
} }
botRespMode = false botRespMode.Store(false)
if r.Resume { if r.Resume {
chatBody.UpdateMessageFunc(chatBody.GetMessageCount()-1, func(msg models.RoleMsg) models.RoleMsg { chatBody.Messages[len(chatBody.Messages)-1].Content += respText.String()
msg.Content += respText.String() updatedMsg := chatBody.Messages[len(chatBody.Messages)-1]
processedMsg := processMessageTag(&msg) processedMsg := processMessageTag(&updatedMsg)
if msgStats != nil && processedMsg.Role != cfg.ToolRole { chatBody.Messages[len(chatBody.Messages)-1] = *processedMsg
processedMsg.Stats = msgStats if msgStats != nil && chatBody.Messages[len(chatBody.Messages)-1].Role != cfg.ToolRole {
chatBody.Messages[len(chatBody.Messages)-1].Stats = msgStats
} }
return *processedMsg
})
} else { } else {
chatBody.UpdateMessageFunc(msgIdx, func(msg models.RoleMsg) models.RoleMsg { chatBody.Messages[msgIdx].Content = respText.String()
msg.Content = respText.String() processedMsg := processMessageTag(&chatBody.Messages[msgIdx])
processedMsg := processMessageTag(&msg) chatBody.Messages[msgIdx] = *processedMsg
if msgStats != nil && processedMsg.Role != cfg.ToolRole { if msgStats != nil && chatBody.Messages[msgIdx].Role != cfg.ToolRole {
processedMsg.Stats = msgStats chatBody.Messages[msgIdx].Stats = msgStats
} }
return *processedMsg stopTTSIfNotForUser(&chatBody.Messages[msgIdx])
})
stopTTSIfNotForUser(&chatBody.GetMessages()[msgIdx])
} }
cleanChatBody() cleanChatBody()
refreshChatDisplay() refreshChatDisplay()
updateStatusLine() updateStatusLine()
// bot msg is done; // bot msg is done;
// now check it for func call // now check it for func call
// logChat(activeChatName, chatBody.GetMessages()) // logChat(activeChatName, chatBody.Messages)
if err := updateStorageChat(activeChatName, chatBody.GetMessages()); err != nil { if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil {
logger.Warn("failed to update storage", "error", err, "name", activeChatName) logger.Warn("failed to update storage", "error", err, "name", activeChatName)
} }
// Strip think blocks before parsing for tool calls // Strip think blocks before parsing for tool calls
respTextNoThink := thinkBlockRE.ReplaceAllString(respText.String(), "") respTextNoThink := thinkBlockRE.ReplaceAllString(respText.String(), "")
if interruptResp { if interruptResp.Load() {
return nil return nil
} }
if findCall(respTextNoThink, toolResp.String()) { if findCall(respTextNoThink, toolResp.String()) {
@@ -984,8 +969,8 @@ out:
// If so, trigger those characters to respond if that char is not controlled by user // If so, trigger those characters to respond if that char is not controlled by user
// perhaps we should have narrator role to determine which char is next to act // perhaps we should have narrator role to determine which char is next to act
if cfg.AutoTurn { if cfg.AutoTurn {
lastMsg, ok := chatBody.GetLastMessage() lastMsg := chatBody.Messages[len(chatBody.Messages)-1]
if ok && len(lastMsg.KnownTo) > 0 { if len(lastMsg.KnownTo) > 0 {
triggerPrivateMessageResponses(&lastMsg) triggerPrivateMessageResponses(&lastMsg)
} }
} }
@@ -994,15 +979,13 @@ out:
// cleanChatBody removes messages with null or empty content to prevent API issues // cleanChatBody removes messages with null or empty content to prevent API issues
func cleanChatBody() { func cleanChatBody() {
if chatBody == nil || chatBody.GetMessageCount() == 0 { if chatBody == nil || chatBody.Messages == nil {
return return
} }
// Tool request cleaning is now configurable via AutoCleanToolCallsFromCtx (default false) // Tool request cleaning is now configurable via AutoCleanToolCallsFromCtx (default false)
// /completion msg where part meant for user and other part tool call // /completion msg where part meant for user and other part tool call
// chatBody.Messages = cleanToolCalls(chatBody.Messages) // chatBody.Messages = cleanToolCalls(chatBody.Messages)
chatBody.WithLock(func(cb *models.ChatBody) { chatBody.Messages = consolidateAssistantMessages(chatBody.Messages)
cb.Messages = consolidateAssistantMessages(cb.Messages)
})
} }
// convertJSONToMapStringString unmarshals JSON into map[string]interface{} and converts all values to strings. // convertJSONToMapStringString unmarshals JSON into map[string]interface{} and converts all values to strings.
@@ -1102,7 +1085,7 @@ func findCall(msg, toolCall string) bool {
Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err), Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
ToolCallID: lastToolCall.ID, // Use the stored tool call ID ToolCallID: lastToolCall.ID, // Use the stored tool call ID
} }
chatBody.AppendMessage(toolResponseMsg) chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
// Clear the stored tool call ID after using it (no longer needed) // Clear the stored tool call ID after using it (no longer needed)
// Trigger the assistant to continue processing with the error message // Trigger the assistant to continue processing with the error message
crr := &models.ChatRoundReq{ crr := &models.ChatRoundReq{
@@ -1139,7 +1122,7 @@ func findCall(msg, toolCall string) bool {
Role: cfg.ToolRole, Role: cfg.ToolRole,
Content: "Error processing tool call: no valid JSON found. Please check the JSON format.", Content: "Error processing tool call: no valid JSON found. Please check the JSON format.",
} }
chatBody.AppendMessage(toolResponseMsg) chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
crr := &models.ChatRoundReq{ crr := &models.ChatRoundReq{
Role: cfg.AssistantRole, Role: cfg.AssistantRole,
} }
@@ -1156,8 +1139,8 @@ func findCall(msg, toolCall string) bool {
Role: cfg.ToolRole, Role: cfg.ToolRole,
Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err), Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
} }
chatBody.AppendMessage(toolResponseMsg) chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
logger.Debug("findCall: added tool error response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "message_count_after_add", chatBody.GetMessageCount()) logger.Debug("findCall: added tool error response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "message_count_after_add", len(chatBody.Messages))
// Trigger the assistant to continue processing with the error message // Trigger the assistant to continue processing with the error message
// chatRound("", cfg.AssistantRole, tv, false, false) // chatRound("", cfg.AssistantRole, tv, false, false)
crr := &models.ChatRoundReq{ crr := &models.ChatRoundReq{
@@ -1175,23 +1158,17 @@ func findCall(msg, toolCall string) bool {
// we got here => last msg recognized as a tool call (correct or not) // we got here => last msg recognized as a tool call (correct or not)
// Use the tool call ID from streaming response (lastToolCall.ID) // Use the tool call ID from streaming response (lastToolCall.ID)
// Don't generate random ID - the ID should match between assistant message and tool response // Don't generate random ID - the ID should match between assistant message and tool response
lastMsgIdx := chatBody.GetMessageCount() - 1 lastMsgIdx := len(chatBody.Messages) - 1
if lastToolCall.ID != "" { if lastToolCall.ID != "" {
chatBody.UpdateMessageFunc(lastMsgIdx, func(msg models.RoleMsg) models.RoleMsg { chatBody.Messages[lastMsgIdx].ToolCallID = lastToolCall.ID
msg.ToolCallID = lastToolCall.ID
return msg
})
} }
// Store tool call info in the assistant message // Store tool call info in the assistant message
// Convert Args map to JSON string for storage // Convert Args map to JSON string for storage
chatBody.UpdateMessageFunc(lastMsgIdx, func(msg models.RoleMsg) models.RoleMsg { chatBody.Messages[lastMsgIdx].ToolCall = &models.ToolCall{
msg.ToolCall = &models.ToolCall{
ID: lastToolCall.ID, ID: lastToolCall.ID,
Name: lastToolCall.Name, Name: lastToolCall.Name,
Args: mapToString(lastToolCall.Args), Args: mapToString(lastToolCall.Args),
} }
return msg
})
// call a func // call a func
_, ok := fnMap[fc.Name] _, ok := fnMap[fc.Name]
if !ok { if !ok {
@@ -1202,8 +1179,8 @@ func findCall(msg, toolCall string) bool {
Content: m, Content: m,
ToolCallID: lastToolCall.ID, // Use the stored tool call ID ToolCallID: lastToolCall.ID, // Use the stored tool call ID
} }
chatBody.AppendMessage(toolResponseMsg) chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
logger.Debug("findCall: added tool not implemented response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", chatBody.GetMessageCount()) logger.Debug("findCall: added tool not implemented response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", len(chatBody.Messages))
// Clear the stored tool call ID after using it // Clear the stored tool call ID after using it
lastToolCall.ID = "" lastToolCall.ID = ""
// Trigger the assistant to continue processing with the new tool response // Trigger the assistant to continue processing with the new tool response
@@ -1217,9 +1194,9 @@ func findCall(msg, toolCall string) bool {
} }
// Show tool call progress indicator before execution // Show tool call progress indicator before execution
fmt.Fprintf(textView, "\n[yellow::i][tool: %s...][-:-:-]", fc.Name) fmt.Fprintf(textView, "\n[yellow::i][tool: %s...][-:-:-]", fc.Name)
toolRunningMode = true toolRunningMode.Store(true)
resp := callToolWithAgent(fc.Name, fc.Args) resp := callToolWithAgent(fc.Name, fc.Args)
toolRunningMode = false toolRunningMode.Store(false)
toolMsg := string(resp) toolMsg := string(resp)
logger.Info("llm used a tool call", "tool_name", fc.Name, "too_args", fc.Args, "id", fc.ID, "tool_resp", toolMsg) logger.Info("llm used a tool call", "tool_name", fc.Name, "too_args", fc.Args, "id", fc.ID, "tool_resp", toolMsg)
// Create tool response message with the proper tool_call_id // Create tool response message with the proper tool_call_id
@@ -1274,9 +1251,9 @@ func findCall(msg, toolCall string) bool {
} }
} }
fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n", fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
"\n\n", chatBody.GetMessageCount(), cfg.ToolRole, toolResponseMsg.GetText()) "\n\n", len(chatBody.Messages), cfg.ToolRole, toolResponseMsg.GetText())
chatBody.AppendMessage(toolResponseMsg) chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
logger.Debug("findCall: added actual tool response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", chatBody.GetMessageCount()) logger.Debug("findCall: added actual tool response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", len(chatBody.Messages))
// Clear the stored tool call ID after using it // Clear the stored tool call ID after using it
lastToolCall.ID = "" lastToolCall.ID = ""
// Trigger the assistant to continue processing with the new tool response // Trigger the assistant to continue processing with the new tool response
@@ -1406,7 +1383,7 @@ func charToStart(agentName string, keepSysP bool) bool {
func updateModelLists() { func updateModelLists() {
var err error var err error
if cfg.OpenRouterToken != "" { if cfg.OpenRouterToken != "" {
_, err := fetchORModels(true) ORFreeModels, err = fetchORModels(true)
if err != nil { if err != nil {
logger.Warn("failed to fetch or models", "error", err) logger.Warn("failed to fetch or models", "error", err)
} }
@@ -1416,16 +1393,18 @@ func updateModelLists() {
if err != nil { if err != nil {
logger.Warn("failed to fetch llama.cpp models", "error", err) logger.Warn("failed to fetch llama.cpp models", "error", err)
} }
LocalModels.Store(ml) localModelsMu.Lock()
for statusLineWidget == nil { LocalModels = ml
time.Sleep(time.Millisecond * 100) localModelsMu.Unlock()
}
// set already loaded model in llama.cpp // set already loaded model in llama.cpp
if strings.Contains(cfg.CurrentAPI, "localhost") || strings.Contains(cfg.CurrentAPI, "127.0.0.1") { if !isLocalLlamacpp() {
modelList := LocalModels.Load().([]string) return
for i := range modelList { }
if strings.Contains(modelList[i], models.LoadedMark) { localModelsMu.Lock()
m := strings.TrimPrefix(modelList[i], models.LoadedMark) defer localModelsMu.Unlock()
for i := range LocalModels {
if strings.Contains(LocalModels[i], models.LoadedMark) {
m := strings.TrimPrefix(LocalModels[i], models.LoadedMark)
cfg.CurrentModel = m cfg.CurrentModel = m
chatBody.Model = m chatBody.Model = m
cachedModelColor.Store("green") cachedModelColor.Store("green")
@@ -1436,20 +1415,23 @@ func updateModelLists() {
} }
} }
} }
}
func refreshLocalModelsIfEmpty() { func refreshLocalModelsIfEmpty() {
models := LocalModels.Load().([]string) localModelsMu.RLock()
if len(models) > 0 { if len(LocalModels) > 0 {
localModelsMu.RUnlock()
return return
} }
localModelsMu.RUnlock()
// try to fetch // try to fetch
models, err := fetchLCPModels() models, err := fetchLCPModels()
if err != nil { if err != nil {
logger.Warn("failed to fetch llama.cpp models", "error", err) logger.Warn("failed to fetch llama.cpp models", "error", err)
return return
} }
LocalModels.Store(models) localModelsMu.Lock()
LocalModels = models
localModelsMu.Unlock()
} }
func summarizeAndStartNewChat() { func summarizeAndStartNewChat() {
@@ -1509,7 +1491,7 @@ func init() {
// load cards // load cards
basicCard.Role = cfg.AssistantRole basicCard.Role = cfg.AssistantRole
logLevel.Set(slog.LevelInfo) logLevel.Set(slog.LevelInfo)
logger = slog.New(slog.NewTextHandler(logfile, &slog.HandlerOptions{Level: logLevel, AddSource: true})) logger = slog.New(slog.NewTextHandler(logfile, &slog.HandlerOptions{Level: logLevel}))
store = storage.NewProviderSQL(cfg.DBPATH, logger) store = storage.NewProviderSQL(cfg.DBPATH, logger)
if store == nil { if store == nil {
cancel() cancel()
@@ -1533,11 +1515,11 @@ func init() {
} }
lastToolCall = &models.FuncCall{} lastToolCall = &models.FuncCall{}
lastChat := loadOldChatOrGetNew() lastChat := loadOldChatOrGetNew()
chatBody = models.NewSafeChatBody(&models.ChatBody{ chatBody = &models.ChatBody{
Model: "modelname", Model: "modelname",
Stream: true, Stream: true,
Messages: lastChat, Messages: lastChat,
}) }
choseChunkParser() choseChunkParser()
httpClient = createClient(time.Second * 90) httpClient = createClient(time.Second * 90)
if cfg.TTS_ENABLED { if cfg.TTS_ENABLED {
@@ -1563,57 +1545,9 @@ func init() {
} }
} }
} }
// Initialize scrollToEndEnabled based on config // atomic default values
scrollToEndEnabled = cfg.AutoScrollEnabled cachedModelColor.Store("orange")
go updateModelLists()
go chatWatcher(ctx) go chatWatcher(ctx)
} initTUI()
initTools()
func getValidKnowToRecipient(msg *models.RoleMsg) (string, bool) {
if cfg == nil || !cfg.CharSpecificContextEnabled {
return "", false
}
// case where all roles are in the tag => public message
cr := listChatRoles()
slices.Sort(cr)
slices.Sort(msg.KnownTo)
if slices.Equal(cr, msg.KnownTo) {
logger.Info("got msg with tag mentioning every role")
return "", false
}
// Check each character in the KnownTo list
for _, recipient := range msg.KnownTo {
if recipient == msg.Role || recipient == cfg.ToolRole {
// weird cases, skip
continue
}
// Skip if this is the user character (user handles their own turn)
// If user is in KnownTo, stop processing - it's the user's turn
if recipient == cfg.UserRole || recipient == cfg.WriteNextMsgAs {
return "", false
}
return recipient, true
}
return "", false
}
// triggerPrivateMessageResponses checks if a message was sent privately to specific characters
// and triggers those non-user characters to respond
func triggerPrivateMessageResponses(msg *models.RoleMsg) {
recipient, ok := getValidKnowToRecipient(msg)
if !ok || recipient == "" {
return
}
// Trigger the recipient character to respond
triggerMsg := recipient + ":\n"
// Send empty message so LLM continues naturally from the conversation
crr := &models.ChatRoundReq{
UserMsg: triggerMsg,
Role: recipient,
Resume: true,
}
fmt.Fprintf(textView, "\n[-:-:b](%d) ", len(chatBody.Messages))
fmt.Fprint(textView, roleToIcon(recipient))
fmt.Fprint(textView, "[-:-:-]\n")
chatRoundChan <- crr
} }

View File

@@ -28,8 +28,8 @@ AutoScrollEnabled = true
AutoCleanToolCallsFromCtx = false AutoCleanToolCallsFromCtx = false
# rag settings # rag settings
RAGBatchSize = 1 RAGBatchSize = 1
RAGWordLimit = 80 RAGWordLimit = 250
RAGOverlapWords = 16 RAGOverlapWords = 25
RAGDir = "ragimport" RAGDir = "ragimport"
# extra tts # extra tts
TTS_ENABLED = false TTS_ENABLED = false

View File

@@ -27,7 +27,6 @@ type Config struct {
WriteNextMsgAs string WriteNextMsgAs string
WriteNextMsgAsCompletionAgent string WriteNextMsgAsCompletionAgent string
SkipLLMResp bool SkipLLMResp bool
AutoCleanToolCallsFromCtx bool `toml:"AutoCleanToolCallsFromCtx"`
DBPATH string `toml:"DBPATH"` DBPATH string `toml:"DBPATH"`
FilePickerDir string `toml:"FilePickerDir"` FilePickerDir string `toml:"FilePickerDir"`
FilePickerExts string `toml:"FilePickerExts"` FilePickerExts string `toml:"FilePickerExts"`

View File

@@ -63,9 +63,6 @@ This document explains how to set up and configure the application using the `co
#### AutoScrollEnabled (`true`) #### AutoScrollEnabled (`true`)
- Whether to automatically scroll chat window while llm streams its repsonse. - Whether to automatically scroll chat window while llm streams its repsonse.
#### AutoCleanToolCallsFromCtx (`false`)
- Whether to automatically clean tool calls from the conversation context to manage token usage.
### RAG (Retrieval Augmented Generation) Settings ### RAG (Retrieval Augmented Generation) Settings
#### EmbedURL (`"http://localhost:8082/v1/embeddings"`) #### EmbedURL (`"http://localhost:8082/v1/embeddings"`)

218
extra/google_tts.go Normal file
View File

@@ -0,0 +1,218 @@
//go:build extra
// +build extra
package extra
import (
"fmt"
"gf-lt/models"
"io"
"log/slog"
"os/exec"
"strings"
"sync"
google_translate_tts "github.com/GrailFinder/google-translate-tts"
"github.com/neurosnap/sentences/english"
)
type GoogleTranslateOrator struct {
logger *slog.Logger
mu sync.Mutex
speech *google_translate_tts.Speech
// fields for playback control
cmd *exec.Cmd
cmdMu sync.Mutex
stopCh chan struct{}
// text buffer and interrupt flag
textBuffer strings.Builder
interrupt bool
Speed float32
}
func (o *GoogleTranslateOrator) stoproutine() {
for {
<-TTSDoneChan
o.logger.Debug("orator got done signal")
o.Stop()
for len(TTSTextChan) > 0 {
<-TTSTextChan
}
o.mu.Lock()
o.textBuffer.Reset()
o.interrupt = true
o.mu.Unlock()
}
}
func (o *GoogleTranslateOrator) readroutine() {
tokenizer, _ := english.NewSentenceTokenizer(nil)
for {
select {
case chunk := <-TTSTextChan:
o.mu.Lock()
o.interrupt = false
_, err := o.textBuffer.WriteString(chunk)
if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err)
o.mu.Unlock()
continue
}
text := o.textBuffer.String()
sentences := tokenizer.Tokenize(text)
o.logger.Debug("adding chunk", "chunk", chunk, "text", text, "sen-len", len(sentences))
if len(sentences) <= 1 {
o.mu.Unlock()
continue
}
completeSentences := sentences[:len(sentences)-1]
remaining := sentences[len(sentences)-1].Text
o.textBuffer.Reset()
o.textBuffer.WriteString(remaining)
o.mu.Unlock()
for _, sentence := range completeSentences {
o.mu.Lock()
interrupted := o.interrupt
o.mu.Unlock()
if interrupted {
return
}
cleanedText := models.CleanText(sentence.Text)
if cleanedText == "" {
continue
}
o.logger.Debug("calling Speak with sentence", "sent", cleanedText)
if err := o.Speak(cleanedText); err != nil {
o.logger.Error("tts failed", "sentence", cleanedText, "error", err)
}
}
case <-TTSFlushChan:
o.logger.Debug("got flushchan signal start")
// lln is done get the whole message out
if len(TTSTextChan) > 0 { // otherwise might get stuck
for chunk := range TTSTextChan {
o.mu.Lock()
_, err := o.textBuffer.WriteString(chunk)
o.mu.Unlock()
if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err)
continue
}
if len(TTSTextChan) == 0 {
break
}
}
}
o.mu.Lock()
remaining := o.textBuffer.String()
remaining = models.CleanText(remaining)
o.textBuffer.Reset()
o.mu.Unlock()
if remaining == "" {
continue
}
o.logger.Debug("calling Speak with remainder", "rem", remaining)
sentencesRem := tokenizer.Tokenize(remaining)
for _, rs := range sentencesRem { // to avoid dumping large volume of text
o.mu.Lock()
interrupt := o.interrupt
o.mu.Unlock()
if interrupt {
break
}
if err := o.Speak(rs.Text); err != nil {
o.logger.Error("tts failed", "sentence", rs.Text, "error", err)
}
}
}
}
}
func (o *GoogleTranslateOrator) GetLogger() *slog.Logger {
return o.logger
}
func (o *GoogleTranslateOrator) Speak(text string) error {
o.logger.Debug("fn: Speak is called", "text-len", len(text))
// Generate MP3 data directly as an io.Reader
reader, err := o.speech.GenerateSpeech(text)
if err != nil {
return fmt.Errorf("generate speech failed: %w", err)
}
// Wrap in io.NopCloser since GenerateSpeech returns io.Reader (no close needed)
body := io.NopCloser(reader)
defer body.Close()
// Build ffplay command with optional speed filter
args := []string{"-nodisp", "-autoexit"}
if o.Speed > 0.1 && o.Speed != 1.0 {
// atempo range is 0.5 to 2.0; you might clamp it here
args = append(args, "-af", fmt.Sprintf("atempo=%.2f", o.Speed))
}
args = append(args, "-i", "pipe:0")
cmd := exec.Command("ffplay", args...)
stdin, err := cmd.StdinPipe()
if err != nil {
return fmt.Errorf("failed to get stdin pipe: %w", err)
}
o.cmdMu.Lock()
o.cmd = cmd
o.stopCh = make(chan struct{})
o.cmdMu.Unlock()
if err := cmd.Start(); err != nil {
return fmt.Errorf("failed to start ffplay: %w", err)
}
copyErr := make(chan error, 1)
go func() {
_, err := io.Copy(stdin, body)
stdin.Close()
copyErr <- err
}()
done := make(chan error, 1)
go func() {
done <- cmd.Wait()
}()
select {
case <-o.stopCh:
if o.cmd != nil && o.cmd.Process != nil {
o.cmd.Process.Kill()
}
<-done
return nil
case copyErrVal := <-copyErr:
if copyErrVal != nil {
if o.cmd != nil && o.cmd.Process != nil {
o.cmd.Process.Kill()
}
<-done
return copyErrVal
}
return <-done
case err := <-done:
return err
}
}
func (o *GoogleTranslateOrator) Stop() {
o.cmdMu.Lock()
defer o.cmdMu.Unlock()
// Signal any running Speak to stop
if o.stopCh != nil {
select {
case <-o.stopCh: // already closed
default:
close(o.stopCh)
}
o.stopCh = nil
}
// Kill the external player process if it's still running
if o.cmd != nil && o.cmd.Process != nil {
o.cmd.Process.Kill()
o.cmd.Wait() // clean up zombie process
o.cmd = nil
}
// Also reset text buffer and interrupt flag (with o.mu)
o.mu.Lock()
o.textBuffer.Reset()
o.interrupt = true
o.mu.Unlock()
}

259
extra/kokoro.go Normal file
View File

@@ -0,0 +1,259 @@
//go:build extra
// +build extra
package extra
import (
"bytes"
"encoding/json"
"fmt"
"gf-lt/models"
"io"
"log/slog"
"net/http"
"os/exec"
"strings"
"sync"
"github.com/neurosnap/sentences/english"
)
type KokoroOrator struct {
logger *slog.Logger
mu sync.Mutex
URL string
Format models.AudioFormat
Stream bool
Speed float32
Language string
Voice string
// fields for playback control
cmd *exec.Cmd
cmdMu sync.Mutex
stopCh chan struct{}
// textBuffer, interrupt etc. remain the same
textBuffer strings.Builder
interrupt bool
}
func (o *KokoroOrator) GetLogger() *slog.Logger {
return o.logger
}
func (o *KokoroOrator) Speak(text string) error {
o.logger.Debug("fn: Speak is called", "text-len", len(text))
body, err := o.requestSound(text)
if err != nil {
return fmt.Errorf("request failed: %w", err)
}
defer body.Close()
cmd := exec.Command("ffplay", "-nodisp", "-autoexit", "-i", "pipe:0")
stdin, err := cmd.StdinPipe()
if err != nil {
return fmt.Errorf("failed to get stdin pipe: %w", err)
}
o.cmdMu.Lock()
o.cmd = cmd
o.stopCh = make(chan struct{})
o.cmdMu.Unlock()
if err := cmd.Start(); err != nil {
return fmt.Errorf("failed to start ffplay: %w", err)
}
// Copy audio in background
copyErr := make(chan error, 1)
go func() {
_, err := io.Copy(stdin, body)
stdin.Close()
copyErr <- err
}()
// Wait for player in background
done := make(chan error, 1)
go func() {
done <- cmd.Wait()
}()
// Wait for BOTH copy and player, but ensure we block until done
select {
case <-o.stopCh:
// Stop requested: kill player and wait for it to exit
if o.cmd != nil && o.cmd.Process != nil {
o.cmd.Process.Kill()
}
<-done // Wait for process to actually exit
return nil
case copyErrVal := <-copyErr:
if copyErrVal != nil {
// Copy failed: kill player and wait
if o.cmd != nil && o.cmd.Process != nil {
o.cmd.Process.Kill()
}
<-done
return copyErrVal
}
// Copy succeeded, now wait for playback to complete
return <-done
case err := <-done:
// Playback finished normally (copy must have succeeded or player would have exited early)
return err
}
}
func (o *KokoroOrator) requestSound(text string) (io.ReadCloser, error) {
if o.URL == "" {
return nil, fmt.Errorf("TTS URL is empty")
}
payload := map[string]interface{}{
"input": text,
"voice": o.Voice,
"response_format": o.Format,
"download_format": o.Format,
"stream": o.Stream,
"speed": o.Speed,
// "return_download_link": true,
"lang_code": o.Language,
}
payloadBytes, err := json.Marshal(payload)
if err != nil {
return nil, fmt.Errorf("failed to marshal payload: %w", err)
}
req, err := http.NewRequest("POST", o.URL, bytes.NewBuffer(payloadBytes)) //nolint:noctx
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}
req.Header.Set("accept", "application/json")
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, fmt.Errorf("request failed: %w", err)
}
if resp.StatusCode != http.StatusOK {
defer resp.Body.Close()
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
return resp.Body, nil
}
func (o *KokoroOrator) stoproutine() {
for {
<-TTSDoneChan
o.logger.Debug("orator got done signal")
// 1. Stop any ongoing playback (kills external player, closes stopCh)
o.Stop()
// 2. Drain any pending text chunks
for len(TTSTextChan) > 0 {
<-TTSTextChan
}
// 3. Reset internal state
o.mu.Lock()
o.textBuffer.Reset()
o.interrupt = true
o.mu.Unlock()
}
}
func (o *KokoroOrator) Stop() {
o.cmdMu.Lock()
defer o.cmdMu.Unlock()
// Signal any running Speak to stop
if o.stopCh != nil {
select {
case <-o.stopCh: // already closed
default:
close(o.stopCh)
}
o.stopCh = nil
}
// Kill the external player process if it's still running
if o.cmd != nil && o.cmd.Process != nil {
o.cmd.Process.Kill()
o.cmd.Wait() // clean up zombie process
o.cmd = nil
}
// Also reset text buffer and interrupt flag (with o.mu)
o.mu.Lock()
o.textBuffer.Reset()
o.interrupt = true
o.mu.Unlock()
}
func (o *KokoroOrator) readroutine() {
tokenizer, _ := english.NewSentenceTokenizer(nil)
for {
select {
case chunk := <-TTSTextChan:
o.mu.Lock()
o.interrupt = false
_, err := o.textBuffer.WriteString(chunk)
if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err)
o.mu.Unlock()
continue
}
text := o.textBuffer.String()
sentences := tokenizer.Tokenize(text)
o.logger.Debug("adding chunk", "chunk", chunk, "text", text, "sen-len", len(sentences))
if len(sentences) <= 1 {
o.mu.Unlock()
continue
}
completeSentences := sentences[:len(sentences)-1]
remaining := sentences[len(sentences)-1].Text
o.textBuffer.Reset()
o.textBuffer.WriteString(remaining)
o.mu.Unlock()
for _, sentence := range completeSentences {
o.mu.Lock()
interrupted := o.interrupt
o.mu.Unlock()
if interrupted {
return
}
cleanedText := models.CleanText(sentence.Text)
if cleanedText == "" {
continue
}
o.logger.Debug("calling Speak with sentence", "sent", cleanedText)
if err := o.Speak(cleanedText); err != nil {
o.logger.Error("tts failed", "sentence", cleanedText, "error", err)
}
}
case <-TTSFlushChan:
o.logger.Debug("got flushchan signal start")
// lln is done get the whole message out
if len(TTSTextChan) > 0 { // otherwise might get stuck
for chunk := range TTSTextChan {
o.mu.Lock()
_, err := o.textBuffer.WriteString(chunk)
o.mu.Unlock()
if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err)
continue
}
if len(TTSTextChan) == 0 {
break
}
}
}
// flush remaining text
o.mu.Lock()
remaining := o.textBuffer.String()
remaining = models.CleanText(remaining)
o.textBuffer.Reset()
o.mu.Unlock()
if remaining == "" {
continue
}
o.logger.Debug("calling Speak with remainder", "rem", remaining)
sentencesRem := tokenizer.Tokenize(remaining)
for _, rs := range sentencesRem { // to avoid dumping large volume of text
o.mu.Lock()
interrupt := o.interrupt
o.mu.Unlock()
if interrupt {
break
}
if err := o.Speak(rs.Text); err != nil {
o.logger.Error("tts failed", "sentence", rs, "error", err)
}
}
}
}
}

View File

@@ -6,18 +6,10 @@ package extra
import ( import (
"bytes" "bytes"
"encoding/binary" "encoding/binary"
"errors"
"fmt"
"gf-lt/config" "gf-lt/config"
"io" "io"
"log/slog" "log/slog"
"mime/multipart"
"net/http"
"regexp" "regexp"
"strings"
"syscall"
"github.com/gordonklaus/portaudio"
) )
var specialRE = regexp.MustCompile(`\[.*?\]`) var specialRE = regexp.MustCompile(`\[.*?\]`)
@@ -44,14 +36,6 @@ func NewSTT(logger *slog.Logger, cfg *config.Config) STT {
return NewWhisperServer(logger, cfg) return NewWhisperServer(logger, cfg)
} }
type WhisperServer struct {
logger *slog.Logger
ServerURL string
SampleRate int
AudioBuffer *bytes.Buffer
recording bool
}
func NewWhisperServer(logger *slog.Logger, cfg *config.Config) *WhisperServer { func NewWhisperServer(logger *slog.Logger, cfg *config.Config) *WhisperServer {
return &WhisperServer{ return &WhisperServer{
logger: logger, logger: logger,
@@ -61,69 +45,6 @@ func NewWhisperServer(logger *slog.Logger, cfg *config.Config) *WhisperServer {
} }
} }
func (stt *WhisperServer) StartRecording() error {
if err := stt.microphoneStream(stt.SampleRate); err != nil {
return fmt.Errorf("failed to init microphone: %w", err)
}
stt.recording = true
return nil
}
func (stt *WhisperServer) StopRecording() (string, error) {
stt.recording = false
// wait loop to finish?
if stt.AudioBuffer == nil {
err := errors.New("unexpected nil AudioBuffer")
stt.logger.Error(err.Error())
return "", err
}
// Create WAV header first
body := &bytes.Buffer{}
writer := multipart.NewWriter(body)
// Add audio file part
part, err := writer.CreateFormFile("file", "recording.wav")
if err != nil {
stt.logger.Error("fn: StopRecording", "error", err)
return "", err
}
// Stream directly to multipart writer: header + raw data
dataSize := stt.AudioBuffer.Len()
stt.writeWavHeader(part, dataSize)
if _, err := io.Copy(part, stt.AudioBuffer); err != nil {
stt.logger.Error("fn: StopRecording", "error", err)
return "", err
}
// Reset buffer for next recording
stt.AudioBuffer.Reset()
// Add response format field
err = writer.WriteField("response_format", "text")
if err != nil {
stt.logger.Error("fn: StopRecording", "error", err)
return "", err
}
if writer.Close() != nil {
stt.logger.Error("fn: StopRecording", "error", err)
return "", err
}
// Send request
resp, err := http.Post(stt.ServerURL, writer.FormDataContentType(), body) //nolint:noctx
if err != nil {
stt.logger.Error("fn: StopRecording", "error", err)
return "", err
}
defer resp.Body.Close()
// Read and print response
responseTextBytes, err := io.ReadAll(resp.Body)
if err != nil {
stt.logger.Error("fn: StopRecording", "error", err)
return "", err
}
resptext := strings.TrimRight(string(responseTextBytes), "\n")
// in case there are special tokens like [_BEG_]
resptext = specialRE.ReplaceAllString(resptext, "")
return strings.TrimSpace(strings.ReplaceAll(resptext, "\n ", "\n")), nil
}
func (stt *WhisperServer) writeWavHeader(w io.Writer, dataSize int) { func (stt *WhisperServer) writeWavHeader(w io.Writer, dataSize int) {
header := make([]byte, 44) header := make([]byte, 44)
copy(header[0:4], "RIFF") copy(header[0:4], "RIFF")
@@ -147,56 +68,3 @@ func (stt *WhisperServer) writeWavHeader(w io.Writer, dataSize int) {
func (stt *WhisperServer) IsRecording() bool { func (stt *WhisperServer) IsRecording() bool {
return stt.recording return stt.recording
} }
func (stt *WhisperServer) microphoneStream(sampleRate int) error {
// Temporarily redirect stderr to suppress ALSA warnings during PortAudio init
origStderr, errDup := syscall.Dup(syscall.Stderr)
if errDup != nil {
return fmt.Errorf("failed to dup stderr: %w", errDup)
}
nullFD, err := syscall.Open("/dev/null", syscall.O_WRONLY, 0)
if err != nil {
_ = syscall.Close(origStderr) // Close the dup'd fd if open fails
return fmt.Errorf("failed to open /dev/null: %w", err)
}
// redirect stderr
_ = syscall.Dup2(nullFD, syscall.Stderr)
// Initialize PortAudio (this is where ALSA warnings occur)
defer func() {
// Restore stderr
_ = syscall.Dup2(origStderr, syscall.Stderr)
_ = syscall.Close(origStderr)
_ = syscall.Close(nullFD)
}()
if err := portaudio.Initialize(); err != nil {
return fmt.Errorf("portaudio init failed: %w", err)
}
in := make([]int16, 64)
stream, err := portaudio.OpenDefaultStream(1, 0, float64(sampleRate), len(in), in)
if err != nil {
if paErr := portaudio.Terminate(); paErr != nil {
return fmt.Errorf("failed to open microphone: %w; terminate error: %w", err, paErr)
}
return fmt.Errorf("failed to open microphone: %w", err)
}
go func(stream *portaudio.Stream) {
if err := stream.Start(); err != nil {
stt.logger.Error("microphoneStream", "error", err)
return
}
for {
if !stt.IsRecording() {
return
}
if err := stream.Read(); err != nil {
stt.logger.Error("reading stream", "error", err)
return
}
if err := binary.Write(stt.AudioBuffer, binary.LittleEndian, in); err != nil {
stt.logger.Error("writing to buffer", "error", err)
return
}
}
}(stream)
return nil
}

View File

@@ -4,25 +4,13 @@
package extra package extra
import ( import (
"bytes"
"encoding/json"
"fmt"
"gf-lt/config" "gf-lt/config"
"gf-lt/models" "gf-lt/models"
"io"
"log/slog" "log/slog"
"net/http"
"os" "os"
"strings" "strings"
"sync"
"time"
google_translate_tts "github.com/GrailFinder/google-translate-tts" google_translate_tts "github.com/GrailFinder/google-translate-tts"
"github.com/GrailFinder/google-translate-tts/handlers"
"github.com/gopxl/beep/v2"
"github.com/gopxl/beep/v2/mp3"
"github.com/gopxl/beep/v2/speaker"
"github.com/neurosnap/sentences/english"
) )
var ( var (
@@ -39,142 +27,6 @@ type Orator interface {
GetLogger() *slog.Logger GetLogger() *slog.Logger
} }
// impl https://github.com/remsky/Kokoro-FastAPI
type KokoroOrator struct {
logger *slog.Logger
mu sync.Mutex
URL string
Format models.AudioFormat
Stream bool
Speed float32
Language string
Voice string
currentStream *beep.Ctrl // Added for playback control
currentDone chan bool
textBuffer strings.Builder
interrupt bool
// textBuffer bytes.Buffer
}
// Google Translate TTS implementation
type GoogleTranslateOrator struct {
logger *slog.Logger
mu sync.Mutex
speech *google_translate_tts.Speech
currentStream *beep.Ctrl
currentDone chan bool
textBuffer strings.Builder
interrupt bool
}
func (o *KokoroOrator) stoproutine() {
for {
<-TTSDoneChan
o.logger.Debug("orator got done signal")
o.Stop()
// drain the channel
for len(TTSTextChan) > 0 {
<-TTSTextChan
}
o.mu.Lock()
o.textBuffer.Reset()
if o.currentDone != nil {
select {
case o.currentDone <- true:
default:
// Channel might be closed, ignore
}
}
o.interrupt = true
o.mu.Unlock()
}
}
func (o *KokoroOrator) readroutine() {
tokenizer, _ := english.NewSentenceTokenizer(nil)
for {
select {
case chunk := <-TTSTextChan:
o.mu.Lock()
o.interrupt = false
_, err := o.textBuffer.WriteString(chunk)
if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err)
o.mu.Unlock()
continue
}
text := o.textBuffer.String()
sentences := tokenizer.Tokenize(text)
o.logger.Debug("adding chunk", "chunk", chunk, "text", text, "sen-len", len(sentences))
if len(sentences) <= 1 {
o.mu.Unlock()
continue
}
completeSentences := sentences[:len(sentences)-1]
remaining := sentences[len(sentences)-1].Text
o.textBuffer.Reset()
o.textBuffer.WriteString(remaining)
o.mu.Unlock()
for _, sentence := range completeSentences {
o.mu.Lock()
interrupted := o.interrupt
o.mu.Unlock()
if interrupted {
return
}
cleanedText := models.CleanText(sentence.Text)
if cleanedText == "" {
continue
}
o.logger.Debug("calling Speak with sentence", "sent", cleanedText)
if err := o.Speak(cleanedText); err != nil {
o.logger.Error("tts failed", "sentence", cleanedText, "error", err)
}
}
case <-TTSFlushChan:
o.logger.Debug("got flushchan signal start")
// lln is done get the whole message out
if len(TTSTextChan) > 0 { // otherwise might get stuck
for chunk := range TTSTextChan {
o.mu.Lock()
_, err := o.textBuffer.WriteString(chunk)
o.mu.Unlock()
if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err)
continue
}
if len(TTSTextChan) == 0 {
break
}
}
}
// flush remaining text
o.mu.Lock()
remaining := o.textBuffer.String()
remaining = models.CleanText(remaining)
o.textBuffer.Reset()
o.mu.Unlock()
if remaining == "" {
continue
}
o.logger.Debug("calling Speak with remainder", "rem", remaining)
sentencesRem := tokenizer.Tokenize(remaining)
for _, rs := range sentencesRem { // to avoid dumping large volume of text
o.mu.Lock()
interrupt := o.interrupt
o.mu.Unlock()
if interrupt {
break
}
if err := o.Speak(rs.Text); err != nil {
o.logger.Error("tts failed", "sentence", rs, "error", err)
}
}
}
}
}
func NewOrator(log *slog.Logger, cfg *config.Config) Orator { func NewOrator(log *slog.Logger, cfg *config.Config) Orator {
provider := cfg.TTS_PROVIDER provider := cfg.TTS_PROVIDER
if provider == "" { if provider == "" {
@@ -204,270 +56,14 @@ func NewOrator(log *slog.Logger, cfg *config.Config) Orator {
Language: language, Language: language,
Proxy: "", // Proxy not supported Proxy: "", // Proxy not supported
Speed: cfg.TTS_SPEED, Speed: cfg.TTS_SPEED,
Handler: &handlers.Beep{},
} }
orator := &GoogleTranslateOrator{ orator := &GoogleTranslateOrator{
logger: log, logger: log,
speech: speech, speech: speech,
Speed: cfg.TTS_SPEED,
} }
go orator.readroutine() go orator.readroutine()
go orator.stoproutine() go orator.stoproutine()
return orator return orator
} }
} }
func (o *KokoroOrator) GetLogger() *slog.Logger {
return o.logger
}
func (o *KokoroOrator) requestSound(text string) (io.ReadCloser, error) {
if o.URL == "" {
return nil, fmt.Errorf("TTS URL is empty")
}
payload := map[string]interface{}{
"input": text,
"voice": o.Voice,
"response_format": o.Format,
"download_format": o.Format,
"stream": o.Stream,
"speed": o.Speed,
// "return_download_link": true,
"lang_code": o.Language,
}
payloadBytes, err := json.Marshal(payload)
if err != nil {
return nil, fmt.Errorf("failed to marshal payload: %w", err)
}
req, err := http.NewRequest("POST", o.URL, bytes.NewBuffer(payloadBytes)) //nolint:noctx
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}
req.Header.Set("accept", "application/json")
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, fmt.Errorf("request failed: %w", err)
}
if resp.StatusCode != http.StatusOK {
defer resp.Body.Close()
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
return resp.Body, nil
}
func (o *KokoroOrator) Speak(text string) error {
o.logger.Debug("fn: Speak is called", "text-len", len(text))
body, err := o.requestSound(text)
if err != nil {
o.logger.Error("request failed", "error", err)
return fmt.Errorf("request failed: %w", err)
}
defer body.Close()
// Decode the mp3 audio from response body
streamer, format, err := mp3.Decode(body)
if err != nil {
o.logger.Error("mp3 decode failed", "error", err)
return fmt.Errorf("mp3 decode failed: %w", err)
}
defer streamer.Close()
// here it spams with errors that speaker cannot be initialized more than once, but how would we deal with many audio records then?
if err := speaker.Init(format.SampleRate, format.SampleRate.N(time.Second/10)); err != nil {
o.logger.Debug("failed to init speaker", "error", err)
}
done := make(chan bool)
o.mu.Lock()
o.currentDone = done
o.currentStream = &beep.Ctrl{Streamer: beep.Seq(streamer, beep.Callback(func() {
o.mu.Lock()
close(done)
o.currentStream = nil
o.currentDone = nil
o.mu.Unlock()
})), Paused: false}
o.mu.Unlock()
speaker.Play(o.currentStream)
<-done
return nil
}
func (o *KokoroOrator) Stop() {
// speaker.Clear()
o.logger.Debug("attempted to stop orator", "orator", o)
speaker.Lock()
defer speaker.Unlock()
o.mu.Lock()
defer o.mu.Unlock()
if o.currentStream != nil {
// o.currentStream.Paused = true
o.currentStream.Streamer = nil
}
}
func (o *GoogleTranslateOrator) stoproutine() {
for {
<-TTSDoneChan
o.logger.Debug("orator got done signal")
o.Stop()
// drain the channel
for len(TTSTextChan) > 0 {
<-TTSTextChan
}
o.mu.Lock()
o.textBuffer.Reset()
if o.currentDone != nil {
select {
case o.currentDone <- true:
default:
// Channel might be closed, ignore
}
}
o.interrupt = true
o.mu.Unlock()
}
}
func (o *GoogleTranslateOrator) readroutine() {
tokenizer, _ := english.NewSentenceTokenizer(nil)
for {
select {
case chunk := <-TTSTextChan:
o.mu.Lock()
o.interrupt = false
_, err := o.textBuffer.WriteString(chunk)
if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err)
o.mu.Unlock()
continue
}
text := o.textBuffer.String()
sentences := tokenizer.Tokenize(text)
o.logger.Debug("adding chunk", "chunk", chunk, "text", text, "sen-len", len(sentences))
if len(sentences) <= 1 {
o.mu.Unlock()
continue
}
completeSentences := sentences[:len(sentences)-1]
remaining := sentences[len(sentences)-1].Text
o.textBuffer.Reset()
o.textBuffer.WriteString(remaining)
o.mu.Unlock()
for _, sentence := range completeSentences {
o.mu.Lock()
interrupted := o.interrupt
o.mu.Unlock()
if interrupted {
return
}
cleanedText := models.CleanText(sentence.Text)
if cleanedText == "" {
continue
}
o.logger.Debug("calling Speak with sentence", "sent", cleanedText)
if err := o.Speak(cleanedText); err != nil {
o.logger.Error("tts failed", "sentence", cleanedText, "error", err)
}
}
case <-TTSFlushChan:
o.logger.Debug("got flushchan signal start")
// lln is done get the whole message out
if len(TTSTextChan) > 0 { // otherwise might get stuck
for chunk := range TTSTextChan {
o.mu.Lock()
_, err := o.textBuffer.WriteString(chunk)
o.mu.Unlock()
if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err)
continue
}
if len(TTSTextChan) == 0 {
break
}
}
}
o.mu.Lock()
remaining := o.textBuffer.String()
remaining = models.CleanText(remaining)
o.textBuffer.Reset()
o.mu.Unlock()
if remaining == "" {
continue
}
o.logger.Debug("calling Speak with remainder", "rem", remaining)
sentencesRem := tokenizer.Tokenize(remaining)
for _, rs := range sentencesRem { // to avoid dumping large volume of text
o.mu.Lock()
interrupt := o.interrupt
o.mu.Unlock()
if interrupt {
break
}
if err := o.Speak(rs.Text); err != nil {
o.logger.Error("tts failed", "sentence", rs.Text, "error", err)
}
}
}
}
}
func (o *GoogleTranslateOrator) GetLogger() *slog.Logger {
return o.logger
}
func (o *GoogleTranslateOrator) Speak(text string) error {
o.logger.Debug("fn: Speak is called", "text-len", len(text))
// Generate MP3 data using google-translate-tts
reader, err := o.speech.GenerateSpeech(text)
if err != nil {
o.logger.Error("generate speech failed", "error", err)
return fmt.Errorf("generate speech failed: %w", err)
}
// Decode the mp3 audio from reader (wrap with NopCloser for io.ReadCloser)
streamer, format, err := mp3.Decode(io.NopCloser(reader))
if err != nil {
o.logger.Error("mp3 decode failed", "error", err)
return fmt.Errorf("mp3 decode failed: %w", err)
}
defer streamer.Close()
playbackStreamer := beep.Streamer(streamer)
speed := o.speech.Speed
if speed <= 0 {
speed = 1.0
}
if speed != 1.0 {
playbackStreamer = beep.ResampleRatio(3, float64(speed), streamer)
}
// Initialize speaker with the format's sample rate
if err := speaker.Init(format.SampleRate, format.SampleRate.N(time.Second/10)); err != nil {
o.logger.Debug("failed to init speaker", "error", err)
}
done := make(chan bool)
o.mu.Lock()
o.currentDone = done
o.currentStream = &beep.Ctrl{Streamer: beep.Seq(playbackStreamer, beep.Callback(func() {
o.mu.Lock()
close(done)
o.currentStream = nil
o.currentDone = nil
o.mu.Unlock()
})), Paused: false}
o.mu.Unlock()
speaker.Play(o.currentStream)
<-done // wait for playback to complete
return nil
}
func (o *GoogleTranslateOrator) Stop() {
o.logger.Debug("attempted to stop google translate orator")
speaker.Lock()
defer speaker.Unlock()
o.mu.Lock()
defer o.mu.Unlock()
if o.currentStream != nil {
o.currentStream.Streamer = nil
}
// Also stop the speech handler if possible
if o.speech != nil {
_ = o.speech.Stop()
}
}

View File

@@ -9,15 +9,13 @@ import (
"errors" "errors"
"fmt" "fmt"
"gf-lt/config" "gf-lt/config"
"io"
"log/slog" "log/slog"
"os" "os"
"os/exec" "os/exec"
"strings" "strings"
"sync" "sync"
"syscall" "syscall"
"time"
"github.com/gordonklaus/portaudio"
) )
type WhisperBinary struct { type WhisperBinary struct {
@@ -25,11 +23,143 @@ type WhisperBinary struct {
whisperPath string whisperPath string
modelPath string modelPath string
lang string lang string
ctx context.Context // Per-recording fields (protected by mu)
cancel context.CancelFunc
mu sync.Mutex mu sync.Mutex
recording bool recording bool
audioBuffer []int16 tempFile string
ctx context.Context
cancel context.CancelFunc
cmd *exec.Cmd
cmdMu sync.Mutex
}
func (w *WhisperBinary) StartRecording() error {
w.mu.Lock()
defer w.mu.Unlock()
if w.recording {
return errors.New("recording is already in progress")
}
// Fresh context for this recording
ctx, cancel := context.WithCancel(context.Background())
w.ctx = ctx
w.cancel = cancel
// Create temporary file
tempFile, err := os.CreateTemp("", "recording_*.wav")
if err != nil {
cancel()
return fmt.Errorf("failed to create temp file: %w", err)
}
tempFile.Close()
w.tempFile = tempFile.Name()
// ffmpeg command: capture from default microphone, write WAV
args := []string{
"-f", "alsa", // or "pulse" if preferred
"-i", "default",
"-acodec", "pcm_s16le",
"-ar", "16000",
"-ac", "1",
"-y", // overwrite output file
w.tempFile,
}
cmd := exec.CommandContext(w.ctx, "ffmpeg", args...)
// Capture stderr for debugging (optional, but useful for diagnosing)
stderr, err := cmd.StderrPipe()
if err != nil {
cancel()
os.Remove(w.tempFile)
return fmt.Errorf("failed to create stderr pipe: %w", err)
}
go func() {
buf := make([]byte, 1024)
for {
n, err := stderr.Read(buf)
if n > 0 {
w.logger.Debug("ffmpeg stderr", "output", string(buf[:n]))
}
if err != nil {
break
}
}
}()
w.cmdMu.Lock()
w.cmd = cmd
w.cmdMu.Unlock()
if err := cmd.Start(); err != nil {
cancel()
os.Remove(w.tempFile)
return fmt.Errorf("failed to start ffmpeg: %w", err)
}
w.recording = true
w.logger.Debug("Recording started", "file", w.tempFile)
return nil
}
func (w *WhisperBinary) StopRecording() (string, error) {
w.mu.Lock()
defer w.mu.Unlock()
if !w.recording {
return "", errors.New("not currently recording")
}
w.recording = false
// Gracefully stop ffmpeg
w.cmdMu.Lock()
if w.cmd != nil && w.cmd.Process != nil {
w.logger.Debug("Sending SIGTERM to ffmpeg")
w.cmd.Process.Signal(syscall.SIGTERM)
// Wait for process to exit (up to 2 seconds)
done := make(chan error, 1)
go func() {
done <- w.cmd.Wait()
}()
select {
case <-done:
w.logger.Debug("ffmpeg exited after SIGTERM")
case <-time.After(2 * time.Second):
w.logger.Warn("ffmpeg did not exit, sending SIGKILL")
w.cmd.Process.Kill()
<-done
}
}
w.cmdMu.Unlock()
// Cancel context (already done, but for cleanliness)
if w.cancel != nil {
w.cancel()
}
// Validate temp file
if w.tempFile == "" {
return "", errors.New("no recording file")
}
defer os.Remove(w.tempFile)
info, err := os.Stat(w.tempFile)
if err != nil {
return "", fmt.Errorf("failed to stat temp file: %w", err)
}
if info.Size() < 44 { // WAV header is 44 bytes
// Log ffmpeg stderr? Already captured in debug logs.
return "", fmt.Errorf("recording file too small (%d bytes), possibly no audio captured", info.Size())
}
// Run whisper.cpp binary
cmd := exec.Command(w.whisperPath, "-m", w.modelPath, "-l", w.lang, w.tempFile)
var outBuf, errBuf bytes.Buffer
cmd.Stdout = &outBuf
cmd.Stderr = &errBuf
if err := cmd.Run(); err != nil {
w.logger.Error("whisper binary failed",
"error", err,
"stderr", errBuf.String(),
"file_size", info.Size())
return "", fmt.Errorf("whisper binary failed: %w (stderr: %s)", err, errBuf.String())
}
result := strings.TrimRight(outBuf.String(), "\n")
result = specialRE.ReplaceAllString(result, "")
return strings.TrimSpace(strings.ReplaceAll(result, "\n ", "\n")), nil
}
// IsRecording returns true if a recording is in progress.
func (w *WhisperBinary) IsRecording() bool {
w.mu.Lock()
defer w.mu.Unlock()
return w.recording
} }
func NewWhisperBinary(logger *slog.Logger, cfg *config.Config) *WhisperBinary { func NewWhisperBinary(logger *slog.Logger, cfg *config.Config) *WhisperBinary {
@@ -44,283 +174,3 @@ func NewWhisperBinary(logger *slog.Logger, cfg *config.Config) *WhisperBinary {
cancel: cancel, cancel: cancel,
} }
} }
func (w *WhisperBinary) StartRecording() error {
w.mu.Lock()
defer w.mu.Unlock()
if w.recording {
return errors.New("recording is already in progress")
}
// If context is cancelled, create a new one for the next recording session
if w.ctx.Err() != nil {
w.logger.Debug("Context cancelled, creating new context")
w.ctx, w.cancel = context.WithCancel(context.Background())
}
// Temporarily redirect stderr to suppress ALSA warnings during PortAudio init
origStderr, errDup := syscall.Dup(syscall.Stderr)
if errDup != nil {
return fmt.Errorf("failed to dup stderr: %w", errDup)
}
nullFD, err := syscall.Open("/dev/null", syscall.O_WRONLY, 0)
if err != nil {
_ = syscall.Close(origStderr) // Close the dup'd fd if open fails
return fmt.Errorf("failed to open /dev/null: %w", err)
}
// redirect stderr
_ = syscall.Dup2(nullFD, syscall.Stderr)
// Initialize PortAudio (this is where ALSA warnings occur)
portaudioErr := portaudio.Initialize()
defer func() {
// Restore stderr
_ = syscall.Dup2(origStderr, syscall.Stderr)
_ = syscall.Close(origStderr)
_ = syscall.Close(nullFD)
}()
if portaudioErr != nil {
return fmt.Errorf("portaudio init failed: %w", portaudioErr)
}
// Initialize audio buffer
w.audioBuffer = make([]int16, 0)
in := make([]int16, 1024) // buffer size
stream, err := portaudio.OpenDefaultStream(1, 0, 16000.0, len(in), in)
if err != nil {
if paErr := portaudio.Terminate(); paErr != nil {
return fmt.Errorf("failed to open microphone: %w; terminate error: %w", err, paErr)
}
return fmt.Errorf("failed to open microphone: %w", err)
}
go w.recordAudio(stream, in)
w.recording = true
w.logger.Debug("Recording started")
return nil
}
func (w *WhisperBinary) recordAudio(stream *portaudio.Stream, in []int16) {
defer func() {
w.logger.Debug("recordAudio defer function called")
_ = stream.Stop() // Stop the stream
_ = portaudio.Terminate() // ignoring error as we're shutting down
w.logger.Debug("recordAudio terminated")
}()
w.logger.Debug("Starting audio stream")
if err := stream.Start(); err != nil {
w.logger.Error("Failed to start audio stream", "error", err)
return
}
w.logger.Debug("Audio stream started, entering recording loop")
for {
select {
case <-w.ctx.Done():
w.logger.Debug("Context done, exiting recording loop")
return
default:
// Check recording status with minimal lock time
w.mu.Lock()
recording := w.recording
w.mu.Unlock()
if !recording {
w.logger.Debug("Recording flag is false, exiting recording loop")
return
}
if err := stream.Read(); err != nil {
w.logger.Error("Error reading from stream", "error", err)
return
}
// Append samples to buffer - only acquire lock when necessary
w.mu.Lock()
if w.audioBuffer == nil {
w.audioBuffer = make([]int16, 0)
}
// Make a copy of the input buffer to avoid overwriting
tempBuffer := make([]int16, len(in))
copy(tempBuffer, in)
w.audioBuffer = append(w.audioBuffer, tempBuffer...)
w.mu.Unlock()
}
}
}
func (w *WhisperBinary) StopRecording() (string, error) {
w.logger.Debug("StopRecording called")
w.mu.Lock()
if !w.recording {
w.mu.Unlock()
return "", errors.New("not currently recording")
}
w.logger.Debug("Setting recording to false and cancelling context")
w.recording = false
w.cancel() // This will stop the recording goroutine
w.mu.Unlock()
// // Small delay to allow the recording goroutine to react to context cancellation
// time.Sleep(20 * time.Millisecond)
// Save the recorded audio to a temporary file
tempFile, err := w.saveAudioToTempFile()
if err != nil {
w.logger.Error("Error saving audio to temp file", "error", err)
return "", fmt.Errorf("failed to save audio to temp file: %w", err)
}
w.logger.Debug("Saved audio to temp file", "file", tempFile)
// Run the whisper binary with a separate context to avoid cancellation during transcription
cmd := exec.Command(w.whisperPath, "-m", w.modelPath, "-l", w.lang, tempFile, "2>/dev/null")
var outBuf bytes.Buffer
cmd.Stdout = &outBuf
// Redirect stderr to suppress ALSA warnings and other stderr output
cmd.Stderr = io.Discard // Suppress stderr output from whisper binary
w.logger.Debug("Running whisper binary command")
if err := cmd.Run(); err != nil {
// Clean up audio buffer
w.mu.Lock()
w.audioBuffer = nil
w.mu.Unlock()
// Since we're suppressing stderr, we'll just log that the command failed
w.logger.Error("Error running whisper binary", "error", err)
return "", fmt.Errorf("whisper binary failed: %w", err)
}
result := outBuf.String()
w.logger.Debug("Whisper binary completed", "result", result)
// Clean up audio buffer
w.mu.Lock()
w.audioBuffer = nil
w.mu.Unlock()
// Clean up the temporary file after transcription
w.logger.Debug("StopRecording completed")
os.Remove(tempFile)
result = strings.TrimRight(result, "\n")
// in case there are special tokens like [_BEG_]
result = specialRE.ReplaceAllString(result, "")
return strings.TrimSpace(strings.ReplaceAll(result, "\n ", "\n")), nil
}
// saveAudioToTempFile saves the recorded audio data to a temporary WAV file
func (w *WhisperBinary) saveAudioToTempFile() (string, error) {
w.logger.Debug("saveAudioToTempFile called")
// Create temporary WAV file
tempFile, err := os.CreateTemp("", "recording_*.wav")
if err != nil {
w.logger.Error("Failed to create temp file", "error", err)
return "", fmt.Errorf("failed to create temp file: %w", err)
}
w.logger.Debug("Created temp file", "file", tempFile.Name())
defer tempFile.Close()
// Write WAV header and data
w.logger.Debug("About to write WAV file", "file", tempFile.Name())
err = w.writeWAVFile(tempFile.Name())
if err != nil {
w.logger.Error("Error writing WAV file", "error", err)
return "", fmt.Errorf("failed to write WAV file: %w", err)
}
w.logger.Debug("WAV file written successfully", "file", tempFile.Name())
return tempFile.Name(), nil
}
// writeWAVFile creates a WAV file from the recorded audio data
func (w *WhisperBinary) writeWAVFile(filename string) error {
w.logger.Debug("writeWAVFile called", "filename", filename)
// Open file for writing
file, err := os.Create(filename)
if err != nil {
w.logger.Error("Error creating file", "error", err)
return err
}
defer file.Close()
w.logger.Debug("About to acquire mutex in writeWAVFile")
w.mu.Lock()
w.logger.Debug("Locked mutex, copying audio buffer")
audioData := make([]int16, len(w.audioBuffer))
copy(audioData, w.audioBuffer)
w.mu.Unlock()
w.logger.Debug("Unlocked mutex", "audio_data_length", len(audioData))
if len(audioData) == 0 {
w.logger.Warn("No audio data to write")
return errors.New("no audio data to write")
}
// Calculate data size (number of samples * size of int16)
dataSize := len(audioData) * 2 // 2 bytes per int16 sample
w.logger.Debug("Calculated data size", "size", dataSize)
// Write WAV header with the correct data size
header := w.createWAVHeader(16000, 1, 16, dataSize)
_, err = file.Write(header)
if err != nil {
w.logger.Error("Error writing WAV header", "error", err)
return err
}
w.logger.Debug("WAV header written successfully")
// Write audio data
w.logger.Debug("About to write audio data samples")
for i, sample := range audioData {
// Write little-endian 16-bit sample
_, err := file.Write([]byte{byte(sample), byte(sample >> 8)})
if err != nil {
w.logger.Error("Error writing sample", "index", i, "error", err)
return err
}
// Log progress every 10000 samples to avoid too much output
if i%10000 == 0 {
w.logger.Debug("Written samples", "count", i)
}
}
w.logger.Debug("All audio data written successfully")
return nil
}
// createWAVHeader creates a WAV file header
func (w *WhisperBinary) createWAVHeader(sampleRate, channels, bitsPerSample int, dataSize int) []byte {
header := make([]byte, 44)
copy(header[0:4], "RIFF")
// Total file size will be updated later
copy(header[8:12], "WAVE")
copy(header[12:16], "fmt ")
// fmt chunk size (16 for PCM)
header[16] = 16
header[17] = 0
header[18] = 0
header[19] = 0
// Audio format (1 = PCM)
header[20] = 1
header[21] = 0
// Number of channels
header[22] = byte(channels)
header[23] = 0
// Sample rate
header[24] = byte(sampleRate)
header[25] = byte(sampleRate >> 8)
header[26] = byte(sampleRate >> 16)
header[27] = byte(sampleRate >> 24)
// Byte rate
byteRate := sampleRate * channels * bitsPerSample / 8
header[28] = byte(byteRate)
header[29] = byte(byteRate >> 8)
header[30] = byte(byteRate >> 16)
header[31] = byte(byteRate >> 24)
// Block align
blockAlign := channels * bitsPerSample / 8
header[32] = byte(blockAlign)
header[33] = 0
// Bits per sample
header[34] = byte(bitsPerSample)
header[35] = 0
// "data" subchunk
copy(header[36:40], "data")
// Data size
header[40] = byte(dataSize)
header[41] = byte(dataSize >> 8)
header[42] = byte(dataSize >> 16)
header[43] = byte(dataSize >> 24)
return header
}
func (w *WhisperBinary) IsRecording() bool {
w.mu.Lock()
defer w.mu.Unlock()
return w.recording
}

156
extra/whisper_server.go Normal file
View File

@@ -0,0 +1,156 @@
//go:build extra
// +build extra
package extra
import (
"bytes"
"errors"
"fmt"
"io"
"log/slog"
"mime/multipart"
"net/http"
"os/exec"
"strings"
"sync"
)
type WhisperServer struct {
logger *slog.Logger
ServerURL string
SampleRate int
AudioBuffer *bytes.Buffer
recording bool // protected by mu
mu sync.Mutex // protects recording & AudioBuffer
cmd *exec.Cmd // protected by cmdMu
stopCh chan struct{} // protected by cmdMu
cmdMu sync.Mutex // protects cmd and stopCh
}
func (stt *WhisperServer) StartRecording() error {
stt.mu.Lock()
defer stt.mu.Unlock()
if stt.recording {
return nil
}
// Build ffmpeg command for microphone capture
args := []string{
"-f", "alsa",
"-i", "default",
"-acodec", "pcm_s16le",
"-ar", fmt.Sprint(stt.SampleRate),
"-ac", "1",
"-f", "s16le",
"-",
}
cmd := exec.Command("ffmpeg", args...)
stdout, err := cmd.StdoutPipe()
if err != nil {
return fmt.Errorf("failed to get stdout pipe: %w", err)
}
stt.cmdMu.Lock()
stt.cmd = cmd
stt.stopCh = make(chan struct{})
stt.cmdMu.Unlock()
if err := cmd.Start(); err != nil {
return fmt.Errorf("failed to start ffmpeg: %w", err)
}
stt.recording = true
stt.AudioBuffer.Reset()
// Read PCM data in goroutine
go func() {
buf := make([]byte, 4096)
for {
select {
case <-stt.stopCh:
return
default:
n, err := stdout.Read(buf)
if n > 0 {
stt.mu.Lock()
stt.AudioBuffer.Write(buf[:n])
stt.mu.Unlock()
}
if err != nil {
if err != io.EOF {
stt.logger.Error("recording read error", "error", err)
}
return
}
}
}
}()
return nil
}
func (stt *WhisperServer) StopRecording() (string, error) {
stt.mu.Lock()
defer stt.mu.Unlock()
if !stt.recording {
return "", errors.New("not recording")
}
stt.recording = false
// Stop ffmpeg
stt.cmdMu.Lock()
if stt.cmd != nil && stt.cmd.Process != nil {
stt.cmd.Process.Kill()
stt.cmd.Wait()
}
close(stt.stopCh)
stt.cmdMu.Unlock()
// Rest of StopRecording unchanged (WAV header + HTTP upload)
// ...
stt.recording = false
// wait loop to finish?
if stt.AudioBuffer == nil {
err := errors.New("unexpected nil AudioBuffer")
stt.logger.Error(err.Error())
return "", err
}
// Create WAV header first
body := &bytes.Buffer{}
writer := multipart.NewWriter(body)
// Add audio file part
part, err := writer.CreateFormFile("file", "recording.wav")
if err != nil {
stt.logger.Error("fn: StopRecording", "error", err)
return "", err
}
// Stream directly to multipart writer: header + raw data
dataSize := stt.AudioBuffer.Len()
stt.writeWavHeader(part, dataSize)
if _, err := io.Copy(part, stt.AudioBuffer); err != nil {
stt.logger.Error("fn: StopRecording", "error", err)
return "", err
}
// Reset buffer for next recording
stt.AudioBuffer.Reset()
// Add response format field
err = writer.WriteField("response_format", "text")
if err != nil {
stt.logger.Error("fn: StopRecording", "error", err)
return "", err
}
if writer.Close() != nil {
stt.logger.Error("fn: StopRecording", "error", err)
return "", err
}
// Send request
resp, err := http.Post(stt.ServerURL, writer.FormDataContentType(), body) //nolint:noctx
if err != nil {
stt.logger.Error("fn: StopRecording", "error", err)
return "", err
}
defer resp.Body.Close()
// Read and print response
responseTextBytes, err := io.ReadAll(resp.Body)
if err != nil {
stt.logger.Error("fn: StopRecording", "error", err)
return "", err
}
resptext := strings.TrimRight(string(responseTextBytes), "\n")
// in case there are special tokens like [_BEG_]
resptext = specialRE.ReplaceAllString(resptext, "")
return strings.TrimSpace(strings.ReplaceAll(resptext, "\n ", "\n")), nil
}

8
go.mod
View File

@@ -4,13 +4,11 @@ go 1.25.1
require ( require (
github.com/BurntSushi/toml v1.5.0 github.com/BurntSushi/toml v1.5.0
github.com/GrailFinder/google-translate-tts v0.1.3 github.com/GrailFinder/google-translate-tts v0.1.4
github.com/GrailFinder/searchagent v0.2.0 github.com/GrailFinder/searchagent v0.2.0
github.com/PuerkitoBio/goquery v1.11.0 github.com/PuerkitoBio/goquery v1.11.0
github.com/gdamore/tcell/v2 v2.13.2 github.com/gdamore/tcell/v2 v2.13.2
github.com/glebarez/go-sqlite v1.22.0 github.com/glebarez/go-sqlite v1.22.0
github.com/gopxl/beep/v2 v2.1.1
github.com/gordonklaus/portaudio v0.0.0-20250206071425-98a94950218b
github.com/jmoiron/sqlx v1.4.0 github.com/jmoiron/sqlx v1.4.0
github.com/ledongthuc/pdf v0.0.0-20250511090121-5959a4027728 github.com/ledongthuc/pdf v0.0.0-20250511090121-5959a4027728
github.com/neurosnap/sentences v1.1.2 github.com/neurosnap/sentences v1.1.2
@@ -25,21 +23,17 @@ require (
github.com/andybalholm/cascadia v1.3.3 // indirect github.com/andybalholm/cascadia v1.3.3 // indirect
github.com/deckarep/golang-set/v2 v2.8.0 // indirect github.com/deckarep/golang-set/v2 v2.8.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect github.com/dustin/go-humanize v1.0.1 // indirect
github.com/ebitengine/oto/v3 v3.4.0 // indirect
github.com/ebitengine/purego v0.9.1 // indirect
github.com/emirpasic/gods v1.18.1 // indirect github.com/emirpasic/gods v1.18.1 // indirect
github.com/gdamore/encoding v1.0.1 // indirect github.com/gdamore/encoding v1.0.1 // indirect
github.com/go-jose/go-jose/v3 v3.0.4 // indirect github.com/go-jose/go-jose/v3 v3.0.4 // indirect
github.com/go-stack/stack v1.8.1 // indirect github.com/go-stack/stack v1.8.1 // indirect
github.com/google/uuid v1.6.0 // indirect github.com/google/uuid v1.6.0 // indirect
github.com/hajimehoshi/go-mp3 v0.3.4 // indirect github.com/hajimehoshi/go-mp3 v0.3.4 // indirect
github.com/hajimehoshi/oto/v2 v2.3.1 // indirect
github.com/lucasb-eyer/go-colorful v1.3.0 // indirect github.com/lucasb-eyer/go-colorful v1.3.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect
github.com/ncruces/go-strftime v1.0.0 // indirect github.com/ncruces/go-strftime v1.0.0 // indirect
github.com/patrickmn/go-cache v2.1.0+incompatible // indirect github.com/patrickmn/go-cache v2.1.0+incompatible // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/rivo/uniseg v0.4.7 // indirect github.com/rivo/uniseg v0.4.7 // indirect
github.com/schollz/progressbar/v2 v2.15.0 // indirect github.com/schollz/progressbar/v2 v2.15.0 // indirect

15
go.sum
View File

@@ -2,8 +2,8 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg= github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg=
github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
github.com/GrailFinder/google-translate-tts v0.1.3 h1:Mww9tNzTWjjSh+OCbTPl/+21oMPKcUecXZfU7nTB/lA= github.com/GrailFinder/google-translate-tts v0.1.4 h1:NJoPZUGfBrmouQMN19MUcNPNUx4tmf4a8OZRME4E4Mg=
github.com/GrailFinder/google-translate-tts v0.1.3/go.mod h1:YIOLKR7sObazdUCrSex3u9OVBovU55eYgWa25vsQJ18= github.com/GrailFinder/google-translate-tts v0.1.4/go.mod h1:YIOLKR7sObazdUCrSex3u9OVBovU55eYgWa25vsQJ18=
github.com/GrailFinder/searchagent v0.2.0 h1:U2GVjLh/9xZt0xX9OcYk9Q2fMkyzyTiADPUmUisRdtQ= github.com/GrailFinder/searchagent v0.2.0 h1:U2GVjLh/9xZt0xX9OcYk9Q2fMkyzyTiADPUmUisRdtQ=
github.com/GrailFinder/searchagent v0.2.0/go.mod h1:d66tn5+22LI8IGJREUsRBT60P0sFdgQgvQRqyvgItrs= github.com/GrailFinder/searchagent v0.2.0/go.mod h1:d66tn5+22LI8IGJREUsRBT60P0sFdgQgvQRqyvgItrs=
github.com/PuerkitoBio/goquery v1.11.0 h1:jZ7pwMQXIITcUXNH83LLk+txlaEy6NVOfTuP43xxfqw= github.com/PuerkitoBio/goquery v1.11.0 h1:jZ7pwMQXIITcUXNH83LLk+txlaEy6NVOfTuP43xxfqw=
@@ -17,10 +17,6 @@ github.com/deckarep/golang-set/v2 v2.8.0 h1:swm0rlPCmdWn9mESxKOjWk8hXSqoxOp+Zlfu
github.com/deckarep/golang-set/v2 v2.8.0/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4= github.com/deckarep/golang-set/v2 v2.8.0/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/ebitengine/oto/v3 v3.4.0 h1:br0PgASsEWaoWn38b2Goe7m1GKFYfNgnsjSd5Gg+/bQ=
github.com/ebitengine/oto/v3 v3.4.0/go.mod h1:IOleLVD0m+CMak3mRVwsYY8vTctQgOM0iiL6S7Ar7eI=
github.com/ebitengine/purego v0.9.1 h1:a/k2f2HQU3Pi399RPW1MOaZyhKJL9w/xFpKAg4q1s0A=
github.com/ebitengine/purego v0.9.1/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw= github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw=
@@ -41,13 +37,8 @@ github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17k
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA= github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gopxl/beep/v2 v2.1.1 h1:6FYIYMm2qPAdWkjX+7xwKrViS1x0Po5kDMdRkq8NVbU=
github.com/gopxl/beep/v2 v2.1.1/go.mod h1:ZAm9TGQ9lvpoiFLd4zf5B1IuyxZhgRACMId1XJbaW0E=
github.com/gordonklaus/portaudio v0.0.0-20250206071425-98a94950218b h1:WEuQWBxelOGHA6z9lABqaMLMrfwVyMdN3UgRLT+YUPo=
github.com/gordonklaus/portaudio v0.0.0-20250206071425-98a94950218b/go.mod h1:esZFQEUwqC+l76f2R8bIWSwXMaPbp79PppwZ1eJhFco=
github.com/hajimehoshi/go-mp3 v0.3.4 h1:NUP7pBYH8OguP4diaTZ9wJbUbk3tC0KlfzsEpWmYj68= github.com/hajimehoshi/go-mp3 v0.3.4 h1:NUP7pBYH8OguP4diaTZ9wJbUbk3tC0KlfzsEpWmYj68=
github.com/hajimehoshi/go-mp3 v0.3.4/go.mod h1:fRtZraRFcWb0pu7ok0LqyFhCUrPeMsGRSVop0eemFmo= github.com/hajimehoshi/go-mp3 v0.3.4/go.mod h1:fRtZraRFcWb0pu7ok0LqyFhCUrPeMsGRSVop0eemFmo=
github.com/hajimehoshi/oto/v2 v2.3.1 h1:qrLKpNus2UfD674oxckKjNJmesp9hMh7u7QCrStB3Rc=
github.com/hajimehoshi/oto/v2 v2.3.1/go.mod h1:seWLbgHH7AyUMYKfKYT9pg7PhUu9/SisyJvNTT+ASQo= github.com/hajimehoshi/oto/v2 v2.3.1/go.mod h1:seWLbgHH7AyUMYKfKYT9pg7PhUu9/SisyJvNTT+ASQo=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
@@ -71,8 +62,6 @@ github.com/neurosnap/sentences v1.1.2 h1:iphYOzx/XckXeBiLIUBkPu2EKMJ+6jDbz/sLJZ7
github.com/neurosnap/sentences v1.1.2/go.mod h1:/pwU4E9XNL21ygMIkOIllv/SMy2ujHwpf8GQPu1YPbQ= github.com/neurosnap/sentences v1.1.2/go.mod h1:/pwU4E9XNL21ygMIkOIllv/SMy2ujHwpf8GQPu1YPbQ=
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc= github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/playwright-community/playwright-go v0.5700.1 h1:PNFb1byWqrTT720rEO0JL88C6Ju0EmUnR5deFLvtP/U= github.com/playwright-community/playwright-go v0.5700.1 h1:PNFb1byWqrTT720rEO0JL88C6Ju0EmUnR5deFLvtP/U=
github.com/playwright-community/playwright-go v0.5700.1/go.mod h1:MlSn1dZrx8rszbCxY6x3qK89ZesJUYVx21B2JnkoNF0= github.com/playwright-community/playwright-go v0.5700.1/go.mod h1:MlSn1dZrx8rszbCxY6x3qK89ZesJUYVx21B2JnkoNF0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=

View File

@@ -5,7 +5,6 @@ import (
"gf-lt/models" "gf-lt/models"
"gf-lt/pngmeta" "gf-lt/pngmeta"
"image" "image"
"net/url"
"os" "os"
"os/exec" "os/exec"
"path" "path"
@@ -13,20 +12,16 @@ import (
"slices" "slices"
"strconv" "strconv"
"strings" "strings"
"sync/atomic"
"time" "time"
"unicode" "unicode"
"sync/atomic"
"github.com/rivo/tview" "github.com/rivo/tview"
) )
// Cached model color - updated by background goroutine // Cached model color - updated by background goroutine
var cachedModelColor atomic.Value // stores string // var cachedModelColor string = "orange"
var cachedModelColor atomic.Value
func init() {
cachedModelColor.Store("orange")
}
// startModelColorUpdater starts a background goroutine that periodically updates // startModelColorUpdater starts a background goroutine that periodically updates
// the cached model color. Only runs HTTP requests for local llama.cpp APIs. // the cached model color. Only runs HTTP requests for local llama.cpp APIs.
@@ -49,7 +44,7 @@ func updateCachedModelColor() {
return return
} }
// Check if model is loaded // Check if model is loaded
loaded, err := isModelLoaded(chatBody.GetModel()) loaded, err := isModelLoaded(chatBody.Model)
if err != nil { if err != nil {
// On error, assume not loaded (red) // On error, assume not loaded (red)
cachedModelColor.Store("red") cachedModelColor.Store("red")
@@ -109,12 +104,12 @@ func refreshChatDisplay() {
viewingAs = cfg.WriteNextMsgAs viewingAs = cfg.WriteNextMsgAs
} }
// Filter messages for this character // Filter messages for this character
filteredMessages := filterMessagesForCharacter(chatBody.GetMessages(), viewingAs) filteredMessages := filterMessagesForCharacter(chatBody.Messages, viewingAs)
displayText := chatToText(filteredMessages, cfg.ShowSys) displayText := chatToText(filteredMessages, cfg.ShowSys)
textView.SetText(displayText) textView.SetText(displayText)
colorText() colorText()
updateStatusLine() updateStatusLine()
if scrollToEndEnabled { if cfg.AutoScrollEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
} }
} }
@@ -223,8 +218,8 @@ func startNewChat(keepSysP bool) {
logger.Warn("no such sys msg", "name", cfg.AssistantRole) logger.Warn("no such sys msg", "name", cfg.AssistantRole)
} }
// set chat body // set chat body
chatBody.TruncateMessages(2) chatBody.Messages = chatBody.Messages[:2]
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
newChat := &models.Chat{ newChat := &models.Chat{
ID: id + 1, ID: id + 1,
Name: fmt.Sprintf("%d_%s", id+1, cfg.AssistantRole), Name: fmt.Sprintf("%d_%s", id+1, cfg.AssistantRole),
@@ -329,12 +324,10 @@ func strInSlice(s string, sl []string) bool {
// isLocalLlamacpp checks if the current API is a local llama.cpp instance. // isLocalLlamacpp checks if the current API is a local llama.cpp instance.
func isLocalLlamacpp() bool { func isLocalLlamacpp() bool {
u, err := url.Parse(cfg.CurrentAPI) if strings.Contains(cfg.CurrentAPI, "openrouter") || strings.Contains(cfg.CurrentAPI, "deepseek") {
if err != nil {
return false return false
} }
host := u.Hostname() return true
return host == "localhost" || host == "127.0.0.1" || host == "::1"
} }
// getModelColor returns the cached color tag for the model name. // getModelColor returns the cached color tag for the model name.
@@ -376,7 +369,7 @@ func makeStatusLine() string {
// Get model color based on load status for local llama.cpp models // Get model color based on load status for local llama.cpp models
modelColor := getModelColor() modelColor := getModelColor()
statusLine := fmt.Sprintf(statusLineTempl, activeChatName, statusLine := fmt.Sprintf(statusLineTempl, activeChatName,
boolColors[cfg.ToolUse], modelColor, chatBody.GetModel(), boolColors[cfg.SkipLLMResp], boolColors[cfg.ToolUse], modelColor, chatBody.Model, boolColors[cfg.SkipLLMResp],
cfg.CurrentAPI, persona, botPersona) cfg.CurrentAPI, persona, botPersona)
if cfg.STT_ENABLED { if cfg.STT_ENABLED {
recordingS := fmt.Sprintf(" | [%s:-:b]voice recording[-:-:-] (ctrl+r)", recordingS := fmt.Sprintf(" | [%s:-:b]voice recording[-:-:-] (ctrl+r)",
@@ -402,11 +395,11 @@ func makeStatusLine() string {
} }
func getContextTokens() int { func getContextTokens() int {
if chatBody == nil { if chatBody == nil || chatBody.Messages == nil {
return 0 return 0
} }
total := 0 total := 0
messages := chatBody.GetMessages() messages := chatBody.Messages
for i := range messages { for i := range messages {
msg := &messages[i] msg := &messages[i]
if msg.Stats != nil && msg.Stats.Tokens > 0 { if msg.Stats != nil && msg.Stats.Tokens > 0 {
@@ -421,33 +414,26 @@ func getContextTokens() int {
const deepseekContext = 128000 const deepseekContext = 128000
func getMaxContextTokens() int { func getMaxContextTokens() int {
if chatBody == nil || chatBody.GetModel() == "" { if chatBody == nil || chatBody.Model == "" {
return 0 return 0
} }
modelName := chatBody.GetModel() modelName := chatBody.Model
switch { switch {
case strings.Contains(cfg.CurrentAPI, "openrouter"): case strings.Contains(cfg.CurrentAPI, "openrouter"):
ord := orModelsData.Load() if orModelsData != nil {
if ord != nil { for i := range orModelsData.Data {
data := ord.(*models.ORModels) m := &orModelsData.Data[i]
if data != nil {
for i := range data.Data {
m := &data.Data[i]
if m.ID == modelName { if m.ID == modelName {
return m.ContextLength return m.ContextLength
} }
} }
} }
}
case strings.Contains(cfg.CurrentAPI, "deepseek"): case strings.Contains(cfg.CurrentAPI, "deepseek"):
return deepseekContext return deepseekContext
default: default:
lmd := localModelsData.Load() if localModelsData != nil {
if lmd != nil { for i := range localModelsData.Data {
data := lmd.(*models.LCPModels) m := &localModelsData.Data[i]
if data != nil {
for i := range data.Data {
m := &data.Data[i]
if m.ID == modelName { if m.ID == modelName {
for _, arg := range m.Status.Args { for _, arg := range m.Status.Args {
if strings.HasPrefix(arg, "--ctx-size") { if strings.HasPrefix(arg, "--ctx-size") {
@@ -476,7 +462,6 @@ func getMaxContextTokens() int {
} }
} }
} }
}
return 0 return 0
} }
@@ -504,7 +489,7 @@ func listChatRoles() []string {
func deepseekModelValidator() error { func deepseekModelValidator() error {
if cfg.CurrentAPI == cfg.DeepSeekChatAPI || cfg.CurrentAPI == cfg.DeepSeekCompletionAPI { if cfg.CurrentAPI == cfg.DeepSeekChatAPI || cfg.CurrentAPI == cfg.DeepSeekCompletionAPI {
if chatBody.GetModel() != "deepseek-chat" && chatBody.GetModel() != "deepseek-reasoner" { if chatBody.Model != "deepseek-chat" && chatBody.Model != "deepseek-reasoner" {
showToast("bad request", "wrong deepseek model name") showToast("bad request", "wrong deepseek model name")
return nil return nil
} }
@@ -556,7 +541,7 @@ func executeCommandAndDisplay(cmdText string) {
cmdText = strings.TrimSpace(cmdText) cmdText = strings.TrimSpace(cmdText)
if cmdText == "" { if cmdText == "" {
fmt.Fprintf(textView, "\n[red]Error: No command provided[-:-:-]\n") fmt.Fprintf(textView, "\n[red]Error: No command provided[-:-:-]\n")
if scrollToEndEnabled { if cfg.AutoScrollEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
} }
colorText() colorText()
@@ -581,14 +566,14 @@ func executeCommandAndDisplay(cmdText string) {
outputContent := workingDir outputContent := workingDir
// Add the command being executed to the chat // Add the command being executed to the chat
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n", fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
chatBody.GetMessageCount(), cfg.ToolRole, cmdText) len(chatBody.Messages), cfg.ToolRole, cmdText)
fmt.Fprintf(textView, "%s\n", outputContent) fmt.Fprintf(textView, "%s\n", outputContent)
combinedMsg := models.RoleMsg{ combinedMsg := models.RoleMsg{
Role: cfg.ToolRole, Role: cfg.ToolRole,
Content: "$ " + cmdText + "\n\n" + outputContent, Content: "$ " + cmdText + "\n\n" + outputContent,
} }
chatBody.AppendMessage(combinedMsg) chatBody.Messages = append(chatBody.Messages, combinedMsg)
if scrollToEndEnabled { if cfg.AutoScrollEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
} }
colorText() colorText()
@@ -596,14 +581,14 @@ func executeCommandAndDisplay(cmdText string) {
} else { } else {
outputContent := "cd: " + newDir + ": No such file or directory" outputContent := "cd: " + newDir + ": No such file or directory"
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n", fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
chatBody.GetMessageCount(), cfg.ToolRole, cmdText) len(chatBody.Messages), cfg.ToolRole, cmdText)
fmt.Fprintf(textView, "[red]%s[-:-:-]\n", outputContent) fmt.Fprintf(textView, "[red]%s[-:-:-]\n", outputContent)
combinedMsg := models.RoleMsg{ combinedMsg := models.RoleMsg{
Role: cfg.ToolRole, Role: cfg.ToolRole,
Content: "$ " + cmdText + "\n\n" + outputContent, Content: "$ " + cmdText + "\n\n" + outputContent,
} }
chatBody.AppendMessage(combinedMsg) chatBody.Messages = append(chatBody.Messages, combinedMsg)
if scrollToEndEnabled { if cfg.AutoScrollEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
} }
colorText() colorText()
@@ -618,7 +603,7 @@ func executeCommandAndDisplay(cmdText string) {
output, err := cmd.CombinedOutput() output, err := cmd.CombinedOutput()
// Add the command being executed to the chat // Add the command being executed to the chat
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n", fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
chatBody.GetMessageCount(), cfg.ToolRole, cmdText) len(chatBody.Messages), cfg.ToolRole, cmdText)
var outputContent string var outputContent string
if err != nil { if err != nil {
// Include both output and error // Include both output and error
@@ -649,9 +634,9 @@ func executeCommandAndDisplay(cmdText string) {
Role: cfg.ToolRole, Role: cfg.ToolRole,
Content: combinedContent, Content: combinedContent,
} }
chatBody.AppendMessage(combinedMsg) chatBody.Messages = append(chatBody.Messages, combinedMsg)
// Scroll to end and update colors // Scroll to end and update colors
if scrollToEndEnabled { if cfg.AutoScrollEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
} }
colorText() colorText()
@@ -679,7 +664,7 @@ func performSearch(term string) {
searchResultLengths = nil searchResultLengths = nil
originalTextForSearch = "" originalTextForSearch = ""
// Re-render text without highlights // Re-render text without highlights
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
return return
} }
@@ -981,3 +966,52 @@ func extractDisplayPath(p, bp string) string {
} }
return p return p
} }
func getValidKnowToRecipient(msg *models.RoleMsg) (string, bool) {
if cfg == nil || !cfg.CharSpecificContextEnabled {
return "", false
}
// case where all roles are in the tag => public message
cr := listChatRoles()
slices.Sort(cr)
slices.Sort(msg.KnownTo)
if slices.Equal(cr, msg.KnownTo) {
logger.Info("got msg with tag mentioning every role")
return "", false
}
// Check each character in the KnownTo list
for _, recipient := range msg.KnownTo {
if recipient == msg.Role || recipient == cfg.ToolRole {
// weird cases, skip
continue
}
// Skip if this is the user character (user handles their own turn)
// If user is in KnownTo, stop processing - it's the user's turn
if recipient == cfg.UserRole || recipient == cfg.WriteNextMsgAs {
return "", false
}
return recipient, true
}
return "", false
}
// triggerPrivateMessageResponses checks if a message was sent privately to specific characters
// and triggers those non-user characters to respond
func triggerPrivateMessageResponses(msg *models.RoleMsg) {
recipient, ok := getValidKnowToRecipient(msg)
if !ok || recipient == "" {
return
}
// Trigger the recipient character to respond
triggerMsg := recipient + ":\n"
// Send empty message so LLM continues naturally from the conversation
crr := &models.ChatRoundReq{
UserMsg: triggerMsg,
Role: recipient,
Resume: true,
}
fmt.Fprintf(textView, "\n[-:-:b](%d) ", len(chatBody.Messages))
fmt.Fprint(textView, roleToIcon(recipient))
fmt.Fprint(textView, "[-:-:-]\n")
chatRoundChan <- crr
}

64
llm.go
View File

@@ -13,9 +13,8 @@ var lastImg string // for ctrl+j
// containsToolSysMsg checks if the toolSysMsg already exists in the chat body // containsToolSysMsg checks if the toolSysMsg already exists in the chat body
func containsToolSysMsg() bool { func containsToolSysMsg() bool {
messages := chatBody.GetMessages() for i := range chatBody.Messages {
for i := range messages { if chatBody.Messages[i].Role == cfg.ToolRole && chatBody.Messages[i].Content == toolSysMsg {
if messages[i].Role == cfg.ToolRole && messages[i].Content == toolSysMsg {
return true return true
} }
} }
@@ -63,11 +62,11 @@ type ChunkParser interface {
func choseChunkParser() { func choseChunkParser() {
chunkParser = LCPCompletion{} chunkParser = LCPCompletion{}
switch cfg.CurrentAPI { switch cfg.CurrentAPI {
case "http://localhost:8080/completion": case "http://localhost:8080/completion", "http://127.0.0.1:8080/completion":
chunkParser = LCPCompletion{} chunkParser = LCPCompletion{}
logger.Debug("chosen lcpcompletion", "link", cfg.CurrentAPI) logger.Debug("chosen lcpcompletion", "link", cfg.CurrentAPI)
return return
case "http://localhost:8080/v1/chat/completions": case "http://localhost:8080/v1/chat/completions", "http://127.0.0.1:8080/v1/chat/completions":
chunkParser = LCPChat{} chunkParser = LCPChat{}
logger.Debug("chosen lcpchat", "link", cfg.CurrentAPI) logger.Debug("chosen lcpchat", "link", cfg.CurrentAPI)
return return
@@ -88,6 +87,11 @@ func choseChunkParser() {
logger.Debug("chosen openrouterchat", "link", cfg.CurrentAPI) logger.Debug("chosen openrouterchat", "link", cfg.CurrentAPI)
return return
default: default:
logger.Warn("unexpected case, assuming llama.cpp on non default address", "link", cfg.CurrentAPI)
if strings.Contains(cfg.CurrentAPI, "chat") {
chunkParser = LCPChat{}
return
}
chunkParser = LCPCompletion{} chunkParser = LCPCompletion{}
} }
} }
@@ -136,13 +140,13 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
newMsg = models.RoleMsg{Role: role, Content: msg} newMsg = models.RoleMsg{Role: role, Content: msg}
} }
newMsg = *processMessageTag(&newMsg) newMsg = *processMessageTag(&newMsg)
chatBody.AppendMessage(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
} }
// sending description of the tools and how to use them // sending description of the tools and how to use them
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
chatBody.AppendMessage(models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
} }
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.GetMessages()) filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
// Build prompt and extract images inline as we process each message // Build prompt and extract images inline as we process each message
messages := make([]string, len(filteredMessages)) messages := make([]string, len(filteredMessages))
for i := range filteredMessages { for i := range filteredMessages {
@@ -184,7 +188,7 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
} }
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData)) "msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData))
payload := models.NewLCPReq(prompt, chatBody.GetModel(), multimodalData, payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData,
defaultLCPProps, chatBody.MakeStopSliceExcluding("", listChatRoles())) defaultLCPProps, chatBody.MakeStopSliceExcluding("", listChatRoles()))
data, err := json.Marshal(payload) data, err := json.Marshal(payload)
if err != nil { if err != nil {
@@ -290,17 +294,17 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
newMsg = models.NewRoleMsg(role, msg) newMsg = models.NewRoleMsg(role, msg)
} }
newMsg = *processMessageTag(&newMsg) newMsg = *processMessageTag(&newMsg)
chatBody.AppendMessage(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role, logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role,
"content_len", len(newMsg.Content), "message_count_after_add", chatBody.GetMessageCount()) "content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages))
} }
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.GetMessages()) filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
// openai /v1/chat does not support custom roles; needs to be user, assistant, system // openai /v1/chat does not support custom roles; needs to be user, assistant, system
// Add persona suffix to the last user message to indicate who the assistant should reply as // Add persona suffix to the last user message to indicate who the assistant should reply as
bodyCopy := &models.ChatBody{ bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(filteredMessages)), Messages: make([]models.RoleMsg, len(filteredMessages)),
Model: chatBody.GetModel(), Model: chatBody.Model,
Stream: chatBody.GetStream(), Stream: chatBody.Stream,
} }
for i := range filteredMessages { for i := range filteredMessages {
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i]) strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
@@ -376,13 +380,13 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
if msg != "" { // otherwise let the bot to continue if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg} newMsg := models.RoleMsg{Role: role, Content: msg}
newMsg = *processMessageTag(&newMsg) newMsg = *processMessageTag(&newMsg)
chatBody.AppendMessage(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
} }
// sending description of the tools and how to use them // sending description of the tools and how to use them
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
chatBody.AppendMessage(models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
} }
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.GetMessages()) filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
messages := make([]string, len(filteredMessages)) messages := make([]string, len(filteredMessages))
for i := range filteredMessages { for i := range filteredMessages {
messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt() messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt()
@@ -395,7 +399,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
} }
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt) "msg", msg, "resume", resume, "prompt", prompt)
payload := models.NewDSCompletionReq(prompt, chatBody.GetModel(), payload := models.NewDSCompletionReq(prompt, chatBody.Model,
defaultLCPProps["temp"], defaultLCPProps["temp"],
chatBody.MakeStopSliceExcluding("", listChatRoles())) chatBody.MakeStopSliceExcluding("", listChatRoles()))
data, err := json.Marshal(payload) data, err := json.Marshal(payload)
@@ -449,15 +453,15 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
if msg != "" { // otherwise let the bot continue if msg != "" { // otherwise let the bot continue
newMsg := models.RoleMsg{Role: role, Content: msg} newMsg := models.RoleMsg{Role: role, Content: msg}
newMsg = *processMessageTag(&newMsg) newMsg = *processMessageTag(&newMsg)
chatBody.AppendMessage(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
} }
// Create copy of chat body with standardized user role // Create copy of chat body with standardized user role
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.GetMessages()) filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
// Add persona suffix to the last user message to indicate who the assistant should reply as // Add persona suffix to the last user message to indicate who the assistant should reply as
bodyCopy := &models.ChatBody{ bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(filteredMessages)), Messages: make([]models.RoleMsg, len(filteredMessages)),
Model: chatBody.GetModel(), Model: chatBody.Model,
Stream: chatBody.GetStream(), Stream: chatBody.Stream,
} }
for i := range filteredMessages { for i := range filteredMessages {
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i]) strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
@@ -528,13 +532,13 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
if msg != "" { // otherwise let the bot to continue if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg} newMsg := models.RoleMsg{Role: role, Content: msg}
newMsg = *processMessageTag(&newMsg) newMsg = *processMessageTag(&newMsg)
chatBody.AppendMessage(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
} }
// sending description of the tools and how to use them // sending description of the tools and how to use them
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
chatBody.AppendMessage(models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
} }
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.GetMessages()) filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
messages := make([]string, len(filteredMessages)) messages := make([]string, len(filteredMessages))
for i := range filteredMessages { for i := range filteredMessages {
messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt() messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt()
@@ -548,7 +552,7 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
stopSlice := chatBody.MakeStopSliceExcluding("", listChatRoles()) stopSlice := chatBody.MakeStopSliceExcluding("", listChatRoles())
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt, "stop_strings", stopSlice) "msg", msg, "resume", resume, "prompt", prompt, "stop_strings", stopSlice)
payload := models.NewOpenRouterCompletionReq(chatBody.GetModel(), prompt, payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt,
defaultLCPProps, stopSlice) defaultLCPProps, stopSlice)
data, err := json.Marshal(payload) data, err := json.Marshal(payload)
if err != nil { if err != nil {
@@ -634,15 +638,15 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
newMsg = models.NewRoleMsg(role, msg) newMsg = models.NewRoleMsg(role, msg)
} }
newMsg = *processMessageTag(&newMsg) newMsg = *processMessageTag(&newMsg)
chatBody.AppendMessage(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
} }
// Create copy of chat body with standardized user role // Create copy of chat body with standardized user role
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.GetMessages()) filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
// Add persona suffix to the last user message to indicate who the assistant should reply as // Add persona suffix to the last user message to indicate who the assistant should reply as
bodyCopy := &models.ChatBody{ bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(filteredMessages)), Messages: make([]models.RoleMsg, len(filteredMessages)),
Model: chatBody.GetModel(), Model: chatBody.Model,
Stream: chatBody.GetStream(), Stream: chatBody.Stream,
} }
for i := range filteredMessages { for i := range filteredMessages {
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i]) strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])

View File

@@ -1,13 +1,15 @@
package main package main
import ( import (
"sync/atomic"
"github.com/rivo/tview" "github.com/rivo/tview"
) )
var ( var (
boolColors = map[bool]string{true: "green", false: "red"} boolColors = map[bool]string{true: "green", false: "red"}
botRespMode = false botRespMode atomic.Bool
toolRunningMode = false toolRunningMode atomic.Bool
editMode = false editMode = false
roleEditMode = false roleEditMode = false
injectRole = true injectRole = true

View File

@@ -6,7 +6,6 @@ import (
"fmt" "fmt"
"os" "os"
"strings" "strings"
"sync"
) )
type FuncCall struct { type FuncCall struct {
@@ -640,253 +639,3 @@ type MultimodalToolResp struct {
Type string `json:"type"` Type string `json:"type"`
Parts []map[string]string `json:"parts"` Parts []map[string]string `json:"parts"`
} }
// SafeChatBody is a thread-safe wrapper around ChatBody using RWMutex.
// This allows safe concurrent access to chat state from multiple goroutines.
type SafeChatBody struct {
mu sync.RWMutex
ChatBody
}
// NewSafeChatBody creates a new SafeChatBody from an existing ChatBody.
// If cb is nil, creates an empty ChatBody.
func NewSafeChatBody(cb *ChatBody) *SafeChatBody {
if cb == nil {
return &SafeChatBody{
ChatBody: ChatBody{
Messages: []RoleMsg{},
},
}
}
return &SafeChatBody{
ChatBody: *cb,
}
}
// GetModel returns the model name (thread-safe read).
func (s *SafeChatBody) GetModel() string {
s.mu.RLock()
defer s.mu.RUnlock()
return s.Model
}
// SetModel sets the model name (thread-safe write).
func (s *SafeChatBody) SetModel(model string) {
s.mu.Lock()
defer s.mu.Unlock()
s.Model = model
}
// GetStream returns the stream flag (thread-safe read).
func (s *SafeChatBody) GetStream() bool {
s.mu.RLock()
defer s.mu.RUnlock()
return s.Stream
}
// SetStream sets the stream flag (thread-safe write).
func (s *SafeChatBody) SetStream(stream bool) {
s.mu.Lock()
defer s.mu.Unlock()
s.Stream = stream
}
// GetMessages returns a copy of all messages (thread-safe read).
// Returns a copy to prevent race conditions after the lock is released.
func (s *SafeChatBody) GetMessages() []RoleMsg {
s.mu.RLock()
defer s.mu.RUnlock()
// Return a copy to prevent external modification
messagesCopy := make([]RoleMsg, len(s.Messages))
copy(messagesCopy, s.Messages)
return messagesCopy
}
// SetMessages replaces all messages (thread-safe write).
func (s *SafeChatBody) SetMessages(messages []RoleMsg) {
s.mu.Lock()
defer s.mu.Unlock()
s.Messages = messages
}
// AppendMessage adds a message to the end (thread-safe write).
func (s *SafeChatBody) AppendMessage(msg RoleMsg) {
s.mu.Lock()
defer s.mu.Unlock()
s.Messages = append(s.Messages, msg)
}
// GetMessageAt returns a message at a specific index (thread-safe read).
// Returns the message and a boolean indicating if the index was valid.
func (s *SafeChatBody) GetMessageAt(index int) (RoleMsg, bool) {
s.mu.RLock()
defer s.mu.RUnlock()
if index < 0 || index >= len(s.Messages) {
return RoleMsg{}, false
}
return s.Messages[index], true
}
// SetMessageAt updates a message at a specific index (thread-safe write).
// Returns false if index is out of bounds.
func (s *SafeChatBody) SetMessageAt(index int, msg RoleMsg) bool {
s.mu.Lock()
defer s.mu.Unlock()
if index < 0 || index >= len(s.Messages) {
return false
}
s.Messages[index] = msg
return true
}
// GetLastMessage returns the last message (thread-safe read).
// Returns the message and a boolean indicating if the chat has messages.
func (s *SafeChatBody) GetLastMessage() (RoleMsg, bool) {
s.mu.RLock()
defer s.mu.RUnlock()
if len(s.Messages) == 0 {
return RoleMsg{}, false
}
return s.Messages[len(s.Messages)-1], true
}
// GetMessageCount returns the number of messages (thread-safe read).
func (s *SafeChatBody) GetMessageCount() int {
s.mu.RLock()
defer s.mu.RUnlock()
return len(s.Messages)
}
// RemoveLastMessage removes the last message (thread-safe write).
// Returns false if there are no messages.
func (s *SafeChatBody) RemoveLastMessage() bool {
s.mu.Lock()
defer s.mu.Unlock()
if len(s.Messages) == 0 {
return false
}
s.Messages = s.Messages[:len(s.Messages)-1]
return true
}
// TruncateMessages keeps only the first n messages (thread-safe write).
func (s *SafeChatBody) TruncateMessages(n int) {
s.mu.Lock()
defer s.mu.Unlock()
if n < len(s.Messages) {
s.Messages = s.Messages[:n]
}
}
// ClearMessages removes all messages (thread-safe write).
func (s *SafeChatBody) ClearMessages() {
s.mu.Lock()
defer s.mu.Unlock()
s.Messages = []RoleMsg{}
}
// Rename renames all occurrences of oldname to newname in messages (thread-safe read-modify-write).
func (s *SafeChatBody) Rename(oldname, newname string) {
s.mu.Lock()
defer s.mu.Unlock()
for i := range s.Messages {
s.Messages[i].Content = strings.ReplaceAll(s.Messages[i].Content, oldname, newname)
s.Messages[i].Role = strings.ReplaceAll(s.Messages[i].Role, oldname, newname)
}
}
// ListRoles returns all unique roles in messages (thread-safe read).
func (s *SafeChatBody) ListRoles() []string {
s.mu.RLock()
defer s.mu.RUnlock()
namesMap := make(map[string]struct{})
for i := range s.Messages {
namesMap[s.Messages[i].Role] = struct{}{}
}
resp := make([]string, len(namesMap))
i := 0
for k := range namesMap {
resp[i] = k
i++
}
return resp
}
// MakeStopSlice returns stop strings for all roles (thread-safe read).
func (s *SafeChatBody) MakeStopSlice() []string {
return s.MakeStopSliceExcluding("", s.ListRoles())
}
// MakeStopSliceExcluding returns stop strings excluding a specific role (thread-safe read).
func (s *SafeChatBody) MakeStopSliceExcluding(excludeRole string, roleList []string) []string {
s.mu.RLock()
defer s.mu.RUnlock()
ss := []string{}
for _, role := range roleList {
if role == excludeRole {
continue
}
ss = append(ss,
role+":\n",
role+":",
role+": ",
role+": ",
role+": \n",
role+": ",
)
}
return ss
}
// UpdateMessageFunc updates a message at index using a provided function.
// The function receives the current message and returns the updated message.
// This is atomic and thread-safe (read-modify-write under single lock).
// Returns false if index is out of bounds.
func (s *SafeChatBody) UpdateMessageFunc(index int, updater func(RoleMsg) RoleMsg) bool {
s.mu.Lock()
defer s.mu.Unlock()
if index < 0 || index >= len(s.Messages) {
return false
}
s.Messages[index] = updater(s.Messages[index])
return true
}
// AppendMessageFunc appends a new message created by a provided function.
// The function receives the current message count and returns the new message.
// This is atomic and thread-safe.
func (s *SafeChatBody) AppendMessageFunc(creator func(count int) RoleMsg) {
s.mu.Lock()
defer s.mu.Unlock()
msg := creator(len(s.Messages))
s.Messages = append(s.Messages, msg)
}
// GetMessagesForLLM returns a filtered copy of messages for sending to LLM.
// This is thread-safe and returns a copy safe for external modification.
func (s *SafeChatBody) GetMessagesForLLM(filterFunc func([]RoleMsg) []RoleMsg) []RoleMsg {
s.mu.RLock()
defer s.mu.RUnlock()
if filterFunc == nil {
messagesCopy := make([]RoleMsg, len(s.Messages))
copy(messagesCopy, s.Messages)
return messagesCopy
}
return filterFunc(s.Messages)
}
// WithLock executes a function while holding the write lock.
// Use this for complex operations that need to be atomic.
func (s *SafeChatBody) WithLock(fn func(*ChatBody)) {
s.mu.Lock()
defer s.mu.Unlock()
fn(&s.ChatBody)
}
// WithRLock executes a function while holding the read lock.
// Use this for complex read-only operations.
func (s *SafeChatBody) WithRLock(fn func(*ChatBody)) {
s.mu.RLock()
defer s.mu.RUnlock()
fn(&s.ChatBody)
}

View File

@@ -22,7 +22,7 @@ func showModelSelectionPopup() {
models, err := fetchLCPModelsWithLoadStatus() models, err := fetchLCPModelsWithLoadStatus()
if err != nil { if err != nil {
logger.Error("failed to fetch models with load status", "error", err) logger.Error("failed to fetch models with load status", "error", err)
return LocalModels.Load().([]string) return LocalModels
} }
return models return models
} }
@@ -30,8 +30,7 @@ func showModelSelectionPopup() {
modelList := getModelListForAPI(cfg.CurrentAPI) modelList := getModelListForAPI(cfg.CurrentAPI)
// Check for empty options list // Check for empty options list
if len(modelList) == 0 { if len(modelList) == 0 {
localModels := LocalModels.Load().([]string) logger.Warn("empty model list for", "api", cfg.CurrentAPI, "localModelsLen", len(LocalModels), "orModelsLen", len(ORFreeModels))
logger.Warn("empty model list for", "api", cfg.CurrentAPI, "localModelsLen", len(localModels), "orModelsLen", len(ORFreeModels))
var message string var message string
switch { switch {
case strings.Contains(cfg.CurrentAPI, "openrouter.ai"): case strings.Contains(cfg.CurrentAPI, "openrouter.ai"):
@@ -51,7 +50,7 @@ func showModelSelectionPopup() {
// Find the current model index to set as selected // Find the current model index to set as selected
currentModelIndex := -1 currentModelIndex := -1
for i, model := range modelList { for i, model := range modelList {
if strings.TrimPrefix(model, models.LoadedMark) == chatBody.GetModel() { if strings.TrimPrefix(model, models.LoadedMark) == chatBody.Model {
currentModelIndex = i currentModelIndex = i
} }
modelListWidget.AddItem(model, "", 0, nil) modelListWidget.AddItem(model, "", 0, nil)
@@ -62,8 +61,8 @@ func showModelSelectionPopup() {
} }
modelListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) { modelListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
modelName := strings.TrimPrefix(mainText, models.LoadedMark) modelName := strings.TrimPrefix(mainText, models.LoadedMark)
chatBody.SetModel(modelName) chatBody.Model = modelName
cfg.CurrentModel = chatBody.GetModel() cfg.CurrentModel = chatBody.Model
pages.RemovePage("modelSelectionPopup") pages.RemovePage("modelSelectionPopup")
app.SetFocus(textArea) app.SetFocus(textArea)
updateCachedModelColor() updateCachedModelColor()
@@ -151,13 +150,15 @@ func showAPILinkSelectionPopup() {
} }
// Assume local llama.cpp // Assume local llama.cpp
refreshLocalModelsIfEmpty() refreshLocalModelsIfEmpty()
return LocalModels.Load().([]string) localModelsMu.RLock()
defer localModelsMu.RUnlock()
return LocalModels
} }
newModelList := getModelListForAPI(cfg.CurrentAPI) newModelList := getModelListForAPI(cfg.CurrentAPI)
// Ensure chatBody.Model is in the new list; if not, set to first available model // Ensure chatBody.Model is in the new list; if not, set to first available model
if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.GetModel()) { if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.Model) {
chatBody.SetModel(strings.TrimPrefix(newModelList[0], models.LoadedMark)) chatBody.Model = strings.TrimPrefix(newModelList[0], models.LoadedMark)
cfg.CurrentModel = chatBody.GetModel() cfg.CurrentModel = chatBody.Model
updateToolCapabilities() updateToolCapabilities()
} }
pages.RemovePage("apiLinkSelectionPopup") pages.RemovePage("apiLinkSelectionPopup")
@@ -228,7 +229,7 @@ func showUserRoleSelectionPopup() {
// Update the user role in config // Update the user role in config
cfg.WriteNextMsgAs = mainText cfg.WriteNextMsgAs = mainText
// role got switch, update textview with character specific context for user // role got switch, update textview with character specific context for user
filtered := filterMessagesForCharacter(chatBody.GetMessages(), mainText) filtered := filterMessagesForCharacter(chatBody.Messages, mainText)
textView.SetText(chatToText(filtered, cfg.ShowSys)) textView.SetText(chatToText(filtered, cfg.ShowSys))
// Remove the popup page // Remove the popup page
pages.RemovePage("userRoleSelectionPopup") pages.RemovePage("userRoleSelectionPopup")

View File

@@ -4,11 +4,14 @@ import (
"fmt" "fmt"
"strconv" "strconv"
"strings" "strings"
"sync"
"github.com/gdamore/tcell/v2" "github.com/gdamore/tcell/v2"
"github.com/rivo/tview" "github.com/rivo/tview"
) )
var _ = sync.RWMutex{}
// Define constants for cell types // Define constants for cell types
const ( const (
CellTypeCheckbox = "checkbox" CellTypeCheckbox = "checkbox"
@@ -118,9 +121,6 @@ func makePropsTable(props map[string]float32) *tview.Table {
addCheckboxRow("TTS Enabled", cfg.TTS_ENABLED, func(checked bool) { addCheckboxRow("TTS Enabled", cfg.TTS_ENABLED, func(checked bool) {
cfg.TTS_ENABLED = checked cfg.TTS_ENABLED = checked
}) })
addCheckboxRow("Auto clean tool calls from context", cfg.AutoCleanToolCallsFromCtx, func(checked bool) {
cfg.AutoCleanToolCallsFromCtx = checked
})
addCheckboxRow("Enable Mouse", cfg.EnableMouse, func(checked bool) { addCheckboxRow("Enable Mouse", cfg.EnableMouse, func(checked bool) {
cfg.EnableMouse = checked cfg.EnableMouse = checked
// Reconfigure the app's mouse setting // Reconfigure the app's mouse setting
@@ -154,7 +154,9 @@ func makePropsTable(props map[string]float32) *tview.Table {
} }
// Assume local llama.cpp // Assume local llama.cpp
refreshLocalModelsIfEmpty() refreshLocalModelsIfEmpty()
return LocalModels.Load().([]string) localModelsMu.RLock()
defer localModelsMu.RUnlock()
return LocalModels
} }
// Add input fields // Add input fields
addInputRow("New char to write msg as", "", func(text string) { addInputRow("New char to write msg as", "", func(text string) {
@@ -257,8 +259,7 @@ func makePropsTable(props map[string]float32) *tview.Table {
// Check for empty options list // Check for empty options list
if len(data.Options) == 0 { if len(data.Options) == 0 {
localModels := LocalModels.Load().([]string) logger.Warn("empty options list for", "label", label, "api", cfg.CurrentAPI, "localModelsLen", len(LocalModels), "orModelsLen", len(ORFreeModels))
logger.Warn("empty options list for", "label", label, "api", cfg.CurrentAPI, "localModelsLen", len(localModels), "orModelsLen", len(ORFreeModels))
message := "No options available for " + label message := "No options available for " + label
if label == "Select a model" { if label == "Select a model" {
switch { switch {

View File

@@ -12,6 +12,7 @@ import (
"regexp" "regexp"
"runtime" "runtime"
"sort" "sort"
"strconv"
"strings" "strings"
"sync" "sync"
"time" "time"
@@ -24,11 +25,123 @@ const ()
var ( var (
// Status messages for TUI integration // Status messages for TUI integration
LongJobStatusCh = make(chan string, 100) // Increased buffer size for parallel batch updates LongJobStatusCh = make(chan string, 100) // Increased buffer size for parallel batch updates
FinishedRAGStatus = "finished loading RAG file; press Enter" FinishedRAGStatus = "finished loading RAG file; press x to exit"
LoadedFileRAGStatus = "loaded file" LoadedFileRAGStatus = "loaded file"
ErrRAGStatus = "some error occurred; failed to transfer data to vector db" ErrRAGStatus = "some error occurred; failed to transfer data to vector db"
// stopWords are common words that can be removed from queries when not part of phrases
stopWords = []string{"the", "a", "an", "and", "or", "but", "in", "on", "at", "to", "for", "of", "with", "by", "from", "up", "down", "left", "right", "about", "like", "such", "than", "then", "also", "too"}
) )
// isStopWord checks if a word is in the stop words list
func isStopWord(word string) bool {
for _, stop := range stopWords {
if strings.EqualFold(word, stop) {
return true
}
}
return false
}
// detectPhrases returns multi-word phrases from a query that should be treated as units
func detectPhrases(query string) []string {
words := strings.Fields(strings.ToLower(query))
var phrases []string
for i := 0; i < len(words)-1; i++ {
word1 := strings.Trim(words[i], ".,!?;:'\"()[]{}")
word2 := strings.Trim(words[i+1], ".,!?;:'\"()[]{}")
// Skip if either word is a stop word or too short
if isStopWord(word1) || isStopWord(word2) || len(word1) < 2 || len(word2) < 2 {
continue
}
// Check if this pair appears to be a meaningful phrase
// Simple heuristic: consecutive non-stop words of reasonable length
phrase := word1 + " " + word2
phrases = append(phrases, phrase)
// Optionally check for 3-word phrases
if i < len(words)-2 {
word3 := strings.Trim(words[i+2], ".,!?;:'\"()[]{}")
if !isStopWord(word3) && len(word3) >= 2 {
phrases = append(phrases, word1+" "+word2+" "+word3)
}
}
}
return phrases
}
// countPhraseMatches returns the number of query phrases found in text
func countPhraseMatches(text, query string) int {
phrases := detectPhrases(query)
if len(phrases) == 0 {
return 0
}
textLower := strings.ToLower(text)
count := 0
for _, phrase := range phrases {
if strings.Contains(textLower, phrase) {
count++
}
}
return count
}
// parseSlugIndices extracts batch and chunk indices from a slug
// slug format: filename_batch_chunk (e.g., "kjv_bible.epub_1786_0")
func parseSlugIndices(slug string) (batch, chunk int, ok bool) {
// Find the last two numbers separated by underscores
re := regexp.MustCompile(`_(\d+)_(\d+)$`)
matches := re.FindStringSubmatch(slug)
if matches == nil || len(matches) != 3 {
return 0, 0, false
}
batch, err1 := strconv.Atoi(matches[1])
chunk, err2 := strconv.Atoi(matches[2])
if err1 != nil || err2 != nil {
return 0, 0, false
}
return batch, chunk, true
}
// areSlugsAdjacent returns true if two slugs are from the same file and have sequential indices
func areSlugsAdjacent(slug1, slug2 string) bool {
// Extract filename prefix (everything before the last underscore sequence)
parts1 := strings.Split(slug1, "_")
parts2 := strings.Split(slug2, "_")
if len(parts1) < 3 || len(parts2) < 3 {
return false
}
// Compare filename prefixes (all parts except last two)
prefix1 := strings.Join(parts1[:len(parts1)-2], "_")
prefix2 := strings.Join(parts2[:len(parts2)-2], "_")
if prefix1 != prefix2 {
return false
}
batch1, chunk1, ok1 := parseSlugIndices(slug1)
batch2, chunk2, ok2 := parseSlugIndices(slug2)
if !ok1 || !ok2 {
return false
}
// Check if they're in same batch and chunks are sequential
if batch1 == batch2 && (chunk1 == chunk2+1 || chunk2 == chunk1+1) {
return true
}
// Check if they're in sequential batches and chunk indices suggest continuity
// This is heuristic but useful for cross-batch adjacency
if (batch1 == batch2+1 && chunk1 == 0) || (batch2 == batch1+1 && chunk2 == 0) {
return true
}
return false
}
type RAG struct { type RAG struct {
logger *slog.Logger logger *slog.Logger
store storage.FullRepo store storage.FullRepo
@@ -155,8 +268,8 @@ func createChunks(sentences []string, wordLimit, overlapWords uint32) []string {
} }
func sanitizeFTSQuery(query string) string { func sanitizeFTSQuery(query string) string {
// Remove double quotes and other problematic characters for FTS5 // Keep double quotes for FTS5 phrase matching
query = strings.ReplaceAll(query, "\"", " ") // Remove other problematic characters
query = strings.ReplaceAll(query, "'", " ") query = strings.ReplaceAll(query, "'", " ")
query = strings.ReplaceAll(query, ";", " ") query = strings.ReplaceAll(query, ";", " ")
query = strings.ReplaceAll(query, "\\", " ") query = strings.ReplaceAll(query, "\\", " ")
@@ -549,7 +662,6 @@ func (r *RAG) RemoveFile(filename string) error {
var ( var (
queryRefinementPattern = regexp.MustCompile(`(?i)(based on my (vector db|vector db|vector database|rags?|past (conversations?|chat|messages?))|from my (files?|documents?|data|information|memory)|search (in|my) (vector db|database|rags?)|rag search for)`) queryRefinementPattern = regexp.MustCompile(`(?i)(based on my (vector db|vector db|vector database|rags?|past (conversations?|chat|messages?))|from my (files?|documents?|data|information|memory)|search (in|my) (vector db|database|rags?)|rag search for)`)
importantKeywords = []string{"project", "architecture", "code", "file", "chat", "conversation", "topic", "summary", "details", "history", "previous", "my", "user", "me"} importantKeywords = []string{"project", "architecture", "code", "file", "chat", "conversation", "topic", "summary", "details", "history", "previous", "my", "user", "me"}
stopWords = []string{"the", "a", "an", "and", "or", "but", "in", "on", "at", "to", "for", "of", "with", "by", "from", "up", "down", "left", "right"}
) )
func (r *RAG) RefineQuery(query string) string { func (r *RAG) RefineQuery(query string) string {
@@ -561,10 +673,27 @@ func (r *RAG) RefineQuery(query string) string {
if len(query) <= 3 { if len(query) <= 3 {
return original return original
} }
// If query already contains double quotes, assume it's a phrase query and skip refinement
if strings.Contains(query, "\"") {
return original
}
query = strings.ToLower(query) query = strings.ToLower(query)
words := strings.Fields(query) words := strings.Fields(query)
if len(words) >= 3 { if len(words) >= 3 {
// Detect phrases and protect words that are part of phrases
phrases := detectPhrases(query)
protectedWords := make(map[string]bool)
for _, phrase := range phrases {
for _, word := range strings.Fields(phrase) {
protectedWords[word] = true
}
}
// Remove stop words that are not protected
for _, stopWord := range stopWords { for _, stopWord := range stopWords {
if protectedWords[stopWord] {
continue
}
wordPattern := `\b` + stopWord + `\b` wordPattern := `\b` + stopWord + `\b`
re := regexp.MustCompile(wordPattern) re := regexp.MustCompile(wordPattern)
query = re.ReplaceAllString(query, "") query = re.ReplaceAllString(query, "")
@@ -673,13 +802,57 @@ func (r *RAG) GenerateQueryVariations(query string) []string {
if !strings.HasSuffix(query, " summary") { if !strings.HasSuffix(query, " summary") {
variations = append(variations, query+" summary") variations = append(variations, query+" summary")
} }
// Add phrase-quoted variations for better FTS5 matching
phrases := detectPhrases(query)
if len(phrases) > 0 {
// Sort phrases by length descending to prioritize longer phrases
sort.Slice(phrases, func(i, j int) bool {
return len(phrases[i]) > len(phrases[j])
})
// Create a version with all phrases quoted
quotedQuery := query
for _, phrase := range phrases {
// Only quote if not already quoted
quotedPhrase := "\"" + phrase + "\""
if !strings.Contains(strings.ToLower(quotedQuery), strings.ToLower(quotedPhrase)) {
// Case-insensitive replacement of phrase with quoted version
re := regexp.MustCompile(`(?i)\b` + regexp.QuoteMeta(phrase) + `\b`)
quotedQuery = re.ReplaceAllString(quotedQuery, quotedPhrase)
}
}
// Disabled malformed quoted query for now
// if quotedQuery != query {
// variations = append(variations, quotedQuery)
// }
// Also add individual phrase variations for short queries
if len(phrases) <= 5 {
for _, phrase := range phrases {
// Create a focused query with just this phrase quoted
// Keep original context but emphasize this phrase
quotedPhrase := "\"" + phrase + "\""
re := regexp.MustCompile(`(?i)\b` + regexp.QuoteMeta(phrase) + `\b`)
focusedQuery := re.ReplaceAllString(query, quotedPhrase)
if focusedQuery != query && focusedQuery != quotedQuery {
variations = append(variations, focusedQuery)
}
// Add the phrase alone (quoted) as a separate variation
variations = append(variations, quotedPhrase)
}
}
}
return variations return variations
} }
func (r *RAG) RerankResults(results []models.VectorRow, query string) []models.VectorRow { func (r *RAG) RerankResults(results []models.VectorRow, query string) []models.VectorRow {
phraseCount := len(detectPhrases(query))
type scoredResult struct { type scoredResult struct {
row models.VectorRow row models.VectorRow
distance float32 distance float32
phraseMatches int
} }
scored := make([]scoredResult, 0, len(results)) scored := make([]scoredResult, 0, len(results))
for i := range results { for i := range results {
@@ -704,18 +877,52 @@ func (r *RAG) RerankResults(results []models.VectorRow, query string) []models.V
if row.FileName == "chat" || strings.Contains(strings.ToLower(row.FileName), "conversation") { if row.FileName == "chat" || strings.Contains(strings.ToLower(row.FileName), "conversation") {
score += 3 score += 3
} }
// Phrase match bonus: extra points for containing detected phrases
phraseMatches := countPhraseMatches(row.RawText, query)
if phraseMatches > 0 {
// Significant bonus per phrase to prioritize exact phrase matches
r.logger.Debug("phrase match bonus", "slug", row.Slug, "phraseMatches", phraseMatches, "score", score)
score += float32(phraseMatches) * 100
}
// Cross-chunk adjacency bonus: if this chunk has adjacent siblings in results,
// boost score to promote narrative continuity
adjacentCount := 0
for _, other := range results {
if other.Slug == row.Slug {
continue
}
if areSlugsAdjacent(row.Slug, other.Slug) {
adjacentCount++
}
}
if adjacentCount > 0 {
// Bonus per adjacent chunk, but diminishing returns
score += float32(adjacentCount) * 4
}
distance := row.Distance - score/100 distance := row.Distance - score/100
scored = append(scored, scoredResult{row: row, distance: distance}) scored = append(scored, scoredResult{row: row, distance: distance, phraseMatches: phraseMatches})
} }
sort.Slice(scored, func(i, j int) bool { sort.Slice(scored, func(i, j int) bool {
return scored[i].distance < scored[j].distance return scored[i].distance < scored[j].distance
}) })
unique := make([]models.VectorRow, 0) unique := make([]models.VectorRow, 0)
seen := make(map[string]bool) seen := make(map[string]bool)
maxPerFile := 2
if phraseCount > 0 {
maxPerFile = 10
}
fileCounts := make(map[string]int) fileCounts := make(map[string]int)
for i := range scored { for i := range scored {
if !seen[scored[i].row.Slug] { if !seen[scored[i].row.Slug] {
if fileCounts[scored[i].row.FileName] >= 2 { // Allow phrase-matching chunks to bypass per-file limit (up to +5 extra)
allowed := fileCounts[scored[i].row.FileName] < maxPerFile
if !allowed && scored[i].phraseMatches > 0 {
// If chunk has phrase matches, allow extra slots (up to maxPerFile + 5)
allowed = fileCounts[scored[i].row.FileName] < maxPerFile+5
}
if !allowed {
continue continue
} }
seen[scored[i].row.Slug] = true seen[scored[i].row.Slug] = true
@@ -723,8 +930,8 @@ func (r *RAG) RerankResults(results []models.VectorRow, query string) []models.V
unique = append(unique, scored[i].row) unique = append(unique, scored[i].row)
} }
} }
if len(unique) > 10 { if len(unique) > 30 {
unique = unique[:10] unique = unique[:30]
} }
return unique return unique
} }
@@ -793,6 +1000,7 @@ func (r *RAG) Search(query string, limit int) ([]models.VectorRow, error) {
r.resetIdleTimer() r.resetIdleTimer()
refined := r.RefineQuery(query) refined := r.RefineQuery(query)
variations := r.GenerateQueryVariations(refined) variations := r.GenerateQueryVariations(refined)
r.logger.Debug("query variations", "original", query, "refined", refined, "variations", variations)
// Collect embedding search results from all variations // Collect embedding search results from all variations
var embResults []models.VectorRow var embResults []models.VectorRow
@@ -824,17 +1032,35 @@ func (r *RAG) Search(query string, limit int) ([]models.VectorRow, error) {
return embResults[i].Distance < embResults[j].Distance return embResults[i].Distance < embResults[j].Distance
}) })
// Perform keyword search // Perform keyword search on all variations
kwResults, err := r.searchKeyword(refined, limit*2) var kwResults []models.VectorRow
seenKw := make(map[string]bool)
for _, q := range variations {
results, err := r.searchKeyword(q, limit)
if err != nil { if err != nil {
r.logger.Warn("keyword search failed, using only embeddings", "error", err) r.logger.Debug("keyword search failed for variation", "error", err, "query", q)
kwResults = nil continue
} }
// Sort keyword results by distance (already sorted by BM25 score) for _, row := range results {
// kwResults already sorted by distance (lower is better) if !seenKw[row.Slug] {
seenKw[row.Slug] = true
kwResults = append(kwResults, row)
}
}
}
// Sort keyword results by distance (lower is better)
sort.Slice(kwResults, func(i, j int) bool {
return kwResults[i].Distance < kwResults[j].Distance
})
// Combine using Reciprocal Rank Fusion (RRF) // Combine using Reciprocal Rank Fusion (RRF)
const rrfK = 60 // Use smaller K for phrase-heavy queries to give more weight to top ranks
phraseCount := len(detectPhrases(query))
rrfK := 60.0
if phraseCount > 0 {
rrfK = 30.0
}
r.logger.Debug("RRF parameters", "phraseCount", phraseCount, "rrfK", rrfK, "query", query)
type scoredRow struct { type scoredRow struct {
row models.VectorRow row models.VectorRow
score float64 score float64
@@ -844,11 +1070,22 @@ func (r *RAG) Search(query string, limit int) ([]models.VectorRow, error) {
for rank, row := range embResults { for rank, row := range embResults {
score := 1.0 / (float64(rank) + rrfK) score := 1.0 / (float64(rank) + rrfK)
scoreMap[row.Slug] += score scoreMap[row.Slug] += score
if row.Slug == "kjv_bible.epub_1786_0" {
r.logger.Debug("target chunk embedding rank", "rank", rank, "score", score)
} }
// Add keyword results }
// Add keyword results with weight boost when phrases are present
kwWeight := 1.0
if phraseCount > 0 {
kwWeight = 100.0
}
r.logger.Debug("keyword weight", "kwWeight", kwWeight, "phraseCount", phraseCount)
for rank, row := range kwResults { for rank, row := range kwResults {
score := 1.0 / (float64(rank) + rrfK) score := kwWeight * (1.0 / (float64(rank) + rrfK))
scoreMap[row.Slug] += score scoreMap[row.Slug] += score
if row.Slug == "kjv_bible.epub_1786_0" {
r.logger.Debug("target chunk keyword rank", "rank", rank, "score", score, "kwWeight", kwWeight, "rrfK", rrfK)
}
// Ensure row exists in combined results // Ensure row exists in combined results
if _, exists := seen[row.Slug]; !exists { if _, exists := seen[row.Slug]; !exists {
embResults = append(embResults, row) embResults = append(embResults, row)
@@ -860,6 +1097,18 @@ func (r *RAG) Search(query string, limit int) ([]models.VectorRow, error) {
score := scoreMap[row.Slug] score := scoreMap[row.Slug]
scoredRows = append(scoredRows, scoredRow{row: row, score: score}) scoredRows = append(scoredRows, scoredRow{row: row, score: score})
} }
// Debug: log scores for target chunk and top chunks
if strings.Contains(strings.ToLower(query), "bald") || strings.Contains(strings.ToLower(query), "she bears") {
for _, sr := range scoredRows {
if sr.row.Slug == "kjv_bible.epub_1786_0" {
r.logger.Debug("target chunk score", "slug", sr.row.Slug, "score", sr.score, "distance", sr.row.Distance)
}
}
// Log top 5 scores
for i := 0; i < len(scoredRows) && i < 5; i++ {
r.logger.Debug("top scored row", "rank", i+1, "slug", scoredRows[i].row.Slug, "score", scoredRows[i].score, "distance", scoredRows[i].row.Distance)
}
}
// Sort by descending RRF score // Sort by descending RRF score
sort.Slice(scoredRows, func(i, j int) bool { sort.Slice(scoredRows, func(i, j int) bool {
return scoredRows[i].score > scoredRows[j].score return scoredRows[i].score > scoredRows[j].score
@@ -938,3 +1187,11 @@ func (r *RAG) Destroy() {
} }
} }
} }
// SetEmbedderForTesting replaces the internal embedder with a mock.
// This function is only available when compiling with the "test" build tag.
func (r *RAG) SetEmbedderForTesting(e Embedder) {
r.mu.Lock()
defer r.mu.Unlock()
r.embedder = e
}

409
rag/rag_integration_test.go Normal file
View File

@@ -0,0 +1,409 @@
package rag
import (
"fmt"
"gf-lt/config"
"gf-lt/models"
"gf-lt/storage"
"log/slog"
"testing"
_ "github.com/glebarez/go-sqlite"
"github.com/jmoiron/sqlx"
)
// mockEmbedder returns zero vectors of a fixed dimension.
type mockEmbedder struct {
dim int
}
func (m *mockEmbedder) Embed(text string) ([]float32, error) {
vec := make([]float32, m.dim)
return vec, nil
}
func (m *mockEmbedder) EmbedSlice(texts []string) ([][]float32, error) {
vecs := make([][]float32, len(texts))
for i := range vecs {
vecs[i] = make([]float32, m.dim)
}
return vecs, nil
}
// dummyStore implements storage.FullRepo with a minimal set of methods.
// Only DB() is used by VectorStorage; other methods return empty values.
type dummyStore struct {
db *sqlx.DB
}
func (d dummyStore) DB() *sqlx.DB { return d.db }
// ChatHistory methods
func (d dummyStore) ListChats() ([]models.Chat, error) { return nil, nil }
func (d dummyStore) GetChatByID(id uint32) (*models.Chat, error) { return nil, nil }
func (d dummyStore) GetChatByChar(char string) ([]models.Chat, error) { return nil, nil }
func (d dummyStore) GetLastChat() (*models.Chat, error) { return nil, nil }
func (d dummyStore) GetLastChatByAgent(agent string) (*models.Chat, error) { return nil, nil }
func (d dummyStore) UpsertChat(chat *models.Chat) (*models.Chat, error) { return chat, nil }
func (d dummyStore) RemoveChat(id uint32) error { return nil }
func (d dummyStore) ChatGetMaxID() (uint32, error) { return 0, nil }
// Memories methods
func (d dummyStore) Memorise(m *models.Memory) (*models.Memory, error) { return m, nil }
func (d dummyStore) Recall(agent, topic string) (string, error) { return "", nil }
func (d dummyStore) RecallTopics(agent string) ([]string, error) { return nil, nil }
// VectorRepo methods (not used but required by interface)
func (d dummyStore) WriteVector(row *models.VectorRow) error { return nil }
func (d dummyStore) SearchClosest(q []float32, limit int) ([]models.VectorRow, error) {
return nil, nil
}
func (d dummyStore) ListFiles() ([]string, error) { return nil, nil }
func (d dummyStore) RemoveEmbByFileName(filename string) error { return nil }
var _ storage.FullRepo = dummyStore{}
// setupTestRAG creates an inmemory SQLite database, creates the necessary tables,
// inserts the provided chunks, and returns a RAG instance with a mock embedder.
func setupTestRAG(t *testing.T, chunks []*models.VectorRow) (*RAG, error) {
t.Helper()
db, err := sqlx.Open("sqlite", ":memory:")
if err != nil {
return nil, fmt.Errorf("open inmemory db: %w", err)
}
// Create the required tables (embeddings_768 and fts_embeddings).
// Use the same schema as production.
_, err = db.Exec(`
CREATE TABLE embeddings_768 (
id INTEGER PRIMARY KEY AUTOINCREMENT,
embeddings BLOB NOT NULL,
slug TEXT NOT NULL,
raw_text TEXT NOT NULL,
filename TEXT NOT NULL DEFAULT ''
);
`)
if err != nil {
return nil, fmt.Errorf("create embeddings table: %w", err)
}
_, err = db.Exec(`
CREATE VIRTUAL TABLE fts_embeddings USING fts5(
slug UNINDEXED,
raw_text,
filename UNINDEXED,
embedding_size UNINDEXED,
tokenize='porter unicode61'
);
`)
if err != nil {
return nil, fmt.Errorf("create FTS table: %w", err)
}
// Create a logger that discards output.
logger := slog.New(slog.NewTextHandler(nil, &slog.HandlerOptions{Level: slog.LevelError}))
store := dummyStore{db: db}
// Create config with embedding dimension 768.
cfg := &config.Config{
EmbedDims: 768,
RAGWordLimit: 250,
RAGOverlapWords: 25,
RAGBatchSize: 1,
}
// Create a RAG instance using New, which will create an embedder based on config.
// We'll override the embedder afterwards via reflection.
rag, err := New(logger, store, cfg)
if err != nil {
return nil, fmt.Errorf("create RAG: %w", err)
}
// Replace the embedder with our mock.
rag.SetEmbedderForTesting(&mockEmbedder{dim: cfg.EmbedDims})
// Insert the provided chunks using the storage directly.
if len(chunks) > 0 {
// Ensure each chunk has embeddings of correct dimension (zero vector).
for _, chunk := range chunks {
if len(chunk.Embeddings) != cfg.EmbedDims {
chunk.Embeddings = make([]float32, cfg.EmbedDims)
}
}
err = rag.storage.WriteVectors(chunks)
if err != nil {
return nil, fmt.Errorf("write test chunks: %w", err)
}
}
return rag, nil
}
// createTestChunks returns a slice of VectorRow representing the target chunk
// (kjv_bible.epub_1786_0), several baldrelated noise chunks, and unrelated chunks.
func createTestChunks() []*models.VectorRow {
// Target chunk: 2 Kings 2:2324 containing "bald head" and "two she bears".
targetRaw := `And he said, Ye shall not send.
2:17 And when they urged him till he was ashamed, he said, Send. They sent
therefore fifty men; and they sought three days, but found him not.
2:18 And when they came again to him, (for he tarried at Jericho,) he said unto
them, Did I not say unto you, Go not? 2:19 And the men of the city said unto
Elisha, Behold, I pray thee, the situation of this city is pleasant, as my lord
seeth: but the water is naught, and the ground barren.
2:20 And he said, Bring me a new cruse, and put salt therein. And they brought
it to him.
2:21 And he went forth unto the spring of the waters, and cast the salt in
there, and said, Thus saith the LORD, I have healed these waters; there shall
not be from thence any more death or barren land.
2:22 So the waters were healed unto this day, according to the saying of Elisha
which he spake.
2:23 And he went up from thence unto Bethel: and as he was going up by the way,
there came forth little children out of the city, and mocked him, and said unto
him, Go up, thou bald head; go up, thou bald head.
2:24 And he turned back, and looked on them, and cursed them in the name of the
LORD. And there came forth two she bears out of the wood, and tare forty and
two children of them.`
// Noise chunk 1: Leviticus containing "bald locust"
noise1Raw := `11:12 Whatsoever hath no fins nor scales in the waters, that shall be an
abomination unto you.
11:13 And these are they which ye shall have in abomination among the fowls;
they shall not be eaten, they are an abomination: the eagle, and the ossifrage,
and the ospray, 11:14 And the vulture, and the kite after his kind; 11:15 Every
raven after his kind; 11:16 And the owl, and the night hawk, and the cuckow,
and the hawk after his kind, 11:17 And the little owl, and the cormorant, and
the great owl, 11:18 And the swan, and the pelican, and the gier eagle, 11:19
And the stork, the heron after her kind, and the lapwing, and the bat.
11:20 All fowls that creep, going upon all four, shall be an abomination unto
you.
11:21 Yet these may ye eat of every flying creeping thing that goeth upon all
four, which have legs above their feet, to leap withal upon the earth; 11:22
Even these of them ye may eat; the locust after his kind, and the bald locust
after his kind, and the beetle after his kind, and the grasshopper after his
kind.
11:23 But all other flying creeping things, which have four feet, shall be an
abomination unto you.
11:24 And for these ye shall be unclean: whosoever toucheth the carcase of them
shall be unclean until the even.`
// Noise chunk 2: Leviticus containing "bald"
noise2Raw := `11:13 And these are they which ye shall have in abomination among the fowls;
they shall not be eaten, they are an abomination: the eagle, and the ossifrage,
and the ospray, 11:14 And the vulture, and the kite after his kind; 11:15 Every
raven after his kind; 11:16 And the owl, and the night hawk, and the cuckow,
and the hawk after his kind, 11:17 And the little owl, and the cormorant, and
the great owl, 11:18 And the swan, and the pelican, and the gier eagle, 11:19
And the stork, the heron after her kind, and the lapwing, and the bat.
11:20 All fowls that creep, going upon all four, shall be an abomination unto
you.
11:21 Yet these may ye eat of every flying creeping thing that goeth upon all
four, which have legs above their feet, to leap withal upon the earth; 11:22
Even these of them ye may eat; the locust after his kind, and the bald locust
after his kind, and the beetle after his kind, and the grasshopper after his
kind.
11:23 But all other flying creeping things, which have four feet, shall be an
abomination unto you.
11:24 And for these ye shall be unclean: whosoever toucheth the carcase of them
shall be unclean until the even.`
// Additional Leviticus noise chunks (simulating 28 bald-related chunks)
// Using variations of the same text with different slugs
leviticusSlugs := []string{
"kjv_bible.epub_564_0",
"kjv_bible.epub_565_0",
"kjv_bible.epub_579_0",
"kjv_bible.epub_580_0",
"kjv_bible.epub_581_0",
"kjv_bible.epub_582_0",
"kjv_bible.epub_583_0",
"kjv_bible.epub_584_0",
"kjv_bible.epub_585_0",
"kjv_bible.epub_586_0",
"kjv_bible.epub_587_0",
"kjv_bible.epub_588_0",
"kjv_bible.epub_589_0",
"kjv_bible.epub_590_0",
}
leviticusTexts := []string{
noise1Raw,
noise2Raw,
`13:40 And the man whose hair is fallen off his head, he is bald; yet is he
clean.
13:41 And he that hath his hair fallen off from the part of his head toward his
face, he is forehead bald; yet is he clean.`,
`13:42 And if there be in the bald head, or bald forehead, a white reddish sore;
it is a leprosy sprung up in his bald head, or his bald forehead.`,
`13:43 Then the priest shall look upon it: and, behold, if the rising of the
sore be white reddish in his bald head, or in his bald forehead, as the leprosy
appearedh in the skin of the flesh;`,
`13:44 He is a leprous man, he is unclean: the priest shall pronounce him utterly
unclean; his plague is in his head.`,
`13:45 And the leper in whom the plague is, his clothes shall be rent, and his
head bare, and he shall put a covering upon his upper lip, and shall cry,
Unclean, unclean.`,
`13:46 All the days wherein the plague shall be in him he shall be defiled; he
is unclean: he shall dwell alone; without the camp shall his habitation be.`,
`13:47 The garment also that the plague of leprosy is in, whether it be a woollen
garment, or a linen garment;`,
`13:48 Whether it be in the warp, or woof; of linen, or of woollen; whether in a
skin, or in any thing made of skin;`,
`13:49 And if the plague be greenish or reddish in the garment, or in the skin,
either in the warp, or in the woof, or in any thing of skin; it is a plague of
leprosy, and shall be shewed unto the priest:`,
`13:50 And the priest shall look upon the plague, and shut up it that hath the
plague seven days:`,
`13:51 And he shall look on the plague on the seventh day: if the plague be spread
in the garment, either in the warp, or in the woof, or in a skin, or in any work
that is made of skin; the plague is a fretting leprosy; it is unclean.`,
`13:52 He shall therefore burn that garment, whether warp or woof, in woollen or
in linen, or any thing of skin, wherein the plague is: for it is a fretting
leprosy; it shall be burnt in the fire.`,
}
// Unrelated chunk 1: ghost_7.txt_777_0
unrelated1Raw := `Doesnt he have any pride as a hunter?!
I didnt see what other choice I had. I would just have to grovel and be ready to flee at any given moment.
The Hidden Curse clan house was in the central region of the imperial capital. It was a high-class area with extraordinary property values that hosted the residences of people like Lord Gladis. This district was near the Imperial Castle, though “near” was a
relative term as it was still a few kilometers away.
The clan house was made of brick and conformed to an older style of architecture.`
// Unrelated chunk 2: ghost_7.txt_778_0
unrelated2Raw := `I would just have to grovel and be ready to flee at any given moment.
The Hidden Curse clan house was in the central region of the imperial capital. It was a high-class area with extraordinary property values that hosted the residences of people like Lord Gladis. This district was near the Imperial Castle, though “near” was a
relative term as it was still a few kilometers away.
The clan house was made of brick and conformed to an older style of architecture. Nearly everyone knew about this mansion and its clock tower. It stood tall over the neighboring mansions and rumor had it that you could see the whole capital from the top. It
spoke to this clans renown and history that they were able to get away with building something that dwarfed the mansions of the nobility.`
chunks := []*models.VectorRow{
{
Slug: "kjv_bible.epub_1786_0",
RawText: targetRaw,
FileName: "kjv_bible.epub",
Embeddings: nil, // will be filled with zero vector later
},
}
// Add Leviticus noise chunks
for i, slug := range leviticusSlugs {
text := leviticusTexts[i%len(leviticusTexts)]
chunks = append(chunks, &models.VectorRow{
Slug: slug,
RawText: text,
FileName: "kjv_bible.epub",
Embeddings: nil,
})
}
// Add unrelated chunks
chunks = append(chunks,
&models.VectorRow{
Slug: "ghost_7.txt_777_0",
RawText: unrelated1Raw,
FileName: "ghost_7.txt",
Embeddings: nil,
},
&models.VectorRow{
Slug: "ghost_7.txt_778_0",
RawText: unrelated2Raw,
FileName: "ghost_7.txt",
Embeddings: nil,
},
)
return chunks
}
func assertTargetInTopN(t *testing.T, results []models.VectorRow, topN int) bool {
t.Helper()
for i, row := range results {
if i >= topN {
break
}
if row.Slug == "kjv_bible.epub_1786_0" {
return true
}
}
return false
}
func TestBiblicalQuery(t *testing.T) {
chunks := createTestChunks()
rag, err := setupTestRAG(t, chunks)
if err != nil {
t.Fatalf("setup failed: %v", err)
}
query := "bald prophet and two she bears"
results, err := rag.Search(query, 10)
if err != nil {
t.Fatalf("search failed: %v", err)
}
// The target chunk should be in the top results.
if !assertTargetInTopN(t, results, 5) {
t.Errorf("target chunk not found in top 5 results for query %q", query)
t.Logf("results slugs: %v", func() []string {
slugs := make([]string, len(results))
for i, r := range results {
slugs[i] = r.Slug
}
return slugs
}())
}
}
func TestQueryVariations(t *testing.T) {
chunks := createTestChunks()
rag, err := setupTestRAG(t, chunks)
if err != nil {
t.Fatalf("setup failed: %v", err)
}
tests := []struct {
name string
query string
topN int
}{
{"she bears", "she bears", 5},
{"bald head", "bald head", 5},
{"two she bears out of the wood", "two she bears out of the wood", 5},
{"bald prophet", "bald prophet", 10},
{"go up thou bald head", "\"go up thou bald head\"", 5},
{"two she bears", "\"two she bears\"", 5},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results, err := rag.Search(tt.query, 10)
if err != nil {
t.Fatalf("search failed: %v", err)
}
if !assertTargetInTopN(t, results, tt.topN) {
t.Errorf("target chunk not found in top %d results for query %q", tt.topN, tt.query)
t.Logf("results slugs: %v", func() []string {
slugs := make([]string, len(results))
for i, r := range results {
slugs[i] = r.Slug
}
return slugs
}())
}
})
}
}

131
rag/rag_real_test.go Normal file
View File

@@ -0,0 +1,131 @@
package rag
import (
"gf-lt/config"
"gf-lt/storage"
"log/slog"
"os"
"path/filepath"
"testing"
)
func TestRealBiblicalQuery(t *testing.T) {
if testing.Short() {
t.Skip("skipping real embedder test in short mode")
}
// Check if the embedder model exists
modelPath := filepath.Join("..", "onnx", "embedgemma", "model_q4.onnx")
if _, err := os.Stat(modelPath); os.IsNotExist(err) {
t.Skipf("embedder model not found at %s; skipping real embedder test", modelPath)
}
tokenizerPath := filepath.Join("..", "onnx", "embedgemma", "tokenizer.json")
dbPath := filepath.Join("..", "gflt.db")
if _, err := os.Stat(dbPath); os.IsNotExist(err) {
t.Skipf("database not found at %s; skipping real embedder test", dbPath)
}
cfg := &config.Config{
EmbedModelPath: modelPath,
EmbedTokenizerPath: tokenizerPath,
EmbedDims: 768,
RAGWordLimit: 250,
RAGOverlapWords: 25,
RAGBatchSize: 1,
}
logger := slog.New(slog.NewTextHandler(nil, &slog.HandlerOptions{Level: slog.LevelError}))
store := storage.NewProviderSQL(dbPath, logger)
if store == nil {
t.Fatal("failed to create storage provider")
}
rag, err := New(logger, store, cfg)
if err != nil {
t.Fatalf("failed to create RAG instance: %v", err)
}
t.Cleanup(func() { rag.Destroy() })
query := "bald prophet and two she bears"
results, err := rag.Search(query, 30)
if err != nil {
t.Fatalf("search failed: %v", err)
}
found := false
for i, row := range results {
if row.Slug == "kjv_bible.epub_1786_0" {
found = true
t.Logf("target chunk found at rank %d", i+1)
break
}
}
if !found {
t.Errorf("target chunk not found in search results for query %q", query)
t.Logf("results slugs:")
for i, r := range results {
t.Logf("%d: %s", i+1, r.Slug)
}
}
}
func TestRealQueryVariations(t *testing.T) {
if testing.Short() {
t.Skip("skipping real embedder test in short mode")
}
modelPath := filepath.Join("..", "onnx", "embedgemma", "model_q4.onnx")
if _, err := os.Stat(modelPath); os.IsNotExist(err) {
t.Skipf("embedder model not found at %s; skipping real embedder test", modelPath)
}
tokenizerPath := filepath.Join("..", "onnx", "embedgemma", "tokenizer.json")
dbPath := filepath.Join("..", "gflt.db")
if _, err := os.Stat(dbPath); os.IsNotExist(err) {
t.Skipf("database not found at %s; skipping real embedder test", dbPath)
}
cfg := &config.Config{
EmbedModelPath: modelPath,
EmbedTokenizerPath: tokenizerPath,
EmbedDims: 768,
RAGWordLimit: 250,
RAGOverlapWords: 25,
RAGBatchSize: 1,
}
logger := slog.New(slog.NewTextHandler(nil, &slog.HandlerOptions{Level: slog.LevelError}))
store := storage.NewProviderSQL(dbPath, logger)
if store == nil {
t.Fatal("failed to create storage provider")
}
rag, err := New(logger, store, cfg)
if err != nil {
t.Fatalf("failed to create RAG instance: %v", err)
}
t.Cleanup(func() { rag.Destroy() })
tests := []struct {
name string
query string
}{
{"she bears", "she bears"},
{"bald head", "bald head"},
{"two she bears out of the wood", "two she bears out of the wood"},
{"bald prophet", "bald prophet"},
{"go up thou bald head", "\"go up thou bald head\""},
{"two she bears", "\"two she bears\""},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results, err := rag.Search(tt.query, 10)
if err != nil {
t.Fatalf("search failed: %v", err)
}
found := false
for _, row := range results {
if row.Slug == "kjv_bible.epub_1786_0" {
found = true
break
}
}
if !found {
t.Errorf("target chunk not found for query %q", tt.query)
for i, r := range results {
t.Logf("%d: %s", i+1, r.Slug)
}
}
})
}
}

155
rag/rag_test.go Normal file
View File

@@ -0,0 +1,155 @@
package rag
import (
"testing"
)
func TestDetectPhrases(t *testing.T) {
tests := []struct {
query string
expect []string
}{
{
query: "bald prophet and two she bears",
expect: []string{"bald prophet", "two she", "two she bears", "she bears"},
},
{
query: "she bears",
expect: []string{"she bears"},
},
{
query: "the quick brown fox",
expect: []string{"quick brown", "quick brown fox", "brown fox"},
},
{
query: "in the house", // stop words
expect: []string{}, // "in" and "the" are stop words
},
{
query: "a", // short
expect: []string{},
},
}
for _, tt := range tests {
got := detectPhrases(tt.query)
if len(got) != len(tt.expect) {
t.Errorf("detectPhrases(%q) = %v, want %v", tt.query, got, tt.expect)
continue
}
for i := range got {
if got[i] != tt.expect[i] {
t.Errorf("detectPhrases(%q) = %v, want %v", tt.query, got, tt.expect)
break
}
}
}
}
func TestCountPhraseMatches(t *testing.T) {
tests := []struct {
text string
query string
expect int
}{
{
text: "two she bears came out of the wood",
query: "she bears",
expect: 1,
},
{
text: "bald head and she bears",
query: "bald prophet and two she bears",
expect: 1, // only "she bears" matches
},
{
text: "no match here",
query: "she bears",
expect: 0,
},
{
text: "she bears and bald prophet",
query: "bald prophet she bears",
expect: 2, // "she bears" and "bald prophet"
},
}
for _, tt := range tests {
got := countPhraseMatches(tt.text, tt.query)
if got != tt.expect {
t.Errorf("countPhraseMatches(%q, %q) = %d, want %d", tt.text, tt.query, got, tt.expect)
}
}
}
func TestAreSlugsAdjacent(t *testing.T) {
tests := []struct {
slug1 string
slug2 string
expect bool
}{
{
slug1: "kjv_bible.epub_1786_0",
slug2: "kjv_bible.epub_1787_0",
expect: true,
},
{
slug1: "kjv_bible.epub_1787_0",
slug2: "kjv_bible.epub_1786_0",
expect: true,
},
{
slug1: "kjv_bible.epub_1786_0",
slug2: "kjv_bible.epub_1788_0",
expect: false,
},
{
slug1: "otherfile.txt_1_0",
slug2: "kjv_bible.epub_1786_0",
expect: false,
},
{
slug1: "file_1_0",
slug2: "file_1_1",
expect: true,
},
{
slug1: "file_1_0",
slug2: "file_2_0", // different batch
expect: true, // sequential batches with same chunk index are adjacent
},
}
for _, tt := range tests {
got := areSlugsAdjacent(tt.slug1, tt.slug2)
if got != tt.expect {
t.Errorf("areSlugsAdjacent(%q, %q) = %v, want %v", tt.slug1, tt.slug2, got, tt.expect)
}
}
}
func TestParseSlugIndices(t *testing.T) {
tests := []struct {
slug string
wantBatch int
wantChunk int
wantOk bool
}{
{"kjv_bible.epub_1786_0", 1786, 0, true},
{"file_1_5", 1, 5, true},
{"no_underscore", 0, 0, false},
{"file_abc_def", 0, 0, false},
{"file_123_456_extra", 456, 0, false}, // regex matches last two numbers
}
for _, tt := range tests {
batch, chunk, ok := parseSlugIndices(tt.slug)
if ok != tt.wantOk {
t.Errorf("parseSlugIndices(%q) ok = %v, want %v", tt.slug, ok, tt.wantOk)
continue
}
if ok && (batch != tt.wantBatch || chunk != tt.wantChunk) {
t.Errorf("parseSlugIndices(%q) = (%d, %d), want (%d, %d)", tt.slug, batch, chunk, tt.wantBatch, tt.wantChunk)
}
}
}

View File

@@ -340,11 +340,9 @@ func (vs *VectorStorage) scanRows(rows *sql.Rows) ([]models.VectorRow, error) {
continue continue
} }
// Convert BM25 score to distance-like metric (lower is better) // Convert BM25 score to distance-like metric (lower is better)
// BM25 is negative, more negative is better. We'll normalize to positive distance. // BM25 is negative, more negative is better. Keep as negative.
distance := float32(-score) // Make positive (since score is negative) distance := float32(score) // Keep negative, more negative is better
if distance < 0 { // No clamping needed; negative distances are fine
distance = 0
}
results = append(results, models.VectorRow{ results = append(results, models.VectorRow{
Slug: slug, Slug: slug,
RawText: rawText, RawText: rawText,

View File

@@ -29,7 +29,7 @@ func historyToSJSON(msgs []models.RoleMsg) (string, error) {
} }
func exportChat() error { func exportChat() error {
data, err := json.MarshalIndent(chatBody.GetMessages(), "", " ") data, err := json.MarshalIndent(chatBody.Messages, "", " ")
if err != nil { if err != nil {
return err return err
} }
@@ -54,7 +54,7 @@ func importChat(filename string) error {
if _, ok := chatMap[activeChatName]; !ok { if _, ok := chatMap[activeChatName]; !ok {
addNewChat(activeChatName) addNewChat(activeChatName)
} }
chatBody.SetMessages(messages) chatBody.Messages = messages
cfg.AssistantRole = messages[1].Role cfg.AssistantRole = messages[1].Role
if cfg.AssistantRole == cfg.UserRole { if cfg.AssistantRole == cfg.UserRole {
cfg.AssistantRole = messages[2].Role cfg.AssistantRole = messages[2].Role

View File

@@ -128,8 +128,8 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
pages.RemovePage(historyPage) pages.RemovePage(historyPage)
return return
} }
chatBody.SetMessages(history) chatBody.Messages = history
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
activeChatName = selectedChat activeChatName = selectedChat
pages.RemovePage(historyPage) pages.RemovePage(historyPage)
return return
@@ -149,8 +149,8 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
} }
showToast("chat deleted", selectedChat+" was deleted") showToast("chat deleted", selectedChat+" was deleted")
// load last chat // load last chat
chatBody.SetMessages(loadOldChatOrGetNew()) chatBody.Messages = loadOldChatOrGetNew()
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
pages.RemovePage(historyPage) pages.RemovePage(historyPage)
return return
case "update card": case "update card":
@@ -163,24 +163,16 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
showToast("error", "no such card: "+agentName) showToast("error", "no such card: "+agentName)
return return
} }
if msg0, ok := chatBody.GetMessageAt(0); ok { cc.SysPrompt = chatBody.Messages[0].Content
cc.SysPrompt = msg0.Content cc.FirstMsg = chatBody.Messages[1].Content
}
if msg1, ok := chatBody.GetMessageAt(1); ok {
cc.FirstMsg = msg1.Content
}
if err := pngmeta.WriteToPng(cc.ToSpec(cfg.UserRole), cc.FilePath, cc.FilePath); err != nil { if err := pngmeta.WriteToPng(cc.ToSpec(cfg.UserRole), cc.FilePath, cc.FilePath); err != nil {
logger.Error("failed to write charcard", "error", err) logger.Error("failed to write charcard", "error", err)
} }
return return
case "move sysprompt onto 1st msg": case "move sysprompt onto 1st msg":
chatBody.WithLock(func(cb *models.ChatBody) { chatBody.Messages[1].Content = chatBody.Messages[0].Content + chatBody.Messages[1].Content
if len(cb.Messages) >= 2 { chatBody.Messages[0].Content = rpDefenitionSysMsg
cb.Messages[1].Content = cb.Messages[0].Content + cb.Messages[1].Content textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
cb.Messages[0].Content = rpDefenitionSysMsg
}
})
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
activeChatName = selectedChat activeChatName = selectedChat
pages.RemovePage(historyPage) pages.RemovePage(historyPage)
return return
@@ -251,11 +243,9 @@ func makeRAGTable(fileList []string, loadedFiles []string) *tview.Flex {
for _, f := range loadedFiles { for _, f := range loadedFiles {
loadedSet[f] = true loadedSet[f] = true
} }
// Build merged list: files from ragdir + orphaned files from DB // Build merged list: files from ragdir + orphaned files from DB
ragFiles := make([]ragFileInfo, 0, len(fileList)+len(loadedFiles)) ragFiles := make([]ragFileInfo, 0, len(fileList)+len(loadedFiles))
seen := make(map[string]bool) seen := make(map[string]bool)
// Add files from ragdir // Add files from ragdir
for _, f := range fileList { for _, f := range fileList {
ragFiles = append(ragFiles, ragFileInfo{ ragFiles = append(ragFiles, ragFileInfo{
@@ -266,7 +256,6 @@ func makeRAGTable(fileList []string, loadedFiles []string) *tview.Flex {
}) })
seen[f] = true seen[f] = true
} }
// Add orphaned files (in DB but not in ragdir) // Add orphaned files (in DB but not in ragdir)
for _, f := range loadedFiles { for _, f := range loadedFiles {
if !seen[f] { if !seen[f] {
@@ -283,7 +272,7 @@ func makeRAGTable(fileList []string, loadedFiles []string) *tview.Flex {
fileTable := tview.NewTable(). fileTable := tview.NewTable().
SetBorders(true) SetBorders(true)
longStatusView := tview.NewTextView() longStatusView := tview.NewTextView()
longStatusView.SetText("status text") longStatusView.SetText("press x to exit")
longStatusView.SetBorder(true).SetTitle("status") longStatusView.SetBorder(true).SetTitle("status")
longStatusView.SetChangedFunc(func() { longStatusView.SetChangedFunc(func() {
app.Draw() app.Draw()
@@ -384,7 +373,6 @@ func makeRAGTable(fileList []string, loadedFiles []string) *tview.Flex {
} }
errCh := make(chan error, 1) // why? errCh := make(chan error, 1) // why?
go func() { go func() {
defer pages.RemovePage(RAGPage)
for { for {
select { select {
case err := <-errCh: case err := <-errCh:
@@ -425,7 +413,6 @@ func makeRAGTable(fileList []string, loadedFiles []string) *tview.Flex {
} }
return return
} }
// defer pages.RemovePage(RAGPage)
tc := fileTable.GetCell(row, column) tc := fileTable.GetCell(row, column)
tc.SetTextColor(tcell.ColorRed) tc.SetTextColor(tcell.ColorRed)
fileTable.SetSelectable(false, false) fileTable.SetSelectable(false, false)
@@ -438,7 +425,6 @@ func makeRAGTable(fileList []string, loadedFiles []string) *tview.Flex {
f := ragFiles[row-1] f := ragFiles[row-1]
// Handle "-" case (orphaned file with no delete option) // Handle "-" case (orphaned file with no delete option)
if tc.Text == "-" { if tc.Text == "-" {
pages.RemovePage(RAGPage)
return return
} }
switch tc.Text { switch tc.Text {
@@ -449,14 +435,14 @@ func makeRAGTable(fileList []string, loadedFiles []string) *tview.Flex {
if err := ragger.LoadRAG(fpath); err != nil { if err := ragger.LoadRAG(fpath); err != nil {
logger.Error("failed to embed file", "chat", fpath, "error", err) logger.Error("failed to embed file", "chat", fpath, "error", err)
showToast("RAG", "failed to embed file; error: "+err.Error()) showToast("RAG", "failed to embed file; error: "+err.Error())
app.QueueUpdate(func() {
pages.RemovePage(RAGPage)
})
return return
} }
showToast("RAG", "file loaded successfully") showToast("RAG", "file loaded successfully")
app.QueueUpdate(func() { app.QueueUpdate(func() {
pages.RemovePage(RAGPage) pages.RemovePage(RAGPage)
loadedFiles, _ := ragger.ListLoaded()
chatRAGTable := makeRAGTable(fileList, loadedFiles)
pages.AddPage(RAGPage, chatRAGTable, true, true)
}) })
}() }()
return return
@@ -466,14 +452,14 @@ func makeRAGTable(fileList []string, loadedFiles []string) *tview.Flex {
if err := ragger.RemoveFile(f.name); err != nil { if err := ragger.RemoveFile(f.name); err != nil {
logger.Error("failed to unload file from RAG", "filename", f.name, "error", err) logger.Error("failed to unload file from RAG", "filename", f.name, "error", err)
showToast("RAG", "failed to unload file; error: "+err.Error()) showToast("RAG", "failed to unload file; error: "+err.Error())
app.QueueUpdate(func() {
pages.RemovePage(RAGPage)
})
return return
} }
showToast("RAG", "file unloaded successfully") showToast("RAG", "file unloaded successfully")
app.QueueUpdate(func() { app.QueueUpdate(func() {
pages.RemovePage(RAGPage) pages.RemovePage(RAGPage)
loadedFiles, _ := ragger.ListLoaded()
chatRAGTable := makeRAGTable(fileList, loadedFiles)
pages.AddPage(RAGPage, chatRAGTable, true, true)
}) })
}() }()
return return
@@ -484,6 +470,21 @@ func makeRAGTable(fileList []string, loadedFiles []string) *tview.Flex {
return return
} }
showToast("chat deleted", fpath+" was deleted") showToast("chat deleted", fpath+" was deleted")
go func() {
app.QueueUpdate(func() {
pages.RemovePage(RAGPage)
newFileList, _ := os.ReadDir(cfg.RAGDir)
loadedFiles, _ := ragger.ListLoaded()
var newFiles []string
for _, f := range newFileList {
if !f.IsDir() {
newFiles = append(newFiles, f.Name())
}
}
chatRAGTable := makeRAGTable(newFiles, loadedFiles)
pages.AddPage(RAGPage, chatRAGTable, true, true)
})
}()
return return
default: default:
pages.RemovePage(RAGPage) pages.RemovePage(RAGPage)
@@ -571,7 +572,7 @@ func makeAgentTable(agentList []string) *tview.Table {
return return
} }
// replace textview // replace textview
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
updateStatusLine() updateStatusLine()
// sysModal.ClearButtons() // sysModal.ClearButtons()
@@ -740,7 +741,7 @@ func makeImportChatTable(filenames []string) *tview.Table {
colorText() colorText()
updateStatusLine() updateStatusLine()
// redraw the text in text area // redraw the text in text area
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
pages.RemovePage(historyPage) pages.RemovePage(historyPage)
app.SetFocus(textArea) app.SetFocus(textArea)
return return

View File

@@ -207,7 +207,7 @@ var (
modelHasVision bool modelHasVision bool
) )
func init() { func initTools() {
sysMap[basicCard.ID] = basicCard sysMap[basicCard.ID] = basicCard
roleToID["assistant"] = basicCard.ID roleToID["assistant"] = basicCard.ID
sa, err := searcher.NewWebSurfer(searcher.SearcherTypeScraper, "") sa, err := searcher.NewWebSurfer(searcher.SearcherTypeScraper, "")
@@ -360,13 +360,13 @@ func ragsearch(args map[string]string) []byte {
} }
limitS, ok := args["limit"] limitS, ok := args["limit"]
if !ok || limitS == "" { if !ok || limitS == "" {
limitS = "3" limitS = "10"
} }
limit, err := strconv.Atoi(limitS) limit, err := strconv.Atoi(limitS)
if err != nil || limit == 0 { if err != nil || limit == 0 {
logger.Warn("ragsearch limit; passed bad value; setting to default (3)", logger.Warn("ragsearch limit; passed bad value; setting to default (3)",
"limit_arg", limitS, "error", err) "limit_arg", limitS, "error", err)
limit = 3 limit = 10
} }
ragInstance := rag.GetInstance() ragInstance := rag.GetInstance()
if ragInstance == nil { if ragInstance == nil {
@@ -1215,11 +1215,11 @@ func isCommandAllowed(command string, args ...string) bool {
} }
func summarizeChat(args map[string]string) []byte { func summarizeChat(args map[string]string) []byte {
if chatBody.GetMessageCount() == 0 { if len(chatBody.Messages) == 0 {
return []byte("No chat history to summarize.") return []byte("No chat history to summarize.")
} }
// Format chat history for the agent // Format chat history for the agent
chatText := chatToText(chatBody.GetMessages(), true) // include system and tool messages chatText := chatToText(chatBody.Messages, true) // include system and tool messages
return []byte(chatText) return []byte(chatText)
} }
@@ -2273,56 +2273,3 @@ var baseTools = []models.Tool{
}, },
}, },
} }
func init() {
if windowToolsAvailable {
baseTools = append(baseTools,
models.Tool{
Type: "function",
Function: models.ToolFunc{
Name: "list_windows",
Description: "List all visible windows with their IDs and names. Returns a map of window ID to window name.",
Parameters: models.ToolFuncParams{
Type: "object",
Required: []string{},
Properties: map[string]models.ToolArgProps{},
},
},
},
models.Tool{
Type: "function",
Function: models.ToolFunc{
Name: "capture_window",
Description: "Capture a screenshot of a specific window and save it to /tmp. Requires window parameter (window ID or name substring).",
Parameters: models.ToolFuncParams{
Type: "object",
Required: []string{"window"},
Properties: map[string]models.ToolArgProps{
"window": models.ToolArgProps{
Type: "string",
Description: "window ID or window name (partial match)",
},
},
},
},
},
models.Tool{
Type: "function",
Function: models.ToolFunc{
Name: "capture_window_and_view",
Description: "Capture a screenshot of a specific window, save it to /tmp, and return the image for viewing. Requires window parameter (window ID or name substring).",
Parameters: models.ToolFuncParams{
Type: "object",
Required: []string{"window"},
Properties: map[string]models.ToolArgProps{
"window": models.ToolArgProps{
Type: "string",
Description: "window ID or window name (partial match)",
},
},
},
},
},
)
}
}

83
tui.go
View File

@@ -42,7 +42,6 @@ var (
confirmPageName = "confirm" confirmPageName = "confirm"
fullscreenMode bool fullscreenMode bool
positionVisible bool = true positionVisible bool = true
scrollToEndEnabled bool = true
// pages // pages
historyPage = "historyPage" historyPage = "historyPage"
agentPage = "agentPage" agentPage = "agentPage"
@@ -51,7 +50,6 @@ var (
helpPage = "helpPage" helpPage = "helpPage"
renamePage = "renamePage" renamePage = "renamePage"
RAGPage = "RAGPage" RAGPage = "RAGPage"
RAGLoadedPage = "RAGLoadedPage"
propsPage = "propsPage" propsPage = "propsPage"
codeBlockPage = "codeBlockPage" codeBlockPage = "codeBlockPage"
imgPage = "imgPage" imgPage = "imgPage"
@@ -224,7 +222,7 @@ func showToast(title, message string) {
}) })
} }
func init() { func initTUI() {
// Start background goroutine to update model color cache // Start background goroutine to update model color cache
startModelColorUpdater() startModelColorUpdater()
tview.Styles = colorschemes["default"] tview.Styles = colorschemes["default"]
@@ -355,7 +353,7 @@ func init() {
searchResults = nil // Clear search results searchResults = nil // Clear search results
searchResultLengths = nil // Clear search result lengths searchResultLengths = nil // Clear search result lengths
originalTextForSearch = "" originalTextForSearch = ""
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) // Reset text without search regions textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys)) // Reset text without search regions
colorText() // Apply normal chat coloring colorText() // Apply normal chat coloring
} else { } else {
// Original logic if no search is active // Original logic if no search is active
@@ -436,11 +434,9 @@ func init() {
pages.RemovePage(editMsgPage) pages.RemovePage(editMsgPage)
return nil return nil
} }
chatBody.WithLock(func(cb *models.ChatBody) { chatBody.Messages[selectedIndex].SetText(editedMsg)
cb.Messages[selectedIndex].SetText(editedMsg)
})
// change textarea // change textarea
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
pages.RemovePage(editMsgPage) pages.RemovePage(editMsgPage)
editMode = false editMode = false
return nil return nil
@@ -468,11 +464,9 @@ func init() {
pages.RemovePage(roleEditPage) pages.RemovePage(roleEditPage)
return return
} }
if selectedIndex >= 0 && selectedIndex < chatBody.GetMessageCount() { if selectedIndex >= 0 && selectedIndex < len(chatBody.Messages) {
chatBody.WithLock(func(cb *models.ChatBody) { chatBody.Messages[selectedIndex].Role = newRole
cb.Messages[selectedIndex].Role = newRole textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
})
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
colorText() colorText()
pages.RemovePage(roleEditPage) pages.RemovePage(roleEditPage)
} }
@@ -501,7 +495,7 @@ func init() {
return nil return nil
} }
selectedIndex = siInt selectedIndex = siInt
if chatBody.GetMessageCount()-1 < selectedIndex || selectedIndex < 0 { if len(chatBody.Messages)-1 < selectedIndex || selectedIndex < 0 {
msg := "chosen index is out of bounds, will copy user input" msg := "chosen index is out of bounds, will copy user input"
logger.Warn(msg, "index", selectedIndex) logger.Warn(msg, "index", selectedIndex)
showToast("error", msg) showToast("error", msg)
@@ -511,7 +505,7 @@ func init() {
hideIndexBar() // Hide overlay instead of removing page directly hideIndexBar() // Hide overlay instead of removing page directly
return nil return nil
} }
m := chatBody.GetMessages()[selectedIndex] m := chatBody.Messages[selectedIndex]
switch { switch {
case roleEditMode: case roleEditMode:
hideIndexBar() // Hide overlay first hideIndexBar() // Hide overlay first
@@ -578,7 +572,7 @@ func init() {
searchResults = nil searchResults = nil
searchResultLengths = nil searchResultLengths = nil
originalTextForSearch = "" originalTextForSearch = ""
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
return return
} else { } else {
@@ -636,9 +630,9 @@ func init() {
// //
textArea.SetMovedFunc(updateStatusLine) textArea.SetMovedFunc(updateStatusLine)
updateStatusLine() updateStatusLine()
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
if scrollToEndEnabled { if cfg.AutoScrollEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
} }
// init sysmap // init sysmap
@@ -650,7 +644,7 @@ func init() {
if event.Key() == tcell.KeyRune && event.Rune() == '5' && event.Modifiers()&tcell.ModAlt != 0 { if event.Key() == tcell.KeyRune && event.Rune() == '5' && event.Modifiers()&tcell.ModAlt != 0 {
// switch cfg.ShowSys // switch cfg.ShowSys
cfg.ShowSys = !cfg.ShowSys cfg.ShowSys = !cfg.ShowSys
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
} }
if event.Key() == tcell.KeyRune && event.Rune() == '3' && event.Modifiers()&tcell.ModAlt != 0 { if event.Key() == tcell.KeyRune && event.Rune() == '3' && event.Modifiers()&tcell.ModAlt != 0 {
@@ -667,9 +661,9 @@ func init() {
} }
if event.Key() == tcell.KeyRune && event.Rune() == '2' && event.Modifiers()&tcell.ModAlt != 0 { if event.Key() == tcell.KeyRune && event.Rune() == '2' && event.Modifiers()&tcell.ModAlt != 0 {
// toggle auto-scrolling // toggle auto-scrolling
scrollToEndEnabled = !scrollToEndEnabled cfg.AutoScrollEnabled = !cfg.AutoScrollEnabled
status := "disabled" status := "disabled"
if scrollToEndEnabled { if cfg.AutoScrollEnabled {
status = "enabled" status = "enabled"
} }
showToast("autoscroll", "Auto-scrolling "+status) showToast("autoscroll", "Auto-scrolling "+status)
@@ -683,7 +677,7 @@ func init() {
// Handle Alt+T to toggle thinking block visibility // Handle Alt+T to toggle thinking block visibility
if event.Key() == tcell.KeyRune && event.Rune() == 't' && event.Modifiers()&tcell.ModAlt != 0 { if event.Key() == tcell.KeyRune && event.Rune() == 't' && event.Modifiers()&tcell.ModAlt != 0 {
thinkingCollapsed = !thinkingCollapsed thinkingCollapsed = !thinkingCollapsed
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
status := "expanded" status := "expanded"
if thinkingCollapsed { if thinkingCollapsed {
@@ -695,7 +689,7 @@ func init() {
// Handle Ctrl+T to toggle tool call/response visibility // Handle Ctrl+T to toggle tool call/response visibility
if event.Key() == tcell.KeyCtrlT { if event.Key() == tcell.KeyCtrlT {
toolCollapsed = !toolCollapsed toolCollapsed = !toolCollapsed
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
status := "expanded" status := "expanded"
if toolCollapsed { if toolCollapsed {
@@ -736,16 +730,16 @@ func init() {
updateStatusLine() updateStatusLine()
return nil return nil
} }
if event.Key() == tcell.KeyF2 && !botRespMode { if event.Key() == tcell.KeyF2 && !botRespMode.Load() {
// regen last msg // regen last msg
if chatBody.GetMessageCount() == 0 { if len(chatBody.Messages) == 0 {
showToast("info", "no messages to regenerate") showToast("info", "no messages to regenerate")
return nil return nil
} }
chatBody.TruncateMessages(chatBody.GetMessageCount() - 1) chatBody.Messages = chatBody.Messages[:len(chatBody.Messages)-1]
// there is no case where user msg is regenerated // there is no case where user msg is regenerated
// lastRole := chatBody.GetMessages()[chatBody.GetMessageCount()-1].Role // lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
// go chatRound("", cfg.UserRole, textView, true, false) // go chatRound("", cfg.UserRole, textView, true, false)
if cfg.TTS_ENABLED { if cfg.TTS_ENABLED {
TTSDoneChan <- true TTSDoneChan <- true
@@ -753,7 +747,7 @@ func init() {
chatRoundChan <- &models.ChatRoundReq{Role: cfg.UserRole, Regen: true} chatRoundChan <- &models.ChatRoundReq{Role: cfg.UserRole, Regen: true}
return nil return nil
} }
if event.Key() == tcell.KeyF3 && !botRespMode { if event.Key() == tcell.KeyF3 && !botRespMode.Load() {
// delete last msg // delete last msg
// check textarea text; if it ends with bot icon delete only icon: // check textarea text; if it ends with bot icon delete only icon:
text := textView.GetText(true) text := textView.GetText(true)
@@ -764,12 +758,12 @@ func init() {
colorText() colorText()
return nil return nil
} }
if chatBody.GetMessageCount() == 0 { if len(chatBody.Messages) == 0 {
showToast("info", "no messages to delete") showToast("info", "no messages to delete")
return nil return nil
} }
chatBody.TruncateMessages(chatBody.GetMessageCount() - 1) chatBody.Messages = chatBody.Messages[:len(chatBody.Messages)-1]
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
if cfg.TTS_ENABLED { if cfg.TTS_ENABLED {
TTSDoneChan <- true TTSDoneChan <- true
} }
@@ -809,15 +803,15 @@ func init() {
return nil return nil
} }
if event.Key() == tcell.KeyF6 { if event.Key() == tcell.KeyF6 {
interruptResp = true interruptResp.Store(true)
botRespMode = false botRespMode.Store(false)
toolRunningMode = false toolRunningMode.Store(false)
return nil return nil
} }
if event.Key() == tcell.KeyF7 { if event.Key() == tcell.KeyF7 {
// copy msg to clipboard // copy msg to clipboard
editMode = false editMode = false
m := chatBody.GetMessages()[chatBody.GetMessageCount()-1] m := chatBody.Messages[len(chatBody.Messages)-1]
msgText := m.GetText() msgText := m.GetText()
if err := copyToClipboard(msgText); err != nil { if err := copyToClipboard(msgText); err != nil {
logger.Error("failed to copy to clipboard", "error", err) logger.Error("failed to copy to clipboard", "error", err)
@@ -1001,10 +995,10 @@ func init() {
TTSDoneChan <- true TTSDoneChan <- true
} }
if event.Key() == tcell.KeyRune && event.Rune() == '0' && event.Modifiers()&tcell.ModAlt != 0 && cfg.TTS_ENABLED { if event.Key() == tcell.KeyRune && event.Rune() == '0' && event.Modifiers()&tcell.ModAlt != 0 && cfg.TTS_ENABLED {
if chatBody.GetMessageCount() > 0 { if len(chatBody.Messages) > 0 {
// Stop any currently playing TTS first // Stop any currently playing TTS first
TTSDoneChan <- true TTSDoneChan <- true
lastMsg := chatBody.GetMessages()[chatBody.GetMessageCount()-1] lastMsg := chatBody.Messages[len(chatBody.Messages)-1]
cleanedText := models.CleanText(lastMsg.GetText()) cleanedText := models.CleanText(lastMsg.GetText())
if cleanedText != "" { if cleanedText != "" {
// nolint: errcheck // nolint: errcheck
@@ -1016,7 +1010,7 @@ func init() {
if event.Key() == tcell.KeyCtrlW { if event.Key() == tcell.KeyCtrlW {
// INFO: continue bot/text message // INFO: continue bot/text message
// without new role // without new role
lastRole := chatBody.GetMessages()[chatBody.GetMessageCount()-1].Role lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role
// go chatRound("", lastRole, textView, false, true) // go chatRound("", lastRole, textView, false, true)
chatRoundChan <- &models.ChatRoundReq{Role: lastRole, Resume: true} chatRoundChan <- &models.ChatRoundReq{Role: lastRole, Resume: true}
return nil return nil
@@ -1102,11 +1096,11 @@ func init() {
if event.Key() == tcell.KeyRune && event.Modifiers() == tcell.ModAlt && event.Rune() == '9' { if event.Key() == tcell.KeyRune && event.Modifiers() == tcell.ModAlt && event.Rune() == '9' {
// Warm up (load) the currently selected model // Warm up (load) the currently selected model
go warmUpModel() go warmUpModel()
showToast("model warmup", "loading model: "+chatBody.GetModel()) showToast("model warmup", "loading model: "+chatBody.Model)
return nil return nil
} }
// cannot send msg in editMode or botRespMode // cannot send msg in editMode or botRespMode
if event.Key() == tcell.KeyEscape && !editMode && !botRespMode { if event.Key() == tcell.KeyEscape && !editMode && !botRespMode.Load() {
if shellMode { if shellMode {
cmdText := shellInput.GetText() cmdText := shellInput.GetText()
if cmdText != "" { if cmdText != "" {
@@ -1141,9 +1135,9 @@ func init() {
} }
// add user icon before user msg // add user icon before user msg
fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n", fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
nl, chatBody.GetMessageCount(), persona, msgText) nl, len(chatBody.Messages), persona, msgText)
textArea.SetText("", true) textArea.SetText("", true)
if scrollToEndEnabled { if cfg.AutoScrollEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
} }
colorText() colorText()
@@ -1172,9 +1166,10 @@ func init() {
app.SetFocus(focusSwitcher[currentF]) app.SetFocus(focusSwitcher[currentF])
return nil return nil
} }
if isASCII(string(event.Rune())) && !botRespMode { if isASCII(string(event.Rune())) && !botRespMode.Load() {
return event return event
} }
return event return event
}) })
go updateModelLists()
} }