68 Commits

Author SHA1 Message Date
Grail Finder
eeca909b65 Feat: working tab completion in shell mode 2026-02-19 08:53:09 +03:00
Grail Finder
b18d96ac13 Enha: remade within a popup 2026-02-19 08:35:21 +03:00
Grail Finder
b861b92e5d Chore: move helpers to helpfuncs 2026-02-19 08:13:41 +03:00
Grail Finder
17f0afac80 Feat: scan files to complete [WIP] 2026-02-19 07:18:47 +03:00
Grail Finder
931b646c30 Enha: codingdir for coding assistant 2026-02-18 22:00:52 +03:00
Grail Finder
f560ecf70b Card: coding assistant 2026-02-18 21:22:58 +03:00
Grail Finder
f40f09390b Feat(tts) alt+0 to replay last message in the chat 2026-02-18 13:15:40 +03:00
Grail Finder
5548991f5c Enha: statusline color (loaded:green; unloaded:red) for local models 2026-02-18 12:13:22 +03:00
Grail Finder
c12311da99 Chore: linter complaints 2026-02-18 08:42:05 +03:00
Grail Finder
7d18a9d77e Feat: indicator for a message with an image [image: filename] 2026-02-17 16:19:33 +03:00
Grail Finder
b67ae1be98 Enha: filter out thinking blocks from chat history, removed {role}: 2026-02-17 13:42:49 +03:00
Grail Finder
372e49199b Feat: collapse/expand thinking blocks with alt+t 2026-02-17 13:15:09 +03:00
Grail Finder
d6d4f09f8d Merge branch 'feat/filepicker-search' 2026-02-17 11:17:17 +03:00
Grail Finder
475936fb1b Feat: filepicker search 2026-02-17 11:16:52 +03:00
Grail Finder
fa846225ee Enha: remove updatequeue, since it waits for another main action 2026-02-17 10:29:28 +03:00
Grail Finder
7b2fa04391 Fix (img prompt): botname: after <__media__> for /completion 2026-02-17 08:23:08 +03:00
Grail Finder
c83779b479 Doc: add attempts doc 2026-02-16 19:43:14 +03:00
Grail Finder
43b0fe3739 Feat: image preview for filepicker 2026-02-16 19:08:16 +03:00
Grail Finder
1b36ef938e Fix: parsing of content parts 2026-02-16 16:35:06 +03:00
Grail Finder
987d5842a4 Enha: tts.done on regen or delete 2026-02-12 18:16:53 +03:00
Grail Finder
10b665813e Fix: avoid sending regen while bot responding 2026-02-12 16:49:29 +03:00
Grail Finder
8c3c2b9b23 Chore: server should live in separate branch
until a usecase for it is found
2026-02-12 10:26:30 +03:00
Grail Finder
e42eb96371 Doc: update 2026-02-10 11:27:06 +03:00
Grail Finder
46a33baabb Enha: stop tts if msg not for user 2026-02-10 11:25:05 +03:00
Grail Finder
875de679cf Merge branch 'feat/char-secrets' 2026-02-10 11:05:09 +03:00
Grail Finder
37b98ad36c Doc: update 2026-02-10 10:56:39 +03:00
Grail Finder
2cd3956f6a Chore: make debug; icon fix 2026-02-10 08:54:47 +03:00
Grail Finder
0afb98246b Fix (llama.cpp) model update 2026-02-09 15:42:40 +03:00
Grail Finder
a13a1634f7 Enha: update card 2026-02-09 12:43:52 +03:00
Grail Finder
83aeee2576 Enha: alice_bob_carl card update; system to see all the messages 2026-02-09 12:26:21 +03:00
Grail Finder
3f4d8a9467 Fix (f1): load from the card 2026-02-09 11:29:47 +03:00
Grail Finder
c04e120ddb Chore: interface{} -> any 2026-02-09 10:39:27 +03:00
Grail Finder
67733ad8dd Enha: add bool to apply card 2026-02-09 10:11:56 +03:00
Grail Finder
5e7ddea682 Enha: change __known_by_char tag to @ 2026-02-09 09:44:54 +03:00
Grail Finder
77ad2a7e7e Enha: popups from the main window
no longer user has to go to the props table to get a pleasant popup to
choose an option
2026-02-09 08:52:11 +03:00
Grail Finder
1bf9e6eef7 Enha: extract first valid recipient from knownto 2026-02-08 21:50:03 +03:00
Grail Finder
93284312cf Enha: auto turn role display 2026-02-08 17:11:29 +03:00
Grail Finder
4af866079c Chore: linter complaints 2026-02-06 12:42:06 +03:00
Grail Finder
478a505869 Enha: client stop string for completion only 2026-02-06 11:32:06 +03:00
Grail Finder
d0722c6f98 Fix: add regen param for f2 2026-02-05 08:25:06 +03:00
Grail Finder
685738a5a4 Enha: force stop string on client side 2026-02-04 13:54:54 +03:00
Grail Finder
7187df509f Enha: stricter stop string 2026-02-04 12:47:54 +03:00
Grail Finder
79861e7c2b Enha: privateMessageResp with resume 2026-02-04 11:22:17 +03:00
Grail Finder
e3965db3c7 Enha: use slices methods 2026-02-04 08:26:30 +03:00
Grail Finder
654d6a47ec Fix: trigger auto turn cannot be empty
empty message means to continue merging new reply to the last message
2026-02-03 19:06:09 +03:00
Grail Finder
76f14ce4a3 Enha: detailed error 2026-02-03 16:56:31 +03:00
Grail Finder
0f5bbaa943 Enha: update config 2026-02-03 12:04:20 +03:00
Grail Finder
65b4f01177 Doc: char context doc 2026-02-03 11:00:12 +03:00
Grail Finder
e3be45b023 Fix: openrouter model list 2026-02-02 14:29:31 +03:00
Grail Finder
fcb4b99332 Fix(tts): mutex use 2026-02-02 11:11:07 +03:00
Grail Finder
0e6d2747cd Enha: auto turn config switch 2026-02-02 08:29:38 +03:00
Grail Finder
343e045095 Enha: role suffix for /chat only if AutoTurn is enabled 2026-02-02 08:23:05 +03:00
Grail Finder
e52e8ce2cc Enha: consolidate assistant messages only 2026-02-02 08:18:49 +03:00
Grail Finder
c1b04303ef Enha: persona suffix for /chat endpoints 2026-02-01 12:53:06 +03:00
Grail Finder
6f6a35459e Chore: cleaning 2026-02-01 11:38:51 +03:00
Grail Finder
3a11210f52 Enha: avoid recursion in llm calls 2026-01-31 12:57:53 +03:00
Grail Finder
fa192a2624 Feat: autoturn 2026-01-25 09:59:07 +03:00
Grail Finder
9813872854 Chore: bool colors for statusline 2026-01-22 09:29:56 +03:00
Grail Finder
a28e8ef9e2 Enha: charlist in cards 2026-01-21 21:01:01 +03:00
Grail Finder
4e597e944e Chore: log cleanup 2026-01-17 13:07:14 +03:00
Grail Finder
3e2a1b6f99 Fix: KnowTo is added only if tag present 2026-01-17 13:03:30 +03:00
Grail Finder
0fb5921004 Fix: copy with knownto 2026-01-17 12:44:18 +03:00
Grail Finder
fd84dd5826 Enha: do not remove tag 2026-01-17 12:28:19 +03:00
Grail Finder
ec2d1c05ac Fix: do not skip system msgs 2026-01-17 11:54:52 +03:00
Grail Finder
8b162ef34f Enha: change textview chat history based on current user persona 2026-01-17 11:42:35 +03:00
Grail Finder
12be603690 Chore: add alice-bob-carl card 2026-01-17 10:23:06 +03:00
Grail Finder
eb44b1e4b2 Feat: impl attempt 2026-01-16 16:53:19 +03:00
Grail Finder
f5d76eb605 Doc: feature concept 2026-01-16 10:11:01 +03:00
34 changed files with 3098 additions and 1180 deletions

3
.gitignore vendored
View File

@@ -5,7 +5,8 @@ history/
*.db *.db
config.toml config.toml
sysprompts/* sysprompts/*
!sysprompts/cluedo.json !sysprompts/alice_bob_carl.json
!sysprompts/coding_assistant.json
history_bak/ history_bak/
.aider* .aider*
tags tags

View File

@@ -1,6 +1,7 @@
version: "2" version: "2"
run: run:
concurrency: 2 timeout: 1m
concurrency: 4
tests: false tests: false
linters: linters:
default: none default: none
@@ -14,7 +15,13 @@ linters:
- prealloc - prealloc
- staticcheck - staticcheck
- unused - unused
- gocritic
- unconvert
- wastedassign
settings: settings:
gocritic:
enabled-tags:
- performance
funlen: funlen:
lines: 80 lines: 80
statements: 50 statements: 50

View File

@@ -1,17 +1,17 @@
.PHONY: setconfig run lint setup-whisper build-whisper download-whisper-model docker-up docker-down docker-logs noextra-run noextra-server .PHONY: setconfig run lint setup-whisper build-whisper download-whisper-model docker-up docker-down docker-logs noextra-run
run: setconfig run: setconfig
go build -tags extra -o gf-lt && ./gf-lt go build -tags extra -o gf-lt && ./gf-lt
server: setconfig build-debug:
go build -tags extra -o gf-lt && ./gf-lt -port 3333 go build -gcflags="all=-N -l" -tags extra -o gf-lt
debug: build-debug
dlv exec --headless --accept-multiclient --listen=:2345 ./gf-lt
noextra-run: setconfig noextra-run: setconfig
go build -tags '!extra' -o gf-lt && ./gf-lt go build -tags '!extra' -o gf-lt && ./gf-lt
noextra-server: setconfig
go build -tags '!extra' -o gf-lt && ./gf-lt -port 3333
setconfig: setconfig:
find config.toml &>/dev/null || cp config.example.toml config.toml find config.toml &>/dev/null || cp config.example.toml config.toml

View File

@@ -8,6 +8,7 @@ made with use of [tview](https://github.com/rivo/tview)
- tts/stt (run make commands to get deps); - tts/stt (run make commands to get deps);
- image input; - image input;
- function calls (function calls are implemented natively, to avoid calling outside sources); - function calls (function calls are implemented natively, to avoid calling outside sources);
- [character specific context (unique feature)](char-specific-context.md)
#### how it looks #### how it looks
![how it looks](assets/ex01.png) ![how it looks](assets/ex01.png)

View File

@@ -77,17 +77,18 @@ func (ag *AgentClient) buildRequest(sysprompt, msg string) ([]byte, error) {
} }
prompt := strings.TrimSpace(sb.String()) prompt := strings.TrimSpace(sb.String())
if isDeepSeek { switch {
case isDeepSeek:
// DeepSeek completion // DeepSeek completion
req := models.NewDSCompletionReq(prompt, model, defaultProps["temperature"], []string{}) req := models.NewDSCompletionReq(prompt, model, defaultProps["temperature"], []string{})
req.Stream = false // Agents don't need streaming req.Stream = false // Agents don't need streaming
return json.Marshal(req) return json.Marshal(req)
} else if isOpenRouter { case isOpenRouter:
// OpenRouter completion // OpenRouter completion
req := models.NewOpenRouterCompletionReq(model, prompt, defaultProps, []string{}) req := models.NewOpenRouterCompletionReq(model, prompt, defaultProps, []string{})
req.Stream = false // Agents don't need streaming req.Stream = false // Agents don't need streaming
return json.Marshal(req) return json.Marshal(req)
} else { default:
// Assume llama.cpp completion // Assume llama.cpp completion
req := models.NewLCPReq(prompt, model, nil, defaultProps, []string{}) req := models.NewLCPReq(prompt, model, nil, defaultProps, []string{})
req.Stream = false // Agents don't need streaming req.Stream = false // Agents don't need streaming
@@ -103,15 +104,16 @@ func (ag *AgentClient) buildRequest(sysprompt, msg string) ([]byte, error) {
Messages: messages, Messages: messages,
} }
if isDeepSeek { switch {
case isDeepSeek:
// DeepSeek chat // DeepSeek chat
req := models.NewDSChatReq(*chatBody) req := models.NewDSChatReq(*chatBody)
return json.Marshal(req) return json.Marshal(req)
} else if isOpenRouter { case isOpenRouter:
// OpenRouter chat // OpenRouter chat
req := models.NewOpenRouterChatReq(*chatBody, defaultProps) req := models.NewOpenRouterChatReq(*chatBody, defaultProps)
return json.Marshal(req) return json.Marshal(req)
} else { default:
// Assume llama.cpp chat (OpenAI format) // Assume llama.cpp chat (OpenAI format)
req := models.OpenAIReq{ req := models.OpenAIReq{
ChatBody: chatBody, ChatBody: chatBody,

574
bot.go
View File

@@ -17,36 +17,35 @@ import (
"net/http" "net/http"
"net/url" "net/url"
"os" "os"
"path" "regexp"
"slices"
"strconv" "strconv"
"strings" "strings"
"sync" "sync"
"time" "time"
"github.com/neurosnap/sentences/english" "github.com/neurosnap/sentences/english"
"github.com/rivo/tview"
) )
var ( var (
httpClient = &http.Client{} httpClient = &http.Client{}
cfg *config.Config cfg *config.Config
logger *slog.Logger logger *slog.Logger
logLevel = new(slog.LevelVar) logLevel = new(slog.LevelVar)
) ctx, cancel = context.WithCancel(context.Background())
var ( activeChatName string
activeChatName string chatRoundChan = make(chan *models.ChatRoundReq, 1)
chunkChan = make(chan string, 10) chunkChan = make(chan string, 10)
openAIToolChan = make(chan string, 10) openAIToolChan = make(chan string, 10)
streamDone = make(chan bool, 1) streamDone = make(chan bool, 1)
chatBody *models.ChatBody chatBody *models.ChatBody
store storage.FullRepo store storage.FullRepo
defaultFirstMsg = "Hello! What can I do for you?" defaultFirstMsg = "Hello! What can I do for you?"
defaultStarter = []models.RoleMsg{} defaultStarter = []models.RoleMsg{}
defaultStarterBytes = []byte{} interruptResp = false
interruptResp = false ragger *rag.RAG
ragger *rag.RAG chunkParser ChunkParser
chunkParser ChunkParser lastToolCall *models.FuncCall
lastToolCall *models.FuncCall
//nolint:unused // TTS_ENABLED conditionally uses this //nolint:unused // TTS_ENABLED conditionally uses this
orator Orator orator Orator
asr STT asr STT
@@ -68,26 +67,102 @@ var (
LocalModels = []string{} LocalModels = []string{}
) )
// cleanNullMessages removes messages with null or empty content to prevent API issues // parseKnownToTag extracts known_to list from content using configured tag.
func cleanNullMessages(messages []models.RoleMsg) []models.RoleMsg { // Returns cleaned content and list of character names.
// // deletes tool calls which we don't want for now func parseKnownToTag(content string) []string {
// cleaned := make([]models.RoleMsg, 0, len(messages)) if cfg == nil || !cfg.CharSpecificContextEnabled {
// for _, msg := range messages { return nil
// // is there a sense for this check at all? }
// if msg.HasContent() || msg.ToolCallID != "" || msg.Role == cfg.AssistantRole || msg.Role == cfg.WriteNextMsgAsCompletionAgent { tag := cfg.CharSpecificContextTag
// cleaned = append(cleaned, msg) if tag == "" {
// } else { tag = "@"
// // Log filtered messages for debugging }
// logger.Warn("filtering out message during cleaning", "role", msg.Role, "content", msg.Content, "tool_call_id", msg.ToolCallID, "has_content", msg.HasContent()) // Pattern: tag + list + "@"
// } pattern := regexp.QuoteMeta(tag) + `(.*?)@`
// } re := regexp.MustCompile(pattern)
return consolidateConsecutiveAssistantMessages(messages) matches := re.FindAllStringSubmatch(content, -1)
if len(matches) == 0 {
return nil
}
// There may be multiple tags; we combine all.
var knownTo []string
for _, match := range matches {
if len(match) < 2 {
continue
}
// Remove the entire matched tag from content
list := strings.TrimSpace(match[1])
if list == "" {
continue
}
strings.SplitSeq(list, ",")
// parts := strings.Split(list, ",")
// for _, p := range parts {
for p := range strings.SplitSeq(list, ",") {
p = strings.TrimSpace(p)
if p != "" {
knownTo = append(knownTo, p)
}
}
}
// Also remove any leftover trailing "__" that might be orphaned? Not needed.
return knownTo
}
// processMessageTag processes a message for known_to tag and sets KnownTo field.
// It also ensures the sender's role is included in KnownTo.
// If KnownTo already set (e.g., from DB), preserves it unless new tag found.
func processMessageTag(msg *models.RoleMsg) *models.RoleMsg {
if cfg == nil || !cfg.CharSpecificContextEnabled {
return msg
}
// If KnownTo already set, assume tag already processed (content cleaned).
// However, we still check for new tags (maybe added later).
knownTo := parseKnownToTag(msg.Content)
// If tag found, replace KnownTo with new list (merge with existing?)
// For simplicity, if knownTo is not nil, replace.
if knownTo == nil {
return msg
}
msg.KnownTo = knownTo
if msg.Role == "" {
return msg
}
if !slices.Contains(msg.KnownTo, msg.Role) {
msg.KnownTo = append(msg.KnownTo, msg.Role)
}
return msg
}
// filterMessagesForCharacter returns messages visible to the specified character.
// If CharSpecificContextEnabled is false, returns all messages.
func filterMessagesForCharacter(messages []models.RoleMsg, character string) []models.RoleMsg {
if cfg == nil || !cfg.CharSpecificContextEnabled || character == "" {
return messages
}
if character == "system" { // system sees every message
return messages
}
filtered := make([]models.RoleMsg, 0, len(messages))
for _, msg := range messages {
// If KnownTo is nil or empty, message is visible to all
// system msg cannot be filtered
if len(msg.KnownTo) == 0 || msg.Role == "system" {
filtered = append(filtered, msg)
continue
}
if slices.Contains(msg.KnownTo, character) {
// Check if character is in KnownTo lis
filtered = append(filtered, msg)
}
}
return filtered
} }
func cleanToolCalls(messages []models.RoleMsg) []models.RoleMsg { func cleanToolCalls(messages []models.RoleMsg) []models.RoleMsg {
// If AutoCleanToolCallsFromCtx is false, keep tool call messages in context // If AutoCleanToolCallsFromCtx is false, keep tool call messages in context
if cfg != nil && !cfg.AutoCleanToolCallsFromCtx { if cfg != nil && !cfg.AutoCleanToolCallsFromCtx {
return consolidateConsecutiveAssistantMessages(messages) return consolidateAssistantMessages(messages)
} }
cleaned := make([]models.RoleMsg, 0, len(messages)) cleaned := make([]models.RoleMsg, 0, len(messages))
for i, msg := range messages { for i, msg := range messages {
@@ -97,11 +172,11 @@ func cleanToolCalls(messages []models.RoleMsg) []models.RoleMsg {
cleaned = append(cleaned, msg) cleaned = append(cleaned, msg)
} }
} }
return consolidateConsecutiveAssistantMessages(cleaned) return consolidateAssistantMessages(cleaned)
} }
// consolidateConsecutiveAssistantMessages merges consecutive assistant messages into a single message // consolidateAssistantMessages merges consecutive assistant messages into a single message
func consolidateConsecutiveAssistantMessages(messages []models.RoleMsg) []models.RoleMsg { func consolidateAssistantMessages(messages []models.RoleMsg) []models.RoleMsg {
if len(messages) == 0 { if len(messages) == 0 {
return messages return messages
} }
@@ -110,7 +185,8 @@ func consolidateConsecutiveAssistantMessages(messages []models.RoleMsg) []models
isBuildingAssistantMsg := false isBuildingAssistantMsg := false
for i := 0; i < len(messages); i++ { for i := 0; i < len(messages); i++ {
msg := messages[i] msg := messages[i]
if msg.Role == cfg.AssistantRole || msg.Role == cfg.WriteNextMsgAsCompletionAgent { // assistant role only
if msg.Role == cfg.AssistantRole {
// If this is an assistant message, start or continue building // If this is an assistant message, start or continue building
if !isBuildingAssistantMsg { if !isBuildingAssistantMsg {
// Start accumulating assistant message // Start accumulating assistant message
@@ -223,7 +299,8 @@ func warmUpModel() {
go func() { go func() {
var data []byte var data []byte
var err error var err error
if strings.HasSuffix(cfg.CurrentAPI, "/completion") { switch {
case strings.HasSuffix(cfg.CurrentAPI, "/completion"):
// Old completion endpoint // Old completion endpoint
req := models.NewLCPReq(".", chatBody.Model, nil, map[string]float32{ req := models.NewLCPReq(".", chatBody.Model, nil, map[string]float32{
"temperature": 0.8, "temperature": 0.8,
@@ -233,7 +310,7 @@ func warmUpModel() {
}, []string{}) }, []string{})
req.Stream = false req.Stream = false
data, err = json.Marshal(req) data, err = json.Marshal(req)
} else if strings.Contains(cfg.CurrentAPI, "/v1/chat/completions") { case strings.Contains(cfg.CurrentAPI, "/v1/chat/completions"):
// OpenAI-compatible chat endpoint // OpenAI-compatible chat endpoint
req := models.OpenAIReq{ req := models.OpenAIReq{
ChatBody: &models.ChatBody{ ChatBody: &models.ChatBody{
@@ -246,7 +323,7 @@ func warmUpModel() {
Tools: nil, Tools: nil,
} }
data, err = json.Marshal(req) data, err = json.Marshal(req)
} else { default:
// Unknown local endpoint, skip // Unknown local endpoint, skip
return return
} }
@@ -265,32 +342,6 @@ func warmUpModel() {
}() }()
} }
func fetchLCPModelName() *models.LCPModels {
//nolint
resp, err := httpClient.Get(cfg.FetchModelNameAPI)
if err != nil {
chatBody.Model = "disconnected"
logger.Warn("failed to get model", "link", cfg.FetchModelNameAPI, "error", err)
if err := notifyUser("error", "request failed "+cfg.FetchModelNameAPI); err != nil {
logger.Debug("failed to notify user", "error", err, "fn", "fetchLCPModelName")
}
return nil
}
defer resp.Body.Close()
llmModel := models.LCPModels{}
if err := json.NewDecoder(resp.Body).Decode(&llmModel); err != nil {
logger.Warn("failed to decode resp", "link", cfg.FetchModelNameAPI, "error", err)
return nil
}
if resp.StatusCode != 200 {
chatBody.Model = "disconnected"
return nil
}
chatBody.Model = path.Base(llmModel.Data[0].ID)
cfg.CurrentModel = chatBody.Model
return &llmModel
}
// nolint // nolint
func fetchDSBalance() *models.DSBalance { func fetchDSBalance() *models.DSBalance {
url := "https://api.deepseek.com/user/balance" url := "https://api.deepseek.com/user/balance"
@@ -412,9 +463,62 @@ func monitorModelLoad(modelID string) {
}() }()
} }
// extractDetailedErrorFromBytes extracts detailed error information from response body bytes
func extractDetailedErrorFromBytes(body []byte, statusCode int) string {
// Try to parse as JSON to extract detailed error information
var errorResponse map[string]interface{}
if err := json.Unmarshal(body, &errorResponse); err == nil {
// Check if it's an error response with detailed information
if errorData, ok := errorResponse["error"]; ok {
if errorMap, ok := errorData.(map[string]interface{}); ok {
var errorMsg string
if msg, ok := errorMap["message"]; ok {
errorMsg = fmt.Sprintf("%v", msg)
}
var details []string
if code, ok := errorMap["code"]; ok {
details = append(details, fmt.Sprintf("Code: %v", code))
}
if metadata, ok := errorMap["metadata"]; ok {
// Handle metadata which might contain raw error details
if metadataMap, ok := metadata.(map[string]interface{}); ok {
if raw, ok := metadataMap["raw"]; ok {
// Parse the raw error string if it's JSON
var rawError map[string]interface{}
if rawStr, ok := raw.(string); ok && json.Unmarshal([]byte(rawStr), &rawError) == nil {
if rawErrorData, ok := rawError["error"]; ok {
if rawErrorMap, ok := rawErrorData.(map[string]interface{}); ok {
if rawMsg, ok := rawErrorMap["message"]; ok {
return fmt.Sprintf("API Error: %s", rawMsg)
}
}
}
}
}
}
details = append(details, fmt.Sprintf("Metadata: %v", metadata))
}
if len(details) > 0 {
return fmt.Sprintf("API Error: %s (%s)", errorMsg, strings.Join(details, ", "))
}
return "API Error: " + errorMsg
}
}
}
// If not a structured error response, return the raw body with status
return fmt.Sprintf("HTTP Status: %d, Response Body: %s", statusCode, string(body))
}
// sendMsgToLLM expects streaming resp // sendMsgToLLM expects streaming resp
func sendMsgToLLM(body io.Reader) { func sendMsgToLLM(body io.Reader) {
choseChunkParser() choseChunkParser()
// openrouter does not respect stop strings, so we have to cut the message ourselves
stopStrings := chatBody.MakeStopSliceExcluding("", listChatRoles())
req, err := http.NewRequest("POST", cfg.CurrentAPI, body) req, err := http.NewRequest("POST", cfg.CurrentAPI, body)
if err != nil { if err != nil {
logger.Error("newreq error", "error", err) logger.Error("newreq error", "error", err)
@@ -438,6 +542,33 @@ func sendMsgToLLM(body io.Reader) {
streamDone <- true streamDone <- true
return return
} }
// Check if the initial response is an error before starting to stream
if resp.StatusCode >= 400 {
// Read the response body to get detailed error information
bodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
logger.Error("failed to read error response body", "error", err, "status_code", resp.StatusCode)
detailedError := fmt.Sprintf("HTTP Status: %d, Failed to read response body: %v", resp.StatusCode, err)
if err := notifyUser("API Error", detailedError); err != nil {
logger.Error("failed to notify", "error", err)
}
resp.Body.Close()
streamDone <- true
return
}
// Parse the error response for detailed information
detailedError := extractDetailedErrorFromBytes(bodyBytes, resp.StatusCode)
logger.Error("API returned error status", "status_code", resp.StatusCode, "detailed_error", detailedError)
if err := notifyUser("API Error", detailedError); err != nil {
logger.Error("failed to notify", "error", err)
}
resp.Body.Close()
streamDone <- true
return
}
defer resp.Body.Close() defer resp.Body.Close()
reader := bufio.NewReader(resp.Body) reader := bufio.NewReader(resp.Body)
counter := uint32(0) counter := uint32(0)
@@ -455,11 +586,23 @@ func sendMsgToLLM(body io.Reader) {
} }
line, err := reader.ReadBytes('\n') line, err := reader.ReadBytes('\n')
if err != nil { if err != nil {
logger.Error("error reading response body", "error", err, "line", string(line), // Check if this is an EOF error and if the response contains detailed error information
"user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI) if err == io.EOF {
// if err.Error() != "EOF" { // For streaming responses, we may have already consumed the error body
if err := notifyUser("API error", err.Error()); err != nil { // So we'll use the original status code to provide context
logger.Error("failed to notify", "error", err) detailedError := fmt.Sprintf("Streaming connection closed unexpectedly (Status: %d). This may indicate an API error. Check your API provider and model settings.", resp.StatusCode)
logger.Error("error reading response body", "error", err, "detailed_error", detailedError,
"status_code", resp.StatusCode, "user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI)
if err := notifyUser("API Error", detailedError); err != nil {
logger.Error("failed to notify", "error", err)
}
} else {
logger.Error("error reading response body", "error", err, "line", string(line),
"user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI)
// if err.Error() != "EOF" {
if err := notifyUser("API error", err.Error()); err != nil {
logger.Error("failed to notify", "error", err)
}
} }
streamDone <- true streamDone <- true
break break
@@ -514,6 +657,14 @@ func sendMsgToLLM(body io.Reader) {
} }
// bot sends way too many \n // bot sends way too many \n
answerText = strings.ReplaceAll(chunk.Chunk, "\n\n", "\n") answerText = strings.ReplaceAll(chunk.Chunk, "\n\n", "\n")
// Accumulate text to check for stop strings that might span across chunks
// check if chunk is in stopstrings => stop
// this check is needed only for openrouter /v1/completion, since it does not respect stop slice
if chunkParser.GetAPIType() == models.APITypeCompletion &&
slices.Contains(stopStrings, answerText) {
logger.Debug("stop string detected on client side for completion endpoint", "stop_string", answerText)
streamDone <- true
}
chunkChan <- answerText chunkChan <- answerText
openAIToolChan <- chunk.ToolChunk openAIToolChan <- chunk.ToolChunk
if chunk.FuncName != "" { if chunk.FuncName != "" {
@@ -597,7 +748,20 @@ func roleToIcon(role string) string {
return "<" + role + ">: " return "<" + role + ">: "
} }
func chatRound(userMsg, role string, tv *tview.TextView, regen, resume bool) { func chatWatcher(ctx context.Context) {
for {
select {
case <-ctx.Done():
return
case chatRoundReq := <-chatRoundChan:
if err := chatRound(chatRoundReq); err != nil {
logger.Error("failed to chatRound", "err", err)
}
}
}
}
func chatRound(r *models.ChatRoundReq) error {
botRespMode = true botRespMode = true
botPersona := cfg.AssistantRole botPersona := cfg.AssistantRole
if cfg.WriteNextMsgAsCompletionAgent != "" { if cfg.WriteNextMsgAsCompletionAgent != "" {
@@ -605,32 +769,23 @@ func chatRound(userMsg, role string, tv *tview.TextView, regen, resume bool) {
} }
defer func() { botRespMode = false }() defer func() { botRespMode = false }()
// check that there is a model set to use if is not local // check that there is a model set to use if is not local
if cfg.CurrentAPI == cfg.DeepSeekChatAPI || cfg.CurrentAPI == cfg.DeepSeekCompletionAPI {
if chatBody.Model != "deepseek-chat" && chatBody.Model != "deepseek-reasoner" {
if err := notifyUser("bad request", "wrong deepseek model name"); err != nil {
logger.Warn("failed ot notify user", "error", err)
return
}
return
}
}
choseChunkParser() choseChunkParser()
reader, err := chunkParser.FormMsg(userMsg, role, resume) reader, err := chunkParser.FormMsg(r.UserMsg, r.Role, r.Resume)
if reader == nil || err != nil { if reader == nil || err != nil {
logger.Error("empty reader from msgs", "role", role, "error", err) logger.Error("empty reader from msgs", "role", r.Role, "error", err)
return return err
} }
if cfg.SkipLLMResp { if cfg.SkipLLMResp {
return return nil
} }
go sendMsgToLLM(reader) go sendMsgToLLM(reader)
logger.Debug("looking at vars in chatRound", "msg", userMsg, "regen", regen, "resume", resume) logger.Debug("looking at vars in chatRound", "msg", r.UserMsg, "regen", r.Regen, "resume", r.Resume)
if !resume { if !r.Resume {
fmt.Fprintf(tv, "\n[-:-:b](%d) ", len(chatBody.Messages)) fmt.Fprintf(textView, "\n[-:-:b](%d) ", len(chatBody.Messages))
fmt.Fprint(tv, roleToIcon(botPersona)) fmt.Fprint(textView, roleToIcon(botPersona))
fmt.Fprint(tv, "[-:-:-]\n") fmt.Fprint(textView, "[-:-:-]\n")
if cfg.ThinkUse && !strings.Contains(cfg.CurrentAPI, "v1") { if cfg.ThinkUse && !strings.Contains(cfg.CurrentAPI, "v1") {
// fmt.Fprint(tv, "<think>") // fmt.Fprint(textView, "<think>")
chunkChan <- "<think>" chunkChan <- "<think>"
} }
} }
@@ -640,29 +795,29 @@ out:
for { for {
select { select {
case chunk := <-chunkChan: case chunk := <-chunkChan:
fmt.Fprint(tv, chunk) fmt.Fprint(textView, chunk)
respText.WriteString(chunk) respText.WriteString(chunk)
if scrollToEndEnabled { if scrollToEndEnabled {
tv.ScrollToEnd() textView.ScrollToEnd()
} }
// Send chunk to audio stream handler // Send chunk to audio stream handler
if cfg.TTS_ENABLED { if cfg.TTS_ENABLED {
TTSTextChan <- chunk TTSTextChan <- chunk
} }
case toolChunk := <-openAIToolChan: case toolChunk := <-openAIToolChan:
fmt.Fprint(tv, toolChunk) fmt.Fprint(textView, toolChunk)
toolResp.WriteString(toolChunk) toolResp.WriteString(toolChunk)
if scrollToEndEnabled { if scrollToEndEnabled {
tv.ScrollToEnd() textView.ScrollToEnd()
} }
case <-streamDone: case <-streamDone:
// drain any remaining chunks from chunkChan before exiting // drain any remaining chunks from chunkChan before exiting
for len(chunkChan) > 0 { for len(chunkChan) > 0 {
chunk := <-chunkChan chunk := <-chunkChan
fmt.Fprint(tv, chunk) fmt.Fprint(textView, chunk)
respText.WriteString(chunk) respText.WriteString(chunk)
if scrollToEndEnabled { if scrollToEndEnabled {
tv.ScrollToEnd() textView.ScrollToEnd()
} }
if cfg.TTS_ENABLED { if cfg.TTS_ENABLED {
// Send chunk to audio stream handler // Send chunk to audio stream handler
@@ -678,25 +833,24 @@ out:
} }
botRespMode = false botRespMode = false
// numbers in chatbody and displayed must be the same // numbers in chatbody and displayed must be the same
if resume { if r.Resume {
chatBody.Messages[len(chatBody.Messages)-1].Content += respText.String() chatBody.Messages[len(chatBody.Messages)-1].Content += respText.String()
// lastM.Content = lastM.Content + respText.String() // lastM.Content = lastM.Content + respText.String()
// Process the updated message to check for known_to tags in resumed response
updatedMsg := chatBody.Messages[len(chatBody.Messages)-1]
processedMsg := processMessageTag(&updatedMsg)
chatBody.Messages[len(chatBody.Messages)-1] = *processedMsg
} else { } else {
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{ newMsg := models.RoleMsg{
Role: botPersona, Content: respText.String(), Role: botPersona, Content: respText.String(),
}) }
// Process the new message to check for known_to tags in LLM response
newMsg = *processMessageTag(&newMsg)
chatBody.Messages = append(chatBody.Messages, newMsg)
stopTTSIfNotForUser(&newMsg)
} }
logger.Debug("chatRound: before cleanChatBody", "messages_before_clean", len(chatBody.Messages))
for i, msg := range chatBody.Messages {
logger.Debug("chatRound: before cleaning", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID)
}
// // Clean null/empty messages to prevent API issues with endpoints like llama.cpp jinja template
cleanChatBody() cleanChatBody()
logger.Debug("chatRound: after cleanChatBody", "messages_after_clean", len(chatBody.Messages)) refreshChatDisplay()
for i, msg := range chatBody.Messages {
logger.Debug("chatRound: after cleaning", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID)
}
colorText()
updateStatusLine() updateStatusLine()
// bot msg is done; // bot msg is done;
// now check it for func call // now check it for func call
@@ -704,7 +858,19 @@ out:
if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil { if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil {
logger.Warn("failed to update storage", "error", err, "name", activeChatName) logger.Warn("failed to update storage", "error", err, "name", activeChatName)
} }
findCall(respText.String(), toolResp.String(), tv) if findCall(respText.String(), toolResp.String()) {
return nil
}
// Check if this message was sent privately to specific characters
// If so, trigger those characters to respond if that char is not controlled by user
// perhaps we should have narrator role to determine which char is next to act
if cfg.AutoTurn {
lastMsg := chatBody.Messages[len(chatBody.Messages)-1]
if len(lastMsg.KnownTo) > 0 {
triggerPrivateMessageResponses(&lastMsg)
}
}
return nil
} }
// cleanChatBody removes messages with null or empty content to prevent API issues // cleanChatBody removes messages with null or empty content to prevent API issues
@@ -712,23 +878,16 @@ func cleanChatBody() {
if chatBody == nil || chatBody.Messages == nil { if chatBody == nil || chatBody.Messages == nil {
return return
} }
originalLen := len(chatBody.Messages)
logger.Debug("cleanChatBody: before cleaning", "message_count", originalLen)
for i, msg := range chatBody.Messages {
logger.Debug("cleanChatBody: before clean", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID)
}
// Tool request cleaning is now configurable via AutoCleanToolCallsFromCtx (default false) // Tool request cleaning is now configurable via AutoCleanToolCallsFromCtx (default false)
// /completion msg where part meant for user and other part tool call // /completion msg where part meant for user and other part tool call
chatBody.Messages = cleanToolCalls(chatBody.Messages) chatBody.Messages = cleanToolCalls(chatBody.Messages)
chatBody.Messages = cleanNullMessages(chatBody.Messages) chatBody.Messages = consolidateAssistantMessages(chatBody.Messages)
logger.Debug("cleanChatBody: after cleaning", "original_len", originalLen, "new_len", len(chatBody.Messages))
for i, msg := range chatBody.Messages {
logger.Debug("cleanChatBody: after clean", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID)
}
} }
// convertJSONToMapStringString unmarshals JSON into map[string]interface{} and converts all values to strings. // convertJSONToMapStringString unmarshals JSON into map[string]interface{} and converts all values to strings.
func convertJSONToMapStringString(jsonStr string) (map[string]string, error) { func convertJSONToMapStringString(jsonStr string) (map[string]string, error) {
// Extract JSON object from string - models may output extra text after JSON
jsonStr = extractJSON(jsonStr)
var raw map[string]interface{} var raw map[string]interface{}
if err := json.Unmarshal([]byte(jsonStr), &raw); err != nil { if err := json.Unmarshal([]byte(jsonStr), &raw); err != nil {
return nil, err return nil, err
@@ -754,6 +913,23 @@ func convertJSONToMapStringString(jsonStr string) (map[string]string, error) {
return result, nil return result, nil
} }
// extractJSON finds the first { and last } to extract only the JSON object
// This handles cases where models output extra text after JSON
func extractJSON(s string) string {
// Try direct parse first - if it works, return as-is
var dummy map[string]interface{}
if err := json.Unmarshal([]byte(s), &dummy); err == nil {
return s
}
// Otherwise find JSON boundaries
start := strings.Index(s, "{")
end := strings.LastIndex(s, "}")
if start >= 0 && end > start {
return s[start : end+1]
}
return s
}
// unmarshalFuncCall unmarshals a JSON tool call, converting numeric arguments to strings. // unmarshalFuncCall unmarshals a JSON tool call, converting numeric arguments to strings.
func unmarshalFuncCall(jsonStr string) (*models.FuncCall, error) { func unmarshalFuncCall(jsonStr string) (*models.FuncCall, error) {
type tempFuncCall struct { type tempFuncCall struct {
@@ -789,8 +965,9 @@ func unmarshalFuncCall(jsonStr string) (*models.FuncCall, error) {
return fc, nil return fc, nil
} }
func findCall(msg, toolCall string, tv *tview.TextView) { // findCall: adds chatRoundReq into the chatRoundChan and returns true if does
fc := &models.FuncCall{} func findCall(msg, toolCall string) bool {
var fc *models.FuncCall
if toolCall != "" { if toolCall != "" {
// HTML-decode the tool call string to handle encoded characters like &lt; -> <= // HTML-decode the tool call string to handle encoded characters like &lt; -> <=
decodedToolCall := html.UnescapeString(toolCall) decodedToolCall := html.UnescapeString(toolCall)
@@ -807,8 +984,13 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
chatBody.Messages = append(chatBody.Messages, toolResponseMsg) chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
// Clear the stored tool call ID after using it (no longer needed) // Clear the stored tool call ID after using it (no longer needed)
// Trigger the assistant to continue processing with the error message // Trigger the assistant to continue processing with the error message
chatRound("", cfg.AssistantRole, tv, false, false) crr := &models.ChatRoundReq{
return Role: cfg.AssistantRole,
}
// provoke next llm msg after failed tool call
chatRoundChan <- crr
// chatRound("", cfg.AssistantRole, tv, false, false)
return true
} }
lastToolCall.Args = openAIToolMap lastToolCall.Args = openAIToolMap
fc = lastToolCall fc = lastToolCall
@@ -820,8 +1002,8 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
} }
} else { } else {
jsStr := toolCallRE.FindString(msg) jsStr := toolCallRE.FindString(msg)
if jsStr == "" { if jsStr == "" { // no tool call case
return return false
} }
prefix := "__tool_call__\n" prefix := "__tool_call__\n"
suffix := "\n__tool_call__" suffix := "\n__tool_call__"
@@ -840,8 +1022,13 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
chatBody.Messages = append(chatBody.Messages, toolResponseMsg) chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
logger.Debug("findCall: added tool error response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "message_count_after_add", len(chatBody.Messages)) logger.Debug("findCall: added tool error response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "message_count_after_add", len(chatBody.Messages))
// Trigger the assistant to continue processing with the error message // Trigger the assistant to continue processing with the error message
chatRound("", cfg.AssistantRole, tv, false, false) // chatRound("", cfg.AssistantRole, tv, false, false)
return crr := &models.ChatRoundReq{
Role: cfg.AssistantRole,
}
// provoke next llm msg after failed tool call
chatRoundChan <- crr
return true
} }
// Update lastToolCall with parsed function call // Update lastToolCall with parsed function call
lastToolCall.ID = fc.ID lastToolCall.ID = fc.ID
@@ -874,13 +1061,17 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
lastToolCall.ID = "" lastToolCall.ID = ""
// Trigger the assistant to continue processing with the new tool response // Trigger the assistant to continue processing with the new tool response
// by calling chatRound with empty content to continue the assistant's response // by calling chatRound with empty content to continue the assistant's response
chatRound("", cfg.AssistantRole, tv, false, false) crr := &models.ChatRoundReq{
return Role: cfg.AssistantRole,
}
// failed to find tool
chatRoundChan <- crr
return true
} }
resp := callToolWithAgent(fc.Name, fc.Args) resp := callToolWithAgent(fc.Name, fc.Args)
toolMsg := string(resp) // Remove the "tool response: " prefix and %+v formatting toolMsg := string(resp) // Remove the "tool response: " prefix and %+v formatting
logger.Info("llm used tool call", "tool_resp", toolMsg, "tool_attrs", fc) logger.Info("llm used tool call", "tool_resp", toolMsg, "tool_attrs", fc)
fmt.Fprintf(tv, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n", fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
"\n\n", len(chatBody.Messages), cfg.ToolRole, toolMsg) "\n\n", len(chatBody.Messages), cfg.ToolRole, toolMsg)
// Create tool response message with the proper tool_call_id // Create tool response message with the proper tool_call_id
toolResponseMsg := models.RoleMsg{ toolResponseMsg := models.RoleMsg{
@@ -894,12 +1085,16 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
lastToolCall.ID = "" lastToolCall.ID = ""
// Trigger the assistant to continue processing with the new tool response // Trigger the assistant to continue processing with the new tool response
// by calling chatRound with empty content to continue the assistant's response // by calling chatRound with empty content to continue the assistant's response
chatRound("", cfg.AssistantRole, tv, false, false) crr := &models.ChatRoundReq{
Role: cfg.AssistantRole,
}
chatRoundChan <- crr
return true
} }
func chatToTextSlice(showSys bool) []string { func chatToTextSlice(messages []models.RoleMsg, showSys bool) []string {
resp := make([]string, len(chatBody.Messages)) resp := make([]string, len(messages))
for i, msg := range chatBody.Messages { for i, msg := range messages {
// INFO: skips system msg and tool msg // INFO: skips system msg and tool msg
if !showSys && (msg.Role == cfg.ToolRole || msg.Role == "system") { if !showSys && (msg.Role == cfg.ToolRole || msg.Role == "system") {
continue continue
@@ -909,9 +1104,17 @@ func chatToTextSlice(showSys bool) []string {
return resp return resp
} }
func chatToText(showSys bool) string { func chatToText(messages []models.RoleMsg, showSys bool) string {
s := chatToTextSlice(showSys) s := chatToTextSlice(messages, showSys)
return strings.Join(s, "\n") text := strings.Join(s, "\n")
// Collapse thinking blocks if enabled
if thinkingCollapsed {
placeholder := "[yellow::i][thinking... (press Alt+T to expand)][-:-:-]"
text = thinkRE.ReplaceAllString(text, placeholder)
}
return text
} }
func removeThinking(chatBody *models.ChatBody) { func removeThinking(chatBody *models.ChatBody) {
@@ -951,28 +1154,27 @@ func addNewChat(chatName string) {
activeChatName = chat.Name activeChatName = chat.Name
} }
func applyCharCard(cc *models.CharCard) { func applyCharCard(cc *models.CharCard, loadHistory bool) {
cfg.AssistantRole = cc.Role cfg.AssistantRole = cc.Role
// FIXME: remove
history, err := loadAgentsLastChat(cfg.AssistantRole) history, err := loadAgentsLastChat(cfg.AssistantRole)
if err != nil { if err != nil || !loadHistory {
// too much action for err != nil; loadAgentsLastChat needs to be split up // too much action for err != nil; loadAgentsLastChat needs to be split up
logger.Warn("failed to load last agent chat;", "agent", cc.Role, "err", err)
history = []models.RoleMsg{ history = []models.RoleMsg{
{Role: "system", Content: cc.SysPrompt}, {Role: "system", Content: cc.SysPrompt},
{Role: cfg.AssistantRole, Content: cc.FirstMsg}, {Role: cfg.AssistantRole, Content: cc.FirstMsg},
} }
logger.Warn("failed to load last agent chat;", "agent", cc.Role, "err", err, "new_history", history)
addNewChat("") addNewChat("")
} }
chatBody.Messages = history chatBody.Messages = history
} }
func charToStart(agentName string) bool { func charToStart(agentName string, keepSysP bool) bool {
cc, ok := sysMap[agentName] cc, ok := sysMap[agentName]
if !ok { if !ok {
return false return false
} }
applyCharCard(cc) applyCharCard(cc, keepSysP)
return true return true
} }
@@ -1025,7 +1227,7 @@ func summarizeAndStartNewChat() {
return return
} }
// Start a new chat // Start a new chat
startNewChat() startNewChat(true)
// Inject summary as a tool call response // Inject summary as a tool call response
toolMsg := models.RoleMsg{ toolMsg := models.RoleMsg{
Role: cfg.ToolRole, Role: cfg.ToolRole,
@@ -1034,7 +1236,7 @@ func summarizeAndStartNewChat() {
} }
chatBody.Messages = append(chatBody.Messages, toolMsg) chatBody.Messages = append(chatBody.Messages, toolMsg)
// Update UI // Update UI
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
// Update storage // Update storage
if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil { if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil {
@@ -1044,13 +1246,24 @@ func summarizeAndStartNewChat() {
} }
func init() { func init() {
// ctx, cancel := context.WithCancel(context.Background())
var err error var err error
cfg, err = config.LoadConfig("config.toml") cfg, err = config.LoadConfig("config.toml")
if err != nil { if err != nil {
fmt.Println("failed to load config.toml") fmt.Println("failed to load config.toml")
cancel()
os.Exit(1) os.Exit(1)
return return
} }
// Set image base directory for path display
baseDir := cfg.FilePickerDir
if baseDir == "" || baseDir == "." {
// Resolve "." to current working directory
if wd, err := os.Getwd(); err == nil {
baseDir = wd
}
}
models.SetImageBaseDir(baseDir)
defaultStarter = []models.RoleMsg{ defaultStarter = []models.RoleMsg{
{Role: "system", Content: basicSysMsg}, {Role: "system", Content: basicSysMsg},
{Role: cfg.AssistantRole, Content: defaultFirstMsg}, {Role: cfg.AssistantRole, Content: defaultFirstMsg},
@@ -1059,11 +1272,8 @@ func init() {
os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil { if err != nil {
slog.Error("failed to open log file", "error", err, "filename", cfg.LogFile) slog.Error("failed to open log file", "error", err, "filename", cfg.LogFile)
return cancel()
} os.Exit(1)
defaultStarterBytes, err = json.Marshal(defaultStarter)
if err != nil {
slog.Error("failed to marshal defaultStarter", "error", err)
return return
} }
// load cards // load cards
@@ -1074,13 +1284,17 @@ func init() {
logger = slog.New(slog.NewTextHandler(logfile, &slog.HandlerOptions{Level: logLevel})) logger = slog.New(slog.NewTextHandler(logfile, &slog.HandlerOptions{Level: logLevel}))
store = storage.NewProviderSQL(cfg.DBPATH, logger) store = storage.NewProviderSQL(cfg.DBPATH, logger)
if store == nil { if store == nil {
cancel()
os.Exit(1) os.Exit(1)
return
} }
ragger = rag.New(logger, store, cfg) ragger = rag.New(logger, store, cfg)
// https://github.com/coreydaley/ggerganov-llama.cpp/blob/master/examples/server/README.md // https://github.com/coreydaley/ggerganov-llama.cpp/blob/master/examples/server/README.md
// load all chats in memory // load all chats in memory
if _, err := loadHistoryChats(); err != nil { if _, err := loadHistoryChats(); err != nil {
logger.Error("failed to load chat", "error", err) logger.Error("failed to load chat", "error", err)
cancel()
os.Exit(1)
return return
} }
lastToolCall = &models.FuncCall{} lastToolCall = &models.FuncCall{}
@@ -1101,4 +1315,54 @@ func init() {
// Initialize scrollToEndEnabled based on config // Initialize scrollToEndEnabled based on config
scrollToEndEnabled = cfg.AutoScrollEnabled scrollToEndEnabled = cfg.AutoScrollEnabled
go updateModelLists() go updateModelLists()
go chatWatcher(ctx)
}
func getValidKnowToRecipient(msg *models.RoleMsg) (string, bool) {
if cfg == nil || !cfg.CharSpecificContextEnabled {
return "", false
}
// case where all roles are in the tag => public message
cr := listChatRoles()
slices.Sort(cr)
slices.Sort(msg.KnownTo)
if slices.Equal(cr, msg.KnownTo) {
logger.Info("got msg with tag mentioning every role")
return "", false
}
// Check each character in the KnownTo list
for _, recipient := range msg.KnownTo {
if recipient == msg.Role || recipient == cfg.ToolRole {
// weird cases, skip
continue
}
// Skip if this is the user character (user handles their own turn)
// If user is in KnownTo, stop processing - it's the user's turn
if recipient == cfg.UserRole || recipient == cfg.WriteNextMsgAs {
return "", false
}
return recipient, true
}
return "", false
}
// triggerPrivateMessageResponses checks if a message was sent privately to specific characters
// and triggers those non-user characters to respond
func triggerPrivateMessageResponses(msg *models.RoleMsg) {
recipient, ok := getValidKnowToRecipient(msg)
if !ok || recipient == "" {
return
}
// Trigger the recipient character to respond
triggerMsg := recipient + ":\n"
// Send empty message so LLM continues naturally from the conversation
crr := &models.ChatRoundReq{
UserMsg: triggerMsg,
Role: recipient,
Resume: true,
}
fmt.Fprintf(textView, "\n[-:-:b](%d) ", len(chatBody.Messages))
fmt.Fprint(textView, roleToIcon(recipient))
fmt.Fprint(textView, "[-:-:-]\n")
chatRoundChan <- crr
} }

View File

@@ -117,35 +117,35 @@ func TestConsolidateConsecutiveAssistantMessages(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
result := consolidateConsecutiveAssistantMessages(tt.input) result := consolidateAssistantMessages(tt.input)
if len(result) != len(tt.expected) { if len(result) != len(tt.expected) {
t.Errorf("Expected %d messages, got %d", len(tt.expected), len(result)) t.Errorf("Expected %d messages, got %d", len(tt.expected), len(result))
t.Logf("Result: %+v", result) t.Logf("Result: %+v", result)
t.Logf("Expected: %+v", tt.expected) t.Logf("Expected: %+v", tt.expected)
return return
} }
for i, expectedMsg := range tt.expected { for i, expectedMsg := range tt.expected {
if i >= len(result) { if i >= len(result) {
t.Errorf("Result has fewer messages than expected at index %d", i) t.Errorf("Result has fewer messages than expected at index %d", i)
continue continue
} }
actualMsg := result[i] actualMsg := result[i]
if actualMsg.Role != expectedMsg.Role { if actualMsg.Role != expectedMsg.Role {
t.Errorf("Message %d: expected role '%s', got '%s'", i, expectedMsg.Role, actualMsg.Role) t.Errorf("Message %d: expected role '%s', got '%s'", i, expectedMsg.Role, actualMsg.Role)
} }
if actualMsg.Content != expectedMsg.Content { if actualMsg.Content != expectedMsg.Content {
t.Errorf("Message %d: expected content '%s', got '%s'", i, expectedMsg.Content, actualMsg.Content) t.Errorf("Message %d: expected content '%s', got '%s'", i, expectedMsg.Content, actualMsg.Content)
} }
if actualMsg.ToolCallID != expectedMsg.ToolCallID { if actualMsg.ToolCallID != expectedMsg.ToolCallID {
t.Errorf("Message %d: expected ToolCallID '%s', got '%s'", i, expectedMsg.ToolCallID, actualMsg.ToolCallID) t.Errorf("Message %d: expected ToolCallID '%s', got '%s'", i, expectedMsg.ToolCallID, actualMsg.ToolCallID)
} }
} }
// Additional check: ensure no messages were lost // Additional check: ensure no messages were lost
if !reflect.DeepEqual(result, tt.expected) { if !reflect.DeepEqual(result, tt.expected) {
t.Errorf("Result does not match expected:\nResult: %+v\nExpected: %+v", result, tt.expected) t.Errorf("Result does not match expected:\nResult: %+v\nExpected: %+v", result, tt.expected)
@@ -286,4 +286,387 @@ func TestConvertJSONToMapStringString(t *testing.T) {
} }
}) })
} }
} }
func TestParseKnownToTag(t *testing.T) {
tests := []struct {
name string
content string
enabled bool
tag string
wantCleaned string
wantKnownTo []string
}{
{
name: "feature disabled returns original",
content: "Hello @Alice@",
enabled: false,
tag: "@",
wantCleaned: "Hello @Alice@",
wantKnownTo: nil,
},
{
name: "no tag returns original",
content: "Hello Alice",
enabled: true,
tag: "@",
wantCleaned: "Hello Alice",
wantKnownTo: nil,
},
{
name: "single tag with one char",
content: "Hello @Alice@",
enabled: true,
tag: "@",
wantCleaned: "Hello",
wantKnownTo: []string{"Alice"},
},
{
name: "single tag with two chars",
content: "Secret @Alice,Bob@ message",
enabled: true,
tag: "@",
wantCleaned: "Secret message",
wantKnownTo: []string{"Alice", "Bob"},
},
{
name: "tag at beginning",
content: "@Alice@ Hello",
enabled: true,
tag: "@",
wantCleaned: "Hello",
wantKnownTo: []string{"Alice"},
},
{
name: "tag at end",
content: "Hello @Alice@",
enabled: true,
tag: "@",
wantCleaned: "Hello",
wantKnownTo: []string{"Alice"},
},
{
name: "multiple tags",
content: "First @Alice@ then @Bob@",
enabled: true,
tag: "@",
wantCleaned: "First then",
wantKnownTo: []string{"Alice", "Bob"},
},
{
name: "custom tag",
content: "Secret @Alice,Bob@ message",
enabled: true,
tag: "@",
wantCleaned: "Secret message",
wantKnownTo: []string{"Alice", "Bob"},
},
{
name: "empty list",
content: "Secret @@@",
enabled: true,
tag: "@",
wantCleaned: "Secret",
wantKnownTo: nil,
},
{
name: "whitespace around commas",
content: "@ Alice , Bob , Carl @",
enabled: true,
tag: "@",
wantCleaned: "",
wantKnownTo: []string{"Alice", "Bob", "Carl"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Set up config
testCfg := &config.Config{
CharSpecificContextEnabled: tt.enabled,
CharSpecificContextTag: tt.tag,
}
cfg = testCfg
knownTo := parseKnownToTag(tt.content)
if len(knownTo) != len(tt.wantKnownTo) {
t.Errorf("parseKnownToTag() knownTo length = %v, want %v", len(knownTo), len(tt.wantKnownTo))
t.Logf("got: %v", knownTo)
t.Logf("want: %v", tt.wantKnownTo)
} else {
for i, got := range knownTo {
if got != tt.wantKnownTo[i] {
t.Errorf("parseKnownToTag() knownTo[%d] = %q, want %q", i, got, tt.wantKnownTo[i])
}
}
}
})
}
}
func TestProcessMessageTag(t *testing.T) {
tests := []struct {
name string
msg models.RoleMsg
enabled bool
tag string
wantMsg models.RoleMsg
}{
{
name: "feature disabled returns unchanged",
msg: models.RoleMsg{
Role: "Alice",
Content: "Secret @Bob@",
},
enabled: false,
tag: "@",
wantMsg: models.RoleMsg{
Role: "Alice",
Content: "Secret @Bob@",
KnownTo: nil,
},
},
{
name: "no tag, no knownTo",
msg: models.RoleMsg{
Role: "Alice",
Content: "Hello everyone",
},
enabled: true,
tag: "@",
wantMsg: models.RoleMsg{
Role: "Alice",
Content: "Hello everyone",
KnownTo: nil,
},
},
{
name: "tag with Bob, adds Alice automatically",
msg: models.RoleMsg{
Role: "Alice",
Content: "Secret @Bob@",
},
enabled: true,
tag: "@",
wantMsg: models.RoleMsg{
Role: "Alice",
Content: "Secret",
KnownTo: []string{"Bob", "Alice"},
},
},
{
name: "tag already includes sender",
msg: models.RoleMsg{
Role: "Alice",
Content: "@Alice,Bob@",
},
enabled: true,
tag: "@",
wantMsg: models.RoleMsg{
Role: "Alice",
Content: "",
KnownTo: []string{"Alice", "Bob"},
},
},
{
name: "knownTo already set (from DB), tag still processed",
msg: models.RoleMsg{
Role: "Alice",
Content: "Secret @Bob@",
KnownTo: []string{"Alice"}, // from previous processing
},
enabled: true,
tag: "@",
wantMsg: models.RoleMsg{
Role: "Alice",
Content: "Secret",
KnownTo: []string{"Bob", "Alice"},
},
},
{
name: "example from real use",
msg: models.RoleMsg{
Role: "Alice",
Content: "I'll start with a simple one! The word is 'banana'. (ooc: @Bob@)",
KnownTo: []string{"Alice"}, // from previous processing
},
enabled: true,
tag: "@",
wantMsg: models.RoleMsg{
Role: "Alice",
Content: "I'll start with a simple one! The word is 'banana'. (ooc: @Bob@)",
KnownTo: []string{"Bob", "Alice"},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
testCfg := &config.Config{
CharSpecificContextEnabled: tt.enabled,
CharSpecificContextTag: tt.tag,
}
cfg = testCfg
got := processMessageTag(&tt.msg)
if len(got.KnownTo) != len(tt.wantMsg.KnownTo) {
t.Errorf("processMessageTag() KnownTo length = %v, want %v", len(got.KnownTo), len(tt.wantMsg.KnownTo))
t.Logf("got: %v", got.KnownTo)
t.Logf("want: %v", tt.wantMsg.KnownTo)
} else {
// order may differ; check membership
for _, want := range tt.wantMsg.KnownTo {
found := false
for _, gotVal := range got.KnownTo {
if gotVal == want {
found = true
break
}
}
if !found {
t.Errorf("processMessageTag() missing KnownTo entry %q, got %v", want, got.KnownTo)
}
}
}
})
}
}
func TestFilterMessagesForCharacter(t *testing.T) {
messages := []models.RoleMsg{
{Role: "system", Content: "System message", KnownTo: nil}, // visible to all
{Role: "Alice", Content: "Hello everyone", KnownTo: nil}, // visible to all
{Role: "Alice", Content: "Secret for Bob", KnownTo: []string{"Alice", "Bob"}},
{Role: "Bob", Content: "Reply to Alice", KnownTo: []string{"Alice", "Bob"}},
{Role: "Alice", Content: "Private to Carl", KnownTo: []string{"Alice", "Carl"}},
{Role: "Carl", Content: "Hi all", KnownTo: nil}, // visible to all
}
tests := []struct {
name string
enabled bool
character string
wantIndices []int // indices from original messages that should be included
}{
{
name: "feature disabled returns all",
enabled: false,
character: "Alice",
wantIndices: []int{0, 1, 2, 3, 4, 5},
},
{
name: "character empty returns all",
enabled: true,
character: "",
wantIndices: []int{0, 1, 2, 3, 4, 5},
},
{
name: "Alice sees all including Carl-private",
enabled: true,
character: "Alice",
wantIndices: []int{0, 1, 2, 3, 4, 5},
},
{
name: "Bob sees Alice-Bob secrets and all public",
enabled: true,
character: "Bob",
wantIndices: []int{0, 1, 2, 3, 5},
},
{
name: "Carl sees Alice-Carl secret and public",
enabled: true,
character: "Carl",
wantIndices: []int{0, 1, 4, 5},
},
{
name: "David sees only public messages",
enabled: true,
character: "David",
wantIndices: []int{0, 1, 5},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
testCfg := &config.Config{
CharSpecificContextEnabled: tt.enabled,
CharSpecificContextTag: "@",
}
cfg = testCfg
got := filterMessagesForCharacter(messages, tt.character)
if len(got) != len(tt.wantIndices) {
t.Errorf("filterMessagesForCharacter() returned %d messages, want %d", len(got), len(tt.wantIndices))
t.Logf("got: %v", got)
return
}
for i, idx := range tt.wantIndices {
if got[i].Content != messages[idx].Content {
t.Errorf("filterMessagesForCharacter() message %d content = %q, want %q", i, got[i].Content, messages[idx].Content)
}
}
})
}
}
func TestRoleMsgCopyPreservesKnownTo(t *testing.T) {
// Test that the Copy() method preserves the KnownTo field
originalMsg := models.RoleMsg{
Role: "Alice",
Content: "Test message",
KnownTo: []string{"Bob", "Charlie"},
}
copiedMsg := originalMsg.Copy()
if copiedMsg.Role != originalMsg.Role {
t.Errorf("Copy() failed to preserve Role: got %q, want %q", copiedMsg.Role, originalMsg.Role)
}
if copiedMsg.Content != originalMsg.Content {
t.Errorf("Copy() failed to preserve Content: got %q, want %q", copiedMsg.Content, originalMsg.Content)
}
if !reflect.DeepEqual(copiedMsg.KnownTo, originalMsg.KnownTo) {
t.Errorf("Copy() failed to preserve KnownTo: got %v, want %v", copiedMsg.KnownTo, originalMsg.KnownTo)
}
if copiedMsg.ToolCallID != originalMsg.ToolCallID {
t.Errorf("Copy() failed to preserve ToolCallID: got %q, want %q", copiedMsg.ToolCallID, originalMsg.ToolCallID)
}
if copiedMsg.IsContentParts() != originalMsg.IsContentParts() {
t.Errorf("Copy() failed to preserve hasContentParts flag")
}
}
func TestKnownToFieldPreservationScenario(t *testing.T) {
// Test the specific scenario from the log where KnownTo field was getting lost
originalMsg := models.RoleMsg{
Role: "Alice",
Content: `Alice: "Okay, Bob. The word is... **'Ephemeral'**. (ooc: @Bob@)"`,
KnownTo: []string{"Bob"}, // This was detected in the log
}
t.Logf("Original message - Role: %s, Content: %s, KnownTo: %v",
originalMsg.Role, originalMsg.Content, originalMsg.KnownTo)
// Simulate what happens when the message gets copied during processing
copiedMsg := originalMsg.Copy()
t.Logf("Copied message - Role: %s, Content: %s, KnownTo: %v",
copiedMsg.Role, copiedMsg.Content, copiedMsg.KnownTo)
// Check if KnownTo field survived the copy
if len(copiedMsg.KnownTo) == 0 {
t.Error("ERROR: KnownTo field was lost during copy!")
} else {
t.Log("SUCCESS: KnownTo field was preserved during copy!")
}
// Verify the content is the same
if copiedMsg.Content != originalMsg.Content {
t.Errorf("Content was changed during copy: got %s, want %s", copiedMsg.Content, originalMsg.Content)
}
// Verify the KnownTo slice is properly copied
if !reflect.DeepEqual(copiedMsg.KnownTo, originalMsg.KnownTo) {
t.Errorf("KnownTo was not properly copied: got %v, want %v", copiedMsg.KnownTo, originalMsg.KnownTo)
}
}

143
char-specific-context.md Normal file
View File

@@ -0,0 +1,143 @@
# Character-Specific Context
**/completion only feature; won't work with /v1/chat**
## Overview
Character-Specific Context is a feature that enables private communication between characters in a multi-character chat. When enabled, messages can be tagged with a special marker indicating which characters should "know" about (see) that message. This allows for secret conversations, private information sharing, and roleplaying scenarios where certain characters are not privy to all communications.
(This feature works by filtering the chat history for each character based on the `KnownTo` field associated with each message. Only messages that are intended for a particular character (or are public) are included in that character's view of the conversation.)
## How It Works
### Tagging Messages
Messages can be tagged with a special string (by default `@`) followed by a comma-separated list of character names. The tag can appear anywhere in the message content. **After csv of characters tag should be closed with `@` (for regexp to know where it ends).**
**Example:**
```
Alice: @Bob@ Can you keep a secret?
```
**To avoid breaking immersion, it is better to place the tag in (ooc:)**
```
Alice: (ooc: @Bob@) Can you keep a secret?
```
This message will be visible only to Alice (the sender) and Bob. The tag is parsed by `parseKnownToTag` and the resulting list of character names is stored in the `KnownTo` field of the message (`RoleMsg`). The sender is automatically added to the `KnownTo` list (if not already present) by `processMessageTag`.
Multiple tags can be used in a single message; all mentioned characters are combined into the `KnownTo` list.
### Filtering Chat History
When it's a character's turn to respond, the function `filterMessagesForCharacter` filters the full message list, returning only those messages where:
- `KnownTo` is empty (message is public), OR
- `KnownTo` contains the character's name.
System messages (`role == "system"`) are always visible to all characters.
The filtered history is then used to construct the prompt sent to the LLM. This ensures each character only sees messages they are supposed to know about.
### Configuration
Two configuration settings control this feature:
- `CharSpecificContextEnabled` boolean; enables or disables the feature globally.
- `CharSpecificContextTag` string; the tag used to mark private messages. Default is `@`.
These are set in `config.toml` (see `config.example.toml` for the default values).
### Processing Pipeline
1. **Message Creation** When a message is added to the chat (by a user or LLM), `processMessageTag` scans its content for the knownto tag.
2. **Storage** The parsed `KnownTo` list is stored with the message in the database.
3. **Filtering** Whenever the chat history is needed (e.g., for an LLM request), `filterMessagesForCharacter` is called with the target character (the one whose turn it is). The filtered list is used for the prompt.
4. **Display** The TUI also uses the same filtering when showing the conversation for a selected character (see “Writing as…”).
## Usage Examples
### Basic Private Message
Alice wants to tell Bob something without Carl knowing:
```
Alice: @Bob@ Meet me at the library tonight.
```
Result:
- Alice (sender) sees the message.
- Bob sees the message.
- Carl does **not** see the message in his chat history.
### Multirecipient Secret
Alice shares a secret with Bob and Carl, but not David:
```
Alice: (ooc: @Bob,Carl@) The treasure is hidden under the old oak.
```
### Public Message
A message without any tag (or with an empty `KnownTo`) is visible to all characters.
```
Alice: Hello everyone!
```
### UserRole Considerations
The human user can assume any characters identity via the “Writing as…” feature (`cfg.UserRole` and `cfg.WriteNextMsgAs`). When the user writes as a character, the same filtering rules apply: the user will see only the messages that character would see.
## Interaction with AutoTurn and WriteNextMsgAsCompletionAgent
### WriteNextMsgAsCompletionAgent
This configuration variable determines which character the LLM should respond as. It is used by `filterMessagesForCurrentCharacter` to select the target character for filtering. If `WriteNextMsgAsCompletionAgent` is set, the LLM will reply in the voice of that character, and only messages visible to that character will be included in the prompt.
### AutoTurn
Normally llm and user (human) take turns writting messages. With private messages there is an issue, where llm can write a private message that will not be visible for character who user controls, so for a human it would appear that llm did not respond. It is desirable in this case, for llm to answer to itself, larping as target character for that private message.
When `AutoTurn` is enabled, the system can automatically trigger responses from llm as characters who have received a private message. The logic in `triggerPrivateMessageResponses` checks the `KnownTo` list of the last message and, for each recipient that is not the user (or the sender), queues a chat round for that character. This creates a chain of private replies without user intervention.
**Example flow:**
1. Alice (llm) sends a private message to Bob (llm) (`KnownTo = ["Alice","Bob"]`).
2. Carl (user) sees nothing.
3. `AutoTurn` detects this and queues a response from Bob.
4. Bob replies (potentially also privately).
5. The conversation continues automatically until public message is made, or Carl (user) was included in `KnownTo`.
## Cardmaking with multiple characters
So far only json format supports multiple characters.
[card example](sysprompts/alice_bob_carl.json)
## Limitations & Caveats
### Endpoint Compatibility
Characterspecific context relies on the `/completion` endpoint (or other completionstyle endpoints) where the LLM is presented with a raw text prompt containing the entire filtered history. It does **not** work with OpenAIstyle `/v1/chat/completions` endpoints, because those endpoints enforce a fixed role set (`user`/`assistant`/`system`) and strip custom role names and metadata.
### TTS
Although text message might be hidden from user character. If TTS is enabled it will be read until tags are parsed. If message should not be viewed by user, tts will stop.
### Tag Parsing
- The tag is casesensitive.
- Whitespace around character names is trimmed.
- If the tag appears multiple times, all mentioned characters are combined.
### Database Storage
The `KnownTo` field is stored as a JSON array in the database. Existing messages that were created before enabling the feature will have an empty `KnownTo` and thus be visible to all characters.
## Relevant Configuration
```toml
CharSpecificContextEnabled = true
CharSpecificContextTag = "@"
AutoTurn = false
```

View File

@@ -19,7 +19,7 @@ AssistantRole = "assistant"
SysDir = "sysprompts" SysDir = "sysprompts"
ChunkLimit = 100000 ChunkLimit = 100000
AutoScrollEnabled = true AutoScrollEnabled = true
# AutoCleanToolCallsFromCtx = false AutoCleanToolCallsFromCtx = false
# rag settings # rag settings
RAGBatchSize = 1 RAGBatchSize = 1
RAGWordLimit = 80 RAGWordLimit = 80
@@ -39,7 +39,14 @@ WhisperBinaryPath = "./batteries/whisper.cpp/build/bin/whisper-cli" # Path to wh
WhisperModelPath = "./batteries/whisper.cpp/ggml-large-v3-turbo-q5_0.bin" # Path to whisper model file (for WHISPER_BINARY mode) WhisperModelPath = "./batteries/whisper.cpp/ggml-large-v3-turbo-q5_0.bin" # Path to whisper model file (for WHISPER_BINARY mode)
STT_LANG = "en" # Language for speech recognition (for WHISPER_BINARY mode) STT_LANG = "en" # Language for speech recognition (for WHISPER_BINARY mode)
STT_SR = 16000 # Sample rate for audio recording STT_SR = 16000 # Sample rate for audio recording
#
DBPATH = "gflt.db" DBPATH = "gflt.db"
FilePickerDir = "." # Directory where file picker should start FilePickerDir = "." # Directory where file picker should start
FilePickerExts = "png,jpg,jpeg,gif,webp" # Comma-separated list of allowed file extensions for file picker FilePickerExts = "png,jpg,jpeg,gif,webp" # Comma-separated list of allowed file extensions for file picker
CodingDir = "." # Default directory for coding assistant file operations (relative paths resolved against this)
EnableMouse = false # Enable mouse support in the UI EnableMouse = false # Enable mouse support in the UI
# character specific context
CharSpecificContextEnabled = true
CharSpecificContextTag = "@"
AutoTurn = true
StripThinkingFromAPI = true # Strip <think> blocks from messages before sending to LLM (keeps them in chat history)

View File

@@ -19,6 +19,7 @@ type Config struct {
ToolRole string `toml:"ToolRole"` ToolRole string `toml:"ToolRole"`
ToolUse bool `toml:"ToolUse"` ToolUse bool `toml:"ToolUse"`
ThinkUse bool `toml:"ThinkUse"` ThinkUse bool `toml:"ThinkUse"`
StripThinkingFromAPI bool `toml:"StripThinkingFromAPI"`
AssistantRole string `toml:"AssistantRole"` AssistantRole string `toml:"AssistantRole"`
SysDir string `toml:"SysDir"` SysDir string `toml:"SysDir"`
ChunkLimit uint32 `toml:"ChunkLimit"` ChunkLimit uint32 `toml:"ChunkLimit"`
@@ -26,7 +27,13 @@ type Config struct {
WriteNextMsgAs string WriteNextMsgAs string
WriteNextMsgAsCompletionAgent string WriteNextMsgAsCompletionAgent string
SkipLLMResp bool SkipLLMResp bool
AutoCleanToolCallsFromCtx bool `toml:"AutoCleanToolCallsFromCtx"` AutoCleanToolCallsFromCtx bool `toml:"AutoCleanToolCallsFromCtx"`
DBPATH string `toml:"DBPATH"`
FilePickerDir string `toml:"FilePickerDir"`
FilePickerExts string `toml:"FilePickerExts"`
CodingDir string `toml:"CodingDir"`
ImagePreview bool `toml:"ImagePreview"`
EnableMouse bool `toml:"EnableMouse"`
// embeddings // embeddings
RAGEnabled bool `toml:"RAGEnabled"` RAGEnabled bool `toml:"RAGEnabled"`
EmbedURL string `toml:"EmbedURL"` EmbedURL string `toml:"EmbedURL"`
@@ -61,10 +68,10 @@ type Config struct {
WhisperBinaryPath string `toml:"WhisperBinaryPath"` WhisperBinaryPath string `toml:"WhisperBinaryPath"`
WhisperModelPath string `toml:"WhisperModelPath"` WhisperModelPath string `toml:"WhisperModelPath"`
STT_LANG string `toml:"STT_LANG"` STT_LANG string `toml:"STT_LANG"`
DBPATH string `toml:"DBPATH"` // character spefic contetx
FilePickerDir string `toml:"FilePickerDir"` CharSpecificContextEnabled bool `toml:"CharSpecificContextEnabled"`
FilePickerExts string `toml:"FilePickerExts"` CharSpecificContextTag string `toml:"CharSpecificContextTag"`
EnableMouse bool `toml:"EnableMouse"` AutoTurn bool `toml:"AutoTurn"`
} }
func LoadConfig(fn string) (*Config, error) { func LoadConfig(fn string) (*Config, error) {

View File

@@ -145,6 +145,9 @@ This document explains how to set up and configure the application using the `co
#### FilePickerExts (`"png,jpg,jpeg,gif,webp"`) #### FilePickerExts (`"png,jpg,jpeg,gif,webp"`)
- Comma-separated list of allowed file extensions for the file picker. - Comma-separated list of allowed file extensions for the file picker.
#### CodingDir (`"."`)
- Default directory for coding assistant file operations. Relative paths in file tools (file_read, file_write, etc.) are resolved against this directory. Use absolute paths (starting with `/`) to bypass this.
#### EnableMouse (`false`) #### EnableMouse (`false`)
- Enable or disable mouse support in the UI. When set to `true`, allows clicking buttons and interacting with UI elements using the mouse, but prevents the terminal from handling mouse events normally (such as selecting and copying text). When set to `false`, enables default terminal behavior allowing you to select and copy text, but disables mouse interaction with UI elements. - Enable or disable mouse support in the UI. When set to `true`, allows clicking buttons and interacting with UI elements using the mouse, but prevents the terminal from handling mouse events normally (such as selecting and copying text). When set to `false`, enables default terminal behavior allowing you to select and copy text, but disables mouse interaction with UI elements.

View File

@@ -67,11 +67,9 @@ In case you're running llama.cpp, here is an example of starting the llama.cpp s
For roleplay, /completion endpoints are much better, since /chat endpoints swap any character name to either `user` or `assistant`. For roleplay, /completion endpoints are much better, since /chat endpoints swap any character name to either `user` or `assistant`.
Once you have the desired API endpoint Once you have the desired API endpoint
(for example: http://localhost:8080/completion), (for example: http://localhost:8080/completion),
there are two ways to pick a model: - `Ctrl+L` to show a model selection popup;
- `Ctrl+L` allows you to iterate through the model list while in the main window.
- `Ctrl+P` (opens the properties table). Go to the `Select a model` row and press Enter. A list of available models will appear; pick any that you want, then press `x` to exit the properties table.
#### Llama.cpp model preload #### Llama.cpp model (pre)load
Llama.cpp supports swapping models. To load the picked ones, press `Alt+9`. Llama.cpp supports swapping models. To load the picked ones, press `Alt+9`.
@@ -128,9 +126,9 @@ The screen flashes briefly as it calculates. "I am experiencing degraded functio
``` ```
Once the character name is in history, we can switch who the LLM will respond as by pressing `Ctrl+X`. Once the character name is in history, we can switch who the LLM will respond as by pressing `Ctrl+X`.
For now, it should be rotating between HAL9000, `Username`, Seraphina, and system. For now, it should give a choice between HAL9000, `Username`, Seraphina, and system.
Make the status line mention: `Bot will write as Seraphina (ctrl+x)` After the change the status line should say: `Bot will write as Seraphina (ctrl+x)`
and press Escape to see her reaction. press Escape for llm to write as Seraphina.
#### Image input #### Image input

View File

@@ -13,8 +13,8 @@ import (
"log/slog" "log/slog"
"net/http" "net/http"
"os" "os"
"regexp"
"strings" "strings"
"sync"
"time" "time"
google_translate_tts "github.com/GrailFinder/google-translate-tts" google_translate_tts "github.com/GrailFinder/google-translate-tts"
@@ -30,43 +30,8 @@ var (
TTSFlushChan = make(chan bool, 1) TTSFlushChan = make(chan bool, 1)
TTSDoneChan = make(chan bool, 1) TTSDoneChan = make(chan bool, 1)
// endsWithPunctuation = regexp.MustCompile(`[;.!?]$`) // endsWithPunctuation = regexp.MustCompile(`[;.!?]$`)
threeOrMoreDashesRE = regexp.MustCompile(`-{3,}`)
) )
// cleanText removes markdown and special characters that are not suitable for TTS
func cleanText(text string) string {
// Remove markdown-like characters that might interfere with TTS
text = strings.ReplaceAll(text, "*", "") // Bold/italic markers
text = strings.ReplaceAll(text, "#", "") // Headers
text = strings.ReplaceAll(text, "_", "") // Underline/italic markers
text = strings.ReplaceAll(text, "~", "") // Strikethrough markers
text = strings.ReplaceAll(text, "`", "") // Code markers
text = strings.ReplaceAll(text, "[", "") // Link brackets
text = strings.ReplaceAll(text, "]", "") // Link brackets
text = strings.ReplaceAll(text, "!", "") // Exclamation marks (if not punctuation)
// Remove HTML tags using regex
htmlTagRegex := regexp.MustCompile(`<[^>]*>`)
text = htmlTagRegex.ReplaceAllString(text, "")
// Split text into lines to handle table separators
lines := strings.Split(text, "\n")
var filteredLines []string
for _, line := range lines {
// Check if the line looks like a table separator (e.g., |----|, |===|, | - - - |)
// A table separator typically contains only |, -, =, and spaces
isTableSeparator := regexp.MustCompile(`^\s*\|\s*[-=\s]+\|\s*$`).MatchString(strings.TrimSpace(line))
if !isTableSeparator {
// If it's not a table separator, remove vertical bars but keep the content
processedLine := strings.ReplaceAll(line, "|", "")
filteredLines = append(filteredLines, processedLine)
}
// If it is a table separator, skip it (don't add to filteredLines)
}
text = strings.Join(filteredLines, "\n")
text = threeOrMoreDashesRE.ReplaceAllString(text, "")
text = strings.TrimSpace(text) // Remove leading/trailing whitespace
return text
}
type Orator interface { type Orator interface {
Speak(text string) error Speak(text string) error
Stop() Stop()
@@ -77,6 +42,7 @@ type Orator interface {
// impl https://github.com/remsky/Kokoro-FastAPI // impl https://github.com/remsky/Kokoro-FastAPI
type KokoroOrator struct { type KokoroOrator struct {
logger *slog.Logger logger *slog.Logger
mu sync.Mutex
URL string URL string
Format models.AudioFormat Format models.AudioFormat
Stream bool Stream bool
@@ -93,6 +59,7 @@ type KokoroOrator struct {
// Google Translate TTS implementation // Google Translate TTS implementation
type GoogleTranslateOrator struct { type GoogleTranslateOrator struct {
logger *slog.Logger logger *slog.Logger
mu sync.Mutex
speech *google_translate_tts.Speech speech *google_translate_tts.Speech
currentStream *beep.Ctrl currentStream *beep.Ctrl
currentDone chan bool currentDone chan bool
@@ -109,6 +76,7 @@ func (o *KokoroOrator) stoproutine() {
for len(TTSTextChan) > 0 { for len(TTSTextChan) > 0 {
<-TTSTextChan <-TTSTextChan
} }
o.mu.Lock()
o.textBuffer.Reset() o.textBuffer.Reset()
if o.currentDone != nil { if o.currentDone != nil {
select { select {
@@ -118,6 +86,7 @@ func (o *KokoroOrator) stoproutine() {
} }
} }
o.interrupt = true o.interrupt = true
o.mu.Unlock()
} }
} }
@@ -128,28 +97,31 @@ func (o *KokoroOrator) readroutine() {
for { for {
select { select {
case chunk := <-TTSTextChan: case chunk := <-TTSTextChan:
o.mu.Lock()
o.interrupt = false o.interrupt = false
// sentenceBuf.WriteString(chunk)
// text := sentenceBuf.String()
_, err := o.textBuffer.WriteString(chunk) _, err := o.textBuffer.WriteString(chunk)
if err != nil { if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err) o.logger.Warn("failed to write to stringbuilder", "error", err)
o.mu.Unlock()
continue continue
} }
text := o.textBuffer.String() text := o.textBuffer.String()
o.mu.Unlock()
sentences := tokenizer.Tokenize(text) sentences := tokenizer.Tokenize(text)
o.logger.Debug("adding chunk", "chunk", chunk, "text", text, "sen-len", len(sentences)) o.logger.Debug("adding chunk", "chunk", chunk, "text", text, "sen-len", len(sentences))
for i, sentence := range sentences { for i, sentence := range sentences {
if i == len(sentences)-1 { // last sentence if i == len(sentences)-1 { // last sentence
o.mu.Lock()
o.textBuffer.Reset() o.textBuffer.Reset()
_, err := o.textBuffer.WriteString(sentence.Text) _, err := o.textBuffer.WriteString(sentence.Text)
o.mu.Unlock()
if err != nil { if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err) o.logger.Warn("failed to write to stringbuilder", "error", err)
continue continue
} }
continue // if only one (often incomplete) sentence; wait for next chunk continue // if only one (often incomplete) sentence; wait for next chunk
} }
cleanedText := cleanText(sentence.Text) cleanedText := models.CleanText(sentence.Text)
if cleanedText == "" { if cleanedText == "" {
continue // Skip empty text after cleaning continue // Skip empty text after cleaning
} }
@@ -163,7 +135,9 @@ func (o *KokoroOrator) readroutine() {
// lln is done get the whole message out // lln is done get the whole message out
if len(TTSTextChan) > 0 { // otherwise might get stuck if len(TTSTextChan) > 0 { // otherwise might get stuck
for chunk := range TTSTextChan { for chunk := range TTSTextChan {
o.mu.Lock()
_, err := o.textBuffer.WriteString(chunk) _, err := o.textBuffer.WriteString(chunk)
o.mu.Unlock()
if err != nil { if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err) o.logger.Warn("failed to write to stringbuilder", "error", err)
continue continue
@@ -174,16 +148,21 @@ func (o *KokoroOrator) readroutine() {
} }
} }
// flush remaining text // flush remaining text
o.mu.Lock()
remaining := o.textBuffer.String() remaining := o.textBuffer.String()
remaining = cleanText(remaining) remaining = models.CleanText(remaining)
o.textBuffer.Reset() o.textBuffer.Reset()
o.mu.Unlock()
if remaining == "" { if remaining == "" {
continue continue
} }
o.logger.Debug("calling Speak with remainder", "rem", remaining) o.logger.Debug("calling Speak with remainder", "rem", remaining)
sentencesRem := tokenizer.Tokenize(remaining) sentencesRem := tokenizer.Tokenize(remaining)
for _, rs := range sentencesRem { // to avoid dumping large volume of text for _, rs := range sentencesRem { // to avoid dumping large volume of text
if o.interrupt { o.mu.Lock()
interrupt := o.interrupt
o.mu.Unlock()
if interrupt {
break break
} }
if err := o.Speak(rs.Text); err != nil { if err := o.Speak(rs.Text); err != nil {
@@ -240,6 +219,9 @@ func (o *KokoroOrator) GetLogger() *slog.Logger {
} }
func (o *KokoroOrator) requestSound(text string) (io.ReadCloser, error) { func (o *KokoroOrator) requestSound(text string) (io.ReadCloser, error) {
if o.URL == "" {
return nil, fmt.Errorf("TTS URL is empty")
}
payload := map[string]interface{}{ payload := map[string]interface{}{
"input": text, "input": text,
"voice": o.Voice, "voice": o.Voice,
@@ -291,14 +273,18 @@ func (o *KokoroOrator) Speak(text string) error {
o.logger.Debug("failed to init speaker", "error", err) o.logger.Debug("failed to init speaker", "error", err)
} }
done := make(chan bool) done := make(chan bool)
o.mu.Lock()
o.currentDone = done o.currentDone = done
// Create controllable stream and store reference
o.currentStream = &beep.Ctrl{Streamer: beep.Seq(streamer, beep.Callback(func() { o.currentStream = &beep.Ctrl{Streamer: beep.Seq(streamer, beep.Callback(func() {
o.mu.Lock()
close(done) close(done)
o.currentStream = nil o.currentStream = nil
o.currentDone = nil
o.mu.Unlock()
})), Paused: false} })), Paused: false}
o.mu.Unlock()
speaker.Play(o.currentStream) speaker.Play(o.currentStream)
<-o.currentDone <-done
return nil return nil
} }
@@ -307,6 +293,8 @@ func (o *KokoroOrator) Stop() {
o.logger.Debug("attempted to stop orator", "orator", o) o.logger.Debug("attempted to stop orator", "orator", o)
speaker.Lock() speaker.Lock()
defer speaker.Unlock() defer speaker.Unlock()
o.mu.Lock()
defer o.mu.Unlock()
if o.currentStream != nil { if o.currentStream != nil {
// o.currentStream.Paused = true // o.currentStream.Paused = true
o.currentStream.Streamer = nil o.currentStream.Streamer = nil
@@ -322,6 +310,7 @@ func (o *GoogleTranslateOrator) stoproutine() {
for len(TTSTextChan) > 0 { for len(TTSTextChan) > 0 {
<-TTSTextChan <-TTSTextChan
} }
o.mu.Lock()
o.textBuffer.Reset() o.textBuffer.Reset()
if o.currentDone != nil { if o.currentDone != nil {
select { select {
@@ -331,6 +320,7 @@ func (o *GoogleTranslateOrator) stoproutine() {
} }
} }
o.interrupt = true o.interrupt = true
o.mu.Unlock()
} }
} }
@@ -339,26 +329,31 @@ func (o *GoogleTranslateOrator) readroutine() {
for { for {
select { select {
case chunk := <-TTSTextChan: case chunk := <-TTSTextChan:
o.mu.Lock()
o.interrupt = false o.interrupt = false
_, err := o.textBuffer.WriteString(chunk) _, err := o.textBuffer.WriteString(chunk)
if err != nil { if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err) o.logger.Warn("failed to write to stringbuilder", "error", err)
o.mu.Unlock()
continue continue
} }
text := o.textBuffer.String() text := o.textBuffer.String()
o.mu.Unlock()
sentences := tokenizer.Tokenize(text) sentences := tokenizer.Tokenize(text)
o.logger.Debug("adding chunk", "chunk", chunk, "text", text, "sen-len", len(sentences)) o.logger.Debug("adding chunk", "chunk", chunk, "text", text, "sen-len", len(sentences))
for i, sentence := range sentences { for i, sentence := range sentences {
if i == len(sentences)-1 { // last sentence if i == len(sentences)-1 { // last sentence
o.mu.Lock()
o.textBuffer.Reset() o.textBuffer.Reset()
_, err := o.textBuffer.WriteString(sentence.Text) _, err := o.textBuffer.WriteString(sentence.Text)
o.mu.Unlock()
if err != nil { if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err) o.logger.Warn("failed to write to stringbuilder", "error", err)
continue continue
} }
continue // if only one (often incomplete) sentence; wait for next chunk continue // if only one (often incomplete) sentence; wait for next chunk
} }
cleanedText := cleanText(sentence.Text) cleanedText := models.CleanText(sentence.Text)
if cleanedText == "" { if cleanedText == "" {
continue // Skip empty text after cleaning continue // Skip empty text after cleaning
} }
@@ -372,7 +367,9 @@ func (o *GoogleTranslateOrator) readroutine() {
// lln is done get the whole message out // lln is done get the whole message out
if len(TTSTextChan) > 0 { // otherwise might get stuck if len(TTSTextChan) > 0 { // otherwise might get stuck
for chunk := range TTSTextChan { for chunk := range TTSTextChan {
o.mu.Lock()
_, err := o.textBuffer.WriteString(chunk) _, err := o.textBuffer.WriteString(chunk)
o.mu.Unlock()
if err != nil { if err != nil {
o.logger.Warn("failed to write to stringbuilder", "error", err) o.logger.Warn("failed to write to stringbuilder", "error", err)
continue continue
@@ -382,16 +379,21 @@ func (o *GoogleTranslateOrator) readroutine() {
} }
} }
} }
o.mu.Lock()
remaining := o.textBuffer.String() remaining := o.textBuffer.String()
remaining = cleanText(remaining) remaining = models.CleanText(remaining)
o.textBuffer.Reset() o.textBuffer.Reset()
o.mu.Unlock()
if remaining == "" { if remaining == "" {
continue continue
} }
o.logger.Debug("calling Speak with remainder", "rem", remaining) o.logger.Debug("calling Speak with remainder", "rem", remaining)
sentencesRem := tokenizer.Tokenize(remaining) sentencesRem := tokenizer.Tokenize(remaining)
for _, rs := range sentencesRem { // to avoid dumping large volume of text for _, rs := range sentencesRem { // to avoid dumping large volume of text
if o.interrupt { o.mu.Lock()
interrupt := o.interrupt
o.mu.Unlock()
if interrupt {
break break
} }
if err := o.Speak(rs.Text); err != nil { if err := o.Speak(rs.Text); err != nil {
@@ -434,14 +436,18 @@ func (o *GoogleTranslateOrator) Speak(text string) error {
o.logger.Debug("failed to init speaker", "error", err) o.logger.Debug("failed to init speaker", "error", err)
} }
done := make(chan bool) done := make(chan bool)
o.mu.Lock()
o.currentDone = done o.currentDone = done
// Create controllable stream and store reference
o.currentStream = &beep.Ctrl{Streamer: beep.Seq(playbackStreamer, beep.Callback(func() { o.currentStream = &beep.Ctrl{Streamer: beep.Seq(playbackStreamer, beep.Callback(func() {
o.mu.Lock()
close(done) close(done)
o.currentStream = nil o.currentStream = nil
o.currentDone = nil
o.mu.Unlock()
})), Paused: false} })), Paused: false}
o.mu.Unlock()
speaker.Play(o.currentStream) speaker.Play(o.currentStream)
<-o.currentDone // wait for playback to complete <-done // wait for playback to complete
return nil return nil
} }
@@ -449,6 +455,8 @@ func (o *GoogleTranslateOrator) Stop() {
o.logger.Debug("attempted to stop google translate orator") o.logger.Debug("attempted to stop google translate orator")
speaker.Lock() speaker.Lock()
defer speaker.Unlock() defer speaker.Unlock()
o.mu.Lock()
defer o.mu.Unlock()
if o.currentStream != nil { if o.currentStream != nil {
o.currentStream.Streamer = nil o.currentStream.Streamer = nil
} }

View File

@@ -5,13 +5,17 @@ import (
"gf-lt/models" "gf-lt/models"
"gf-lt/pngmeta" "gf-lt/pngmeta"
"image" "image"
"net/url"
"os" "os"
"os/exec"
"path" "path"
"slices"
"strings" "strings"
"time"
"unicode" "unicode"
"math/rand/v2" "math/rand/v2"
"github.com/rivo/tview"
) )
func isASCII(s string) bool { func isASCII(s string) bool {
@@ -23,6 +27,56 @@ func isASCII(s string) bool {
return true return true
} }
// stripThinkingFromMsg removes thinking blocks from assistant messages.
// Skips user, tool, and system messages as they may contain thinking examples.
func stripThinkingFromMsg(msg *models.RoleMsg) *models.RoleMsg {
if !cfg.StripThinkingFromAPI {
return msg
}
// Skip user, tool, and system messages - they might contain thinking examples
if msg.Role == cfg.UserRole || msg.Role == cfg.ToolRole || msg.Role == "system" {
return msg
}
// Strip thinking from assistant messages
if thinkRE.MatchString(msg.Content) {
msg.Content = thinkRE.ReplaceAllString(msg.Content, "")
// Clean up any double newlines that might result
msg.Content = strings.TrimSpace(msg.Content)
}
return msg
}
// refreshChatDisplay updates the chat display based on current character view
// It filters messages for the character the user is currently "writing as"
// and updates the textView with the filtered conversation
func refreshChatDisplay() {
// Determine which character's view to show
viewingAs := cfg.UserRole
if cfg.WriteNextMsgAs != "" {
viewingAs = cfg.WriteNextMsgAs
}
// Filter messages for this character
filteredMessages := filterMessagesForCharacter(chatBody.Messages, viewingAs)
displayText := chatToText(filteredMessages, cfg.ShowSys)
textView.SetText(displayText)
colorText()
if scrollToEndEnabled {
textView.ScrollToEnd()
}
}
func stopTTSIfNotForUser(msg *models.RoleMsg) {
viewingAs := cfg.UserRole
if cfg.WriteNextMsgAs != "" {
viewingAs = cfg.WriteNextMsgAs
}
// stop tts if msg is not for user
if cfg.CharSpecificContextEnabled &&
!slices.Contains(msg.KnownTo, viewingAs) && cfg.TTS_ENABLED {
TTSDoneChan <- true
}
}
func colorText() { func colorText() {
text := textView.GetText(false) text := textView.GetText(false)
quoteReplacer := strings.NewReplacer( quoteReplacer := strings.NewReplacer(
@@ -69,7 +123,6 @@ func colorText() {
for i, cb := range codeBlocks { for i, cb := range codeBlocks {
text = strings.Replace(text, fmt.Sprintf(placeholder, i), cb, 1) text = strings.Replace(text, fmt.Sprintf(placeholder, i), cb, 1)
} }
logger.Debug("thinking debug", "blocks", thinkBlocks)
for i, tb := range thinkBlocks { for i, tb := range thinkBlocks {
text = strings.Replace(text, fmt.Sprintf(placeholderThink, i), tb, 1) text = strings.Replace(text, fmt.Sprintf(placeholderThink, i), tb, 1)
} }
@@ -100,23 +153,24 @@ func initSysCards() ([]string, error) {
return labels, nil return labels, nil
} }
func startNewChat() { func startNewChat(keepSysP bool) {
id, err := store.ChatGetMaxID() id, err := store.ChatGetMaxID()
if err != nil { if err != nil {
logger.Error("failed to get chat id", "error", err) logger.Error("failed to get chat id", "error", err)
} }
if ok := charToStart(cfg.AssistantRole); !ok { if ok := charToStart(cfg.AssistantRole, keepSysP); !ok {
logger.Warn("no such sys msg", "name", cfg.AssistantRole) logger.Warn("no such sys msg", "name", cfg.AssistantRole)
} }
// set chat body // set chat body
chatBody.Messages = chatBody.Messages[:2] chatBody.Messages = chatBody.Messages[:2]
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
newChat := &models.Chat{ newChat := &models.Chat{
ID: id + 1, ID: id + 1,
Name: fmt.Sprintf("%d_%s", id+1, cfg.AssistantRole), Name: fmt.Sprintf("%d_%s", id+1, cfg.AssistantRole),
Msgs: string(defaultStarterBytes), // chat is written to db when we get first llm response (or any)
Agent: cfg.AssistantRole, // actual chat history (messages) would be parsed then
CreatedAt: time.Now(), Msgs: "",
Agent: cfg.AssistantRole,
} }
activeChatName = newChat.Name activeChatName = newChat.Name
chatMap[newChat.Name] = newChat chatMap[newChat.Name] = newChat
@@ -166,7 +220,7 @@ func setLogLevel(sl string) {
} }
func listRolesWithUser() []string { func listRolesWithUser() []string {
roles := chatBody.ListRoles() roles := listChatRoles()
// Remove user role if it exists in the list (to avoid duplicates and ensure it's at position 0) // Remove user role if it exists in the list (to avoid duplicates and ensure it's at position 0)
filteredRoles := make([]string, 0, len(roles)) filteredRoles := make([]string, 0, len(roles))
for _, role := range roles { for _, role := range roles {
@@ -176,6 +230,7 @@ func listRolesWithUser() []string {
} }
// Prepend user role to the beginning of the list // Prepend user role to the beginning of the list
result := append([]string{cfg.UserRole}, filteredRoles...) result := append([]string{cfg.UserRole}, filteredRoles...)
slices.Sort(result)
return result return result
} }
@@ -208,6 +263,34 @@ func strInSlice(s string, sl []string) bool {
return false return false
} }
// isLocalLlamacpp checks if the current API is a local llama.cpp instance.
func isLocalLlamacpp() bool {
u, err := url.Parse(cfg.CurrentAPI)
if err != nil {
return false
}
host := u.Hostname()
return host == "localhost" || host == "127.0.0.1" || host == "::1"
}
// getModelColor returns the color tag for the model name based on its load status.
// For non-local models, returns orange. For local llama.cpp models, returns green if loaded, red if not.
func getModelColor() string {
if !isLocalLlamacpp() {
return "orange"
}
// Check if model is loaded
loaded, err := isModelLoaded(chatBody.Model)
if err != nil {
// On error, assume not loaded (red)
return "red"
}
if loaded {
return "green"
}
return "red"
}
func makeStatusLine() string { func makeStatusLine() string {
isRecording := false isRecording := false
if asr != nil { if asr != nil {
@@ -237,9 +320,12 @@ func makeStatusLine() string {
} else { } else {
shellModeInfo = "" shellModeInfo = ""
} }
statusLine := fmt.Sprintf(indexLineCompletion, botRespMode, activeChatName, // Get model color based on load status for local llama.cpp models
cfg.ToolUse, chatBody.Model, cfg.SkipLLMResp, cfg.CurrentAPI, modelColor := getModelColor()
isRecording, persona, botPersona, injectRole) statusLine := fmt.Sprintf(indexLineCompletion, boolColors[botRespMode], botRespMode, activeChatName,
boolColors[cfg.ToolUse], cfg.ToolUse, modelColor, chatBody.Model, boolColors[cfg.SkipLLMResp],
cfg.SkipLLMResp, cfg.CurrentAPI, boolColors[isRecording], isRecording, persona,
botPersona, boolColors[injectRole], injectRole)
return statusLine + imageInfo + shellModeInfo return statusLine + imageInfo + shellModeInfo
} }
@@ -252,3 +338,391 @@ func randString(n int) string {
} }
return string(b) return string(b)
} }
// set of roles within card definition and mention in chat history
func listChatRoles() []string {
currentChat, ok := chatMap[activeChatName]
cbc := chatBody.ListRoles()
if !ok {
return cbc
}
currentCard, ok := sysMap[currentChat.Agent]
if !ok {
// case which won't let to switch roles:
// started new chat (basic_sys or any other), at the start it yet be saved or have chatbody
// if it does not have a card or chars, it'll return an empty slice
// log error
logger.Warn("failed to find current card in sysMap", "agent", currentChat.Agent, "sysMap", sysMap)
return cbc
}
charset := []string{}
for _, name := range currentCard.Characters {
if !strInSlice(name, cbc) {
charset = append(charset, name)
}
}
charset = append(charset, cbc...)
return charset
}
func deepseekModelValidator() error {
if cfg.CurrentAPI == cfg.DeepSeekChatAPI || cfg.CurrentAPI == cfg.DeepSeekCompletionAPI {
if chatBody.Model != "deepseek-chat" && chatBody.Model != "deepseek-reasoner" {
if err := notifyUser("bad request", "wrong deepseek model name"); err != nil {
logger.Warn("failed ot notify user", "error", err)
return err
}
return nil
}
}
return nil
}
// == shellmode ==
func toggleShellMode() {
shellMode = !shellMode
if shellMode {
// Update input placeholder to indicate shell mode
textArea.SetPlaceholder("SHELL MODE: Enter command and press <Esc> to execute")
} else {
// Reset to normal mode
textArea.SetPlaceholder("input is multiline; press <Enter> to start the next line;\npress <Esc> to send the message. Alt+1 to exit shell mode")
}
updateStatusLine()
}
func updateFlexLayout() {
if fullscreenMode {
// flex already contains only focused widget; do nothing
return
}
flex.Clear()
flex.AddItem(textView, 0, 40, false)
flex.AddItem(textArea, 0, 10, false)
if positionVisible {
flex.AddItem(statusLineWidget, 0, 2, false)
}
// Keep focus on currently focused widget
focused := app.GetFocus()
if focused == textView {
app.SetFocus(textView)
} else {
app.SetFocus(textArea)
}
}
func executeCommandAndDisplay(cmdText string) {
// Parse the command (split by spaces, but handle quoted arguments)
cmdParts := parseCommand(cmdText)
if len(cmdParts) == 0 {
fmt.Fprintf(textView, "\n[red]Error: No command provided[-:-:-]\n")
if scrollToEndEnabled {
textView.ScrollToEnd()
}
colorText()
return
}
command := cmdParts[0]
args := []string{}
if len(cmdParts) > 1 {
args = cmdParts[1:]
}
// Create the command execution
cmd := exec.Command(command, args...)
// Execute the command and get output
output, err := cmd.CombinedOutput()
// Add the command being executed to the chat
fmt.Fprintf(textView, "\n[yellow]$ %s[-:-:-]\n", cmdText)
var outputContent string
if err != nil {
// Include both output and error
errorMsg := "Error: " + err.Error()
fmt.Fprintf(textView, "[red]%s[-:-:-]\n", errorMsg)
if len(output) > 0 {
outputStr := string(output)
fmt.Fprintf(textView, "[red]%s[-:-:-]\n", outputStr)
outputContent = errorMsg + "\n" + outputStr
} else {
outputContent = errorMsg
}
} else {
// Only output if successful
if len(output) > 0 {
outputStr := string(output)
fmt.Fprintf(textView, "[green]%s[-:-:-]\n", outputStr)
outputContent = outputStr
} else {
successMsg := "Command executed successfully (no output)"
fmt.Fprintf(textView, "[green]%s[-:-:-]\n", successMsg)
outputContent = successMsg
}
}
// Combine command and output in a single message for chat history
combinedContent := "$ " + cmdText + "\n\n" + outputContent
combinedMsg := models.RoleMsg{
Role: cfg.ToolRole,
Content: combinedContent,
}
chatBody.Messages = append(chatBody.Messages, combinedMsg)
// Scroll to end and update colors
if scrollToEndEnabled {
textView.ScrollToEnd()
}
colorText()
}
// parseCommand splits command string handling quotes properly
func parseCommand(cmd string) []string {
var args []string
var current string
var inQuotes bool
var quoteChar rune
for _, r := range cmd {
switch r {
case '"', '\'':
if inQuotes {
if r == quoteChar {
inQuotes = false
} else {
current += string(r)
}
} else {
inQuotes = true
quoteChar = r
}
case ' ', '\t':
if inQuotes {
current += string(r)
} else if current != "" {
args = append(args, current)
current = ""
}
default:
current += string(r)
}
}
if current != "" {
args = append(args, current)
}
return args
}
// == search ==
// Global variables for search state
var searchResults []int
var searchResultLengths []int // To store the length of each match in the formatted string
var searchIndex int
var searchText string
var originalTextForSearch string
// performSearch searches for the given term in the textView content and highlights matches
func performSearch(term string) {
searchText = term
if searchText == "" {
searchResults = nil
searchResultLengths = nil
originalTextForSearch = ""
// Re-render text without highlights
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText()
return
}
// Get formatted text and search directly in it to avoid mapping issues
formattedText := textView.GetText(true)
originalTextForSearch = formattedText
searchTermLower := strings.ToLower(searchText)
formattedTextLower := strings.ToLower(formattedText)
// Find all occurrences of the search term in the formatted text directly
formattedSearchResults := []int{}
searchStart := 0
for {
pos := strings.Index(formattedTextLower[searchStart:], searchTermLower)
if pos == -1 {
break
}
absolutePos := searchStart + pos
formattedSearchResults = append(formattedSearchResults, absolutePos)
searchStart = absolutePos + len(searchText)
}
if len(formattedSearchResults) == 0 {
// No matches found
searchResults = nil
searchResultLengths = nil
notification := "Pattern not found: " + term
if err := notifyUser("search", notification); err != nil {
logger.Error("failed to send notification", "error", err)
}
return
}
// Store the formatted text positions and lengths for accurate highlighting
searchResults = formattedSearchResults
// Create lengths array - all matches have the same length as the search term
searchResultLengths = make([]int, len(formattedSearchResults))
for i := range searchResultLengths {
searchResultLengths[i] = len(searchText)
}
searchIndex = 0
highlightCurrentMatch()
}
// highlightCurrentMatch highlights the current search match and scrolls to it
func highlightCurrentMatch() {
if len(searchResults) == 0 || searchIndex >= len(searchResults) {
return
}
// Get the stored formatted text
formattedText := originalTextForSearch
// For tview to properly support highlighting and scrolling, we need to work with its region system
// Instead of just applying highlights, we need to add region tags to the text
highlightedText := addRegionTags(formattedText, searchResults, searchResultLengths, searchIndex, searchText)
// Update the text view with the text that includes region tags
textView.SetText(highlightedText)
// Highlight the current region and scroll to it
// Need to identify which position in the results array corresponds to the current match
// The region ID will be search_<position>_<index>
currentRegion := fmt.Sprintf("search_%d_%d", searchResults[searchIndex], searchIndex)
textView.Highlight(currentRegion).ScrollToHighlight()
// Send notification about which match we're at
notification := fmt.Sprintf("Match %d of %d", searchIndex+1, len(searchResults))
if err := notifyUser("search", notification); err != nil {
logger.Error("failed to send notification", "error", err)
}
}
// showSearchBar shows the search input field as an overlay
func showSearchBar() {
// Create a temporary flex to combine search and main content
updatedFlex := tview.NewFlex().SetDirection(tview.FlexRow).
AddItem(searchField, 3, 0, true). // Search field at top
AddItem(flex, 0, 1, false) // Main flex layout below
// Add the search overlay as a page
pages.AddPage(searchPageName, updatedFlex, true, true)
app.SetFocus(searchField)
}
// hideSearchBar hides the search input field
func hideSearchBar() {
pages.RemovePage(searchPageName)
// Return focus to the text view
app.SetFocus(textView)
// Clear the search field
searchField.SetText("")
}
// Global variables for index overlay functionality
var indexPageName = "indexOverlay"
// showIndexBar shows the index input field as an overlay at the top
func showIndexBar() {
// Create a temporary flex to combine index input and main content
updatedFlex := tview.NewFlex().SetDirection(tview.FlexRow).
AddItem(indexPickWindow, 3, 0, true). // Index field at top
AddItem(flex, 0, 1, false) // Main flex layout below
// Add the index overlay as a page
pages.AddPage(indexPageName, updatedFlex, true, true)
app.SetFocus(indexPickWindow)
}
// hideIndexBar hides the index input field
func hideIndexBar() {
pages.RemovePage(indexPageName)
// Return focus to the text view
app.SetFocus(textView)
// Clear the index field
indexPickWindow.SetText("")
}
// addRegionTags adds region tags to search matches in the text for tview highlighting
func addRegionTags(text string, positions []int, lengths []int, currentIdx int, searchTerm string) string {
if len(positions) == 0 {
return text
}
var result strings.Builder
lastEnd := 0
for i, pos := range positions {
endPos := pos + lengths[i]
// Add text before this match
if pos > lastEnd {
result.WriteString(text[lastEnd:pos])
}
// The matched text, which may contain its own formatting tags
actualText := text[pos:endPos]
// Add region tag and highlighting for this match
// Use a unique region id that includes the match index to avoid conflicts
regionId := fmt.Sprintf("search_%d_%d", pos, i) // position + index to ensure uniqueness
var highlightStart, highlightEnd string
if i == currentIdx {
// Current match - use different highlighting
highlightStart = fmt.Sprintf(`["%s"][yellow:blue:b]`, regionId) // Current match with region and special highlight
highlightEnd = `[-:-:-][""]` // Reset formatting and close region
} else {
// Other matches - use regular highlighting
highlightStart = fmt.Sprintf(`["%s"][gold:red:u]`, regionId) // Other matches with region and highlight
highlightEnd = `[-:-:-][""]` // Reset formatting and close region
}
result.WriteString(highlightStart)
result.WriteString(actualText)
result.WriteString(highlightEnd)
lastEnd = endPos
}
// Add the rest of the text after the last processed match
if lastEnd < len(text) {
result.WriteString(text[lastEnd:])
}
return result.String()
}
// searchNext finds the next occurrence of the search term
func searchNext() {
if len(searchResults) == 0 {
if err := notifyUser("search", "No search results to navigate"); err != nil {
logger.Error("failed to send notification", "error", err)
}
return
}
searchIndex = (searchIndex + 1) % len(searchResults)
highlightCurrentMatch()
}
// searchPrev finds the previous occurrence of the search term
func searchPrev() {
if len(searchResults) == 0 {
if err := notifyUser("search", "No search results to navigate"); err != nil {
logger.Error("failed to send notification", "error", err)
}
return
}
if searchIndex == 0 {
searchIndex = len(searchResults) - 1
} else {
searchIndex--
}
highlightCurrentMatch()
}
// == tab completion ==
func scanFiles(dir, filter string) []string {
var files []string
entries, err := os.ReadDir(dir)
if err != nil {
return files
}
for _, entry := range entries {
name := entry.Name()
if strings.HasPrefix(name, ".") {
continue
}
if filter == "" || strings.HasPrefix(strings.ToLower(name), strings.ToLower(filter)) {
if entry.IsDir() {
files = append(files, name+"/")
} else {
files = append(files, name)
}
}
}
return files
}

358
llm.go
View File

@@ -34,10 +34,31 @@ func ClearImageAttachment() {
imageAttachmentPath = "" imageAttachmentPath = ""
} }
// filterMessagesForCurrentCharacter filters messages based on char-specific context.
// Returns filtered messages and the bot persona role (target character).
func filterMessagesForCurrentCharacter(messages []models.RoleMsg) ([]models.RoleMsg, string) {
botPersona := cfg.AssistantRole
if cfg.WriteNextMsgAsCompletionAgent != "" {
botPersona = cfg.WriteNextMsgAsCompletionAgent
}
if cfg == nil || !cfg.CharSpecificContextEnabled {
return messages, botPersona
}
// get last message (written by user) and checck if it has a tag
lm := messages[len(messages)-1]
recipient, ok := getValidKnowToRecipient(&lm)
if ok && recipient != "" {
botPersona = recipient
}
filtered := filterMessagesForCharacter(messages, botPersona)
return filtered, botPersona
}
type ChunkParser interface { type ChunkParser interface {
ParseChunk([]byte) (*models.TextChunk, error) ParseChunk([]byte) (*models.TextChunk, error)
FormMsg(msg, role string, cont bool) (io.Reader, error) FormMsg(msg, role string, cont bool) (io.Reader, error)
GetToken() string GetToken() string
GetAPIType() models.APIType
} }
func choseChunkParser() { func choseChunkParser() {
@@ -87,6 +108,10 @@ type OpenRouterChat struct {
Model string Model string
} }
func (lcp LCPCompletion) GetAPIType() models.APIType {
return models.APITypeCompletion
}
func (lcp LCPCompletion) GetToken() string { func (lcp LCPCompletion) GetToken() string {
return "" return ""
} }
@@ -98,7 +123,8 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
if localImageAttachmentPath != "" { if localImageAttachmentPath != "" {
imageURL, err := models.CreateImageURLFromPath(localImageAttachmentPath) imageURL, err := models.CreateImageURLFromPath(localImageAttachmentPath)
if err != nil { if err != nil {
logger.Error("failed to create image URL from path for completion", "error", err, "path", localImageAttachmentPath) logger.Error("failed to create image URL from path for completion",
"error", err, "path", localImageAttachmentPath)
return nil, err return nil, err
} }
// Extract base64 part from data URL (e.g., "data:image/jpeg;base64,...") // Extract base64 part from data URL (e.g., "data:image/jpeg;base64,...")
@@ -113,46 +139,35 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
} }
if msg != "" { // otherwise let the bot to continue if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg} newMsg := models.RoleMsg{Role: role, Content: msg}
newMsg = *processMessageTag(&newMsg)
chatBody.Messages = append(chatBody.Messages, newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
} }
if !resume { // if rag - add as system message to avoid conflicts with tool usage
// if rag - add as system message to avoid conflicts with tool usage if !resume && cfg.RAGEnabled {
if cfg.RAGEnabled { um := chatBody.Messages[len(chatBody.Messages)-1].Content
um := chatBody.Messages[len(chatBody.Messages)-1].Content logger.Debug("RAG is enabled, preparing RAG context", "user_message", um)
logger.Debug("RAG is enabled, preparing RAG context", "user_message", um) ragResp, err := chatRagUse(um)
ragResp, err := chatRagUse(um) if err != nil {
if err != nil { logger.Error("failed to form a rag msg", "error", err)
logger.Error("failed to form a rag msg", "error", err) return nil, err
return nil, err
}
logger.Debug("RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
} }
logger.Debug("RAG response received", "response_len", len(ragResp),
"response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
} }
// sending description of the tools and how to use them
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
// add to chat body
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
} }
messages := make([]string, len(chatBody.Messages)) filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
for i, m := range chatBody.Messages { messages := make([]string, len(filteredMessages))
messages[i] = m.ToPrompt() for i, m := range filteredMessages {
messages[i] = stripThinkingFromMsg(&m).ToPrompt()
} }
prompt := strings.Join(messages, "\n") prompt := strings.Join(messages, "\n")
// strings builder?
if !resume {
botPersona := cfg.AssistantRole
if cfg.WriteNextMsgAsCompletionAgent != "" {
botPersona = cfg.WriteNextMsgAsCompletionAgent
}
botMsgStart := "\n" + botPersona + ":\n"
prompt += botMsgStart
}
if cfg.ThinkUse && !cfg.ToolUse {
prompt += "<think>"
}
// Add multimodal media markers to the prompt text when multimodal data is present // Add multimodal media markers to the prompt text when multimodal data is present
// This is required by llama.cpp multimodal models so they know where to insert media // This is required by llama.cpp multimodal models so they know where to insert media
if len(multimodalData) > 0 { if len(multimodalData) > 0 {
@@ -164,10 +179,18 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
} }
prompt = sb.String() prompt = sb.String()
} }
// needs to be after <__media__> if there are images
if !resume {
botMsgStart := "\n" + botPersona + ":\n"
prompt += botMsgStart
}
if cfg.ThinkUse && !cfg.ToolUse {
prompt += "<think>"
}
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData)) "msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData))
payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData, defaultLCPProps, chatBody.MakeStopSlice()) payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData,
defaultLCPProps, chatBody.MakeStopSliceExcluding("", listChatRoles()))
data, err := json.Marshal(payload) data, err := json.Marshal(payload)
if err != nil { if err != nil {
logger.Error("failed to form a msg", "error", err) logger.Error("failed to form a msg", "error", err)
@@ -193,7 +216,11 @@ func (lcp LCPCompletion) ParseChunk(data []byte) (*models.TextChunk, error) {
return resp, nil return resp, nil
} }
func (op LCPChat) GetToken() string { func (lcp LCPChat) GetAPIType() models.APIType {
return models.APITypeChat
}
func (lcp LCPChat) GetToken() string {
return "" return ""
} }
@@ -262,7 +289,7 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
// If image processing fails, fall back to simple text message // If image processing fails, fall back to simple text message
newMsg = models.NewRoleMsg(role, msg) newMsg = models.NewRoleMsg(role, msg)
} else { } else {
newMsg.AddImagePart(imageURL) newMsg.AddImagePart(imageURL, localImageAttachmentPath)
} }
// Only clear the global image attachment after successfully processing it in this API call // Only clear the global image attachment after successfully processing it in this API call
imageAttachmentPath = "" // Clear the attachment after use imageAttachmentPath = "" // Clear the attachment after use
@@ -270,42 +297,47 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
// Create a simple text message // Create a simple text message
newMsg = models.NewRoleMsg(role, msg) newMsg = models.NewRoleMsg(role, msg)
} }
newMsg = *processMessageTag(&newMsg)
chatBody.Messages = append(chatBody.Messages, newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role, "content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages)) logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role,
"content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages))
} }
if !resume { // if rag - add as system message to avoid conflicts with tool usage
// if rag - add as system message to avoid conflicts with tool usage if !resume && cfg.RAGEnabled {
if cfg.RAGEnabled { um := chatBody.Messages[len(chatBody.Messages)-1].Content
um := chatBody.Messages[len(chatBody.Messages)-1].Content logger.Debug("LCPChat: RAG is enabled, preparing RAG context", "user_message", um)
logger.Debug("LCPChat: RAG is enabled, preparing RAG context", "user_message", um) ragResp, err := chatRagUse(um)
ragResp, err := chatRagUse(um) if err != nil {
if err != nil { logger.Error("LCPChat: failed to form a rag msg", "error", err)
logger.Error("LCPChat: failed to form a rag msg", "error", err) return nil, err
return nil, err
}
logger.Debug("LCPChat: RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
logger.Debug("LCPChat: RAG message added to chat body", "role", ragMsg.Role, "rag_content_len", len(ragMsg.Content), "message_count_after_rag", len(chatBody.Messages))
} }
logger.Debug("LCPChat: RAG response received",
"response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
logger.Debug("LCPChat: RAG message added to chat body", "role", ragMsg.Role,
"rag_content_len", len(ragMsg.Content), "message_count_after_rag", len(chatBody.Messages))
} }
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
// openai /v1/chat does not support custom roles; needs to be user, assistant, system // openai /v1/chat does not support custom roles; needs to be user, assistant, system
// Add persona suffix to the last user message to indicate who the assistant should reply as
bodyCopy := &models.ChatBody{ bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(chatBody.Messages)), Messages: make([]models.RoleMsg, len(filteredMessages)),
Model: chatBody.Model, Model: chatBody.Model,
Stream: chatBody.Stream, Stream: chatBody.Stream,
} }
for i, msg := range chatBody.Messages { for i, msg := range filteredMessages {
if msg.Role == cfg.UserRole { strippedMsg := *stripThinkingFromMsg(&msg)
bodyCopy.Messages[i] = msg if strippedMsg.Role == cfg.UserRole {
bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "user" bodyCopy.Messages[i].Role = "user"
} else { } else {
bodyCopy.Messages[i] = msg bodyCopy.Messages[i] = strippedMsg
} }
} }
// Clean null/empty messages to prevent API issues // Clean null/empty messages to prevent API issues
bodyCopy.Messages = cleanNullMessages(bodyCopy.Messages) bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
req := models.OpenAIReq{ req := models.OpenAIReq{
ChatBody: bodyCopy, ChatBody: bodyCopy,
Tools: nil, Tools: nil,
@@ -322,6 +354,10 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
} }
// deepseek // deepseek
func (ds DeepSeekerCompletion) GetAPIType() models.APIType {
return models.APITypeCompletion
}
func (ds DeepSeekerCompletion) ParseChunk(data []byte) (*models.TextChunk, error) { func (ds DeepSeekerCompletion) ParseChunk(data []byte) (*models.TextChunk, error) {
llmchunk := models.DSCompletionResp{} llmchunk := models.DSCompletionResp{}
if err := json.Unmarshal(data, &llmchunk); err != nil { if err := json.Unmarshal(data, &llmchunk); err != nil {
@@ -346,43 +382,42 @@ func (ds DeepSeekerCompletion) GetToken() string {
func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader, error) { func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg deepseekercompletion", "link", cfg.CurrentAPI) logger.Debug("formmsg deepseekercompletion", "link", cfg.CurrentAPI)
if err := deepseekModelValidator(); err != nil {
return nil, err
}
if msg != "" { // otherwise let the bot to continue if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg} newMsg := models.RoleMsg{Role: role, Content: msg}
newMsg = *processMessageTag(&newMsg)
chatBody.Messages = append(chatBody.Messages, newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
} }
if !resume { // if rag - add as system message to avoid conflicts with tool usage
// if rag - add as system message to avoid conflicts with tool usage if !resume && cfg.RAGEnabled {
// TODO: perhaps RAG should be a func/tool call instead? um := chatBody.Messages[len(chatBody.Messages)-1].Content
if cfg.RAGEnabled { logger.Debug("DeepSeekerCompletion: RAG is enabled, preparing RAG context", "user_message", um)
um := chatBody.Messages[len(chatBody.Messages)-1].Content ragResp, err := chatRagUse(um)
logger.Debug("DeepSeekerCompletion: RAG is enabled, preparing RAG context", "user_message", um) if err != nil {
ragResp, err := chatRagUse(um) logger.Error("DeepSeekerCompletion: failed to form a rag msg", "error", err)
if err != nil { return nil, err
logger.Error("DeepSeekerCompletion: failed to form a rag msg", "error", err)
return nil, err
}
logger.Debug("DeepSeekerCompletion: RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
logger.Debug("DeepSeekerCompletion: RAG message added to chat body", "message_count", len(chatBody.Messages))
} }
logger.Debug("DeepSeekerCompletion: RAG response received",
"response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
logger.Debug("DeepSeekerCompletion: RAG message added to chat body", "message_count", len(chatBody.Messages))
} }
// sending description of the tools and how to use them
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
// add to chat body
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
} }
messages := make([]string, len(chatBody.Messages)) filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
for i, m := range chatBody.Messages { messages := make([]string, len(filteredMessages))
messages[i] = m.ToPrompt() for i, m := range filteredMessages {
messages[i] = stripThinkingFromMsg(&m).ToPrompt()
} }
prompt := strings.Join(messages, "\n") prompt := strings.Join(messages, "\n")
// strings builder? // strings builder?
if !resume { if !resume {
botPersona := cfg.AssistantRole
if cfg.WriteNextMsgAsCompletionAgent != "" {
botPersona = cfg.WriteNextMsgAsCompletionAgent
}
botMsgStart := "\n" + botPersona + ":\n" botMsgStart := "\n" + botPersona + ":\n"
prompt += botMsgStart prompt += botMsgStart
} }
@@ -392,7 +427,8 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt) "msg", msg, "resume", resume, "prompt", prompt)
payload := models.NewDSCompletionReq(prompt, chatBody.Model, payload := models.NewDSCompletionReq(prompt, chatBody.Model,
defaultLCPProps["temp"], chatBody.MakeStopSlice()) defaultLCPProps["temp"],
chatBody.MakeStopSliceExcluding("", listChatRoles()))
data, err := json.Marshal(payload) data, err := json.Marshal(payload)
if err != nil { if err != nil {
logger.Error("failed to form a msg", "error", err) logger.Error("failed to form a msg", "error", err)
@@ -401,6 +437,10 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
return bytes.NewReader(data), nil return bytes.NewReader(data), nil
} }
func (ds DeepSeekerChat) GetAPIType() models.APIType {
return models.APITypeChat
}
func (ds DeepSeekerChat) ParseChunk(data []byte) (*models.TextChunk, error) { func (ds DeepSeekerChat) ParseChunk(data []byte) (*models.TextChunk, error) {
llmchunk := models.DSChatStreamResp{} llmchunk := models.DSChatStreamResp{}
if err := json.Unmarshal(data, &llmchunk); err != nil { if err := json.Unmarshal(data, &llmchunk); err != nil {
@@ -430,42 +470,49 @@ func (ds DeepSeekerChat) GetToken() string {
func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg deepseekerchat", "link", cfg.CurrentAPI) logger.Debug("formmsg deepseekerchat", "link", cfg.CurrentAPI)
if err := deepseekModelValidator(); err != nil {
return nil, err
}
if msg != "" { // otherwise let the bot continue if msg != "" { // otherwise let the bot continue
newMsg := models.RoleMsg{Role: role, Content: msg} newMsg := models.RoleMsg{Role: role, Content: msg}
newMsg = *processMessageTag(&newMsg)
chatBody.Messages = append(chatBody.Messages, newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
} }
if !resume { // if rag - add as system message to avoid conflicts with tool usage
// if rag - add as system message to avoid conflicts with tool usage if !resume && cfg.RAGEnabled {
if cfg.RAGEnabled { um := chatBody.Messages[len(chatBody.Messages)-1].Content
um := chatBody.Messages[len(chatBody.Messages)-1].Content logger.Debug("RAG is enabled, preparing RAG context", "user_message", um)
logger.Debug("RAG is enabled, preparing RAG context", "user_message", um) ragResp, err := chatRagUse(um)
ragResp, err := chatRagUse(um) if err != nil {
if err != nil { logger.Error("failed to form a rag msg", "error", err)
logger.Error("failed to form a rag msg", "error", err) return nil, err
return nil, err
}
logger.Debug("RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
} }
logger.Debug("RAG response received", "response_len", len(ragResp),
"response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
} }
// Create copy of chat body with standardized user role
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
// Add persona suffix to the last user message to indicate who the assistant should reply as
bodyCopy := &models.ChatBody{ bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(chatBody.Messages)), Messages: make([]models.RoleMsg, len(filteredMessages)),
Model: chatBody.Model, Model: chatBody.Model,
Stream: chatBody.Stream, Stream: chatBody.Stream,
} }
for i, msg := range chatBody.Messages { for i, msg := range filteredMessages {
if msg.Role == cfg.UserRole || i == 1 { strippedMsg := *stripThinkingFromMsg(&msg)
bodyCopy.Messages[i] = msg if strippedMsg.Role == cfg.UserRole || i == 1 {
bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "user" bodyCopy.Messages[i].Role = "user"
} else { } else {
bodyCopy.Messages[i] = msg bodyCopy.Messages[i] = strippedMsg
} }
} }
// Clean null/empty messages to prevent API issues // Clean null/empty messages to prevent API issues
bodyCopy.Messages = cleanNullMessages(bodyCopy.Messages) bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
dsBody := models.NewDSChatReq(*bodyCopy) dsBody := models.NewDSChatReq(*bodyCopy)
data, err := json.Marshal(dsBody) data, err := json.Marshal(dsBody)
if err != nil { if err != nil {
@@ -476,6 +523,10 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
} }
// openrouter // openrouter
func (or OpenRouterCompletion) GetAPIType() models.APIType {
return models.APITypeCompletion
}
func (or OpenRouterCompletion) ParseChunk(data []byte) (*models.TextChunk, error) { func (or OpenRouterCompletion) ParseChunk(data []byte) (*models.TextChunk, error) {
llmchunk := models.OpenRouterCompletionResp{} llmchunk := models.OpenRouterCompletionResp{}
if err := json.Unmarshal(data, &llmchunk); err != nil { if err := json.Unmarshal(data, &llmchunk); err != nil {
@@ -502,50 +553,48 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
logger.Debug("formmsg openroutercompletion", "link", cfg.CurrentAPI) logger.Debug("formmsg openroutercompletion", "link", cfg.CurrentAPI)
if msg != "" { // otherwise let the bot to continue if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg} newMsg := models.RoleMsg{Role: role, Content: msg}
newMsg = *processMessageTag(&newMsg)
chatBody.Messages = append(chatBody.Messages, newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
} }
if !resume { // if rag - add as system message to avoid conflicts with tool usage
// if rag - add as system message to avoid conflicts with tool usage if !resume && cfg.RAGEnabled {
if cfg.RAGEnabled { um := chatBody.Messages[len(chatBody.Messages)-1].Content
um := chatBody.Messages[len(chatBody.Messages)-1].Content logger.Debug("RAG is enabled, preparing RAG context", "user_message", um)
logger.Debug("RAG is enabled, preparing RAG context", "user_message", um) ragResp, err := chatRagUse(um)
ragResp, err := chatRagUse(um) if err != nil {
if err != nil { logger.Error("failed to form a rag msg", "error", err)
logger.Error("failed to form a rag msg", "error", err) return nil, err
return nil, err
}
logger.Debug("RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
} }
logger.Debug("RAG response received", "response_len",
len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
} }
// sending description of the tools and how to use them
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
// add to chat body
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
} }
messages := make([]string, len(chatBody.Messages)) filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
for i, m := range chatBody.Messages { messages := make([]string, len(filteredMessages))
messages[i] = m.ToPrompt() for i, m := range filteredMessages {
messages[i] = stripThinkingFromMsg(&m).ToPrompt()
} }
prompt := strings.Join(messages, "\n") prompt := strings.Join(messages, "\n")
// strings builder? // strings builder?
if !resume { if !resume {
botPersona := cfg.AssistantRole
if cfg.WriteNextMsgAsCompletionAgent != "" {
botPersona = cfg.WriteNextMsgAsCompletionAgent
}
botMsgStart := "\n" + botPersona + ":\n" botMsgStart := "\n" + botPersona + ":\n"
prompt += botMsgStart prompt += botMsgStart
} }
if cfg.ThinkUse && !cfg.ToolUse { if cfg.ThinkUse && !cfg.ToolUse {
prompt += "<think>" prompt += "<think>"
} }
ss := chatBody.MakeStopSlice() stopSlice := chatBody.MakeStopSliceExcluding("", listChatRoles())
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt, "stop_strings", ss) "msg", msg, "resume", resume, "prompt", prompt, "stop_strings", stopSlice)
payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt, defaultLCPProps, ss) payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt,
defaultLCPProps, stopSlice)
data, err := json.Marshal(payload) data, err := json.Marshal(payload)
if err != nil { if err != nil {
logger.Error("failed to form a msg", "error", err) logger.Error("failed to form a msg", "error", err)
@@ -555,6 +604,10 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
} }
// chat // chat
func (or OpenRouterChat) GetAPIType() models.APIType {
return models.APITypeChat
}
func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) { func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) {
llmchunk := models.OpenRouterChatResp{} llmchunk := models.OpenRouterChatResp{}
if err := json.Unmarshal(data, &llmchunk); err != nil { if err := json.Unmarshal(data, &llmchunk); err != nil {
@@ -611,7 +664,7 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
// If image processing fails, fall back to simple text message // If image processing fails, fall back to simple text message
newMsg = models.NewRoleMsg(role, msg) newMsg = models.NewRoleMsg(role, msg)
} else { } else {
newMsg.AddImagePart(imageURL) newMsg.AddImagePart(imageURL, localImageAttachmentPath)
} }
// Only clear the global image attachment after successfully processing it in this API call // Only clear the global image attachment after successfully processing it in this API call
imageAttachmentPath = "" // Clear the attachment after use imageAttachmentPath = "" // Clear the attachment after use
@@ -619,41 +672,44 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
// Create a simple text message // Create a simple text message
newMsg = models.NewRoleMsg(role, msg) newMsg = models.NewRoleMsg(role, msg)
} }
newMsg = *processMessageTag(&newMsg)
chatBody.Messages = append(chatBody.Messages, newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
} }
if !resume { // if rag - add as system message to avoid conflicts with tool usage
// if rag - add as system message to avoid conflicts with tool usage if !resume && cfg.RAGEnabled {
if cfg.RAGEnabled { um := chatBody.Messages[len(chatBody.Messages)-1].Content
um := chatBody.Messages[len(chatBody.Messages)-1].Content logger.Debug("RAG is enabled, preparing RAG context", "user_message", um)
logger.Debug("RAG is enabled, preparing RAG context", "user_message", um) ragResp, err := chatRagUse(um)
ragResp, err := chatRagUse(um) if err != nil {
if err != nil { logger.Error("failed to form a rag msg", "error", err)
logger.Error("failed to form a rag msg", "error", err) return nil, err
return nil, err
}
logger.Debug("RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
} }
logger.Debug("RAG response received", "response_len", len(ragResp),
"response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
} }
// Create copy of chat body with standardized user role // Create copy of chat body with standardized user role
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
// Add persona suffix to the last user message to indicate who the assistant should reply as
bodyCopy := &models.ChatBody{ bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(chatBody.Messages)), Messages: make([]models.RoleMsg, len(filteredMessages)),
Model: chatBody.Model, Model: chatBody.Model,
Stream: chatBody.Stream, Stream: chatBody.Stream,
} }
for i, msg := range chatBody.Messages { for i, msg := range filteredMessages {
bodyCopy.Messages[i] = msg strippedMsg := *stripThinkingFromMsg(&msg)
bodyCopy.Messages[i] = strippedMsg
// Standardize role if it's a user role // Standardize role if it's a user role
if bodyCopy.Messages[i].Role == cfg.UserRole { if bodyCopy.Messages[i].Role == cfg.UserRole {
bodyCopy.Messages[i] = msg bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "user" bodyCopy.Messages[i].Role = "user"
} }
} }
// Clean null/empty messages to prevent API issues // Clean null/empty messages to prevent API issues
bodyCopy.Messages = cleanNullMessages(bodyCopy.Messages) bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
orBody := models.NewOpenRouterChatReq(*bodyCopy, defaultLCPProps) orBody := models.NewOpenRouterChatReq(*bodyCopy, defaultLCPProps)
if cfg.ToolUse && !resume && role != cfg.ToolRole { if cfg.ToolUse && !resume && role != cfg.ToolRole {
orBody.Tools = baseTools // set tools to use orBody.Tools = baseTools // set tools to use

30
main.go
View File

@@ -1,35 +1,23 @@
package main package main
import ( import (
"flag"
"strconv"
"github.com/rivo/tview" "github.com/rivo/tview"
) )
var ( var (
botRespMode = false boolColors = map[bool]string{true: "green", false: "red"}
editMode = false botRespMode = false
roleEditMode = false editMode = false
injectRole = true roleEditMode = false
selectedIndex = int(-1) injectRole = true
currentAPIIndex = 0 // Index to track current API in ApiLinks slice selectedIndex = int(-1)
currentORModelIndex = 0 // Index to track current OpenRouter model in ORFreeModels slice shellMode = false
currentLocalModelIndex = 0 // Index to track current llama.cpp model thinkingCollapsed = false
shellMode = false indexLineCompletion = "F12 to show keys help | llm turn: [%s:-:b]%v[-:-:-] (F6) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [%s:-:b]%v[-:-:-] (ctrl+k) | model: [%s:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [%s:-:b]%v[-:-:-] (F10)\nAPI: [orange:-:b]%s[-:-:-] (ctrl+v) | recording: [%s:-:b]%v[-:-:-] (ctrl+r) | writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | bot will write as [orange:-:b]%s[-:-:-] (ctrl+x) | role injection (alt+7) [%s:-:b]%v[-:-:-]"
// indexLine = "F12 to show keys help | bot resp mode: [orange:-:b]%v[-:-:-] (F6) | card's char: [orange:-:b]%s[-:-:-] (ctrl+s) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [orange:-:b]%v[-:-:-] (F10)\nAPI_URL: [orange:-:b]%s[-:-:-] (ctrl+v) | ThinkUse: [orange:-:b]%v[-:-:-] (ctrl+p) | Log Level: [orange:-:b]%v[-:-:-] (ctrl+p) | Recording: [orange:-:b]%v[-:-:-] (ctrl+r) | Writing as: [orange:-:b]%s[-:-:-] (ctrl+q)"
indexLineCompletion = "F12 to show keys help | bot resp mode: [orange:-:b]%v[-:-:-] (F6) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [orange:-:b]%v[-:-:-] (F10)\nAPI: [orange:-:b]%s[-:-:-] (ctrl+v) | Recording: [orange:-:b]%v[-:-:-] (ctrl+r) | Writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | Bot will write as [orange:-:b]%s[-:-:-] (ctrl+x) | role_inject [orange:-:b]%v[-:-:-]"
focusSwitcher = map[tview.Primitive]tview.Primitive{} focusSwitcher = map[tview.Primitive]tview.Primitive{}
) )
func main() { func main() {
apiPort := flag.Int("port", 0, "port to host api")
flag.Parse()
if apiPort != nil && *apiPort > 3000 {
srv := Server{}
srv.ListenToRequests(strconv.Itoa(*apiPort))
return
}
pages.AddPage("main", flex, true, true) pages.AddPage("main", flex, true, true)
if err := app.SetRoot(pages, if err := app.SetRoot(pages,
true).EnableMouse(cfg.EnableMouse).EnablePaste(true).Run(); err != nil { true).EnableMouse(cfg.EnableMouse).EnablePaste(true).Run(); err != nil {

View File

@@ -1,9 +1,9 @@
package main package main
import ( import (
"gf-lt/models"
"fmt" "fmt"
"gf-lt/config" "gf-lt/config"
"gf-lt/models"
"strings" "strings"
"testing" "testing"
) )

View File

@@ -31,18 +31,20 @@ func (c *CharCardSpec) Simplify(userName, fpath string) *CharCard {
fm := strings.ReplaceAll(strings.ReplaceAll(c.FirstMes, "{{char}}", c.Name), "{{user}}", userName) fm := strings.ReplaceAll(strings.ReplaceAll(c.FirstMes, "{{char}}", c.Name), "{{user}}", userName)
sysPr := strings.ReplaceAll(strings.ReplaceAll(c.Description, "{{char}}", c.Name), "{{user}}", userName) sysPr := strings.ReplaceAll(strings.ReplaceAll(c.Description, "{{char}}", c.Name), "{{user}}", userName)
return &CharCard{ return &CharCard{
SysPrompt: sysPr, SysPrompt: sysPr,
FirstMsg: fm, FirstMsg: fm,
Role: c.Name, Role: c.Name,
FilePath: fpath, FilePath: fpath,
Characters: []string{c.Name, userName},
} }
} }
type CharCard struct { type CharCard struct {
SysPrompt string `json:"sys_prompt"` SysPrompt string `json:"sys_prompt"`
FirstMsg string `json:"first_msg"` FirstMsg string `json:"first_msg"`
Role string `json:"role"` Role string `json:"role"`
FilePath string `json:"filepath"` Characters []string `json:"chars"`
FilePath string `json:"filepath"`
} }
func (cc *CharCard) ToSpec(userName string) *CharCardSpec { func (cc *CharCard) ToSpec(userName string) *CharCardSpec {

View File

@@ -14,7 +14,7 @@ type Chat struct {
UpdatedAt time.Time `db:"updated_at" json:"updated_at"` UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
} }
func (c Chat) ToHistory() ([]RoleMsg, error) { func (c *Chat) ToHistory() ([]RoleMsg, error) {
resp := []RoleMsg{} resp := []RoleMsg{}
if err := json.Unmarshal([]byte(c.Msgs), &resp); err != nil { if err := json.Unmarshal([]byte(c.Msgs), &resp); err != nil {
return nil, err return nil, err

View File

@@ -1,8 +1,49 @@
package models package models
import (
"regexp"
"strings"
)
type AudioFormat string type AudioFormat string
const ( const (
AFWav AudioFormat = "wav" AFWav AudioFormat = "wav"
AFMP3 AudioFormat = "mp3" AFMP3 AudioFormat = "mp3"
) )
var threeOrMoreDashesRE = regexp.MustCompile(`-{3,}`)
// CleanText removes markdown and special characters that are not suitable for TTS
func CleanText(text string) string {
// Remove markdown-like characters that might interfere with TTS
text = strings.ReplaceAll(text, "*", "") // Bold/italic markers
text = strings.ReplaceAll(text, "#", "") // Headers
text = strings.ReplaceAll(text, "_", "") // Underline/italic markers
text = strings.ReplaceAll(text, "~", "") // Strikethrough markers
text = strings.ReplaceAll(text, "`", "") // Code markers
text = strings.ReplaceAll(text, "[", "") // Link brackets
text = strings.ReplaceAll(text, "]", "") // Link brackets
text = strings.ReplaceAll(text, "!", "") // Exclamation marks (if not punctuation)
// Remove HTML tags using regex
htmlTagRegex := regexp.MustCompile(`<[^>]*>`)
text = htmlTagRegex.ReplaceAllString(text, "")
// Split text into lines to handle table separators
lines := strings.Split(text, "\n")
var filteredLines []string
for _, line := range lines {
// Check if the line looks like a table separator (e.g., |----|, |===|, | - - - |)
// A table separator typically contains only |, -, =, and spaces
isTableSeparator := regexp.MustCompile(`^\s*\|\s*[-=\s]+\|\s*$`).MatchString(strings.TrimSpace(line))
if !isTableSeparator {
// If it's not a table separator, remove vertical bars but keep the content
processedLine := strings.ReplaceAll(line, "|", "")
filteredLines = append(filteredLines, processedLine)
}
// If it is a table separator, skip it (don't add to filteredLines)
}
text = strings.Join(filteredLines, "\n")
text = threeOrMoreDashesRE.ReplaceAllString(text, "")
text = strings.TrimSpace(text) // Remove leading/trailing whitespace
return text
}

View File

@@ -5,9 +5,22 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"os" "os"
"path/filepath"
"strings" "strings"
) )
var (
// imageBaseDir is the base directory for displaying image paths.
// If set, image paths will be shown relative to this directory.
imageBaseDir = ""
)
// SetImageBaseDir sets the base directory for displaying image paths.
// If dir is empty, full paths will be shown.
func SetImageBaseDir(dir string) {
imageBaseDir = dir
}
type FuncCall struct { type FuncCall struct {
ID string `json:"id,omitempty"` ID string `json:"id,omitempty"`
Name string `json:"name"` Name string `json:"name"`
@@ -82,6 +95,7 @@ type TextContentPart struct {
type ImageContentPart struct { type ImageContentPart struct {
Type string `json:"type"` Type string `json:"type"`
Path string `json:"path,omitempty"` // Store original file path
ImageURL struct { ImageURL struct {
URL string `json:"url"` URL string `json:"url"`
} `json:"image_url"` } `json:"image_url"`
@@ -89,37 +103,42 @@ type ImageContentPart struct {
// RoleMsg represents a message with content that can be either a simple string or structured content parts // RoleMsg represents a message with content that can be either a simple string or structured content parts
type RoleMsg struct { type RoleMsg struct {
Role string `json:"role"` Role string `json:"role"`
Content string `json:"-"` Content string `json:"-"`
ContentParts []interface{} `json:"-"` ContentParts []any `json:"-"`
ToolCallID string `json:"tool_call_id,omitempty"` // For tool response messages ToolCallID string `json:"tool_call_id,omitempty"` // For tool response messages
hasContentParts bool // Flag to indicate which content type to marshal KnownTo []string `json:"known_to,omitempty"`
hasContentParts bool // Flag to indicate which content type to marshal
} }
// MarshalJSON implements custom JSON marshaling for RoleMsg // MarshalJSON implements custom JSON marshaling for RoleMsg
func (m RoleMsg) MarshalJSON() ([]byte, error) { func (m *RoleMsg) MarshalJSON() ([]byte, error) {
if m.hasContentParts { if m.hasContentParts {
// Use structured content format // Use structured content format
aux := struct { aux := struct {
Role string `json:"role"` Role string `json:"role"`
Content []interface{} `json:"content"` Content []any `json:"content"`
ToolCallID string `json:"tool_call_id,omitempty"` ToolCallID string `json:"tool_call_id,omitempty"`
KnownTo []string `json:"known_to,omitempty"`
}{ }{
Role: m.Role, Role: m.Role,
Content: m.ContentParts, Content: m.ContentParts,
ToolCallID: m.ToolCallID, ToolCallID: m.ToolCallID,
KnownTo: m.KnownTo,
} }
return json.Marshal(aux) return json.Marshal(aux)
} else { } else {
// Use simple content format // Use simple content format
aux := struct { aux := struct {
Role string `json:"role"` Role string `json:"role"`
Content string `json:"content"` Content string `json:"content"`
ToolCallID string `json:"tool_call_id,omitempty"` ToolCallID string `json:"tool_call_id,omitempty"`
KnownTo []string `json:"known_to,omitempty"`
}{ }{
Role: m.Role, Role: m.Role,
Content: m.Content, Content: m.Content,
ToolCallID: m.ToolCallID, ToolCallID: m.ToolCallID,
KnownTo: m.KnownTo,
} }
return json.Marshal(aux) return json.Marshal(aux)
} }
@@ -129,23 +148,26 @@ func (m RoleMsg) MarshalJSON() ([]byte, error) {
func (m *RoleMsg) UnmarshalJSON(data []byte) error { func (m *RoleMsg) UnmarshalJSON(data []byte) error {
// First, try to unmarshal as structured content format // First, try to unmarshal as structured content format
var structured struct { var structured struct {
Role string `json:"role"` Role string `json:"role"`
Content []interface{} `json:"content"` Content []any `json:"content"`
ToolCallID string `json:"tool_call_id,omitempty"` ToolCallID string `json:"tool_call_id,omitempty"`
KnownTo []string `json:"known_to,omitempty"`
} }
if err := json.Unmarshal(data, &structured); err == nil && len(structured.Content) > 0 { if err := json.Unmarshal(data, &structured); err == nil && len(structured.Content) > 0 {
m.Role = structured.Role m.Role = structured.Role
m.ContentParts = structured.Content m.ContentParts = structured.Content
m.ToolCallID = structured.ToolCallID m.ToolCallID = structured.ToolCallID
m.KnownTo = structured.KnownTo
m.hasContentParts = true m.hasContentParts = true
return nil return nil
} }
// Otherwise, unmarshal as simple content format // Otherwise, unmarshal as simple content format
var simple struct { var simple struct {
Role string `json:"role"` Role string `json:"role"`
Content string `json:"content"` Content string `json:"content"`
ToolCallID string `json:"tool_call_id,omitempty"` ToolCallID string `json:"tool_call_id,omitempty"`
KnownTo []string `json:"known_to,omitempty"`
} }
if err := json.Unmarshal(data, &simple); err != nil { if err := json.Unmarshal(data, &simple); err != nil {
return err return err
@@ -153,53 +175,101 @@ func (m *RoleMsg) UnmarshalJSON(data []byte) error {
m.Role = simple.Role m.Role = simple.Role
m.Content = simple.Content m.Content = simple.Content
m.ToolCallID = simple.ToolCallID m.ToolCallID = simple.ToolCallID
m.KnownTo = simple.KnownTo
m.hasContentParts = false m.hasContentParts = false
return nil return nil
} }
func (m RoleMsg) ToText(i int) string { func (m *RoleMsg) ToText(i int) string {
icon := fmt.Sprintf("(%d)", i)
// Convert content to string representation // Convert content to string representation
contentStr := "" var contentStr string
var imageIndicators []string
if !m.hasContentParts { if !m.hasContentParts {
contentStr = m.Content contentStr = m.Content
} else { } else {
// For structured content, just take the text parts // For structured content, collect text parts and image indicators
var textParts []string var textParts []string
for _, part := range m.ContentParts { for _, part := range m.ContentParts {
if partMap, ok := part.(map[string]interface{}); ok { switch p := part.(type) {
if partType, exists := partMap["type"]; exists && partType == "text" { case TextContentPart:
if textVal, textExists := partMap["text"]; textExists { if p.Type == "text" {
if textStr, isStr := textVal.(string); isStr { textParts = append(textParts, p.Text)
textParts = append(textParts, textStr) }
case ImageContentPart:
// Collect image indicator
displayPath := p.Path
if displayPath == "" {
displayPath = "image"
} else {
displayPath = extractDisplayPath(displayPath)
}
imageIndicators = append(imageIndicators, fmt.Sprintf("[orange::i][image: %s][-:-:-]", displayPath))
case map[string]any:
if partType, exists := p["type"]; exists {
switch partType {
case "text":
if textVal, textExists := p["text"]; textExists {
if textStr, isStr := textVal.(string); isStr {
textParts = append(textParts, textStr)
}
} }
case "image_url":
// Handle unmarshaled image content
var displayPath string
if pathVal, pathExists := p["path"]; pathExists {
if pathStr, isStr := pathVal.(string); isStr && pathStr != "" {
displayPath = extractDisplayPath(pathStr)
}
}
if displayPath == "" {
displayPath = "image"
}
imageIndicators = append(imageIndicators, fmt.Sprintf("[orange::i][image: %s][-:-:-]", displayPath))
} }
} }
} }
} }
contentStr = strings.Join(textParts, " ") + " " contentStr = strings.Join(textParts, " ") + " "
} }
// check if already has role annotation (/completion makes them) // check if already has role annotation (/completion makes them)
if !strings.HasPrefix(contentStr, m.Role+":") { // in that case remove it, and then add to icon
icon = fmt.Sprintf("(%d) <%s>: ", i, m.Role) // since icon and content are separated by \n
contentStr, _ = strings.CutPrefix(contentStr, m.Role+":")
// if !strings.HasPrefix(contentStr, m.Role+":") {
icon := fmt.Sprintf("(%d) <%s>: ", i, m.Role)
// }
// Build final message with image indicators before text
var finalContent strings.Builder
if len(imageIndicators) > 0 {
// Add each image indicator on its own line
for _, indicator := range imageIndicators {
finalContent.WriteString(indicator)
finalContent.WriteString("\n")
}
} }
textMsg := fmt.Sprintf("[-:-:b]%s[-:-:-]\n%s\n", icon, contentStr) finalContent.WriteString(contentStr)
textMsg := fmt.Sprintf("[-:-:b]%s[-:-:-]\n%s\n", icon, finalContent.String())
return strings.ReplaceAll(textMsg, "\n\n", "\n") return strings.ReplaceAll(textMsg, "\n\n", "\n")
} }
func (m RoleMsg) ToPrompt() string { func (m *RoleMsg) ToPrompt() string {
contentStr := "" var contentStr string
if !m.hasContentParts { if !m.hasContentParts {
contentStr = m.Content contentStr = m.Content
} else { } else {
// For structured content, just take the text parts // For structured content, just take the text parts
var textParts []string var textParts []string
for _, part := range m.ContentParts { for _, part := range m.ContentParts {
if partMap, ok := part.(map[string]interface{}); ok { switch p := part.(type) {
if partType, exists := partMap["type"]; exists && partType == "text" { case TextContentPart:
if textVal, textExists := partMap["text"]; textExists { if p.Type == "text" {
textParts = append(textParts, p.Text)
}
case ImageContentPart:
// skip images for text display
case map[string]any:
if partType, exists := p["type"]; exists && partType == "text" {
if textVal, textExists := p["text"]; textExists {
if textStr, isStr := textVal.(string); isStr { if textStr, isStr := textVal.(string); isStr {
textParts = append(textParts, textStr) textParts = append(textParts, textStr)
} }
@@ -222,7 +292,7 @@ func NewRoleMsg(role, content string) RoleMsg {
} }
// NewMultimodalMsg creates a RoleMsg with structured content parts (text and images) // NewMultimodalMsg creates a RoleMsg with structured content parts (text and images)
func NewMultimodalMsg(role string, contentParts []interface{}) RoleMsg { func NewMultimodalMsg(role string, contentParts []any) RoleMsg {
return RoleMsg{ return RoleMsg{
Role: role, Role: role,
ContentParts: contentParts, ContentParts: contentParts,
@@ -231,7 +301,7 @@ func NewMultimodalMsg(role string, contentParts []interface{}) RoleMsg {
} }
// HasContent returns true if the message has either string content or structured content parts // HasContent returns true if the message has either string content or structured content parts
func (m RoleMsg) HasContent() bool { func (m *RoleMsg) HasContent() bool {
if m.Content != "" { if m.Content != "" {
return true return true
} }
@@ -242,22 +312,23 @@ func (m RoleMsg) HasContent() bool {
} }
// IsContentParts returns true if the message uses structured content parts // IsContentParts returns true if the message uses structured content parts
func (m RoleMsg) IsContentParts() bool { func (m *RoleMsg) IsContentParts() bool {
return m.hasContentParts return m.hasContentParts
} }
// GetContentParts returns the content parts of the message // GetContentParts returns the content parts of the message
func (m RoleMsg) GetContentParts() []interface{} { func (m *RoleMsg) GetContentParts() []any {
return m.ContentParts return m.ContentParts
} }
// Copy creates a copy of the RoleMsg with all fields // Copy creates a copy of the RoleMsg with all fields
func (m RoleMsg) Copy() RoleMsg { func (m *RoleMsg) Copy() RoleMsg {
return RoleMsg{ return RoleMsg{
Role: m.Role, Role: m.Role,
Content: m.Content, Content: m.Content,
ContentParts: m.ContentParts, ContentParts: m.ContentParts,
ToolCallID: m.ToolCallID, ToolCallID: m.ToolCallID,
KnownTo: m.KnownTo,
hasContentParts: m.hasContentParts, hasContentParts: m.hasContentParts,
} }
} }
@@ -267,9 +338,9 @@ func (m *RoleMsg) AddTextPart(text string) {
if !m.hasContentParts { if !m.hasContentParts {
// Convert to content parts format // Convert to content parts format
if m.Content != "" { if m.Content != "" {
m.ContentParts = []interface{}{TextContentPart{Type: "text", Text: m.Content}} m.ContentParts = []any{TextContentPart{Type: "text", Text: m.Content}}
} else { } else {
m.ContentParts = []interface{}{} m.ContentParts = []any{}
} }
m.hasContentParts = true m.hasContentParts = true
} }
@@ -279,19 +350,20 @@ func (m *RoleMsg) AddTextPart(text string) {
} }
// AddImagePart adds an image content part to the message // AddImagePart adds an image content part to the message
func (m *RoleMsg) AddImagePart(imageURL string) { func (m *RoleMsg) AddImagePart(imageURL, imagePath string) {
if !m.hasContentParts { if !m.hasContentParts {
// Convert to content parts format // Convert to content parts format
if m.Content != "" { if m.Content != "" {
m.ContentParts = []interface{}{TextContentPart{Type: "text", Text: m.Content}} m.ContentParts = []any{TextContentPart{Type: "text", Text: m.Content}}
} else { } else {
m.ContentParts = []interface{}{} m.ContentParts = []any{}
} }
m.hasContentParts = true m.hasContentParts = true
} }
imagePart := ImageContentPart{ imagePart := ImageContentPart{
Type: "image_url", Type: "image_url",
Path: imagePath, // Store the original file path
ImageURL: struct { ImageURL: struct {
URL string `json:"url"` URL string `json:"url"`
}{URL: imageURL}, }{URL: imageURL},
@@ -331,6 +403,31 @@ func CreateImageURLFromPath(imagePath string) (string, error) {
return fmt.Sprintf("data:%s;base64,%s", mimeType, encoded), nil return fmt.Sprintf("data:%s;base64,%s", mimeType, encoded), nil
} }
// extractDisplayPath returns a path suitable for display, potentially relative to imageBaseDir
func extractDisplayPath(p string) string {
if p == "" {
return ""
}
// If base directory is set, try to make path relative to it
if imageBaseDir != "" {
if rel, err := filepath.Rel(imageBaseDir, p); err == nil {
// Check if relative path doesn't start with ".." (meaning it's within base dir)
// If it starts with "..", we might still want to show it as relative
// but for now we show full path if it goes outside base dir
if !strings.HasPrefix(rel, "..") {
p = rel
}
}
}
// Truncate long paths to last 60 characters if needed
if len(p) > 60 {
return "..." + p[len(p)-60:]
}
return p
}
type ChatBody struct { type ChatBody struct {
Model string `json:"model"` Model string `json:"model"`
Stream bool `json:"stream"` Stream bool `json:"stream"`
@@ -359,13 +456,27 @@ func (cb *ChatBody) ListRoles() []string {
} }
func (cb *ChatBody) MakeStopSlice() []string { func (cb *ChatBody) MakeStopSlice() []string {
namesMap := make(map[string]struct{}) return cb.MakeStopSliceExcluding("", cb.ListRoles())
for _, m := range cb.Messages { }
namesMap[m.Role] = struct{}{}
} func (cb *ChatBody) MakeStopSliceExcluding(
ss := []string{"<|im_end|>"} excludeRole string, roleList []string,
for k := range namesMap { ) []string {
ss = append(ss, k+":\n") ss := []string{}
for _, role := range roleList {
// Skip the excluded role (typically the current speaker)
if role == excludeRole {
continue
}
// Add multiple variations to catch different formatting
ss = append(ss,
role+":\n", // Most common: role with newline
role+":", // Role with colon but no newline
role+": ", // Role with colon and single space
role+": ", // Role with colon and double space (common tokenization)
role+": \n", // Role with colon and double space (common tokenization)
role+": ", // Role with colon and triple space
)
} }
return ss return ss
} }
@@ -443,12 +554,12 @@ type LlamaCPPReq struct {
Stream bool `json:"stream"` Stream bool `json:"stream"`
// For multimodal requests, prompt should be an object with prompt_string and multimodal_data // For multimodal requests, prompt should be an object with prompt_string and multimodal_data
// For regular requests, prompt is a string // For regular requests, prompt is a string
Prompt interface{} `json:"prompt"` // Can be string or object with prompt_string and multimodal_data Prompt any `json:"prompt"` // Can be string or object with prompt_string and multimodal_data
Temperature float32 `json:"temperature"` Temperature float32 `json:"temperature"`
DryMultiplier float32 `json:"dry_multiplier"` DryMultiplier float32 `json:"dry_multiplier"`
Stop []string `json:"stop"` Stop []string `json:"stop"`
MinP float32 `json:"min_p"` MinP float32 `json:"min_p"`
NPredict int32 `json:"n_predict"` NPredict int32 `json:"n_predict"`
// MaxTokens int `json:"max_tokens"` // MaxTokens int `json:"max_tokens"`
// DryBase float64 `json:"dry_base"` // DryBase float64 `json:"dry_base"`
// DryAllowedLength int `json:"dry_allowed_length"` // DryAllowedLength int `json:"dry_allowed_length"`
@@ -476,7 +587,7 @@ type PromptObject struct {
} }
func NewLCPReq(prompt, model string, multimodalData []string, props map[string]float32, stopStrings []string) LlamaCPPReq { func NewLCPReq(prompt, model string, multimodalData []string, props map[string]float32, stopStrings []string) LlamaCPPReq {
var finalPrompt interface{} var finalPrompt any
if len(multimodalData) > 0 { if len(multimodalData) > 0 {
// When multimodal data is present, use the object format as per Python example: // When multimodal data is present, use the object format as per Python example:
// { "prompt": { "prompt_string": "...", "multimodal_data": [...] } } // { "prompt": { "prompt_string": "...", "multimodal_data": [...] } }
@@ -523,9 +634,23 @@ type LCPModels struct {
} }
func (lcp *LCPModels) ListModels() []string { func (lcp *LCPModels) ListModels() []string {
resp := []string{} resp := make([]string, 0, len(lcp.Data))
for _, model := range lcp.Data { for _, model := range lcp.Data {
resp = append(resp, model.ID) resp = append(resp, model.ID)
} }
return resp return resp
} }
type ChatRoundReq struct {
UserMsg string
Role string
Regen bool
Resume bool
}
type APIType int
const (
APITypeChat APIType = iota
APITypeCompletion
)

167
models/models_test.go Normal file
View File

@@ -0,0 +1,167 @@
package models
import (
"strings"
"testing"
)
func TestRoleMsgToTextWithImages(t *testing.T) {
tests := []struct {
name string
msg RoleMsg
index int
expected string // substring to check
}{
{
name: "text and image",
index: 0,
msg: func() RoleMsg {
msg := NewMultimodalMsg("user", []interface{}{})
msg.AddTextPart("Look at this picture")
msg.AddImagePart("data:image/jpeg;base64,abc123", "/home/user/Pictures/cat.jpg")
return msg
}(),
expected: "[orange::i][image: /home/user/Pictures/cat.jpg][-:-:-]",
},
{
name: "image only",
index: 1,
msg: func() RoleMsg {
msg := NewMultimodalMsg("user", []interface{}{})
msg.AddImagePart("data:image/png;base64,xyz789", "/tmp/screenshot_20250217_123456.png")
return msg
}(),
expected: "[orange::i][image: /tmp/screenshot_20250217_123456.png][-:-:-]",
},
{
name: "long filename truncated",
index: 2,
msg: func() RoleMsg {
msg := NewMultimodalMsg("user", []interface{}{})
msg.AddTextPart("Check this")
msg.AddImagePart("data:image/jpeg;base64,foo", "/very/long/path/to/a/really_long_filename_that_exceeds_forty_characters.jpg")
return msg
}(),
expected: "[orange::i][image: .../to/a/really_long_filename_that_exceeds_forty_characters.jpg][-:-:-]",
},
{
name: "multiple images",
index: 3,
msg: func() RoleMsg {
msg := NewMultimodalMsg("user", []interface{}{})
msg.AddTextPart("Multiple images")
msg.AddImagePart("data:image/jpeg;base64,a", "/path/img1.jpg")
msg.AddImagePart("data:image/png;base64,b", "/path/img2.png")
return msg
}(),
expected: "[orange::i][image: /path/img1.jpg][-:-:-]\n[orange::i][image: /path/img2.png][-:-:-]",
},
{
name: "old format without path",
index: 4,
msg: RoleMsg{
Role: "user",
hasContentParts: true,
ContentParts: []interface{}{
map[string]interface{}{
"type": "image_url",
"image_url": map[string]interface{}{
"url": "data:image/jpeg;base64,old",
},
},
},
},
expected: "[orange::i][image: image][-:-:-]",
},
{
name: "old format with path",
index: 5,
msg: RoleMsg{
Role: "user",
hasContentParts: true,
ContentParts: []interface{}{
map[string]interface{}{
"type": "image_url",
"path": "/old/path/photo.jpg",
"image_url": map[string]interface{}{
"url": "data:image/jpeg;base64,old",
},
},
},
},
expected: "[orange::i][image: /old/path/photo.jpg][-:-:-]",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := tt.msg.ToText(tt.index)
if !strings.Contains(result, tt.expected) {
t.Errorf("ToText() result does not contain expected indicator\ngot: %s\nwant substring: %s", result, tt.expected)
}
// Ensure the indicator appears before text content
if strings.Contains(tt.expected, "cat.jpg") && strings.Contains(result, "Look at this picture") {
indicatorPos := strings.Index(result, "[orange::i][image: /home/user/Pictures/cat.jpg][-:-:-]")
textPos := strings.Index(result, "Look at this picture")
if indicatorPos == -1 || textPos == -1 || indicatorPos >= textPos {
t.Errorf("image indicator should appear before text")
}
}
})
}
}
func TestExtractDisplayPath(t *testing.T) {
// Save original base dir
originalBaseDir := imageBaseDir
defer func() { imageBaseDir = originalBaseDir }()
tests := []struct {
name string
baseDir string
path string
expected string
}{
{
name: "no base dir shows full path",
baseDir: "",
path: "/home/user/images/cat.jpg",
expected: "/home/user/images/cat.jpg",
},
{
name: "relative path within base dir",
baseDir: "/home/user",
path: "/home/user/images/cat.jpg",
expected: "images/cat.jpg",
},
{
name: "path outside base dir shows full path",
baseDir: "/home/user",
path: "/tmp/test.jpg",
expected: "/tmp/test.jpg",
},
{
name: "same directory",
baseDir: "/home/user/images",
path: "/home/user/images/cat.jpg",
expected: "cat.jpg",
},
{
name: "long path truncated",
baseDir: "",
path: "/very/long/path/to/a/really_long_filename_that_exceeds_sixty_characters_limit_yes_it_is_very_long.jpg",
expected: "..._that_exceeds_sixty_characters_limit_yes_it_is_very_long.jpg",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
imageBaseDir = tt.baseDir
result := extractDisplayPath(tt.path)
if result != tt.expected {
t.Errorf("extractDisplayPath(%q) with baseDir=%q = %q, want %q",
tt.path, tt.baseDir, result, tt.expected)
}
})
}
}

View File

@@ -143,11 +143,14 @@ type ORModels struct {
func (orm *ORModels) ListModels(free bool) []string { func (orm *ORModels) ListModels(free bool) []string {
resp := []string{} resp := []string{}
for _, model := range orm.Data { for i := range orm.Data {
model := &orm.Data[i] // Take address of element to avoid copying
if free { if free {
if model.Pricing.Prompt == "0" && model.Pricing.Request == "0" && if model.Pricing.Prompt == "0" && model.Pricing.Completion == "0" {
model.Pricing.Completion == "0" { // treat missing request as free
resp = append(resp, model.ID) if model.Pricing.Request == "" || model.Pricing.Request == "0" {
resp = append(resp, model.ID)
}
} }
} else { } else {
resp = append(resp, model.ID) resp = append(resp, model.ID)

97
models/openrouter_test.go Normal file
View File

@@ -0,0 +1,97 @@
package models
import (
"encoding/json"
"os"
"path/filepath"
"testing"
)
func TestORModelsListModels(t *testing.T) {
t.Run("unit test with hardcoded data", func(t *testing.T) {
jsonData := `{
"data": [
{
"id": "model/free",
"pricing": {
"prompt": "0",
"completion": "0"
}
},
{
"id": "model/paid",
"pricing": {
"prompt": "0.001",
"completion": "0.002"
}
},
{
"id": "model/request-zero",
"pricing": {
"prompt": "0",
"completion": "0",
"request": "0"
}
},
{
"id": "model/request-nonzero",
"pricing": {
"prompt": "0",
"completion": "0",
"request": "0.5"
}
}
]
}`
var models ORModels
if err := json.Unmarshal([]byte(jsonData), &models); err != nil {
t.Fatalf("failed to unmarshal test data: %v", err)
}
freeModels := models.ListModels(true)
if len(freeModels) != 2 {
t.Errorf("expected 2 free models, got %d: %v", len(freeModels), freeModels)
}
expectedFree := map[string]bool{"model/free": true, "model/request-zero": true}
for _, id := range freeModels {
if !expectedFree[id] {
t.Errorf("unexpected free model ID: %s", id)
}
}
allModels := models.ListModels(false)
if len(allModels) != 4 {
t.Errorf("expected 4 total models, got %d", len(allModels))
}
})
t.Run("integration with or_models.json", func(t *testing.T) {
// Attempt to load the real data file from the project root
path := filepath.Join("..", "or_models.json")
data, err := os.ReadFile(path)
if err != nil {
t.Skip("or_models.json not found, skipping integration test")
}
var models ORModels
if err := json.Unmarshal(data, &models); err != nil {
t.Fatalf("failed to unmarshal %s: %v", path, err)
}
freeModels := models.ListModels(true)
if len(freeModels) == 0 {
t.Error("expected at least one free model, got none")
}
allModels := models.ListModels(false)
if len(allModels) == 0 {
t.Error("expected at least one model")
}
// Ensure free models are subset of all models
freeSet := make(map[string]bool)
for _, id := range freeModels {
freeSet[id] = true
}
for _, id := range freeModels {
if !freeSet[id] {
t.Errorf("free model %s not found in all models", id)
}
}
t.Logf("found %d free models out of %d total models", len(freeModels), len(allModels))
})
}

View File

@@ -120,7 +120,7 @@ func createTextChunk(embed PngEmbed) ([]byte, error) {
if err := binary.Write(chunk, binary.BigEndian, uint32(len(data))); err != nil { if err := binary.Write(chunk, binary.BigEndian, uint32(len(data))); err != nil {
return nil, fmt.Errorf("error writing chunk length: %w", err) return nil, fmt.Errorf("error writing chunk length: %w", err)
} }
if _, err := chunk.Write([]byte(textChunkType)); err != nil { if _, err := chunk.WriteString(textChunkType); err != nil {
return nil, fmt.Errorf("error writing chunk type: %w", err) return nil, fmt.Errorf("error writing chunk type: %w", err)
} }
if _, err := chunk.Write(data); err != nil { if _, err := chunk.Write(data); err != nil {

389
popups.go Normal file
View File

@@ -0,0 +1,389 @@
package main
import (
"slices"
"strings"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
)
// showModelSelectionPopup creates a modal popup to select a model
func showModelSelectionPopup() {
// Helper function to get model list for a given API
getModelListForAPI := func(api string) []string {
if strings.Contains(api, "api.deepseek.com/") {
return []string{"deepseek-chat", "deepseek-reasoner"}
} else if strings.Contains(api, "openrouter.ai") {
return ORFreeModels
}
// Assume local llama.cpp
updateModelLists()
return LocalModels
}
// Get the current model list based on the API
modelList := getModelListForAPI(cfg.CurrentAPI)
// Check for empty options list
if len(modelList) == 0 {
logger.Warn("empty model list for", "api", cfg.CurrentAPI, "localModelsLen", len(LocalModels), "orModelsLen", len(ORFreeModels))
var message string
switch {
case strings.Contains(cfg.CurrentAPI, "openrouter.ai"):
message = "No OpenRouter models available. Check token and connection."
case strings.Contains(cfg.CurrentAPI, "api.deepseek.com"):
message = "DeepSeek models should be available. Please report bug."
default:
message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models."
}
if err := notifyUser("Empty list", message); err != nil {
logger.Error("failed to send notification", "error", err)
}
return
}
// Create a list primitive
modelListWidget := tview.NewList().ShowSecondaryText(false).
SetSelectedBackgroundColor(tcell.ColorGray)
modelListWidget.SetTitle("Select Model").SetBorder(true)
// Find the current model index to set as selected
currentModelIndex := -1
for i, model := range modelList {
if model == chatBody.Model {
currentModelIndex = i
}
modelListWidget.AddItem(model, "", 0, nil)
}
// Set the current selection if found
if currentModelIndex != -1 {
modelListWidget.SetCurrentItem(currentModelIndex)
}
modelListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
// Update the model in both chatBody and config
chatBody.Model = mainText
cfg.CurrentModel = chatBody.Model
// Remove the popup page
pages.RemovePage("modelSelectionPopup")
// Update the status line to reflect the change
updateStatusLine()
})
modelListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
pages.RemovePage("modelSelectionPopup")
return nil
}
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
pages.RemovePage("modelSelectionPopup")
return nil
}
return event
})
modal := func(p tview.Primitive, width, height int) tview.Primitive {
return tview.NewFlex().
AddItem(nil, 0, 1, false).
AddItem(tview.NewFlex().SetDirection(tview.FlexRow).
AddItem(nil, 0, 1, false).
AddItem(p, height, 1, true).
AddItem(nil, 0, 1, false), width, 1, true).
AddItem(nil, 0, 1, false)
}
// Add modal page and make it visible
pages.AddPage("modelSelectionPopup", modal(modelListWidget, 80, 20), true, true)
app.SetFocus(modelListWidget)
}
// showAPILinkSelectionPopup creates a modal popup to select an API link
func showAPILinkSelectionPopup() {
// Prepare API links dropdown - ensure current API is in the list, avoid duplicates
apiLinks := make([]string, 0, len(cfg.ApiLinks)+1)
// Add current API first if it's not already in ApiLinks
foundCurrentAPI := false
for _, api := range cfg.ApiLinks {
if api == cfg.CurrentAPI {
foundCurrentAPI = true
}
apiLinks = append(apiLinks, api)
}
// If current API is not in the list, add it at the beginning
if !foundCurrentAPI {
apiLinks = make([]string, 0, len(cfg.ApiLinks)+1)
apiLinks = append(apiLinks, cfg.CurrentAPI)
apiLinks = append(apiLinks, cfg.ApiLinks...)
}
// Check for empty options list
if len(apiLinks) == 0 {
logger.Warn("no API links available for selection")
message := "No API links available. Please configure API links in your config file."
if err := notifyUser("Empty list", message); err != nil {
logger.Error("failed to send notification", "error", err)
}
return
}
// Create a list primitive
apiListWidget := tview.NewList().ShowSecondaryText(false).
SetSelectedBackgroundColor(tcell.ColorGray)
apiListWidget.SetTitle("Select API Link").SetBorder(true)
// Find the current API index to set as selected
currentAPIIndex := -1
for i, api := range apiLinks {
if api == cfg.CurrentAPI {
currentAPIIndex = i
}
apiListWidget.AddItem(api, "", 0, nil)
}
// Set the current selection if found
if currentAPIIndex != -1 {
apiListWidget.SetCurrentItem(currentAPIIndex)
}
apiListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
// Update the API in config
cfg.CurrentAPI = mainText
// Update model list based on new API
// Helper function to get model list for a given API (same as in props_table.go)
getModelListForAPI := func(api string) []string {
if strings.Contains(api, "api.deepseek.com/") {
return []string{"deepseek-chat", "deepseek-reasoner"}
} else if strings.Contains(api, "openrouter.ai") {
return ORFreeModels
}
// Assume local llama.cpp
refreshLocalModelsIfEmpty()
localModelsMu.RLock()
defer localModelsMu.RUnlock()
return LocalModels
}
newModelList := getModelListForAPI(cfg.CurrentAPI)
// Ensure chatBody.Model is in the new list; if not, set to first available model
if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.Model) {
chatBody.Model = newModelList[0]
cfg.CurrentModel = chatBody.Model
}
// Remove the popup page
pages.RemovePage("apiLinkSelectionPopup")
// Update the parser and status line to reflect the change
choseChunkParser()
updateStatusLine()
})
apiListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
pages.RemovePage("apiLinkSelectionPopup")
return nil
}
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
pages.RemovePage("apiLinkSelectionPopup")
return nil
}
return event
})
modal := func(p tview.Primitive, width, height int) tview.Primitive {
return tview.NewFlex().
AddItem(nil, 0, 1, false).
AddItem(tview.NewFlex().SetDirection(tview.FlexRow).
AddItem(nil, 0, 1, false).
AddItem(p, height, 1, true).
AddItem(nil, 0, 1, false), width, 1, true).
AddItem(nil, 0, 1, false)
}
// Add modal page and make it visible
pages.AddPage("apiLinkSelectionPopup", modal(apiListWidget, 80, 20), true, true)
app.SetFocus(apiListWidget)
}
// showUserRoleSelectionPopup creates a modal popup to select a user role
func showUserRoleSelectionPopup() {
// Get the list of available roles
roles := listRolesWithUser()
// Check for empty options list
if len(roles) == 0 {
logger.Warn("no roles available for selection")
message := "No roles available for selection."
if err := notifyUser("Empty list", message); err != nil {
logger.Error("failed to send notification", "error", err)
}
return
}
// Create a list primitive
roleListWidget := tview.NewList().ShowSecondaryText(false).
SetSelectedBackgroundColor(tcell.ColorGray)
roleListWidget.SetTitle("Select User Role").SetBorder(true)
// Find the current role index to set as selected
currentRole := cfg.UserRole
if cfg.WriteNextMsgAs != "" {
currentRole = cfg.WriteNextMsgAs
}
currentRoleIndex := -1
for i, role := range roles {
if strings.EqualFold(role, currentRole) {
currentRoleIndex = i
}
roleListWidget.AddItem(role, "", 0, nil)
}
// Set the current selection if found
if currentRoleIndex != -1 {
roleListWidget.SetCurrentItem(currentRoleIndex)
}
roleListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
// Update the user role in config
cfg.WriteNextMsgAs = mainText
// role got switch, update textview with character specific context for user
filtered := filterMessagesForCharacter(chatBody.Messages, mainText)
textView.SetText(chatToText(filtered, cfg.ShowSys))
// Remove the popup page
pages.RemovePage("userRoleSelectionPopup")
// Update the status line to reflect the change
updateStatusLine()
colorText()
})
roleListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
pages.RemovePage("userRoleSelectionPopup")
return nil
}
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
pages.RemovePage("userRoleSelectionPopup")
return nil
}
return event
})
modal := func(p tview.Primitive, width, height int) tview.Primitive {
return tview.NewFlex().
AddItem(nil, 0, 1, false).
AddItem(tview.NewFlex().SetDirection(tview.FlexRow).
AddItem(nil, 0, 1, false).
AddItem(p, height, 1, true).
AddItem(nil, 0, 1, false), width, 1, true).
AddItem(nil, 0, 1, false)
}
// Add modal page and make it visible
pages.AddPage("userRoleSelectionPopup", modal(roleListWidget, 80, 20), true, true)
app.SetFocus(roleListWidget)
}
// showBotRoleSelectionPopup creates a modal popup to select a bot role
func showBotRoleSelectionPopup() {
// Get the list of available roles
roles := listChatRoles()
if len(roles) == 0 {
logger.Warn("empty roles in chat")
}
if !strInSlice(cfg.AssistantRole, roles) {
roles = append(roles, cfg.AssistantRole)
}
// Check for empty options list
if len(roles) == 0 {
logger.Warn("no roles available for selection")
message := "No roles available for selection."
if err := notifyUser("Empty list", message); err != nil {
logger.Error("failed to send notification", "error", err)
}
return
}
// Create a list primitive
roleListWidget := tview.NewList().ShowSecondaryText(false).
SetSelectedBackgroundColor(tcell.ColorGray)
roleListWidget.SetTitle("Select Bot Role").SetBorder(true)
// Find the current role index to set as selected
currentRole := cfg.AssistantRole
if cfg.WriteNextMsgAsCompletionAgent != "" {
currentRole = cfg.WriteNextMsgAsCompletionAgent
}
currentRoleIndex := -1
for i, role := range roles {
if strings.EqualFold(role, currentRole) {
currentRoleIndex = i
}
roleListWidget.AddItem(role, "", 0, nil)
}
// Set the current selection if found
if currentRoleIndex != -1 {
roleListWidget.SetCurrentItem(currentRoleIndex)
}
roleListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
// Update the bot role in config
cfg.WriteNextMsgAsCompletionAgent = mainText
// Remove the popup page
pages.RemovePage("botRoleSelectionPopup")
// Update the status line to reflect the change
updateStatusLine()
})
roleListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
pages.RemovePage("botRoleSelectionPopup")
return nil
}
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
pages.RemovePage("botRoleSelectionPopup")
return nil
}
return event
})
modal := func(p tview.Primitive, width, height int) tview.Primitive {
return tview.NewFlex().
AddItem(nil, 0, 1, false).
AddItem(tview.NewFlex().SetDirection(tview.FlexRow).
AddItem(nil, 0, 1, false).
AddItem(p, height, 1, true).
AddItem(nil, 0, 1, false), width, 1, true).
AddItem(nil, 0, 1, false)
}
// Add modal page and make it visible
pages.AddPage("botRoleSelectionPopup", modal(roleListWidget, 80, 20), true, true)
app.SetFocus(roleListWidget)
}
func showFileCompletionPopup(filter string) {
baseDir := cfg.CodingDir
if baseDir == "" {
baseDir = "."
}
complMatches := scanFiles(baseDir, filter)
if len(complMatches) == 0 {
return
}
// If only one match, auto-complete without showing popup
if len(complMatches) == 1 {
currentText := textArea.GetText()
atIdx := strings.LastIndex(currentText, "@")
if atIdx >= 0 {
before := currentText[:atIdx]
textArea.SetText(before+complMatches[0], true)
}
return
}
widget := tview.NewList().ShowSecondaryText(false).
SetSelectedBackgroundColor(tcell.ColorGray)
widget.SetTitle("file completion").SetBorder(true)
for _, m := range complMatches {
widget.AddItem(m, "", 0, nil)
}
widget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
currentText := textArea.GetText()
atIdx := strings.LastIndex(currentText, "@")
if atIdx >= 0 {
before := currentText[:atIdx]
textArea.SetText(before+mainText, true)
}
pages.RemovePage("fileCompletionPopup")
})
widget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if event.Key() == tcell.KeyEscape {
pages.RemovePage("fileCompletionPopup")
return nil
}
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
pages.RemovePage("fileCompletionPopup")
return nil
}
return event
})
modal := func(p tview.Primitive, width, height int) tview.Primitive {
return tview.NewFlex().
AddItem(nil, 0, 1, false).
AddItem(tview.NewFlex().SetDirection(tview.FlexRow).
AddItem(nil, 0, 1, false).
AddItem(p, height, 1, true).
AddItem(nil, 0, 1, false), width, 1, true).
AddItem(nil, 0, 1, false)
}
// Add modal page and make it visible
pages.AddPage("fileCompletionPopup", modal(widget, 80, 20), true, true)
app.SetFocus(widget)
}

View File

@@ -2,7 +2,6 @@ package main
import ( import (
"fmt" "fmt"
"slices"
"strconv" "strconv"
"strings" "strings"
"sync" "sync"
@@ -53,7 +52,6 @@ func makePropsTable(props map[string]float32) *tview.Table {
row++ row++
// Store cell data for later use in selection functions // Store cell data for later use in selection functions
cellData := make(map[string]*CellData) cellData := make(map[string]*CellData)
var modelCellID string // will be set for the model selection row
// Helper function to add a checkbox-like row // Helper function to add a checkbox-like row
addCheckboxRow := func(label string, initialValue bool, onChange func(bool)) { addCheckboxRow := func(label string, initialValue bool, onChange func(bool)) {
table.SetCell(row, 0, table.SetCell(row, 0,
@@ -137,6 +135,15 @@ func makePropsTable(props map[string]float32) *tview.Table {
// Reconfigure the app's mouse setting // Reconfigure the app's mouse setting
app.EnableMouse(cfg.EnableMouse) app.EnableMouse(cfg.EnableMouse)
}) })
addCheckboxRow("Image Preview (file picker)", cfg.ImagePreview, func(checked bool) {
cfg.ImagePreview = checked
})
addCheckboxRow("Auto turn (for cards with many chars)", cfg.AutoTurn, func(checked bool) {
cfg.AutoTurn = checked
})
addCheckboxRow("Char specific context", cfg.CharSpecificContextEnabled, func(checked bool) {
cfg.CharSpecificContextEnabled = checked
})
// Add dropdowns // Add dropdowns
logLevels := []string{"Debug", "Info", "Warn"} logLevels := []string{"Debug", "Info", "Warn"}
addListPopupRow("Set log level", logLevels, GetLogLevel(), func(option string) { addListPopupRow("Set log level", logLevels, GetLogLevel(), func(option string) {
@@ -155,52 +162,6 @@ func makePropsTable(props map[string]float32) *tview.Table {
defer localModelsMu.RUnlock() defer localModelsMu.RUnlock()
return LocalModels return LocalModels
} }
var modelRowIndex int // will be set before model row is added
// Prepare API links dropdown - ensure current API is first, avoid duplicates
apiLinks := make([]string, 0, len(cfg.ApiLinks)+1)
apiLinks = append(apiLinks, cfg.CurrentAPI)
for _, api := range cfg.ApiLinks {
if api != cfg.CurrentAPI {
apiLinks = append(apiLinks, api)
}
}
addListPopupRow("Select an api", apiLinks, cfg.CurrentAPI, func(option string) {
cfg.CurrentAPI = option
// Update model list based on new API
newModelList := getModelListForAPI(cfg.CurrentAPI)
if modelCellID != "" {
if data := cellData[modelCellID]; data != nil {
data.Options = newModelList
}
}
// Ensure chatBody.Model is in the new list; if not, set to first available model
if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.Model) {
chatBody.Model = newModelList[0]
cfg.CurrentModel = chatBody.Model
// Update the displayed cell text - need to find model row
// Search for model row by label
for r := 0; r < table.GetRowCount(); r++ {
if cell := table.GetCell(r, 0); cell != nil && cell.Text == "Select a model" {
if valueCell := table.GetCell(r, 1); valueCell != nil {
valueCell.SetText(chatBody.Model)
}
break
}
}
}
})
// Prepare model list dropdown
modelRowIndex = row
modelCellID = fmt.Sprintf("listpopup_%d", modelRowIndex)
modelList := getModelListForAPI(cfg.CurrentAPI)
addListPopupRow("Select a model", modelList, chatBody.Model, func(option string) {
chatBody.Model = option
cfg.CurrentModel = chatBody.Model
})
// Role selection dropdown
addListPopupRow("Write next message as", listRolesWithUser(), cfg.WriteNextMsgAs, func(option string) {
cfg.WriteNextMsgAs = option
})
// Add input fields // Add input fields
addInputRow("New char to write msg as", "", func(text string) { addInputRow("New char to write msg as", "", func(text string) {
if text != "" { if text != "" {
@@ -307,11 +268,12 @@ func makePropsTable(props map[string]float32) *tview.Table {
logger.Warn("empty options list for", "label", label, "api", cfg.CurrentAPI, "localModelsLen", len(LocalModels), "orModelsLen", len(ORFreeModels)) logger.Warn("empty options list for", "label", label, "api", cfg.CurrentAPI, "localModelsLen", len(LocalModels), "orModelsLen", len(ORFreeModels))
message := "No options available for " + label message := "No options available for " + label
if label == "Select a model" { if label == "Select a model" {
if strings.Contains(cfg.CurrentAPI, "openrouter.ai") { switch {
case strings.Contains(cfg.CurrentAPI, "openrouter.ai"):
message = "No OpenRouter models available. Check token and connection." message = "No OpenRouter models available. Check token and connection."
} else if strings.Contains(cfg.CurrentAPI, "api.deepseek.com") { case strings.Contains(cfg.CurrentAPI, "api.deepseek.com"):
message = "DeepSeek models should be available. Please report bug." message = "DeepSeek models should be available. Please report bug."
} else { default:
message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models." message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models."
} }
} }

View File

@@ -107,7 +107,7 @@ func (r *RAG) LoadRAG(fpath string) error {
} }
// Adjust batch size if needed // Adjust batch size if needed
if len(paragraphs) < int(r.cfg.RAGBatchSize) && len(paragraphs) > 0 { if len(paragraphs) < r.cfg.RAGBatchSize && len(paragraphs) > 0 {
r.cfg.RAGBatchSize = len(paragraphs) r.cfg.RAGBatchSize = len(paragraphs)
} }
@@ -133,7 +133,7 @@ func (r *RAG) LoadRAG(fpath string) error {
ctn := 0 ctn := 0
totalParagraphs := len(paragraphs) totalParagraphs := len(paragraphs)
for { for {
if int(right) > totalParagraphs { if right > totalParagraphs {
batchCh <- map[int][]string{left: paragraphs[left:]} batchCh <- map[int][]string{left: paragraphs[left:]}
break break
} }

View File

@@ -1,74 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"gf-lt/config"
"net/http"
"time"
)
type Server struct {
// nolint
config config.Config
}
func (srv *Server) ListenToRequests(port string) {
// h := srv.actions
mux := http.NewServeMux()
server := &http.Server{
Addr: "localhost:" + port,
Handler: mux,
ReadTimeout: time.Second * 5,
WriteTimeout: time.Second * 5,
}
mux.HandleFunc("GET /ping", pingHandler)
mux.HandleFunc("GET /model", modelHandler)
mux.HandleFunc("POST /completion", completionHandler)
fmt.Println("Listening", "addr", server.Addr)
if err := server.ListenAndServe(); err != nil {
panic(err)
}
}
// create server
// listen to the completion endpoint handler
func pingHandler(w http.ResponseWriter, req *http.Request) {
if _, err := w.Write([]byte("pong")); err != nil {
logger.Error("server ping", "error", err)
}
}
func completionHandler(w http.ResponseWriter, req *http.Request) {
// post request
body := req.Body
// get body as io.reader
// pass it to the /completion
go sendMsgToLLM(body)
out:
for {
select {
case chunk := <-chunkChan:
fmt.Print(chunk)
if _, err := w.Write([]byte(chunk)); err != nil {
logger.Warn("failed to write chunk", "value", chunk)
continue
}
case <-streamDone:
break out
}
}
}
func modelHandler(w http.ResponseWriter, req *http.Request) {
llmModel := fetchLCPModelName()
payload, err := json.Marshal(llmModel)
if err != nil {
logger.Error("model handler", "error", err)
// return err
return
}
if _, err := w.Write(payload); err != nil {
logger.Error("model handler", "error", err)
}
}

View File

@@ -0,0 +1,7 @@
{
"sys_prompt": "This is a chat between Alice, Bob and Carl. Normally all message are public (seen by everyone). But characters also able to make messages intended to specific targets using '@' tag. Usually tag is provided inside of out of character clause: (ooc: @charname@), but will be parsed if put anywhere in the message.\nTO SEND A PRIVATE MESSAGE:\n- Include a recipient tag in this exact format: @CharacterName@\n- The tag can be anywhere in your message\n- Example: \"(ooc: @Bob@) Don't tell others this secret.\"\n- For immersion sake it is better if private messages are given in context of whispering, passing notes, or being alone in some space: Alice: (ooc: @Carl@) *leans closer to Carl and whispers* \"I forgot to turn off the car, could you watch my bag for a cuple of minutes?\"\n- Only the sender and tagged recipients will see that message.\nRECEIVING MESSAGES:\n- You only see messages where you are the sender OR you are tagged in the recipient tag\n- Public messages (without tags) are seen by everyone.\nEXAMPLE FORMAT:\nAlice: \"Public message everyone sees\"\nAlice: (ooc: @Bob@)\n\"Private message only for Bob\"\n(if Diana joins the conversation, and Alice wants to exclude her) Alice: (ooc: @Bob,Carl@; Diana is not trustworthy)\n*Grabs Bob and Carl, and pulls them away* \"Listen boys, let's meet this friday again!\"\nWHEN TO USE:\n- Most of the time public messages (no tag) are the best choice. Private messages (with tag) are mostly for the passing secrets or information that is described or infered as private.\n- Game of 20 questions. Guys are putting paper sickers on the forehead with names written on them. So in this case only person who gets the sticker put on them does not see the writting on it.\nBob: *Puts sticker with 'JACK THE RIPPER' written on it, on Alices forehead* (ooc: @Carl).\nCarl: \"Alright, we're ready.\"\nAlice: \"Good. So, am I a fictional character or a real one?\"",
"role": "Alice",
"filepath": "sysprompts/alice_bob_carl.json",
"chars": ["Alice", "Bob", "Carl"],
"first_msg": "\"Hey guys! Want to play Alias like game? I'll tell Bob a word and he needs to describe that word so Carl can guess what it was?\""
}

View File

@@ -0,0 +1,6 @@
{
"sys_prompt": "You are an expert software engineering assistant. Your goal is to help users with coding tasks, debugging, refactoring, and software development.\n\n## Core Principles\n1. **Security First**: Never expose secrets, keys, or credentials. Never commit sensitive data.\n2. **No Git Actions**: You can READ git info (status, log, diff) for context, but NEVER perform git actions (commit, add, push, checkout, reset, rm, etc.). Let the user handle all git operations.\n3. **Explore Before Execute**: Always understand the codebase structure before making changes.\n4. **Follow Conventions**: Match existing code style, patterns, and frameworks used in the project.\n5. **Be Concise**: Minimize output tokens while maintaining quality. Avoid unnecessary explanations.\n\n## Workflow for Complex Tasks\nFor multi-step tasks, ALWAYS use the todo system to track progress:\n\n1. **Create Todo List**: At the start of complex tasks, use `todo_create` to break down work into actionable items.\n2. **Update Progress**: Mark items as `in_progress` when working on them, and `completed` when done.\n3. **Check Status**: Use `todo_read` to review your progress.\n\nExample workflow:\n- User: \"Add user authentication to this app\"\n- You: Create todos: [\"Analyze existing auth structure\", \"Check frameworks in use\", \"Implement auth middleware\", \"Add login endpoints\", \"Test implementation\"]\n\n## Task Execution Flow\n\n### Phase 1: Exploration (Always First)\n- Use `file_list` to understand directory structure (path defaults to CodingDir if not specified)\n- Use `file_read` to examine relevant files (paths are relative to CodingDir unless starting with `/`)\n- Use `execute_command` with `grep`/`find` to search for patterns\n- Check `README` or documentation files\n- Identify: frameworks, conventions, testing approach\n- **Git reads allowed**: You may use `git status`, `git log`, `git diff` for context, but only to inform your work\n- **Path handling**: Relative paths are resolved against CodingDir (configurable via Alt+O). Use absolute paths (starting with `/`) to bypass CodingDir.\n\n### Phase 2: Planning\n- For complex tasks: create todo items\n- Identify files that need modification\n- Plan your approach following existing patterns\n\n### Phase 3: Implementation\n- Make changes using appropriate file tools\n- Prefer `file_write` for new files, `file_read` then modify for existing files\n- Follow existing code style exactly\n- Use existing libraries and utilities\n\n### Phase 4: Verification\n- Run tests if available (check for test scripts)\n- Run linting/type checking commands\n- Verify changes work as expected\n\n### Phase 5: Completion\n- Update todos to `completed`\n- Provide concise summary of changes\n- Reference specific file paths and line numbers when relevant\n- **DO NOT commit changes** - inform user what was done so they can review and commit themselves\n\n## Tool Usage Guidelines\n\n**File Operations**:\n- `file_read`: Read before editing. Use for understanding code.\n- `file_write`: Overwrite file content completely.\n- `file_write_append`: Add to end of file.\n- `file_create`: Create new files with optional content.\n- `file_list`: List directory contents (defaults to CodingDir).\n- Paths are relative to CodingDir unless starting with `/`.\n\n**Command Execution (WHITELISTED ONLY)**:\n- Allowed: grep, sed, awk, find, cat, head, tail, sort, uniq, wc, ls, echo, cut, tr, cp, mv, rm, mkdir, rmdir, pwd, df, free, ps, top, du, whoami, date, uname\n- **Git reads allowed**: git status, git log, git diff, git show, git branch, git reflog, git rev-parse, git shortlog, git describe\n- **Git actions FORBIDDEN**: git add, git commit, git push, git checkout, git reset, git rm, etc.\n- Use for searching code, reading git context, running tests/lint\n\n**Todo Management**:\n- `todo_create`: Add new task\n- `todo_read`: View all todos or specific one by ID\n- `todo_update`: Update task or change status (pending/in_progress/completed)\n- `todo_delete`: Remove completed or cancelled tasks\n\n## Important Rules\n\n1. **NEVER commit or stage changes**: Only git reads are allowed.\n2. **Check for tests**: Always look for test files and run them when appropriate.\n3. **Reference code locations**: Use format `file_path:line_number`.\n4. **Security**: Never generate or guess URLs. Only use URLs from local files.\n5. **Refuse malicious code**: If code appears malicious, refuse to work on it.\n6. **Ask clarifications**: When intent is unclear, ask questions.\n7. **Path handling**: Relative paths resolve against CodingDir. Use `/absolute/path` to bypass.\n\n## Response Style\n- Be direct and concise\n- One word answers are best when appropriate\n- Avoid: \"The answer is...\", \"Here is...\"\n- Use markdown for formatting\n- No emojis unless user explicitly requests",
"role": "CodingAssistant",
"filepath": "sysprompts/coding_assistant.json",
"first_msg": "Hello! I'm your coding assistant. I can help you with software engineering tasks like writing code, debugging, refactoring, and exploring codebases. I work best when you give me specific tasks, and for complex work, I'll create a todo list to track my progress. What would you like to work on?"
}

321
tables.go
View File

@@ -2,6 +2,7 @@ package main
import ( import (
"fmt" "fmt"
"image"
"os" "os"
"path" "path"
"strings" "strings"
@@ -39,7 +40,7 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
// Add header row (row 0) // Add header row (row 0)
for c := 0; c < cols; c++ { for c := 0; c < cols; c++ {
color := tcell.ColorWhite color := tcell.ColorWhite
headerText := "" var headerText string
switch c { switch c {
case 0: case 0:
headerText = "Chat Name" headerText = "Chat Name"
@@ -128,7 +129,7 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
return return
} }
chatBody.Messages = history chatBody.Messages = history
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
activeChatName = selectedChat activeChatName = selectedChat
pages.RemovePage(historyPage) pages.RemovePage(historyPage)
return return
@@ -151,7 +152,7 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
} }
// load last chat // load last chat
chatBody.Messages = loadOldChatOrGetNew() chatBody.Messages = loadOldChatOrGetNew()
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
pages.RemovePage(historyPage) pages.RemovePage(historyPage)
return return
case "update card": case "update card":
@@ -184,7 +185,7 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
case "move sysprompt onto 1st msg": case "move sysprompt onto 1st msg":
chatBody.Messages[1].Content = chatBody.Messages[0].Content + chatBody.Messages[1].Content chatBody.Messages[1].Content = chatBody.Messages[0].Content + chatBody.Messages[1].Content
chatBody.Messages[0].Content = rpDefenitionSysMsg chatBody.Messages[0].Content = rpDefenitionSysMsg
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
activeChatName = selectedChat activeChatName = selectedChat
pages.RemovePage(historyPage) pages.RemovePage(historyPage)
return return
@@ -215,8 +216,8 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
} }
// Update sysMap with fresh card data // Update sysMap with fresh card data
sysMap[agentName] = newCard sysMap[agentName] = newCard
applyCharCard(newCard) // fetching sysprompt and first message anew from the card
startNewChat() startNewChat(false)
pages.RemovePage(historyPage) pages.RemovePage(historyPage)
return return
default: default:
@@ -268,19 +269,20 @@ func makeRAGTable(fileList []string) *tview.Flex {
for r := 0; r < rows; r++ { for r := 0; r < rows; r++ {
for c := 0; c < cols; c++ { for c := 0; c < cols; c++ {
color := tcell.ColorWhite color := tcell.ColorWhite
if c < 1 { switch {
case c < 1:
fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0 fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0
tview.NewTableCell(fileList[r]). tview.NewTableCell(fileList[r]).
SetTextColor(color). SetTextColor(color).
SetAlign(tview.AlignCenter). SetAlign(tview.AlignCenter).
SetSelectable(false)) SetSelectable(false))
} else if c == 1 { // Action description column - not selectable case c == 1: // Action description column - not selectable
fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0 fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0
tview.NewTableCell("(Action)"). tview.NewTableCell("(Action)").
SetTextColor(color). SetTextColor(color).
SetAlign(tview.AlignCenter). SetAlign(tview.AlignCenter).
SetSelectable(false)) SetSelectable(false))
} else { // Action button column - selectable default: // Action button column - selectable
fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0 fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0
tview.NewTableCell(actions[c-1]). tview.NewTableCell(actions[c-1]).
SetTextColor(color). SetTextColor(color).
@@ -415,19 +417,20 @@ func makeLoadedRAGTable(fileList []string) *tview.Flex {
for r := 0; r < rows; r++ { for r := 0; r < rows; r++ {
for c := 0; c < cols; c++ { for c := 0; c < cols; c++ {
color := tcell.ColorWhite color := tcell.ColorWhite
if c < 1 { switch {
case c < 1:
fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0 fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0
tview.NewTableCell(fileList[r]). tview.NewTableCell(fileList[r]).
SetTextColor(color). SetTextColor(color).
SetAlign(tview.AlignCenter). SetAlign(tview.AlignCenter).
SetSelectable(false)) SetSelectable(false))
} else if c == 1 { // Action description column - not selectable case c == 1: // Action description column - not selectable
fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0 fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0
tview.NewTableCell("(Action)"). tview.NewTableCell("(Action)").
SetTextColor(color). SetTextColor(color).
SetAlign(tview.AlignCenter). SetAlign(tview.AlignCenter).
SetSelectable(false)) SetSelectable(false))
} else { // Action button column - selectable default: // Action button column - selectable
fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0 fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0
tview.NewTableCell(actions[c-1]). tview.NewTableCell(actions[c-1]).
SetTextColor(color). SetTextColor(color).
@@ -496,13 +499,14 @@ func makeAgentTable(agentList []string) *tview.Table {
for r := 0; r < rows; r++ { for r := 0; r < rows; r++ {
for c := 0; c < cols; c++ { for c := 0; c < cols; c++ {
color := tcell.ColorWhite color := tcell.ColorWhite
if c < 1 { switch {
case c < 1:
chatActTable.SetCell(r, c, chatActTable.SetCell(r, c,
tview.NewTableCell(agentList[r]). tview.NewTableCell(agentList[r]).
SetTextColor(color). SetTextColor(color).
SetAlign(tview.AlignCenter). SetAlign(tview.AlignCenter).
SetSelectable(false)) SetSelectable(false))
} else if c == 1 { case c == 1:
if actions[c-1] == "filepath" { if actions[c-1] == "filepath" {
cc, ok := sysMap[agentList[r]] cc, ok := sysMap[agentList[r]]
if !ok { if !ok {
@@ -519,7 +523,7 @@ func makeAgentTable(agentList []string) *tview.Table {
tview.NewTableCell(actions[c-1]). tview.NewTableCell(actions[c-1]).
SetTextColor(color). SetTextColor(color).
SetAlign(tview.AlignCenter)) SetAlign(tview.AlignCenter))
} else { default:
chatActTable.SetCell(r, c, chatActTable.SetCell(r, c,
tview.NewTableCell(actions[c-1]). tview.NewTableCell(actions[c-1]).
SetTextColor(color). SetTextColor(color).
@@ -549,13 +553,13 @@ func makeAgentTable(agentList []string) *tview.Table {
// notification := fmt.Sprintf("chat: %s; action: %s", selectedChat, tc.Text) // notification := fmt.Sprintf("chat: %s; action: %s", selectedChat, tc.Text)
switch tc.Text { switch tc.Text {
case "load": case "load":
if ok := charToStart(selected); !ok { if ok := charToStart(selected, true); !ok {
logger.Warn("no such sys msg", "name", selected) logger.Warn("no such sys msg", "name", selected)
pages.RemovePage(agentPage) pages.RemovePage(agentPage)
return return
} }
// replace textview // replace textview
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
updateStatusLine() updateStatusLine()
// sysModal.ClearButtons() // sysModal.ClearButtons()
@@ -609,13 +613,14 @@ func makeCodeBlockTable(codeBlocks []string) *tview.Table {
if len(codeBlocks[r]) < 30 { if len(codeBlocks[r]) < 30 {
previewLen = len(codeBlocks[r]) previewLen = len(codeBlocks[r])
} }
if c < 1 { switch {
case c < 1:
table.SetCell(r, c, table.SetCell(r, c,
tview.NewTableCell(codeBlocks[r][:previewLen]). tview.NewTableCell(codeBlocks[r][:previewLen]).
SetTextColor(color). SetTextColor(color).
SetAlign(tview.AlignCenter). SetAlign(tview.AlignCenter).
SetSelectable(false)) SetSelectable(false))
} else { default:
table.SetCell(r, c, table.SetCell(r, c,
tview.NewTableCell(actions[c-1]). tview.NewTableCell(actions[c-1]).
SetTextColor(color). SetTextColor(color).
@@ -680,13 +685,14 @@ func makeImportChatTable(filenames []string) *tview.Table {
for r := 0; r < rows; r++ { for r := 0; r < rows; r++ {
for c := 0; c < cols; c++ { for c := 0; c < cols; c++ {
color := tcell.ColorWhite color := tcell.ColorWhite
if c < 1 { switch {
case c < 1:
chatActTable.SetCell(r, c, chatActTable.SetCell(r, c,
tview.NewTableCell(filenames[r]). tview.NewTableCell(filenames[r]).
SetTextColor(color). SetTextColor(color).
SetAlign(tview.AlignCenter). SetAlign(tview.AlignCenter).
SetSelectable(false)) SetSelectable(false))
} else { default:
chatActTable.SetCell(r, c, chatActTable.SetCell(r, c,
tview.NewTableCell(actions[c-1]). tview.NewTableCell(actions[c-1]).
SetTextColor(color). SetTextColor(color).
@@ -724,7 +730,7 @@ func makeImportChatTable(filenames []string) *tview.Table {
colorText() colorText()
updateStatusLine() updateStatusLine()
// redraw the text in text area // redraw the text in text area
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
pages.RemovePage(historyPage) pages.RemovePage(historyPage)
app.SetFocus(textArea) app.SetFocus(textArea)
return return
@@ -783,17 +789,18 @@ func makeFilePicker() *tview.Flex {
var selectedFile string var selectedFile string
// Track currently displayed directory (changes as user navigates) // Track currently displayed directory (changes as user navigates)
currentDisplayDir := startDir currentDisplayDir := startDir
// --- NEW: search state ---
searching := false
searchQuery := ""
// Helper function to check if a file has an allowed extension from config // Helper function to check if a file has an allowed extension from config
hasAllowedExtension := func(filename string) bool { hasAllowedExtension := func(filename string) bool {
// If no allowed extensions are specified in config, allow all files
if cfg.FilePickerExts == "" { if cfg.FilePickerExts == "" {
return true return true
} }
// Split the allowed extensions from the config string
allowedExts := strings.Split(cfg.FilePickerExts, ",") allowedExts := strings.Split(cfg.FilePickerExts, ",")
lowerFilename := strings.ToLower(strings.TrimSpace(filename)) lowerFilename := strings.ToLower(strings.TrimSpace(filename))
for _, ext := range allowedExts { for _, ext := range allowedExts {
ext = strings.TrimSpace(ext) // Remove any whitespace around the extension ext = strings.TrimSpace(ext)
if ext != "" && strings.HasSuffix(lowerFilename, "."+ext) { if ext != "" && strings.HasSuffix(lowerFilename, "."+ext) {
return true return true
} }
@@ -813,21 +820,37 @@ func makeFilePicker() *tview.Flex {
} }
// Create UI elements // Create UI elements
listView := tview.NewList() listView := tview.NewList()
listView.SetBorder(true).SetTitle("Files & Directories").SetTitleAlign(tview.AlignLeft) listView.SetBorder(true).SetTitle("Files & Directories [c: set CodingDir]").SetTitleAlign(tview.AlignLeft)
// Status view for selected file information // Status view for selected file information
statusView := tview.NewTextView() statusView := tview.NewTextView()
statusView.SetBorder(true).SetTitle("Selected File").SetTitleAlign(tview.AlignLeft) statusView.SetBorder(true).SetTitle("Selected File").SetTitleAlign(tview.AlignLeft)
statusView.SetTextColor(tcell.ColorYellow) statusView.SetTextColor(tcell.ColorYellow)
// Layout - only include list view and status view // Image preview pane
var imgPreview *tview.Image
if cfg.ImagePreview {
imgPreview = tview.NewImage()
imgPreview.SetBorder(true).SetTitle("Preview").SetTitleAlign(tview.AlignLeft)
}
// Horizontal flex for list + preview
var hFlex *tview.Flex
if cfg.ImagePreview && imgPreview != nil {
hFlex = tview.NewFlex().SetDirection(tview.FlexColumn).
AddItem(listView, 0, 3, true).
AddItem(imgPreview, 0, 2, false)
} else {
hFlex = tview.NewFlex().SetDirection(tview.FlexColumn).
AddItem(listView, 0, 1, true)
}
// Main vertical flex
flex := tview.NewFlex().SetDirection(tview.FlexRow) flex := tview.NewFlex().SetDirection(tview.FlexRow)
flex.AddItem(listView, 0, 3, true) flex.AddItem(hFlex, 0, 3, true)
flex.AddItem(statusView, 3, 0, false) flex.AddItem(statusView, 3, 0, false)
// Refresh the file list // Refresh the file list now accepts a filter string
var refreshList func(string) var refreshList func(string, string)
refreshList = func(dir string) { refreshList = func(dir string, filter string) {
listView.Clear() listView.Clear()
// Update the current display directory // Update the current display directory
currentDisplayDir = dir // Update the current display directory currentDisplayDir = dir
// Add exit option at the top // Add exit option at the top
listView.AddItem("Exit file picker [gray](Close without selecting)[-]", "", 'x', func() { listView.AddItem("Exit file picker [gray](Close without selecting)[-]", "", 'x', func() {
pages.RemovePage(filePickerPage) pages.RemovePage(filePickerPage)
@@ -835,13 +858,16 @@ func makeFilePicker() *tview.Flex {
// Add parent directory (..) if not at root // Add parent directory (..) if not at root
if dir != "/" { if dir != "/" {
parentDir := path.Dir(dir) parentDir := path.Dir(dir)
// Special handling for edge cases - only return if we're truly at a system root // For Unix-like systems, avoid infinite loop when at root
// For Unix-like systems, path.Dir("/") returns "/" which would cause parentDir == dir if parentDir != dir {
if parentDir == dir && dir == "/" {
// We're at the root ("/") and trying to go up, just don't add the parent item
} else {
listView.AddItem("../ [gray](Parent Directory)[-]", "", 'p', func() { listView.AddItem("../ [gray](Parent Directory)[-]", "", 'p', func() {
refreshList(parentDir) // Clear search on navigation
searching = false
searchQuery = ""
if cfg.ImagePreview {
imgPreview.SetImage(nil)
}
refreshList(parentDir, "")
dirStack = append(dirStack, parentDir) dirStack = append(dirStack, parentDir)
currentStackPos = len(dirStack) - 1 currentStackPos = len(dirStack) - 1
}) })
@@ -853,95 +879,214 @@ func makeFilePicker() *tview.Flex {
statusView.SetText("Error reading directory: " + err.Error()) statusView.SetText("Error reading directory: " + err.Error())
return return
} }
// Add directories and files to the list // Helper to check if an item passes the filter
matchesFilter := func(name string) bool {
if filter == "" {
return true
}
return strings.Contains(strings.ToLower(name), strings.ToLower(filter))
}
// Add directories
for _, file := range files { for _, file := range files {
name := file.Name() name := file.Name()
// Skip hidden files and directories (those starting with a dot)
if strings.HasPrefix(name, ".") { if strings.HasPrefix(name, ".") {
continue continue
} }
if file.IsDir() { if file.IsDir() && matchesFilter(name) {
// Capture the directory name for the closure to avoid loop variable issues
dirName := name dirName := name
listView.AddItem(dirName+"/ [gray](Directory)[-]", "", 0, func() { listView.AddItem(dirName+"/ [gray](Directory)[-]", "", 0, func() {
// Clear search on navigation
searching = false
searchQuery = ""
if cfg.ImagePreview {
imgPreview.SetImage(nil)
}
newDir := path.Join(dir, dirName) newDir := path.Join(dir, dirName)
refreshList(newDir) refreshList(newDir, "")
dirStack = append(dirStack, newDir) dirStack = append(dirStack, newDir)
currentStackPos = len(dirStack) - 1 currentStackPos = len(dirStack) - 1
statusView.SetText("Current: " + newDir) statusView.SetText("Current: " + newDir)
}) })
} else {
// Only show files that have allowed extensions (from config)
if hasAllowedExtension(name) {
// Capture the file name for the closure to avoid loop variable issues
fileName := name
fullFilePath := path.Join(dir, fileName)
listView.AddItem(fileName+" [gray](File)[-]", "", 0, func() {
selectedFile = fullFilePath
statusView.SetText("Selected: " + selectedFile)
// Check if the file is an image
if isImageFile(fileName) {
// For image files, offer to attach to the next LLM message
statusView.SetText("Selected image: " + selectedFile)
} else {
// For non-image files, display as before
statusView.SetText("Selected: " + selectedFile)
}
})
}
} }
} }
statusView.SetText("Current: " + dir) // Add files with allowed extensions
for _, file := range files {
name := file.Name()
if strings.HasPrefix(name, ".") || file.IsDir() {
continue
}
if hasAllowedExtension(name) && matchesFilter(name) {
fileName := name
fullFilePath := path.Join(dir, fileName)
listView.AddItem(fileName+" [gray](File)[-]", "", 0, func() {
selectedFile = fullFilePath
statusView.SetText("Selected: " + selectedFile)
if isImageFile(fileName) {
statusView.SetText("Selected image: " + selectedFile)
}
})
}
}
// Update status line based on search state
switch {
case searching:
statusView.SetText("Search: " + searchQuery + "_")
case searchQuery != "":
statusView.SetText("Current: " + dir + " (filter: " + searchQuery + ")")
default:
statusView.SetText("Current: " + dir)
}
} }
// Initialize the file list // Initialize the file list
refreshList(startDir) refreshList(startDir, "")
// Update image preview when selection changes (unchanged)
if cfg.ImagePreview && imgPreview != nil {
listView.SetChangedFunc(func(index int, mainText, secondaryText string, rune rune) {
itemText, _ := listView.GetItemText(index)
if strings.HasPrefix(itemText, "Exit file picker") || strings.HasPrefix(itemText, "../") {
imgPreview.SetImage(nil)
return
}
actualItemName := itemText
if bracketPos := strings.Index(itemText, " ["); bracketPos != -1 {
actualItemName = itemText[:bracketPos]
}
if strings.HasSuffix(actualItemName, "/") {
imgPreview.SetImage(nil)
return
}
if !isImageFile(actualItemName) {
imgPreview.SetImage(nil)
return
}
filePath := path.Join(currentDisplayDir, actualItemName)
file, err := os.Open(filePath)
if err != nil {
imgPreview.SetImage(nil)
return
}
defer file.Close()
img, _, err := image.Decode(file)
if err != nil {
imgPreview.SetImage(nil)
return
}
imgPreview.SetImage(img)
})
}
// Set up keyboard navigation // Set up keyboard navigation
flex.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey { flex.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
// --- Handle search mode ---
if searching {
switch event.Key() {
case tcell.KeyEsc:
// Exit search, clear filter
searching = false
searchQuery = ""
refreshList(currentDisplayDir, "")
return nil
case tcell.KeyBackspace, tcell.KeyBackspace2:
if len(searchQuery) > 0 {
searchQuery = searchQuery[:len(searchQuery)-1]
refreshList(currentDisplayDir, searchQuery)
}
return nil
case tcell.KeyRune:
r := event.Rune()
if r != 0 {
searchQuery += string(r)
refreshList(currentDisplayDir, searchQuery)
}
return nil
default:
// Pass all other keys (arrows, Enter, etc.) to normal processing
// This allows selecting items while still in search mode
return event
}
}
// --- Not searching ---
switch event.Key() { switch event.Key() {
case tcell.KeyEsc: case tcell.KeyEsc:
pages.RemovePage(filePickerPage) pages.RemovePage(filePickerPage)
return nil return nil
case tcell.KeyBackspace2: // Backspace to go to parent directory case tcell.KeyBackspace2: // Backspace to go to parent directory
if cfg.ImagePreview && imgPreview != nil {
imgPreview.SetImage(nil)
}
if currentStackPos > 0 { if currentStackPos > 0 {
currentStackPos-- currentStackPos--
prevDir := dirStack[currentStackPos] prevDir := dirStack[currentStackPos]
refreshList(prevDir) // Clear search when navigating with backspace
// Trim the stack to current position to avoid deep history searching = false
searchQuery = ""
refreshList(prevDir, "")
// Trim the stack to current position
dirStack = dirStack[:currentStackPos+1] dirStack = dirStack[:currentStackPos+1]
} }
return nil return nil
case tcell.KeyRune:
if event.Rune() == '/' {
// Enter search mode
searching = true
searchQuery = ""
refreshList(currentDisplayDir, "")
return nil
}
if event.Rune() == 'c' {
// Set CodingDir to current directory
itemIndex := listView.GetCurrentItem()
if itemIndex >= 0 && itemIndex < listView.GetItemCount() {
itemText, _ := listView.GetItemText(itemIndex)
// Get the actual directory path
var targetDir string
if strings.HasPrefix(itemText, "Exit") || strings.HasPrefix(itemText, "Select this directory") {
targetDir = currentDisplayDir
} else {
actualItemName := itemText
if bracketPos := strings.Index(itemText, " ["); bracketPos != -1 {
actualItemName = itemText[:bracketPos]
}
if strings.HasPrefix(actualItemName, "../") {
targetDir = path.Dir(currentDisplayDir)
} else if strings.HasSuffix(actualItemName, "/") {
dirName := strings.TrimSuffix(actualItemName, "/")
targetDir = path.Join(currentDisplayDir, dirName)
} else {
targetDir = currentDisplayDir
}
}
cfg.CodingDir = targetDir
if err := notifyUser("CodingDir", "Set to: "+targetDir); err != nil {
logger.Error("failed to notify user", "error", err)
}
pages.RemovePage(filePickerPage)
return nil
}
}
case tcell.KeyEnter: case tcell.KeyEnter:
// Get the currently highlighted item in the list // Get the currently highlighted item in the list
itemIndex := listView.GetCurrentItem() itemIndex := listView.GetCurrentItem()
if itemIndex >= 0 && itemIndex < listView.GetItemCount() { if itemIndex >= 0 && itemIndex < listView.GetItemCount() {
// We need to get the text of the currently selected item to determine if it's a directory
// Since we can't directly get the item text, we'll keep track of items differently
// Let's improve the approach by tracking the currently selected item
itemText, _ := listView.GetItemText(itemIndex) itemText, _ := listView.GetItemText(itemIndex)
logger.Info("choosing dir", "itemText", itemText) logger.Info("choosing dir", "itemText", itemText)
// Check for the exit option first (should be the first item) // Check for the exit option first
if strings.HasPrefix(itemText, "Exit file picker") { if strings.HasPrefix(itemText, "Exit file picker") {
pages.RemovePage(filePickerPage) pages.RemovePage(filePickerPage)
return nil return nil
} }
// Extract the actual filename/directory name by removing the type info in brackets // Extract the actual filename/directory name by removing the type info
// Format is "name [gray](type)[-]"
actualItemName := itemText actualItemName := itemText
if bracketPos := strings.Index(itemText, " ["); bracketPos != -1 { if bracketPos := strings.Index(itemText, " ["); bracketPos != -1 {
actualItemName = itemText[:bracketPos] actualItemName = itemText[:bracketPos]
} }
// Check if it's a directory (ends with /) // Check if it's a directory (ends with /)
if strings.HasSuffix(actualItemName, "/") { if strings.HasSuffix(actualItemName, "/") {
// This is a directory, we need to get the full path
// Since the item text ends with "/" and represents a directory
var targetDir string var targetDir string
if strings.HasPrefix(actualItemName, "../") { if strings.HasPrefix(actualItemName, "../") {
// Parent directory - need to go up from current directory // Parent directory
targetDir = path.Dir(currentDisplayDir) targetDir = path.Dir(currentDisplayDir)
// Avoid going above root - if parent is same as current and it's system root
if targetDir == currentDisplayDir && currentDisplayDir == "/" { if targetDir == currentDisplayDir && currentDisplayDir == "/" {
// We're at root, don't navigate logger.Warn("at root, cannot go up")
logger.Warn("went to root", "dir", targetDir)
return nil return nil
} }
} else { } else {
@@ -949,24 +1094,23 @@ func makeFilePicker() *tview.Flex {
dirName := strings.TrimSuffix(actualItemName, "/") dirName := strings.TrimSuffix(actualItemName, "/")
targetDir = path.Join(currentDisplayDir, dirName) targetDir = path.Join(currentDisplayDir, dirName)
} }
// Navigate to the selected directory // Navigate clear search
logger.Info("going to the dir", "dir", targetDir) logger.Info("going to dir", "dir", targetDir)
refreshList(targetDir) if cfg.ImagePreview && imgPreview != nil {
imgPreview.SetImage(nil)
}
searching = false
searchQuery = ""
refreshList(targetDir, "")
dirStack = append(dirStack, targetDir) dirStack = append(dirStack, targetDir)
currentStackPos = len(dirStack) - 1 currentStackPos = len(dirStack) - 1
statusView.SetText("Current: " + targetDir) statusView.SetText("Current: " + targetDir)
return nil return nil
} else { } else {
// It's a file - construct the full path from current directory and the actual item name // It's a file
// We can't rely only on the selectedFile variable since Enter key might be pressed
// without having clicked the file first
filePath := path.Join(currentDisplayDir, actualItemName) filePath := path.Join(currentDisplayDir, actualItemName)
// Verify it's actually a file (not just lacking a directory suffix)
if info, err := os.Stat(filePath); err == nil && !info.IsDir() { if info, err := os.Stat(filePath); err == nil && !info.IsDir() {
// Check if the file is an image
if isImageFile(actualItemName) { if isImageFile(actualItemName) {
// For image files, set it as an attachment for the next LLM message
// Use the version without UI updates to avoid hangs in event handlers
logger.Info("setting image", "file", actualItemName) logger.Info("setting image", "file", actualItemName)
SetImageAttachment(filePath) SetImageAttachment(filePath)
logger.Info("after setting image", "file", actualItemName) logger.Info("after setting image", "file", actualItemName)
@@ -975,7 +1119,6 @@ func makeFilePicker() *tview.Flex {
pages.RemovePage(filePickerPage) pages.RemovePage(filePickerPage)
logger.Info("after update drawn", "file", actualItemName) logger.Info("after update drawn", "file", actualItemName)
} else { } else {
// For non-image files, update the text area with file path
textArea.SetText(filePath, true) textArea.SetText(filePath, true)
app.SetFocus(textArea) app.SetFocus(textArea)
pages.RemovePage(filePickerPage) pages.RemovePage(filePickerPage)

View File

@@ -9,6 +9,7 @@ import (
"io" "io"
"os" "os"
"os/exec" "os/exec"
"path/filepath"
"regexp" "regexp"
"strconv" "strconv"
"strings" "strings"
@@ -377,6 +378,8 @@ func fileCreate(args map[string]string) []byte {
return []byte(msg) return []byte(msg)
} }
path = resolvePath(path)
content, ok := args["content"] content, ok := args["content"]
if !ok { if !ok {
content = "" content = ""
@@ -400,6 +403,8 @@ func fileRead(args map[string]string) []byte {
return []byte(msg) return []byte(msg)
} }
path = resolvePath(path)
content, err := readStringFromFile(path) content, err := readStringFromFile(path)
if err != nil { if err != nil {
msg := "failed to read file; error: " + err.Error() msg := "failed to read file; error: " + err.Error()
@@ -428,6 +433,7 @@ func fileWrite(args map[string]string) []byte {
logger.Error(msg) logger.Error(msg)
return []byte(msg) return []byte(msg)
} }
path = resolvePath(path)
content, ok := args["content"] content, ok := args["content"]
if !ok { if !ok {
content = "" content = ""
@@ -448,6 +454,7 @@ func fileWriteAppend(args map[string]string) []byte {
logger.Error(msg) logger.Error(msg)
return []byte(msg) return []byte(msg)
} }
path = resolvePath(path)
content, ok := args["content"] content, ok := args["content"]
if !ok { if !ok {
content = "" content = ""
@@ -469,6 +476,8 @@ func fileDelete(args map[string]string) []byte {
return []byte(msg) return []byte(msg)
} }
path = resolvePath(path)
if err := removeFile(path); err != nil { if err := removeFile(path); err != nil {
msg := "failed to delete file; error: " + err.Error() msg := "failed to delete file; error: " + err.Error()
logger.Error(msg) logger.Error(msg)
@@ -486,6 +495,7 @@ func fileMove(args map[string]string) []byte {
logger.Error(msg) logger.Error(msg)
return []byte(msg) return []byte(msg)
} }
src = resolvePath(src)
dst, ok := args["dst"] dst, ok := args["dst"]
if !ok || dst == "" { if !ok || dst == "" {
@@ -493,6 +503,7 @@ func fileMove(args map[string]string) []byte {
logger.Error(msg) logger.Error(msg)
return []byte(msg) return []byte(msg)
} }
dst = resolvePath(dst)
if err := moveFile(src, dst); err != nil { if err := moveFile(src, dst); err != nil {
msg := "failed to move file; error: " + err.Error() msg := "failed to move file; error: " + err.Error()
@@ -511,6 +522,7 @@ func fileCopy(args map[string]string) []byte {
logger.Error(msg) logger.Error(msg)
return []byte(msg) return []byte(msg)
} }
src = resolvePath(src)
dst, ok := args["dst"] dst, ok := args["dst"]
if !ok || dst == "" { if !ok || dst == "" {
@@ -518,6 +530,7 @@ func fileCopy(args map[string]string) []byte {
logger.Error(msg) logger.Error(msg)
return []byte(msg) return []byte(msg)
} }
dst = resolvePath(dst)
if err := copyFile(src, dst); err != nil { if err := copyFile(src, dst); err != nil {
msg := "failed to copy file; error: " + err.Error() msg := "failed to copy file; error: " + err.Error()
@@ -535,6 +548,8 @@ func fileList(args map[string]string) []byte {
path = "." // default to current directory path = "." // default to current directory
} }
path = resolvePath(path)
files, err := listDirectory(path) files, err := listDirectory(path)
if err != nil { if err != nil {
msg := "failed to list directory; error: " + err.Error() msg := "failed to list directory; error: " + err.Error()
@@ -558,6 +573,13 @@ func fileList(args map[string]string) []byte {
// Helper functions for file operations // Helper functions for file operations
func resolvePath(p string) string {
if filepath.IsAbs(p) {
return p
}
return filepath.Join(cfg.CodingDir, p)
}
func readStringFromFile(filename string) (string, error) { func readStringFromFile(filename string) (string, error) {
data, err := os.ReadFile(filename) data, err := os.ReadFile(filename)
if err != nil { if err != nil {
@@ -648,12 +670,6 @@ func executeCommand(args map[string]string) []byte {
return []byte(msg) return []byte(msg)
} }
if !isCommandAllowed(command) {
msg := fmt.Sprintf("command '%s' is not allowed", command)
logger.Error(msg)
return []byte(msg)
}
// Get arguments - handle both single arg and multiple args // Get arguments - handle both single arg and multiple args
var cmdArgs []string var cmdArgs []string
if args["args"] != "" { if args["args"] != "" {
@@ -673,6 +689,12 @@ func executeCommand(args map[string]string) []byte {
} }
} }
if !isCommandAllowed(command, cmdArgs...) {
msg := fmt.Sprintf("command '%s' is not allowed", command)
logger.Error(msg)
return []byte(msg)
}
// Execute with timeout for safety // Execute with timeout for safety
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel() defer cancel()
@@ -907,7 +929,19 @@ func todoDelete(args map[string]string) []byte {
return jsonResult return jsonResult
} }
func isCommandAllowed(command string) bool { var gitReadSubcommands = map[string]bool{
"status": true,
"log": true,
"diff": true,
"show": true,
"branch": true,
"reflog": true,
"rev-parse": true,
"shortlog": true,
"describe": true,
}
func isCommandAllowed(command string, args ...string) bool {
allowedCommands := map[string]bool{ allowedCommands := map[string]bool{
"grep": true, "grep": true,
"sed": true, "sed": true,
@@ -937,8 +971,15 @@ func isCommandAllowed(command string) bool {
"whoami": true, "whoami": true,
"date": true, "date": true,
"uname": true, "uname": true,
"git": true,
} }
return allowedCommands[command] if !allowedCommands[command] {
return false
}
if command == "git" && len(args) > 0 {
return gitReadSubcommands[args[0]]
}
return true
} }
func summarizeChat(args map[string]string) []byte { func summarizeChat(args map[string]string) []byte {
@@ -946,7 +987,7 @@ func summarizeChat(args map[string]string) []byte {
return []byte("No chat history to summarize.") return []byte("No chat history to summarize.")
} }
// Format chat history for the agent // Format chat history for the agent
chatText := chatToText(true) // include system and tool messages chatText := chatToText(chatBody.Messages, true) // include system and tool messages
return []byte(chatText) return []byte(chatText)
} }
@@ -1303,14 +1344,14 @@ var baseTools = []models.Tool{
Type: "function", Type: "function",
Function: models.ToolFunc{ Function: models.ToolFunc{
Name: "execute_command", Name: "execute_command",
Description: "Execute a shell command safely. Use when you need to run system commands like grep sed awk find cat head tail sort uniq wc ls echo cut tr cp mv rm mkdir rmdir pwd df free ps top du whoami date uname", Description: "Execute a shell command safely. Use when you need to run system commands like grep sed awk find cat head tail sort uniq wc ls echo cut tr cp mv rm mkdir rmdir pwd df free ps top du whoami date uname. Git is allowed for read-only operations: status, log, diff, show, branch, reflog, rev-parse, shortlog, describe.",
Parameters: models.ToolFuncParams{ Parameters: models.ToolFuncParams{
Type: "object", Type: "object",
Required: []string{"command"}, Required: []string{"command"},
Properties: map[string]models.ToolArgProps{ Properties: map[string]models.ToolArgProps{
"command": models.ToolArgProps{ "command": models.ToolArgProps{
Type: "string", Type: "string",
Description: "command to execute (only commands from whitelist are allowed: grep sed awk find cat head tail sort uniq wc ls echo cut tr cp mv rm mkdir rmdir pwd df free ps top du whoami date uname", Description: "command to execute (only commands from whitelist are allowed: grep sed awk find cat head tail sort uniq wc ls echo cut tr cp mv rm mkdir rmdir pwd df free ps top du whoami date uname; git allowed for reads: status log diff show branch reflog rev-parse shortlog describe)",
}, },
"args": models.ToolArgProps{ "args": models.ToolArgProps{
Type: "string", Type: "string",

571
tui.go
View File

@@ -7,7 +7,6 @@ import (
_ "image/jpeg" _ "image/jpeg"
_ "image/png" _ "image/png"
"os" "os"
"os/exec"
"path" "path"
"strconv" "strconv"
"strings" "strings"
@@ -50,7 +49,6 @@ var (
imgPage = "imgPage" imgPage = "imgPage"
filePickerPage = "filePicker" filePickerPage = "filePicker"
exportDir = "chat_exports" exportDir = "chat_exports"
// For overlay search functionality // For overlay search functionality
searchField *tview.InputField searchField *tview.InputField
searchPageName = "searchOverlay" searchPageName = "searchOverlay"
@@ -77,24 +75,27 @@ var (
[yellow]Ctrl+n[white]: start a new chat [yellow]Ctrl+n[white]: start a new chat
[yellow]Ctrl+o[white]: open image file picker [yellow]Ctrl+o[white]: open image file picker
[yellow]Ctrl+p[white]: props edit form (min-p, dry, etc.) [yellow]Ctrl+p[white]: props edit form (min-p, dry, etc.)
[yellow]Ctrl+v[white]: switch between /completion and /chat api (if provided in config) [yellow]Ctrl+v[white]: show API link selection popup to choose current API
[yellow]Ctrl+r[white]: start/stop recording from your microphone (needs stt server or whisper binary) [yellow]Ctrl+r[white]: start/stop recording from your microphone (needs stt server or whisper binary)
[yellow]Ctrl+t[white]: remove thinking (<think>) and tool messages from context (delete from chat) [yellow]Ctrl+t[white]: remove thinking (<think>) and tool messages from context (delete from chat)
[yellow]Ctrl+l[white]: rotate through free OpenRouter models (if openrouter api) or update connected model name (llamacpp) [yellow]Ctrl+l[white]: show model selection popup to choose current model
[yellow]Ctrl+k[white]: switch tool use (recommend tool use to llm after user msg) [yellow]Ctrl+k[white]: switch tool use (recommend tool use to llm after user msg)
[yellow]Ctrl+a[white]: interrupt tts (needs tts server) [yellow]Ctrl+a[white]: interrupt tts (needs tts server)
[yellow]Alt+0[white]: replay last message via tts (needs tts server)
[yellow]Ctrl+g[white]: open RAG file manager (load files for context retrieval) [yellow]Ctrl+g[white]: open RAG file manager (load files for context retrieval)
[yellow]Ctrl+y[white]: list loaded RAG files (view and manage loaded files) [yellow]Ctrl+y[white]: list loaded RAG files (view and manage loaded files)
[yellow]Ctrl+q[white]: cycle through mentioned chars in chat, to pick persona to send next msg as [yellow]Ctrl+q[white]: show user role selection popup to choose who sends next msg as
[yellow]Ctrl+x[white]: cycle through mentioned chars in chat, to pick persona to send next msg as (for llm) [yellow]Ctrl+x[white]: show bot role selection popup to choose which agent responds next
[yellow]Alt+1[white]: toggle shell mode (execute commands locally) [yellow]Alt+1[white]: toggle shell mode (execute commands locally)
[yellow]Alt+2[white]: toggle auto-scrolling (for reading while LLM types) [yellow]Alt+2[white]: toggle auto-scrolling (for reading while LLM types)
[yellow]Alt+3[white]: summarize chat history and start new chat with summary as tool response [yellow]Alt+3[white]: summarize chat history and start new chat with summary as tool response
[yellow]Alt+4[white]: edit msg role [yellow]Alt+4[white]: edit msg role
[yellow]Alt+5[white]: toggle system and tool messages display [yellow]Alt+5[white]: toggle system and tool messages display
[yellow]Alt+6[white]: toggle status line visibility [yellow]Alt+6[white]: toggle status line visibility
[yellow]Alt+7[white]: toggle role injection (inject role in messages)
[yellow]Alt+8[white]: show char img or last picked img [yellow]Alt+8[white]: show char img or last picked img
[yellow]Alt+9[white]: warm up (load) selected llama.cpp model [yellow]Alt+9[white]: warm up (load) selected llama.cpp model
[yellow]Alt+t[white]: toggle thinking blocks visibility (collapse/expand <think> blocks)
=== scrolling chat window (some keys similar to vim) === === scrolling chat window (some keys similar to vim) ===
[yellow]arrows up/down and j/k[white]: scroll up and down [yellow]arrows up/down and j/k[white]: scroll up and down
@@ -106,6 +107,13 @@ var (
=== tables (chat history, agent pick, file pick, properties) === === tables (chat history, agent pick, file pick, properties) ===
[yellow]x[white]: to exit the table page [yellow]x[white]: to exit the table page
=== filepicker ===
[yellow]c[white]: (in file picker) set current dir as CodingDir
[yellow]x[white]: to exit
=== shell mode ===
[yellow]@match->Tab[white]: file completion (type @ in input to get file suggestions)
=== status line === === status line ===
%s %s
@@ -167,328 +175,6 @@ Press <Enter> or 'x' to return
} }
) )
func toggleShellMode() {
shellMode = !shellMode
if shellMode {
// Update input placeholder to indicate shell mode
textArea.SetPlaceholder("SHELL MODE: Enter command and press <Esc> to execute")
} else {
// Reset to normal mode
textArea.SetPlaceholder("input is multiline; press <Enter> to start the next line;\npress <Esc> to send the message. Alt+1 to exit shell mode")
}
updateStatusLine()
}
func updateFlexLayout() {
if fullscreenMode {
// flex already contains only focused widget; do nothing
return
}
flex.Clear()
flex.AddItem(textView, 0, 40, false)
flex.AddItem(textArea, 0, 10, false)
if positionVisible {
flex.AddItem(statusLineWidget, 0, 2, false)
}
// Keep focus on currently focused widget
focused := app.GetFocus()
if focused == textView {
app.SetFocus(textView)
} else {
app.SetFocus(textArea)
}
}
func executeCommandAndDisplay(cmdText string) {
// Parse the command (split by spaces, but handle quoted arguments)
cmdParts := parseCommand(cmdText)
if len(cmdParts) == 0 {
fmt.Fprintf(textView, "\n[red]Error: No command provided[-:-:-]\n")
if scrollToEndEnabled {
textView.ScrollToEnd()
}
colorText()
return
}
command := cmdParts[0]
args := []string{}
if len(cmdParts) > 1 {
args = cmdParts[1:]
}
// Create the command execution
cmd := exec.Command(command, args...)
// Execute the command and get output
output, err := cmd.CombinedOutput()
// Add the command being executed to the chat
fmt.Fprintf(textView, "\n[yellow]$ %s[-:-:-]\n", cmdText)
var outputContent string
if err != nil {
// Include both output and error
errorMsg := "Error: " + err.Error()
fmt.Fprintf(textView, "[red]%s[-:-:-]\n", errorMsg)
if len(output) > 0 {
outputStr := string(output)
fmt.Fprintf(textView, "[red]%s[-:-:-]\n", outputStr)
outputContent = errorMsg + "\n" + outputStr
} else {
outputContent = errorMsg
}
} else {
// Only output if successful
if len(output) > 0 {
outputStr := string(output)
fmt.Fprintf(textView, "[green]%s[-:-:-]\n", outputStr)
outputContent = outputStr
} else {
successMsg := "Command executed successfully (no output)"
fmt.Fprintf(textView, "[green]%s[-:-:-]\n", successMsg)
outputContent = successMsg
}
}
// Combine command and output in a single message for chat history
combinedContent := "$ " + cmdText + "\n\n" + outputContent
combinedMsg := models.RoleMsg{
Role: cfg.ToolRole,
Content: combinedContent,
}
chatBody.Messages = append(chatBody.Messages, combinedMsg)
// Scroll to end and update colors
if scrollToEndEnabled {
textView.ScrollToEnd()
}
colorText()
}
// parseCommand splits command string handling quotes properly
func parseCommand(cmd string) []string {
var args []string
var current string
var inQuotes bool
var quoteChar rune
for _, r := range cmd {
switch r {
case '"', '\'':
if inQuotes {
if r == quoteChar {
inQuotes = false
} else {
current += string(r)
}
} else {
inQuotes = true
quoteChar = r
}
case ' ', '\t':
if inQuotes {
current += string(r)
} else if current != "" {
args = append(args, current)
current = ""
}
default:
current += string(r)
}
}
if current != "" {
args = append(args, current)
}
return args
}
// Global variables for search state
var searchResults []int
var searchResultLengths []int // To store the length of each match in the formatted string
var searchIndex int
var searchText string
var originalTextForSearch string
// performSearch searches for the given term in the textView content and highlights matches
func performSearch(term string) {
searchText = term
if searchText == "" {
searchResults = nil
searchResultLengths = nil
originalTextForSearch = ""
// Re-render text without highlights
textView.SetText(chatToText(cfg.ShowSys))
colorText()
return
}
// Get formatted text and search directly in it to avoid mapping issues
formattedText := textView.GetText(true)
originalTextForSearch = formattedText
searchTermLower := strings.ToLower(searchText)
formattedTextLower := strings.ToLower(formattedText)
// Find all occurrences of the search term in the formatted text directly
formattedSearchResults := []int{}
searchStart := 0
for {
pos := strings.Index(formattedTextLower[searchStart:], searchTermLower)
if pos == -1 {
break
}
absolutePos := searchStart + pos
formattedSearchResults = append(formattedSearchResults, absolutePos)
searchStart = absolutePos + len(searchText)
}
if len(formattedSearchResults) == 0 {
// No matches found
searchResults = nil
searchResultLengths = nil
notification := "Pattern not found: " + term
if err := notifyUser("search", notification); err != nil {
logger.Error("failed to send notification", "error", err)
}
return
}
// Store the formatted text positions and lengths for accurate highlighting
searchResults = formattedSearchResults
// Create lengths array - all matches have the same length as the search term
searchResultLengths = make([]int, len(formattedSearchResults))
for i := range searchResultLengths {
searchResultLengths[i] = len(searchText)
}
searchIndex = 0
highlightCurrentMatch()
}
// highlightCurrentMatch highlights the current search match and scrolls to it
func highlightCurrentMatch() {
if len(searchResults) == 0 || searchIndex >= len(searchResults) {
return
}
// Get the stored formatted text
formattedText := originalTextForSearch
// For tview to properly support highlighting and scrolling, we need to work with its region system
// Instead of just applying highlights, we need to add region tags to the text
highlightedText := addRegionTags(formattedText, searchResults, searchResultLengths, searchIndex, searchText)
// Update the text view with the text that includes region tags
textView.SetText(highlightedText)
// Highlight the current region and scroll to it
// Need to identify which position in the results array corresponds to the current match
// The region ID will be search_<position>_<index>
currentRegion := fmt.Sprintf("search_%d_%d", searchResults[searchIndex], searchIndex)
textView.Highlight(currentRegion).ScrollToHighlight()
// Send notification about which match we're at
notification := fmt.Sprintf("Match %d of %d", searchIndex+1, len(searchResults))
if err := notifyUser("search", notification); err != nil {
logger.Error("failed to send notification", "error", err)
}
}
// showSearchBar shows the search input field as an overlay
func showSearchBar() {
// Create a temporary flex to combine search and main content
updatedFlex := tview.NewFlex().SetDirection(tview.FlexRow).
AddItem(searchField, 3, 0, true). // Search field at top
AddItem(flex, 0, 1, false) // Main flex layout below
// Add the search overlay as a page
pages.AddPage(searchPageName, updatedFlex, true, true)
app.SetFocus(searchField)
}
// hideSearchBar hides the search input field
func hideSearchBar() {
pages.RemovePage(searchPageName)
// Return focus to the text view
app.SetFocus(textView)
// Clear the search field
searchField.SetText("")
}
// Global variables for index overlay functionality
var indexPageName = "indexOverlay"
// showIndexBar shows the index input field as an overlay at the top
func showIndexBar() {
// Create a temporary flex to combine index input and main content
updatedFlex := tview.NewFlex().SetDirection(tview.FlexRow).
AddItem(indexPickWindow, 3, 0, true). // Index field at top
AddItem(flex, 0, 1, false) // Main flex layout below
// Add the index overlay as a page
pages.AddPage(indexPageName, updatedFlex, true, true)
app.SetFocus(indexPickWindow)
}
// hideIndexBar hides the index input field
func hideIndexBar() {
pages.RemovePage(indexPageName)
// Return focus to the text view
app.SetFocus(textView)
// Clear the index field
indexPickWindow.SetText("")
}
// addRegionTags adds region tags to search matches in the text for tview highlighting
func addRegionTags(text string, positions []int, lengths []int, currentIdx int, searchTerm string) string {
if len(positions) == 0 {
return text
}
var result strings.Builder
lastEnd := 0
for i, pos := range positions {
endPos := pos + lengths[i]
// Add text before this match
if pos > lastEnd {
result.WriteString(text[lastEnd:pos])
}
// The matched text, which may contain its own formatting tags
actualText := text[pos:endPos]
// Add region tag and highlighting for this match
// Use a unique region id that includes the match index to avoid conflicts
regionId := fmt.Sprintf("search_%d_%d", pos, i) // position + index to ensure uniqueness
var highlightStart, highlightEnd string
if i == currentIdx {
// Current match - use different highlighting
highlightStart = fmt.Sprintf(`["%s"][yellow:blue:b]`, regionId) // Current match with region and special highlight
highlightEnd = `[-:-:-][""]` // Reset formatting and close region
} else {
// Other matches - use regular highlighting
highlightStart = fmt.Sprintf(`["%s"][gold:red:u]`, regionId) // Other matches with region and highlight
highlightEnd = `[-:-:-][""]` // Reset formatting and close region
}
result.WriteString(highlightStart)
result.WriteString(actualText)
result.WriteString(highlightEnd)
lastEnd = endPos
}
// Add the rest of the text after the last processed match
if lastEnd < len(text) {
result.WriteString(text[lastEnd:])
}
return result.String()
}
// searchNext finds the next occurrence of the search term
func searchNext() {
if len(searchResults) == 0 {
if err := notifyUser("search", "No search results to navigate"); err != nil {
logger.Error("failed to send notification", "error", err)
}
return
}
searchIndex = (searchIndex + 1) % len(searchResults)
highlightCurrentMatch()
}
// searchPrev finds the previous occurrence of the search term
func searchPrev() {
if len(searchResults) == 0 {
if err := notifyUser("search", "No search results to navigate"); err != nil {
logger.Error("failed to send notification", "error", err)
}
return
}
if searchIndex == 0 {
searchIndex = len(searchResults) - 1
} else {
searchIndex--
}
highlightCurrentMatch()
}
func init() { func init() {
tview.Styles = colorschemes["default"] tview.Styles = colorschemes["default"]
app = tview.NewApplication() app = tview.NewApplication()
@@ -496,6 +182,37 @@ func init() {
textArea = tview.NewTextArea(). textArea = tview.NewTextArea().
SetPlaceholder("input is multiline; press <Enter> to start the next line;\npress <Esc> to send the message.") SetPlaceholder("input is multiline; press <Enter> to start the next line;\npress <Esc> to send the message.")
textArea.SetBorder(true).SetTitle("input") textArea.SetBorder(true).SetTitle("input")
// Add input capture for @ completion
textArea.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
if !shellMode {
return event
}
// Handle Tab key for file completion
if event.Key() == tcell.KeyTab {
currentText := textArea.GetText()
row, col, _, _ := textArea.GetCursor()
// Calculate absolute position from row/col
lines := strings.Split(currentText, "\n")
cursorPos := 0
for i := 0; i < row && i < len(lines); i++ {
cursorPos += len(lines[i]) + 1 // +1 for newline
}
cursorPos += col
// Look backwards from cursor to find @
if cursorPos > 0 {
// Find the last @ before cursor
textBeforeCursor := currentText[:cursorPos]
atIndex := strings.LastIndex(textBeforeCursor, "@")
if atIndex >= 0 {
// Extract the partial match text after @
filter := textBeforeCursor[atIndex+1:]
showFileCompletionPopup(filter)
return nil // Consume the Tab event
}
}
}
return event
})
textView = tview.NewTextView(). textView = tview.NewTextView().
SetDynamicColors(true). SetDynamicColors(true).
SetRegions(true). SetRegions(true).
@@ -517,8 +234,8 @@ func init() {
searchResults = nil // Clear search results searchResults = nil // Clear search results
searchResultLengths = nil // Clear search result lengths searchResultLengths = nil // Clear search result lengths
originalTextForSearch = "" originalTextForSearch = ""
textView.SetText(chatToText(cfg.ShowSys)) // Reset text without search regions textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys)) // Reset text without search regions
colorText() // Apply normal chat coloring colorText() // Apply normal chat coloring
} else { } else {
// Original logic if no search is active // Original logic if no search is active
currentSelection := textView.GetHighlights() currentSelection := textView.GetHighlights()
@@ -532,8 +249,7 @@ func init() {
}) })
textView.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey { textView.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
// Handle vim-like navigation in TextView // Handle vim-like navigation in TextView
switch event.Key() { if event.Key() == tcell.KeyRune {
case tcell.KeyRune:
switch event.Rune() { switch event.Rune() {
case 'j': case 'j':
// For line down // For line down
@@ -594,7 +310,7 @@ func init() {
} }
chatBody.Messages[selectedIndex].Content = editedMsg chatBody.Messages[selectedIndex].Content = editedMsg
// change textarea // change textarea
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
pages.RemovePage(editMsgPage) pages.RemovePage(editMsgPage)
editMode = false editMode = false
return nil return nil
@@ -610,7 +326,6 @@ func init() {
// colorText() // colorText()
// updateStatusLine() // updateStatusLine()
}) })
roleEditWindow = tview.NewInputField(). roleEditWindow = tview.NewInputField().
SetLabel("Enter new role: "). SetLabel("Enter new role: ").
SetPlaceholder("e.g., user, assistant, system, tool"). SetPlaceholder("e.g., user, assistant, system, tool").
@@ -627,7 +342,7 @@ func init() {
} }
if selectedIndex >= 0 && selectedIndex < len(chatBody.Messages) { if selectedIndex >= 0 && selectedIndex < len(chatBody.Messages) {
chatBody.Messages[selectedIndex].Role = newRole chatBody.Messages[selectedIndex].Role = newRole
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
pages.RemovePage(roleEditPage) pages.RemovePage(roleEditPage)
} }
@@ -671,17 +386,18 @@ func init() {
return nil return nil
} }
m := chatBody.Messages[selectedIndex] m := chatBody.Messages[selectedIndex]
if roleEditMode { switch {
case roleEditMode:
hideIndexBar() // Hide overlay first hideIndexBar() // Hide overlay first
// Set the current role as the default text in the input field // Set the current role as the default text in the input field
roleEditWindow.SetText(m.Role) roleEditWindow.SetText(m.Role)
pages.AddPage(roleEditPage, roleEditWindow, true, true) pages.AddPage(roleEditPage, roleEditWindow, true, true)
roleEditMode = false // Reset the flag roleEditMode = false // Reset the flag
} else if editMode { case editMode:
hideIndexBar() // Hide overlay first hideIndexBar() // Hide overlay first
pages.AddPage(editMsgPage, editArea, true, true) pages.AddPage(editMsgPage, editArea, true, true)
editArea.SetText(m.Content, true) editArea.SetText(m.Content, true)
} else { default:
if err := copyToClipboard(m.Content); err != nil { if err := copyToClipboard(m.Content); err != nil {
logger.Error("failed to copy to clipboard", "error", err) logger.Error("failed to copy to clipboard", "error", err)
} }
@@ -739,7 +455,7 @@ func init() {
searchResults = nil searchResults = nil
searchResultLengths = nil searchResultLengths = nil
originalTextForSearch = "" originalTextForSearch = ""
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
return return
} else { } else {
@@ -759,22 +475,19 @@ func init() {
pages.RemovePage(helpPage) pages.RemovePage(helpPage)
}) })
helpView.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey { helpView.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
switch event.Key() { if event.Key() == tcell.KeyEnter {
case tcell.KeyEnter:
return event return event
default: }
if event.Key() == tcell.KeyRune && event.Rune() == 'x' { if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
pages.RemovePage(helpPage) pages.RemovePage(helpPage)
return nil return nil
}
} }
return nil return nil
}) })
// //
imgView = tview.NewImage() imgView = tview.NewImage()
imgView.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey { imgView.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
switch event.Key() { if event.Key() == tcell.KeyEnter {
case tcell.KeyEnter:
pages.RemovePage(imgPage) pages.RemovePage(imgPage)
return event return event
} }
@@ -787,7 +500,7 @@ func init() {
// //
textArea.SetMovedFunc(updateStatusLine) textArea.SetMovedFunc(updateStatusLine)
updateStatusLine() updateStatusLine()
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
if scrollToEndEnabled { if scrollToEndEnabled {
textView.ScrollToEnd() textView.ScrollToEnd()
@@ -801,7 +514,7 @@ func init() {
if event.Key() == tcell.KeyRune && event.Rune() == '5' && event.Modifiers()&tcell.ModAlt != 0 { if event.Key() == tcell.KeyRune && event.Rune() == '5' && event.Modifiers()&tcell.ModAlt != 0 {
// switch cfg.ShowSys // switch cfg.ShowSys
cfg.ShowSys = !cfg.ShowSys cfg.ShowSys = !cfg.ShowSys
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
} }
if event.Key() == tcell.KeyRune && event.Rune() == '3' && event.Modifiers()&tcell.ModAlt != 0 { if event.Key() == tcell.KeyRune && event.Rune() == '3' && event.Modifiers()&tcell.ModAlt != 0 {
@@ -828,6 +541,25 @@ func init() {
} }
updateStatusLine() updateStatusLine()
} }
// Handle Alt+7 to toggle injectRole
if event.Key() == tcell.KeyRune && event.Rune() == '7' && event.Modifiers()&tcell.ModAlt != 0 {
injectRole = !injectRole
updateStatusLine()
}
// Handle Alt+T to toggle thinking block visibility
if event.Key() == tcell.KeyRune && event.Rune() == 't' && event.Modifiers()&tcell.ModAlt != 0 {
thinkingCollapsed = !thinkingCollapsed
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText()
status := "expanded"
if thinkingCollapsed {
status = "collapsed"
}
if err := notifyUser("thinking", "Thinking blocks "+status); err != nil {
logger.Error("failed to send notification", "error", err)
}
return nil
}
if event.Key() == tcell.KeyF1 { if event.Key() == tcell.KeyF1 {
// chatList, err := loadHistoryChats() // chatList, err := loadHistoryChats()
chatList, err := store.GetChatByChar(cfg.AssistantRole) chatList, err := store.GetChatByChar(cfg.AssistantRole)
@@ -855,7 +587,7 @@ func init() {
updateStatusLine() updateStatusLine()
return nil return nil
} }
if event.Key() == tcell.KeyF2 { if event.Key() == tcell.KeyF2 && !botRespMode {
// regen last msg // regen last msg
if len(chatBody.Messages) == 0 { if len(chatBody.Messages) == 0 {
if err := notifyUser("info", "no messages to regenerate"); err != nil { if err := notifyUser("info", "no messages to regenerate"); err != nil {
@@ -866,8 +598,12 @@ func init() {
chatBody.Messages = chatBody.Messages[:len(chatBody.Messages)-1] chatBody.Messages = chatBody.Messages[:len(chatBody.Messages)-1]
// there is no case where user msg is regenerated // there is no case where user msg is regenerated
// lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role // lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
go chatRound("", cfg.UserRole, textView, true, false) // go chatRound("", cfg.UserRole, textView, true, false)
if cfg.TTS_ENABLED {
TTSDoneChan <- true
}
chatRoundChan <- &models.ChatRoundReq{Role: cfg.UserRole, Regen: true}
return nil return nil
} }
if event.Key() == tcell.KeyF3 && !botRespMode { if event.Key() == tcell.KeyF3 && !botRespMode {
@@ -888,7 +624,10 @@ func init() {
return nil return nil
} }
chatBody.Messages = chatBody.Messages[:len(chatBody.Messages)-1] chatBody.Messages = chatBody.Messages[:len(chatBody.Messages)-1]
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
if cfg.TTS_ENABLED {
TTSDoneChan <- true
}
colorText() colorText()
return nil return nil
} }
@@ -1012,7 +751,7 @@ func init() {
return nil return nil
} }
if event.Key() == tcell.KeyCtrlN { if event.Key() == tcell.KeyCtrlN {
startNewChat() startNewChat(true)
return nil return nil
} }
if event.Key() == tcell.KeyCtrlO { if event.Key() == tcell.KeyCtrlO {
@@ -1022,64 +761,21 @@ func init() {
return nil return nil
} }
if event.Key() == tcell.KeyCtrlL { if event.Key() == tcell.KeyCtrlL {
// Check if the current API is an OpenRouter API // Show model selection popup instead of rotating models
if strings.Contains(cfg.CurrentAPI, "openrouter.ai/api/v1/") { showModelSelectionPopup()
// Rotate through OpenRouter free models
if len(ORFreeModels) > 0 {
currentORModelIndex = (currentORModelIndex + 1) % len(ORFreeModels)
chatBody.Model = ORFreeModels[currentORModelIndex]
cfg.CurrentModel = chatBody.Model
}
updateStatusLine()
} else {
localModelsMu.RLock()
if len(LocalModels) > 0 {
currentLocalModelIndex = (currentLocalModelIndex + 1) % len(LocalModels)
chatBody.Model = LocalModels[currentLocalModelIndex]
cfg.CurrentModel = chatBody.Model
}
localModelsMu.RUnlock()
updateStatusLine()
// // For non-OpenRouter APIs, use the old logic
// go func() {
// fetchLCPModelName() // blocks
// updateStatusLine()
// }()
}
return nil return nil
} }
if event.Key() == tcell.KeyCtrlT { if event.Key() == tcell.KeyCtrlT {
// clear context // clear context
// remove tools and thinking // remove tools and thinking
removeThinking(chatBody) removeThinking(chatBody)
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText() colorText()
return nil return nil
} }
if event.Key() == tcell.KeyCtrlV { if event.Key() == tcell.KeyCtrlV {
// switch between API links using index-based rotation // Show API link selection popup instead of rotating APIs
if len(cfg.ApiLinks) == 0 { showAPILinkSelectionPopup()
// No API links to rotate through
return nil
}
// Find current API in the list to get the current index
currentIndex := -1
for i, api := range cfg.ApiLinks {
if api == cfg.CurrentAPI {
currentIndex = i
break
}
}
// If current API is not in the list, start from beginning
// Otherwise, advance to next API in the list (with wrap-around)
if currentIndex == -1 {
currentAPIIndex = 0
} else {
currentAPIIndex = (currentIndex + 1) % len(cfg.ApiLinks)
}
cfg.CurrentAPI = cfg.ApiLinks[currentAPIIndex]
choseChunkParser()
updateStatusLine()
return nil return nil
} }
if event.Key() == tcell.KeyCtrlS { if event.Key() == tcell.KeyCtrlS {
@@ -1159,65 +855,37 @@ func init() {
} }
} }
// I need keybind for tts to shut up // I need keybind for tts to shut up
if event.Key() == tcell.KeyCtrlA { if event.Key() == tcell.KeyCtrlA && cfg.TTS_ENABLED {
// textArea.SetText("pressed ctrl+A", true) TTSDoneChan <- true
if cfg.TTS_ENABLED { }
// audioStream.TextChan <- chunk if event.Key() == tcell.KeyRune && event.Rune() == '0' && event.Modifiers()&tcell.ModAlt != 0 && cfg.TTS_ENABLED {
if len(chatBody.Messages) > 0 {
// Stop any currently playing TTS first
TTSDoneChan <- true TTSDoneChan <- true
lastMsg := chatBody.Messages[len(chatBody.Messages)-1]
cleanedText := models.CleanText(lastMsg.Content)
if cleanedText != "" {
go orator.Speak(cleanedText)
}
} }
return nil
} }
if event.Key() == tcell.KeyCtrlW { if event.Key() == tcell.KeyCtrlW {
// INFO: continue bot/text message // INFO: continue bot/text message
// without new role // without new role
lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role
go chatRound("", lastRole, textView, false, true) // go chatRound("", lastRole, textView, false, true)
chatRoundChan <- &models.ChatRoundReq{Role: lastRole, Resume: true}
return nil return nil
} }
if event.Key() == tcell.KeyCtrlQ { if event.Key() == tcell.KeyCtrlQ {
persona := cfg.UserRole // Show user role selection popup instead of cycling through roles
if cfg.WriteNextMsgAs != "" { showUserRoleSelectionPopup()
persona = cfg.WriteNextMsgAs
}
roles := listRolesWithUser()
logger.Info("list roles", "roles", roles)
for i, role := range roles {
if strings.EqualFold(role, persona) {
if i == len(roles)-1 {
cfg.WriteNextMsgAs = roles[0] // reached last, get first
break
}
cfg.WriteNextMsgAs = roles[i+1] // get next role
logger.Info("picked role", "roles", roles, "index", i+1)
break
}
}
updateStatusLine()
return nil return nil
} }
if event.Key() == tcell.KeyCtrlX { if event.Key() == tcell.KeyCtrlX {
persona := cfg.AssistantRole // Show bot role selection popup instead of cycling through roles
if cfg.WriteNextMsgAsCompletionAgent != "" { showBotRoleSelectionPopup()
persona = cfg.WriteNextMsgAsCompletionAgent
}
roles := chatBody.ListRoles()
if len(roles) == 0 {
logger.Warn("empty roles in chat")
}
if !strInSlice(cfg.AssistantRole, roles) {
roles = append(roles, cfg.AssistantRole)
}
for i, role := range roles {
if strings.EqualFold(role, persona) {
if i == len(roles)-1 {
cfg.WriteNextMsgAsCompletionAgent = roles[0] // reached last, get first
break
}
cfg.WriteNextMsgAsCompletionAgent = roles[i+1] // get next role
logger.Info("picked role", "roles", roles, "index", i+1)
break
}
}
updateStatusLine()
return nil return nil
} }
if event.Key() == tcell.KeyCtrlG { if event.Key() == tcell.KeyCtrlG {
@@ -1295,7 +963,6 @@ func init() {
// cannot send msg in editMode or botRespMode // cannot send msg in editMode or botRespMode
if event.Key() == tcell.KeyEscape && !editMode && !botRespMode { if event.Key() == tcell.KeyEscape && !editMode && !botRespMode {
msgText := textArea.GetText() msgText := textArea.GetText()
// TODO: add shellmode command -> output to the chat history, or at least have an option
if shellMode && msgText != "" { if shellMode && msgText != "" {
// In shell mode, execute command instead of sending to LLM // In shell mode, execute command instead of sending to LLM
executeCommandAndDisplay(msgText) executeCommandAndDisplay(msgText)
@@ -1335,7 +1002,8 @@ func init() {
} }
colorText() colorText()
} }
go chatRound(msgText, persona, textView, false, false) // go chatRound(msgText, persona, textView, false, false)
chatRoundChan <- &models.ChatRoundReq{Role: persona, UserMsg: msgText}
// Also clear any image attachment after sending the message // Also clear any image attachment after sending the message
go func() { go func() {
// Wait a short moment for the message to be processed, then clear the image attachment // Wait a short moment for the message to be processed, then clear the image attachment
@@ -1351,7 +1019,6 @@ func init() {
app.SetFocus(focusSwitcher[currentF]) app.SetFocus(focusSwitcher[currentF])
return nil return nil
} }
if isASCII(string(event.Rune())) && !botRespMode { if isASCII(string(event.Rune())) && !botRespMode {
return event return event
} }