Fix (race): mutex chatbody
This commit is contained in:
104
bot.go
104
bot.go
@@ -37,7 +37,7 @@ var (
|
||||
chunkChan = make(chan string, 10)
|
||||
openAIToolChan = make(chan string, 10)
|
||||
streamDone = make(chan bool, 1)
|
||||
chatBody *models.ChatBody
|
||||
chatBody *models.SafeChatBody
|
||||
store storage.FullRepo
|
||||
defaultFirstMsg = "Hello! What can I do for you?"
|
||||
defaultStarter = []models.RoleMsg{}
|
||||
@@ -262,13 +262,13 @@ func warmUpModel() {
|
||||
return
|
||||
}
|
||||
// Check if model is already loaded
|
||||
loaded, err := isModelLoaded(chatBody.Model)
|
||||
loaded, err := isModelLoaded(chatBody.GetModel())
|
||||
if err != nil {
|
||||
logger.Debug("failed to check model status", "model", chatBody.Model, "error", err)
|
||||
logger.Debug("failed to check model status", "model", chatBody.GetModel(), "error", err)
|
||||
// Continue with warmup attempt anyway
|
||||
}
|
||||
if loaded {
|
||||
showToast("model already loaded", "Model "+chatBody.Model+" is already loaded.")
|
||||
showToast("model already loaded", "Model "+chatBody.GetModel()+" is already loaded.")
|
||||
return
|
||||
}
|
||||
go func() {
|
||||
@@ -277,7 +277,7 @@ func warmUpModel() {
|
||||
switch {
|
||||
case strings.HasSuffix(cfg.CurrentAPI, "/completion"):
|
||||
// Old completion endpoint
|
||||
req := models.NewLCPReq(".", chatBody.Model, nil, map[string]float32{
|
||||
req := models.NewLCPReq(".", chatBody.GetModel(), nil, map[string]float32{
|
||||
"temperature": 0.8,
|
||||
"dry_multiplier": 0.0,
|
||||
"min_p": 0.05,
|
||||
@@ -289,7 +289,7 @@ func warmUpModel() {
|
||||
// OpenAI-compatible chat endpoint
|
||||
req := models.OpenAIReq{
|
||||
ChatBody: &models.ChatBody{
|
||||
Model: chatBody.Model,
|
||||
Model: chatBody.GetModel(),
|
||||
Messages: []models.RoleMsg{
|
||||
{Role: "system", Content: "."},
|
||||
},
|
||||
@@ -313,7 +313,7 @@ func warmUpModel() {
|
||||
}
|
||||
resp.Body.Close()
|
||||
// Start monitoring for model load completion
|
||||
monitorModelLoad(chatBody.Model)
|
||||
monitorModelLoad(chatBody.GetModel())
|
||||
}()
|
||||
}
|
||||
|
||||
@@ -418,7 +418,9 @@ func fetchLCPModelsWithStatus() (*models.LCPModels, error) {
|
||||
if err := json.NewDecoder(resp.Body).Decode(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
localModelsMu.Lock()
|
||||
localModelsData = data
|
||||
localModelsMu.Unlock()
|
||||
return data, nil
|
||||
}
|
||||
|
||||
@@ -821,10 +823,10 @@ func chatRound(r *models.ChatRoundReq) error {
|
||||
}
|
||||
go sendMsgToLLM(reader)
|
||||
logger.Debug("looking at vars in chatRound", "msg", r.UserMsg, "regen", r.Regen, "resume", r.Resume)
|
||||
msgIdx := len(chatBody.Messages)
|
||||
msgIdx := chatBody.GetMessageCount()
|
||||
if !r.Resume {
|
||||
// Add empty message to chatBody immediately so it persists during Alt+T toggle
|
||||
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{
|
||||
chatBody.AppendMessage(models.RoleMsg{
|
||||
Role: botPersona, Content: "",
|
||||
})
|
||||
nl := "\n\n"
|
||||
@@ -836,7 +838,7 @@ func chatRound(r *models.ChatRoundReq) error {
|
||||
}
|
||||
fmt.Fprintf(textView, "%s[-:-:b](%d) %s[-:-:-]\n", nl, msgIdx, roleToIcon(botPersona))
|
||||
} else {
|
||||
msgIdx = len(chatBody.Messages) - 1
|
||||
msgIdx = chatBody.GetMessageCount() - 1
|
||||
}
|
||||
respText := strings.Builder{}
|
||||
toolResp := strings.Builder{}
|
||||
@@ -893,7 +895,10 @@ out:
|
||||
fmt.Fprint(textView, chunk)
|
||||
respText.WriteString(chunk)
|
||||
// Update the message in chatBody.Messages so it persists during Alt+T
|
||||
chatBody.Messages[msgIdx].Content = respText.String()
|
||||
chatBody.UpdateMessageFunc(msgIdx, func(msg models.RoleMsg) models.RoleMsg {
|
||||
msg.Content = respText.String()
|
||||
return msg
|
||||
})
|
||||
if scrollToEndEnabled {
|
||||
textView.ScrollToEnd()
|
||||
}
|
||||
@@ -936,29 +941,32 @@ out:
|
||||
}
|
||||
botRespMode = false
|
||||
if r.Resume {
|
||||
chatBody.Messages[len(chatBody.Messages)-1].Content += respText.String()
|
||||
updatedMsg := chatBody.Messages[len(chatBody.Messages)-1]
|
||||
processedMsg := processMessageTag(&updatedMsg)
|
||||
chatBody.Messages[len(chatBody.Messages)-1] = *processedMsg
|
||||
if msgStats != nil && chatBody.Messages[len(chatBody.Messages)-1].Role != cfg.ToolRole {
|
||||
chatBody.Messages[len(chatBody.Messages)-1].Stats = msgStats
|
||||
chatBody.UpdateMessageFunc(chatBody.GetMessageCount()-1, func(msg models.RoleMsg) models.RoleMsg {
|
||||
msg.Content += respText.String()
|
||||
processedMsg := processMessageTag(&msg)
|
||||
if msgStats != nil && processedMsg.Role != cfg.ToolRole {
|
||||
processedMsg.Stats = msgStats
|
||||
}
|
||||
return *processedMsg
|
||||
})
|
||||
} else {
|
||||
chatBody.Messages[msgIdx].Content = respText.String()
|
||||
processedMsg := processMessageTag(&chatBody.Messages[msgIdx])
|
||||
chatBody.Messages[msgIdx] = *processedMsg
|
||||
if msgStats != nil && chatBody.Messages[msgIdx].Role != cfg.ToolRole {
|
||||
chatBody.Messages[msgIdx].Stats = msgStats
|
||||
chatBody.UpdateMessageFunc(msgIdx, func(msg models.RoleMsg) models.RoleMsg {
|
||||
msg.Content = respText.String()
|
||||
processedMsg := processMessageTag(&msg)
|
||||
if msgStats != nil && processedMsg.Role != cfg.ToolRole {
|
||||
processedMsg.Stats = msgStats
|
||||
}
|
||||
stopTTSIfNotForUser(&chatBody.Messages[msgIdx])
|
||||
return *processedMsg
|
||||
})
|
||||
stopTTSIfNotForUser(&chatBody.GetMessages()[msgIdx])
|
||||
}
|
||||
cleanChatBody()
|
||||
refreshChatDisplay()
|
||||
updateStatusLine()
|
||||
// bot msg is done;
|
||||
// now check it for func call
|
||||
// logChat(activeChatName, chatBody.Messages)
|
||||
if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil {
|
||||
// logChat(activeChatName, chatBody.GetMessages())
|
||||
if err := updateStorageChat(activeChatName, chatBody.GetMessages()); err != nil {
|
||||
logger.Warn("failed to update storage", "error", err, "name", activeChatName)
|
||||
}
|
||||
// Strip think blocks before parsing for tool calls
|
||||
@@ -973,8 +981,8 @@ out:
|
||||
// If so, trigger those characters to respond if that char is not controlled by user
|
||||
// perhaps we should have narrator role to determine which char is next to act
|
||||
if cfg.AutoTurn {
|
||||
lastMsg := chatBody.Messages[len(chatBody.Messages)-1]
|
||||
if len(lastMsg.KnownTo) > 0 {
|
||||
lastMsg, ok := chatBody.GetLastMessage()
|
||||
if ok && len(lastMsg.KnownTo) > 0 {
|
||||
triggerPrivateMessageResponses(&lastMsg)
|
||||
}
|
||||
}
|
||||
@@ -983,13 +991,15 @@ out:
|
||||
|
||||
// cleanChatBody removes messages with null or empty content to prevent API issues
|
||||
func cleanChatBody() {
|
||||
if chatBody == nil || chatBody.Messages == nil {
|
||||
if chatBody == nil || chatBody.GetMessageCount() == 0 {
|
||||
return
|
||||
}
|
||||
// Tool request cleaning is now configurable via AutoCleanToolCallsFromCtx (default false)
|
||||
// /completion msg where part meant for user and other part tool call
|
||||
// chatBody.Messages = cleanToolCalls(chatBody.Messages)
|
||||
chatBody.Messages = consolidateAssistantMessages(chatBody.Messages)
|
||||
chatBody.WithLock(func(cb *models.ChatBody) {
|
||||
cb.Messages = consolidateAssistantMessages(cb.Messages)
|
||||
})
|
||||
}
|
||||
|
||||
// convertJSONToMapStringString unmarshals JSON into map[string]interface{} and converts all values to strings.
|
||||
@@ -1089,7 +1099,7 @@ func findCall(msg, toolCall string) bool {
|
||||
Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
|
||||
ToolCallID: lastToolCall.ID, // Use the stored tool call ID
|
||||
}
|
||||
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
|
||||
chatBody.AppendMessage(toolResponseMsg)
|
||||
// Clear the stored tool call ID after using it (no longer needed)
|
||||
// Trigger the assistant to continue processing with the error message
|
||||
crr := &models.ChatRoundReq{
|
||||
@@ -1126,7 +1136,7 @@ func findCall(msg, toolCall string) bool {
|
||||
Role: cfg.ToolRole,
|
||||
Content: "Error processing tool call: no valid JSON found. Please check the JSON format.",
|
||||
}
|
||||
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
|
||||
chatBody.AppendMessage(toolResponseMsg)
|
||||
crr := &models.ChatRoundReq{
|
||||
Role: cfg.AssistantRole,
|
||||
}
|
||||
@@ -1143,8 +1153,8 @@ func findCall(msg, toolCall string) bool {
|
||||
Role: cfg.ToolRole,
|
||||
Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
|
||||
}
|
||||
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
|
||||
logger.Debug("findCall: added tool error response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "message_count_after_add", len(chatBody.Messages))
|
||||
chatBody.AppendMessage(toolResponseMsg)
|
||||
logger.Debug("findCall: added tool error response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "message_count_after_add", chatBody.GetMessageCount())
|
||||
// Trigger the assistant to continue processing with the error message
|
||||
// chatRound("", cfg.AssistantRole, tv, false, false)
|
||||
crr := &models.ChatRoundReq{
|
||||
@@ -1162,17 +1172,23 @@ func findCall(msg, toolCall string) bool {
|
||||
// we got here => last msg recognized as a tool call (correct or not)
|
||||
// Use the tool call ID from streaming response (lastToolCall.ID)
|
||||
// Don't generate random ID - the ID should match between assistant message and tool response
|
||||
lastMsgIdx := len(chatBody.Messages) - 1
|
||||
lastMsgIdx := chatBody.GetMessageCount() - 1
|
||||
if lastToolCall.ID != "" {
|
||||
chatBody.Messages[lastMsgIdx].ToolCallID = lastToolCall.ID
|
||||
chatBody.UpdateMessageFunc(lastMsgIdx, func(msg models.RoleMsg) models.RoleMsg {
|
||||
msg.ToolCallID = lastToolCall.ID
|
||||
return msg
|
||||
})
|
||||
}
|
||||
// Store tool call info in the assistant message
|
||||
// Convert Args map to JSON string for storage
|
||||
chatBody.Messages[lastMsgIdx].ToolCall = &models.ToolCall{
|
||||
chatBody.UpdateMessageFunc(lastMsgIdx, func(msg models.RoleMsg) models.RoleMsg {
|
||||
msg.ToolCall = &models.ToolCall{
|
||||
ID: lastToolCall.ID,
|
||||
Name: lastToolCall.Name,
|
||||
Args: mapToString(lastToolCall.Args),
|
||||
}
|
||||
return msg
|
||||
})
|
||||
// call a func
|
||||
_, ok := fnMap[fc.Name]
|
||||
if !ok {
|
||||
@@ -1183,8 +1199,8 @@ func findCall(msg, toolCall string) bool {
|
||||
Content: m,
|
||||
ToolCallID: lastToolCall.ID, // Use the stored tool call ID
|
||||
}
|
||||
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
|
||||
logger.Debug("findCall: added tool not implemented response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", len(chatBody.Messages))
|
||||
chatBody.AppendMessage(toolResponseMsg)
|
||||
logger.Debug("findCall: added tool not implemented response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", chatBody.GetMessageCount())
|
||||
// Clear the stored tool call ID after using it
|
||||
lastToolCall.ID = ""
|
||||
// Trigger the assistant to continue processing with the new tool response
|
||||
@@ -1255,9 +1271,9 @@ func findCall(msg, toolCall string) bool {
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
|
||||
"\n\n", len(chatBody.Messages), cfg.ToolRole, toolResponseMsg.GetText())
|
||||
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
|
||||
logger.Debug("findCall: added actual tool response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", len(chatBody.Messages))
|
||||
"\n\n", chatBody.GetMessageCount(), cfg.ToolRole, toolResponseMsg.GetText())
|
||||
chatBody.AppendMessage(toolResponseMsg)
|
||||
logger.Debug("findCall: added actual tool response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", chatBody.GetMessageCount())
|
||||
// Clear the stored tool call ID after using it
|
||||
lastToolCall.ID = ""
|
||||
// Trigger the assistant to continue processing with the new tool response
|
||||
@@ -1497,7 +1513,7 @@ func init() {
|
||||
// load cards
|
||||
basicCard.Role = cfg.AssistantRole
|
||||
logLevel.Set(slog.LevelInfo)
|
||||
logger = slog.New(slog.NewTextHandler(logfile, &slog.HandlerOptions{Level: logLevel}))
|
||||
logger = slog.New(slog.NewTextHandler(logfile, &slog.HandlerOptions{Level: logLevel, AddSource: true}))
|
||||
store = storage.NewProviderSQL(cfg.DBPATH, logger)
|
||||
if store == nil {
|
||||
cancel()
|
||||
@@ -1521,11 +1537,11 @@ func init() {
|
||||
}
|
||||
lastToolCall = &models.FuncCall{}
|
||||
lastChat := loadOldChatOrGetNew()
|
||||
chatBody = &models.ChatBody{
|
||||
chatBody = models.NewSafeChatBody(&models.ChatBody{
|
||||
Model: "modelname",
|
||||
Stream: true,
|
||||
Messages: lastChat,
|
||||
}
|
||||
})
|
||||
choseChunkParser()
|
||||
httpClient = createClient(time.Second * 90)
|
||||
if cfg.TTS_ENABLED {
|
||||
|
||||
34
helpfuncs.go
34
helpfuncs.go
@@ -43,7 +43,7 @@ func updateCachedModelColor() {
|
||||
return
|
||||
}
|
||||
// Check if model is loaded
|
||||
loaded, err := isModelLoaded(chatBody.Model)
|
||||
loaded, err := isModelLoaded(chatBody.GetModel())
|
||||
if err != nil {
|
||||
// On error, assume not loaded (red)
|
||||
cachedModelColor = "red"
|
||||
@@ -103,7 +103,7 @@ func refreshChatDisplay() {
|
||||
viewingAs = cfg.WriteNextMsgAs
|
||||
}
|
||||
// Filter messages for this character
|
||||
filteredMessages := filterMessagesForCharacter(chatBody.Messages, viewingAs)
|
||||
filteredMessages := filterMessagesForCharacter(chatBody.GetMessages(), viewingAs)
|
||||
displayText := chatToText(filteredMessages, cfg.ShowSys)
|
||||
textView.SetText(displayText)
|
||||
colorText()
|
||||
@@ -217,8 +217,8 @@ func startNewChat(keepSysP bool) {
|
||||
logger.Warn("no such sys msg", "name", cfg.AssistantRole)
|
||||
}
|
||||
// set chat body
|
||||
chatBody.Messages = chatBody.Messages[:2]
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
chatBody.TruncateMessages(2)
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
newChat := &models.Chat{
|
||||
ID: id + 1,
|
||||
Name: fmt.Sprintf("%d_%s", id+1, cfg.AssistantRole),
|
||||
@@ -370,7 +370,7 @@ func makeStatusLine() string {
|
||||
// Get model color based on load status for local llama.cpp models
|
||||
modelColor := getModelColor()
|
||||
statusLine := fmt.Sprintf(statusLineTempl, activeChatName,
|
||||
boolColors[cfg.ToolUse], modelColor, chatBody.Model, boolColors[cfg.SkipLLMResp],
|
||||
boolColors[cfg.ToolUse], modelColor, chatBody.GetModel(), boolColors[cfg.SkipLLMResp],
|
||||
cfg.CurrentAPI, persona, botPersona)
|
||||
if cfg.STT_ENABLED {
|
||||
recordingS := fmt.Sprintf(" | [%s:-:b]voice recording[-:-:-] (ctrl+r)",
|
||||
@@ -396,11 +396,11 @@ func makeStatusLine() string {
|
||||
}
|
||||
|
||||
func getContextTokens() int {
|
||||
if chatBody == nil || chatBody.Messages == nil {
|
||||
if chatBody == nil {
|
||||
return 0
|
||||
}
|
||||
total := 0
|
||||
messages := chatBody.Messages
|
||||
messages := chatBody.GetMessages()
|
||||
for i := range messages {
|
||||
msg := &messages[i]
|
||||
if msg.Stats != nil && msg.Stats.Tokens > 0 {
|
||||
@@ -415,10 +415,10 @@ func getContextTokens() int {
|
||||
const deepseekContext = 128000
|
||||
|
||||
func getMaxContextTokens() int {
|
||||
if chatBody == nil || chatBody.Model == "" {
|
||||
if chatBody == nil || chatBody.GetModel() == "" {
|
||||
return 0
|
||||
}
|
||||
modelName := chatBody.Model
|
||||
modelName := chatBody.GetModel()
|
||||
switch {
|
||||
case strings.Contains(cfg.CurrentAPI, "openrouter"):
|
||||
if orModelsData != nil {
|
||||
@@ -490,7 +490,7 @@ func listChatRoles() []string {
|
||||
|
||||
func deepseekModelValidator() error {
|
||||
if cfg.CurrentAPI == cfg.DeepSeekChatAPI || cfg.CurrentAPI == cfg.DeepSeekCompletionAPI {
|
||||
if chatBody.Model != "deepseek-chat" && chatBody.Model != "deepseek-reasoner" {
|
||||
if chatBody.GetModel() != "deepseek-chat" && chatBody.GetModel() != "deepseek-reasoner" {
|
||||
showToast("bad request", "wrong deepseek model name")
|
||||
return nil
|
||||
}
|
||||
@@ -567,13 +567,13 @@ func executeCommandAndDisplay(cmdText string) {
|
||||
outputContent := workingDir
|
||||
// Add the command being executed to the chat
|
||||
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
|
||||
len(chatBody.Messages), cfg.ToolRole, cmdText)
|
||||
chatBody.GetMessageCount(), cfg.ToolRole, cmdText)
|
||||
fmt.Fprintf(textView, "%s\n", outputContent)
|
||||
combinedMsg := models.RoleMsg{
|
||||
Role: cfg.ToolRole,
|
||||
Content: "$ " + cmdText + "\n\n" + outputContent,
|
||||
}
|
||||
chatBody.Messages = append(chatBody.Messages, combinedMsg)
|
||||
chatBody.AppendMessage(combinedMsg)
|
||||
if scrollToEndEnabled {
|
||||
textView.ScrollToEnd()
|
||||
}
|
||||
@@ -582,13 +582,13 @@ func executeCommandAndDisplay(cmdText string) {
|
||||
} else {
|
||||
outputContent := "cd: " + newDir + ": No such file or directory"
|
||||
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
|
||||
len(chatBody.Messages), cfg.ToolRole, cmdText)
|
||||
chatBody.GetMessageCount(), cfg.ToolRole, cmdText)
|
||||
fmt.Fprintf(textView, "[red]%s[-:-:-]\n", outputContent)
|
||||
combinedMsg := models.RoleMsg{
|
||||
Role: cfg.ToolRole,
|
||||
Content: "$ " + cmdText + "\n\n" + outputContent,
|
||||
}
|
||||
chatBody.Messages = append(chatBody.Messages, combinedMsg)
|
||||
chatBody.AppendMessage(combinedMsg)
|
||||
if scrollToEndEnabled {
|
||||
textView.ScrollToEnd()
|
||||
}
|
||||
@@ -604,7 +604,7 @@ func executeCommandAndDisplay(cmdText string) {
|
||||
output, err := cmd.CombinedOutput()
|
||||
// Add the command being executed to the chat
|
||||
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
|
||||
len(chatBody.Messages), cfg.ToolRole, cmdText)
|
||||
chatBody.GetMessageCount(), cfg.ToolRole, cmdText)
|
||||
var outputContent string
|
||||
if err != nil {
|
||||
// Include both output and error
|
||||
@@ -635,7 +635,7 @@ func executeCommandAndDisplay(cmdText string) {
|
||||
Role: cfg.ToolRole,
|
||||
Content: combinedContent,
|
||||
}
|
||||
chatBody.Messages = append(chatBody.Messages, combinedMsg)
|
||||
chatBody.AppendMessage(combinedMsg)
|
||||
// Scroll to end and update colors
|
||||
if scrollToEndEnabled {
|
||||
textView.ScrollToEnd()
|
||||
@@ -665,7 +665,7 @@ func performSearch(term string) {
|
||||
searchResultLengths = nil
|
||||
originalTextForSearch = ""
|
||||
// Re-render text without highlights
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
colorText()
|
||||
return
|
||||
}
|
||||
|
||||
55
llm.go
55
llm.go
@@ -13,8 +13,9 @@ var lastImg string // for ctrl+j
|
||||
|
||||
// containsToolSysMsg checks if the toolSysMsg already exists in the chat body
|
||||
func containsToolSysMsg() bool {
|
||||
for i := range chatBody.Messages {
|
||||
if chatBody.Messages[i].Role == cfg.ToolRole && chatBody.Messages[i].Content == toolSysMsg {
|
||||
messages := chatBody.GetMessages()
|
||||
for i := range messages {
|
||||
if messages[i].Role == cfg.ToolRole && messages[i].Content == toolSysMsg {
|
||||
return true
|
||||
}
|
||||
}
|
||||
@@ -135,13 +136,13 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
||||
newMsg = models.RoleMsg{Role: role, Content: msg}
|
||||
}
|
||||
newMsg = *processMessageTag(&newMsg)
|
||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
||||
chatBody.AppendMessage(newMsg)
|
||||
}
|
||||
// sending description of the tools and how to use them
|
||||
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
||||
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
||||
chatBody.AppendMessage(models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
||||
}
|
||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.GetMessages())
|
||||
// Build prompt and extract images inline as we process each message
|
||||
messages := make([]string, len(filteredMessages))
|
||||
for i := range filteredMessages {
|
||||
@@ -183,7 +184,7 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
||||
}
|
||||
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
||||
"msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData))
|
||||
payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData,
|
||||
payload := models.NewLCPReq(prompt, chatBody.GetModel(), multimodalData,
|
||||
defaultLCPProps, chatBody.MakeStopSliceExcluding("", listChatRoles()))
|
||||
data, err := json.Marshal(payload)
|
||||
if err != nil {
|
||||
@@ -289,17 +290,17 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
|
||||
newMsg = models.NewRoleMsg(role, msg)
|
||||
}
|
||||
newMsg = *processMessageTag(&newMsg)
|
||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
||||
chatBody.AppendMessage(newMsg)
|
||||
logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role,
|
||||
"content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages))
|
||||
"content_len", len(newMsg.Content), "message_count_after_add", chatBody.GetMessageCount())
|
||||
}
|
||||
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
|
||||
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.GetMessages())
|
||||
// openai /v1/chat does not support custom roles; needs to be user, assistant, system
|
||||
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
||||
bodyCopy := &models.ChatBody{
|
||||
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
||||
Model: chatBody.Model,
|
||||
Stream: chatBody.Stream,
|
||||
Model: chatBody.GetModel(),
|
||||
Stream: chatBody.GetStream(),
|
||||
}
|
||||
for i := range filteredMessages {
|
||||
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
|
||||
@@ -375,13 +376,13 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
|
||||
if msg != "" { // otherwise let the bot to continue
|
||||
newMsg := models.RoleMsg{Role: role, Content: msg}
|
||||
newMsg = *processMessageTag(&newMsg)
|
||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
||||
chatBody.AppendMessage(newMsg)
|
||||
}
|
||||
// sending description of the tools and how to use them
|
||||
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
||||
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
||||
chatBody.AppendMessage(models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
||||
}
|
||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.GetMessages())
|
||||
messages := make([]string, len(filteredMessages))
|
||||
for i := range filteredMessages {
|
||||
messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt()
|
||||
@@ -394,7 +395,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
|
||||
}
|
||||
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
||||
"msg", msg, "resume", resume, "prompt", prompt)
|
||||
payload := models.NewDSCompletionReq(prompt, chatBody.Model,
|
||||
payload := models.NewDSCompletionReq(prompt, chatBody.GetModel(),
|
||||
defaultLCPProps["temp"],
|
||||
chatBody.MakeStopSliceExcluding("", listChatRoles()))
|
||||
data, err := json.Marshal(payload)
|
||||
@@ -448,15 +449,15 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
||||
if msg != "" { // otherwise let the bot continue
|
||||
newMsg := models.RoleMsg{Role: role, Content: msg}
|
||||
newMsg = *processMessageTag(&newMsg)
|
||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
||||
chatBody.AppendMessage(newMsg)
|
||||
}
|
||||
// Create copy of chat body with standardized user role
|
||||
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
|
||||
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.GetMessages())
|
||||
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
||||
bodyCopy := &models.ChatBody{
|
||||
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
||||
Model: chatBody.Model,
|
||||
Stream: chatBody.Stream,
|
||||
Model: chatBody.GetModel(),
|
||||
Stream: chatBody.GetStream(),
|
||||
}
|
||||
for i := range filteredMessages {
|
||||
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
|
||||
@@ -527,13 +528,13 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
|
||||
if msg != "" { // otherwise let the bot to continue
|
||||
newMsg := models.RoleMsg{Role: role, Content: msg}
|
||||
newMsg = *processMessageTag(&newMsg)
|
||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
||||
chatBody.AppendMessage(newMsg)
|
||||
}
|
||||
// sending description of the tools and how to use them
|
||||
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
||||
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
||||
chatBody.AppendMessage(models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
||||
}
|
||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.GetMessages())
|
||||
messages := make([]string, len(filteredMessages))
|
||||
for i := range filteredMessages {
|
||||
messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt()
|
||||
@@ -547,7 +548,7 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
|
||||
stopSlice := chatBody.MakeStopSliceExcluding("", listChatRoles())
|
||||
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
||||
"msg", msg, "resume", resume, "prompt", prompt, "stop_strings", stopSlice)
|
||||
payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt,
|
||||
payload := models.NewOpenRouterCompletionReq(chatBody.GetModel(), prompt,
|
||||
defaultLCPProps, stopSlice)
|
||||
data, err := json.Marshal(payload)
|
||||
if err != nil {
|
||||
@@ -633,15 +634,15 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
||||
newMsg = models.NewRoleMsg(role, msg)
|
||||
}
|
||||
newMsg = *processMessageTag(&newMsg)
|
||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
||||
chatBody.AppendMessage(newMsg)
|
||||
}
|
||||
// Create copy of chat body with standardized user role
|
||||
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
|
||||
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.GetMessages())
|
||||
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
||||
bodyCopy := &models.ChatBody{
|
||||
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
||||
Model: chatBody.Model,
|
||||
Stream: chatBody.Stream,
|
||||
Model: chatBody.GetModel(),
|
||||
Stream: chatBody.GetStream(),
|
||||
}
|
||||
for i := range filteredMessages {
|
||||
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
|
||||
|
||||
251
models/models.go
251
models/models.go
@@ -6,6 +6,7 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type FuncCall struct {
|
||||
@@ -639,3 +640,253 @@ type MultimodalToolResp struct {
|
||||
Type string `json:"type"`
|
||||
Parts []map[string]string `json:"parts"`
|
||||
}
|
||||
|
||||
// SafeChatBody is a thread-safe wrapper around ChatBody using RWMutex.
|
||||
// This allows safe concurrent access to chat state from multiple goroutines.
|
||||
type SafeChatBody struct {
|
||||
mu sync.RWMutex
|
||||
ChatBody
|
||||
}
|
||||
|
||||
// NewSafeChatBody creates a new SafeChatBody from an existing ChatBody.
|
||||
// If cb is nil, creates an empty ChatBody.
|
||||
func NewSafeChatBody(cb *ChatBody) *SafeChatBody {
|
||||
if cb == nil {
|
||||
return &SafeChatBody{
|
||||
ChatBody: ChatBody{
|
||||
Messages: []RoleMsg{},
|
||||
},
|
||||
}
|
||||
}
|
||||
return &SafeChatBody{
|
||||
ChatBody: *cb,
|
||||
}
|
||||
}
|
||||
|
||||
// GetModel returns the model name (thread-safe read).
|
||||
func (s *SafeChatBody) GetModel() string {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
return s.Model
|
||||
}
|
||||
|
||||
// SetModel sets the model name (thread-safe write).
|
||||
func (s *SafeChatBody) SetModel(model string) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.Model = model
|
||||
}
|
||||
|
||||
// GetStream returns the stream flag (thread-safe read).
|
||||
func (s *SafeChatBody) GetStream() bool {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
return s.Stream
|
||||
}
|
||||
|
||||
// SetStream sets the stream flag (thread-safe write).
|
||||
func (s *SafeChatBody) SetStream(stream bool) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.Stream = stream
|
||||
}
|
||||
|
||||
// GetMessages returns a copy of all messages (thread-safe read).
|
||||
// Returns a copy to prevent race conditions after the lock is released.
|
||||
func (s *SafeChatBody) GetMessages() []RoleMsg {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
// Return a copy to prevent external modification
|
||||
messagesCopy := make([]RoleMsg, len(s.Messages))
|
||||
copy(messagesCopy, s.Messages)
|
||||
return messagesCopy
|
||||
}
|
||||
|
||||
// SetMessages replaces all messages (thread-safe write).
|
||||
func (s *SafeChatBody) SetMessages(messages []RoleMsg) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.Messages = messages
|
||||
}
|
||||
|
||||
// AppendMessage adds a message to the end (thread-safe write).
|
||||
func (s *SafeChatBody) AppendMessage(msg RoleMsg) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.Messages = append(s.Messages, msg)
|
||||
}
|
||||
|
||||
// GetMessageAt returns a message at a specific index (thread-safe read).
|
||||
// Returns the message and a boolean indicating if the index was valid.
|
||||
func (s *SafeChatBody) GetMessageAt(index int) (RoleMsg, bool) {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
if index < 0 || index >= len(s.Messages) {
|
||||
return RoleMsg{}, false
|
||||
}
|
||||
return s.Messages[index], true
|
||||
}
|
||||
|
||||
// SetMessageAt updates a message at a specific index (thread-safe write).
|
||||
// Returns false if index is out of bounds.
|
||||
func (s *SafeChatBody) SetMessageAt(index int, msg RoleMsg) bool {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if index < 0 || index >= len(s.Messages) {
|
||||
return false
|
||||
}
|
||||
s.Messages[index] = msg
|
||||
return true
|
||||
}
|
||||
|
||||
// GetLastMessage returns the last message (thread-safe read).
|
||||
// Returns the message and a boolean indicating if the chat has messages.
|
||||
func (s *SafeChatBody) GetLastMessage() (RoleMsg, bool) {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
if len(s.Messages) == 0 {
|
||||
return RoleMsg{}, false
|
||||
}
|
||||
return s.Messages[len(s.Messages)-1], true
|
||||
}
|
||||
|
||||
// GetMessageCount returns the number of messages (thread-safe read).
|
||||
func (s *SafeChatBody) GetMessageCount() int {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
return len(s.Messages)
|
||||
}
|
||||
|
||||
// RemoveLastMessage removes the last message (thread-safe write).
|
||||
// Returns false if there are no messages.
|
||||
func (s *SafeChatBody) RemoveLastMessage() bool {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if len(s.Messages) == 0 {
|
||||
return false
|
||||
}
|
||||
s.Messages = s.Messages[:len(s.Messages)-1]
|
||||
return true
|
||||
}
|
||||
|
||||
// TruncateMessages keeps only the first n messages (thread-safe write).
|
||||
func (s *SafeChatBody) TruncateMessages(n int) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if n < len(s.Messages) {
|
||||
s.Messages = s.Messages[:n]
|
||||
}
|
||||
}
|
||||
|
||||
// ClearMessages removes all messages (thread-safe write).
|
||||
func (s *SafeChatBody) ClearMessages() {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.Messages = []RoleMsg{}
|
||||
}
|
||||
|
||||
// Rename renames all occurrences of oldname to newname in messages (thread-safe read-modify-write).
|
||||
func (s *SafeChatBody) Rename(oldname, newname string) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
for i := range s.Messages {
|
||||
s.Messages[i].Content = strings.ReplaceAll(s.Messages[i].Content, oldname, newname)
|
||||
s.Messages[i].Role = strings.ReplaceAll(s.Messages[i].Role, oldname, newname)
|
||||
}
|
||||
}
|
||||
|
||||
// ListRoles returns all unique roles in messages (thread-safe read).
|
||||
func (s *SafeChatBody) ListRoles() []string {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
namesMap := make(map[string]struct{})
|
||||
for i := range s.Messages {
|
||||
namesMap[s.Messages[i].Role] = struct{}{}
|
||||
}
|
||||
resp := make([]string, len(namesMap))
|
||||
i := 0
|
||||
for k := range namesMap {
|
||||
resp[i] = k
|
||||
i++
|
||||
}
|
||||
return resp
|
||||
}
|
||||
|
||||
// MakeStopSlice returns stop strings for all roles (thread-safe read).
|
||||
func (s *SafeChatBody) MakeStopSlice() []string {
|
||||
return s.MakeStopSliceExcluding("", s.ListRoles())
|
||||
}
|
||||
|
||||
// MakeStopSliceExcluding returns stop strings excluding a specific role (thread-safe read).
|
||||
func (s *SafeChatBody) MakeStopSliceExcluding(excludeRole string, roleList []string) []string {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
ss := []string{}
|
||||
for _, role := range roleList {
|
||||
if role == excludeRole {
|
||||
continue
|
||||
}
|
||||
ss = append(ss,
|
||||
role+":\n",
|
||||
role+":",
|
||||
role+": ",
|
||||
role+": ",
|
||||
role+": \n",
|
||||
role+": ",
|
||||
)
|
||||
}
|
||||
return ss
|
||||
}
|
||||
|
||||
// UpdateMessageFunc updates a message at index using a provided function.
|
||||
// The function receives the current message and returns the updated message.
|
||||
// This is atomic and thread-safe (read-modify-write under single lock).
|
||||
// Returns false if index is out of bounds.
|
||||
func (s *SafeChatBody) UpdateMessageFunc(index int, updater func(RoleMsg) RoleMsg) bool {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if index < 0 || index >= len(s.Messages) {
|
||||
return false
|
||||
}
|
||||
s.Messages[index] = updater(s.Messages[index])
|
||||
return true
|
||||
}
|
||||
|
||||
// AppendMessageFunc appends a new message created by a provided function.
|
||||
// The function receives the current message count and returns the new message.
|
||||
// This is atomic and thread-safe.
|
||||
func (s *SafeChatBody) AppendMessageFunc(creator func(count int) RoleMsg) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
msg := creator(len(s.Messages))
|
||||
s.Messages = append(s.Messages, msg)
|
||||
}
|
||||
|
||||
// GetMessagesForLLM returns a filtered copy of messages for sending to LLM.
|
||||
// This is thread-safe and returns a copy safe for external modification.
|
||||
func (s *SafeChatBody) GetMessagesForLLM(filterFunc func([]RoleMsg) []RoleMsg) []RoleMsg {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
if filterFunc == nil {
|
||||
messagesCopy := make([]RoleMsg, len(s.Messages))
|
||||
copy(messagesCopy, s.Messages)
|
||||
return messagesCopy
|
||||
}
|
||||
return filterFunc(s.Messages)
|
||||
}
|
||||
|
||||
// WithLock executes a function while holding the write lock.
|
||||
// Use this for complex operations that need to be atomic.
|
||||
func (s *SafeChatBody) WithLock(fn func(*ChatBody)) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
fn(&s.ChatBody)
|
||||
}
|
||||
|
||||
// WithRLock executes a function while holding the read lock.
|
||||
// Use this for complex read-only operations.
|
||||
func (s *SafeChatBody) WithRLock(fn func(*ChatBody)) {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
fn(&s.ChatBody)
|
||||
}
|
||||
|
||||
14
popups.go
14
popups.go
@@ -50,7 +50,7 @@ func showModelSelectionPopup() {
|
||||
// Find the current model index to set as selected
|
||||
currentModelIndex := -1
|
||||
for i, model := range modelList {
|
||||
if strings.TrimPrefix(model, models.LoadedMark) == chatBody.Model {
|
||||
if strings.TrimPrefix(model, models.LoadedMark) == chatBody.GetModel() {
|
||||
currentModelIndex = i
|
||||
}
|
||||
modelListWidget.AddItem(model, "", 0, nil)
|
||||
@@ -61,8 +61,8 @@ func showModelSelectionPopup() {
|
||||
}
|
||||
modelListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
||||
modelName := strings.TrimPrefix(mainText, models.LoadedMark)
|
||||
chatBody.Model = modelName
|
||||
cfg.CurrentModel = chatBody.Model
|
||||
chatBody.SetModel(modelName)
|
||||
cfg.CurrentModel = chatBody.GetModel()
|
||||
pages.RemovePage("modelSelectionPopup")
|
||||
app.SetFocus(textArea)
|
||||
updateCachedModelColor()
|
||||
@@ -156,9 +156,9 @@ func showAPILinkSelectionPopup() {
|
||||
}
|
||||
newModelList := getModelListForAPI(cfg.CurrentAPI)
|
||||
// Ensure chatBody.Model is in the new list; if not, set to first available model
|
||||
if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.Model) {
|
||||
chatBody.Model = strings.TrimPrefix(newModelList[0], models.LoadedMark)
|
||||
cfg.CurrentModel = chatBody.Model
|
||||
if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.GetModel()) {
|
||||
chatBody.SetModel(strings.TrimPrefix(newModelList[0], models.LoadedMark))
|
||||
cfg.CurrentModel = chatBody.GetModel()
|
||||
updateToolCapabilities()
|
||||
}
|
||||
pages.RemovePage("apiLinkSelectionPopup")
|
||||
@@ -229,7 +229,7 @@ func showUserRoleSelectionPopup() {
|
||||
// Update the user role in config
|
||||
cfg.WriteNextMsgAs = mainText
|
||||
// role got switch, update textview with character specific context for user
|
||||
filtered := filterMessagesForCharacter(chatBody.Messages, mainText)
|
||||
filtered := filterMessagesForCharacter(chatBody.GetMessages(), mainText)
|
||||
textView.SetText(chatToText(filtered, cfg.ShowSys))
|
||||
// Remove the popup page
|
||||
pages.RemovePage("userRoleSelectionPopup")
|
||||
|
||||
@@ -29,7 +29,7 @@ func historyToSJSON(msgs []models.RoleMsg) (string, error) {
|
||||
}
|
||||
|
||||
func exportChat() error {
|
||||
data, err := json.MarshalIndent(chatBody.Messages, "", " ")
|
||||
data, err := json.MarshalIndent(chatBody.GetMessages(), "", " ")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -54,7 +54,7 @@ func importChat(filename string) error {
|
||||
if _, ok := chatMap[activeChatName]; !ok {
|
||||
addNewChat(activeChatName)
|
||||
}
|
||||
chatBody.Messages = messages
|
||||
chatBody.SetMessages(messages)
|
||||
cfg.AssistantRole = messages[1].Role
|
||||
if cfg.AssistantRole == cfg.UserRole {
|
||||
cfg.AssistantRole = messages[2].Role
|
||||
|
||||
30
tables.go
30
tables.go
@@ -128,8 +128,8 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
|
||||
pages.RemovePage(historyPage)
|
||||
return
|
||||
}
|
||||
chatBody.Messages = history
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
chatBody.SetMessages(history)
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
activeChatName = selectedChat
|
||||
pages.RemovePage(historyPage)
|
||||
return
|
||||
@@ -149,8 +149,8 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
|
||||
}
|
||||
showToast("chat deleted", selectedChat+" was deleted")
|
||||
// load last chat
|
||||
chatBody.Messages = loadOldChatOrGetNew()
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
chatBody.SetMessages(loadOldChatOrGetNew())
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
pages.RemovePage(historyPage)
|
||||
return
|
||||
case "update card":
|
||||
@@ -163,16 +163,24 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
|
||||
showToast("error", "no such card: "+agentName)
|
||||
return
|
||||
}
|
||||
cc.SysPrompt = chatBody.Messages[0].Content
|
||||
cc.FirstMsg = chatBody.Messages[1].Content
|
||||
if msg0, ok := chatBody.GetMessageAt(0); ok {
|
||||
cc.SysPrompt = msg0.Content
|
||||
}
|
||||
if msg1, ok := chatBody.GetMessageAt(1); ok {
|
||||
cc.FirstMsg = msg1.Content
|
||||
}
|
||||
if err := pngmeta.WriteToPng(cc.ToSpec(cfg.UserRole), cc.FilePath, cc.FilePath); err != nil {
|
||||
logger.Error("failed to write charcard", "error", err)
|
||||
}
|
||||
return
|
||||
case "move sysprompt onto 1st msg":
|
||||
chatBody.Messages[1].Content = chatBody.Messages[0].Content + chatBody.Messages[1].Content
|
||||
chatBody.Messages[0].Content = rpDefenitionSysMsg
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
chatBody.WithLock(func(cb *models.ChatBody) {
|
||||
if len(cb.Messages) >= 2 {
|
||||
cb.Messages[1].Content = cb.Messages[0].Content + cb.Messages[1].Content
|
||||
cb.Messages[0].Content = rpDefenitionSysMsg
|
||||
}
|
||||
})
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
activeChatName = selectedChat
|
||||
pages.RemovePage(historyPage)
|
||||
return
|
||||
@@ -563,7 +571,7 @@ func makeAgentTable(agentList []string) *tview.Table {
|
||||
return
|
||||
}
|
||||
// replace textview
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
colorText()
|
||||
updateStatusLine()
|
||||
// sysModal.ClearButtons()
|
||||
@@ -732,7 +740,7 @@ func makeImportChatTable(filenames []string) *tview.Table {
|
||||
colorText()
|
||||
updateStatusLine()
|
||||
// redraw the text in text area
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
pages.RemovePage(historyPage)
|
||||
app.SetFocus(textArea)
|
||||
return
|
||||
|
||||
4
tools.go
4
tools.go
@@ -1215,11 +1215,11 @@ func isCommandAllowed(command string, args ...string) bool {
|
||||
}
|
||||
|
||||
func summarizeChat(args map[string]string) []byte {
|
||||
if len(chatBody.Messages) == 0 {
|
||||
if chatBody.GetMessageCount() == 0 {
|
||||
return []byte("No chat history to summarize.")
|
||||
}
|
||||
// Format chat history for the agent
|
||||
chatText := chatToText(chatBody.Messages, true) // include system and tool messages
|
||||
chatText := chatToText(chatBody.GetMessages(), true) // include system and tool messages
|
||||
return []byte(chatText)
|
||||
}
|
||||
|
||||
|
||||
56
tui.go
56
tui.go
@@ -355,7 +355,7 @@ func init() {
|
||||
searchResults = nil // Clear search results
|
||||
searchResultLengths = nil // Clear search result lengths
|
||||
originalTextForSearch = ""
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys)) // Reset text without search regions
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) // Reset text without search regions
|
||||
colorText() // Apply normal chat coloring
|
||||
} else {
|
||||
// Original logic if no search is active
|
||||
@@ -436,9 +436,11 @@ func init() {
|
||||
pages.RemovePage(editMsgPage)
|
||||
return nil
|
||||
}
|
||||
chatBody.Messages[selectedIndex].SetText(editedMsg)
|
||||
chatBody.WithLock(func(cb *models.ChatBody) {
|
||||
cb.Messages[selectedIndex].SetText(editedMsg)
|
||||
})
|
||||
// change textarea
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
pages.RemovePage(editMsgPage)
|
||||
editMode = false
|
||||
return nil
|
||||
@@ -466,9 +468,11 @@ func init() {
|
||||
pages.RemovePage(roleEditPage)
|
||||
return
|
||||
}
|
||||
if selectedIndex >= 0 && selectedIndex < len(chatBody.Messages) {
|
||||
chatBody.Messages[selectedIndex].Role = newRole
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
if selectedIndex >= 0 && selectedIndex < chatBody.GetMessageCount() {
|
||||
chatBody.WithLock(func(cb *models.ChatBody) {
|
||||
cb.Messages[selectedIndex].Role = newRole
|
||||
})
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
colorText()
|
||||
pages.RemovePage(roleEditPage)
|
||||
}
|
||||
@@ -497,7 +501,7 @@ func init() {
|
||||
return nil
|
||||
}
|
||||
selectedIndex = siInt
|
||||
if len(chatBody.Messages)-1 < selectedIndex || selectedIndex < 0 {
|
||||
if chatBody.GetMessageCount()-1 < selectedIndex || selectedIndex < 0 {
|
||||
msg := "chosen index is out of bounds, will copy user input"
|
||||
logger.Warn(msg, "index", selectedIndex)
|
||||
showToast("error", msg)
|
||||
@@ -507,7 +511,7 @@ func init() {
|
||||
hideIndexBar() // Hide overlay instead of removing page directly
|
||||
return nil
|
||||
}
|
||||
m := chatBody.Messages[selectedIndex]
|
||||
m := chatBody.GetMessages()[selectedIndex]
|
||||
switch {
|
||||
case roleEditMode:
|
||||
hideIndexBar() // Hide overlay first
|
||||
@@ -574,7 +578,7 @@ func init() {
|
||||
searchResults = nil
|
||||
searchResultLengths = nil
|
||||
originalTextForSearch = ""
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
colorText()
|
||||
return
|
||||
} else {
|
||||
@@ -632,7 +636,7 @@ func init() {
|
||||
//
|
||||
textArea.SetMovedFunc(updateStatusLine)
|
||||
updateStatusLine()
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
colorText()
|
||||
if scrollToEndEnabled {
|
||||
textView.ScrollToEnd()
|
||||
@@ -646,7 +650,7 @@ func init() {
|
||||
if event.Key() == tcell.KeyRune && event.Rune() == '5' && event.Modifiers()&tcell.ModAlt != 0 {
|
||||
// switch cfg.ShowSys
|
||||
cfg.ShowSys = !cfg.ShowSys
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
colorText()
|
||||
}
|
||||
if event.Key() == tcell.KeyRune && event.Rune() == '3' && event.Modifiers()&tcell.ModAlt != 0 {
|
||||
@@ -679,7 +683,7 @@ func init() {
|
||||
// Handle Alt+T to toggle thinking block visibility
|
||||
if event.Key() == tcell.KeyRune && event.Rune() == 't' && event.Modifiers()&tcell.ModAlt != 0 {
|
||||
thinkingCollapsed = !thinkingCollapsed
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
colorText()
|
||||
status := "expanded"
|
||||
if thinkingCollapsed {
|
||||
@@ -691,7 +695,7 @@ func init() {
|
||||
// Handle Ctrl+T to toggle tool call/response visibility
|
||||
if event.Key() == tcell.KeyCtrlT {
|
||||
toolCollapsed = !toolCollapsed
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
colorText()
|
||||
status := "expanded"
|
||||
if toolCollapsed {
|
||||
@@ -734,14 +738,14 @@ func init() {
|
||||
}
|
||||
if event.Key() == tcell.KeyF2 && !botRespMode {
|
||||
// regen last msg
|
||||
if len(chatBody.Messages) == 0 {
|
||||
if chatBody.GetMessageCount() == 0 {
|
||||
showToast("info", "no messages to regenerate")
|
||||
return nil
|
||||
}
|
||||
chatBody.Messages = chatBody.Messages[:len(chatBody.Messages)-1]
|
||||
chatBody.TruncateMessages(chatBody.GetMessageCount() - 1)
|
||||
// there is no case where user msg is regenerated
|
||||
// lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
// lastRole := chatBody.GetMessages()[chatBody.GetMessageCount()-1].Role
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
// go chatRound("", cfg.UserRole, textView, true, false)
|
||||
if cfg.TTS_ENABLED {
|
||||
TTSDoneChan <- true
|
||||
@@ -760,12 +764,12 @@ func init() {
|
||||
colorText()
|
||||
return nil
|
||||
}
|
||||
if len(chatBody.Messages) == 0 {
|
||||
if chatBody.GetMessageCount() == 0 {
|
||||
showToast("info", "no messages to delete")
|
||||
return nil
|
||||
}
|
||||
chatBody.Messages = chatBody.Messages[:len(chatBody.Messages)-1]
|
||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||
chatBody.TruncateMessages(chatBody.GetMessageCount() - 1)
|
||||
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||
if cfg.TTS_ENABLED {
|
||||
TTSDoneChan <- true
|
||||
}
|
||||
@@ -813,7 +817,7 @@ func init() {
|
||||
if event.Key() == tcell.KeyF7 {
|
||||
// copy msg to clipboard
|
||||
editMode = false
|
||||
m := chatBody.Messages[len(chatBody.Messages)-1]
|
||||
m := chatBody.GetMessages()[chatBody.GetMessageCount()-1]
|
||||
msgText := m.GetText()
|
||||
if err := copyToClipboard(msgText); err != nil {
|
||||
logger.Error("failed to copy to clipboard", "error", err)
|
||||
@@ -997,10 +1001,10 @@ func init() {
|
||||
TTSDoneChan <- true
|
||||
}
|
||||
if event.Key() == tcell.KeyRune && event.Rune() == '0' && event.Modifiers()&tcell.ModAlt != 0 && cfg.TTS_ENABLED {
|
||||
if len(chatBody.Messages) > 0 {
|
||||
if chatBody.GetMessageCount() > 0 {
|
||||
// Stop any currently playing TTS first
|
||||
TTSDoneChan <- true
|
||||
lastMsg := chatBody.Messages[len(chatBody.Messages)-1]
|
||||
lastMsg := chatBody.GetMessages()[chatBody.GetMessageCount()-1]
|
||||
cleanedText := models.CleanText(lastMsg.GetText())
|
||||
if cleanedText != "" {
|
||||
// nolint: errcheck
|
||||
@@ -1012,7 +1016,7 @@ func init() {
|
||||
if event.Key() == tcell.KeyCtrlW {
|
||||
// INFO: continue bot/text message
|
||||
// without new role
|
||||
lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role
|
||||
lastRole := chatBody.GetMessages()[chatBody.GetMessageCount()-1].Role
|
||||
// go chatRound("", lastRole, textView, false, true)
|
||||
chatRoundChan <- &models.ChatRoundReq{Role: lastRole, Resume: true}
|
||||
return nil
|
||||
@@ -1098,7 +1102,7 @@ func init() {
|
||||
if event.Key() == tcell.KeyRune && event.Modifiers() == tcell.ModAlt && event.Rune() == '9' {
|
||||
// Warm up (load) the currently selected model
|
||||
go warmUpModel()
|
||||
showToast("model warmup", "loading model: "+chatBody.Model)
|
||||
showToast("model warmup", "loading model: "+chatBody.GetModel())
|
||||
return nil
|
||||
}
|
||||
// cannot send msg in editMode or botRespMode
|
||||
@@ -1137,7 +1141,7 @@ func init() {
|
||||
}
|
||||
// add user icon before user msg
|
||||
fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
|
||||
nl, len(chatBody.Messages), persona, msgText)
|
||||
nl, chatBody.GetMessageCount(), persona, msgText)
|
||||
textArea.SetText("", true)
|
||||
if scrollToEndEnabled {
|
||||
textView.ScrollToEnd()
|
||||
|
||||
Reference in New Issue
Block a user