Feat: alt+3 to start new chat with the summary of old one

This commit is contained in:
Grail Finder
2025-12-26 22:05:19 +03:00
parent c0ec82b579
commit c43af62dc4
2 changed files with 76 additions and 14 deletions

69
bot.go
View File

@@ -69,6 +69,7 @@ var (
"meta-llama/llama-3.3-70b-instruct:free", "meta-llama/llama-3.3-70b-instruct:free",
} }
LocalModels = []string{} LocalModels = []string{}
lastSummary string
) )
// cleanNullMessages removes messages with null or empty content to prevent API issues // cleanNullMessages removes messages with null or empty content to prevent API issues
@@ -626,8 +627,22 @@ func checkGame(role string, tv *tview.TextView) {
} }
} }
func chatRound(userMsg, role string, tv *tview.TextView, regen, resume bool) { func chatRound(userMsg, role string, tv *tview.TextView, regen, resume, summaryMode bool) {
botRespMode = true botRespMode = true
if summaryMode {
// Save original messages
originalMessages := chatBody.Messages
defer func() { chatBody.Messages = originalMessages }()
// Build summary prompt messages
summaryMessages := []models.RoleMsg{}
// Add system instruction
summaryMessages = append(summaryMessages, models.RoleMsg{Role: "system", Content: "Please provide a concise summary of the following conversation. Focus on key points, decisions, and actions. Provide only the summary, no additional commentary."})
// Append all original messages (excluding system? keep them)
summaryMessages = append(summaryMessages, originalMessages...)
// Add a user message to trigger summary
summaryMessages = append(summaryMessages, models.RoleMsg{Role: cfg.UserRole, Content: "Summarize the conversation."})
chatBody.Messages = summaryMessages
}
botPersona := cfg.AssistantRole botPersona := cfg.AssistantRole
if cfg.WriteNextMsgAsCompletionAgent != "" { if cfg.WriteNextMsgAsCompletionAgent != "" {
botPersona = cfg.WriteNextMsgAsCompletionAgent botPersona = cfg.WriteNextMsgAsCompletionAgent
@@ -657,7 +672,7 @@ func chatRound(userMsg, role string, tv *tview.TextView, regen, resume bool) {
} }
go sendMsgToLLM(reader) go sendMsgToLLM(reader)
logger.Debug("looking at vars in chatRound", "msg", userMsg, "regen", regen, "resume", resume) logger.Debug("looking at vars in chatRound", "msg", userMsg, "regen", regen, "resume", resume)
if !resume { if !summaryMode && !resume {
fmt.Fprintf(tv, "\n[-:-:b](%d) ", len(chatBody.Messages)) fmt.Fprintf(tv, "\n[-:-:b](%d) ", len(chatBody.Messages))
fmt.Fprint(tv, roleToIcon(botPersona)) fmt.Fprint(tv, roleToIcon(botPersona))
fmt.Fprint(tv, "[-:-:-]\n") fmt.Fprint(tv, "[-:-:-]\n")
@@ -704,6 +719,9 @@ out:
Role: botPersona, Content: respText.String(), Role: botPersona, Content: respText.String(),
}) })
} }
if summaryMode {
lastSummary = respText.String()
}
logger.Debug("chatRound: before cleanChatBody", "messages_before_clean", len(chatBody.Messages)) logger.Debug("chatRound: before cleanChatBody", "messages_before_clean", len(chatBody.Messages))
for i, msg := range chatBody.Messages { for i, msg := range chatBody.Messages {
logger.Debug("chatRound: before cleaning", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID) logger.Debug("chatRound: before cleaning", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID)
@@ -714,16 +732,20 @@ out:
for i, msg := range chatBody.Messages { for i, msg := range chatBody.Messages {
logger.Debug("chatRound: after cleaning", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID) logger.Debug("chatRound: after cleaning", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID)
} }
if !summaryMode {
colorText() colorText()
updateStatusLine() updateStatusLine()
}
// bot msg is done; // bot msg is done;
// now check it for func call // now check it for func call
// logChat(activeChatName, chatBody.Messages) // logChat(activeChatName, chatBody.Messages)
if !summaryMode {
if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil { if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil {
logger.Warn("failed to update storage", "error", err, "name", activeChatName) logger.Warn("failed to update storage", "error", err, "name", activeChatName)
} }
findCall(respText.String(), toolResp.String(), tv) findCall(respText.String(), toolResp.String(), tv)
} }
}
// cleanChatBody removes messages with null or empty content to prevent API issues // cleanChatBody removes messages with null or empty content to prevent API issues
func cleanChatBody() { func cleanChatBody() {
@@ -825,7 +847,7 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
chatBody.Messages = append(chatBody.Messages, toolResponseMsg) chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
// Clear the stored tool call ID after using it (no longer needed) // Clear the stored tool call ID after using it (no longer needed)
// Trigger the assistant to continue processing with the error message // Trigger the assistant to continue processing with the error message
chatRound("", cfg.AssistantRole, tv, false, false) chatRound("", cfg.AssistantRole, tv, false, false, false)
return return
} }
lastToolCall.Args = openAIToolMap lastToolCall.Args = openAIToolMap
@@ -858,7 +880,7 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
chatBody.Messages = append(chatBody.Messages, toolResponseMsg) chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
logger.Debug("findCall: added tool error response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "message_count_after_add", len(chatBody.Messages)) logger.Debug("findCall: added tool error response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "message_count_after_add", len(chatBody.Messages))
// Trigger the assistant to continue processing with the error message // Trigger the assistant to continue processing with the error message
chatRound("", cfg.AssistantRole, tv, false, false) chatRound("", cfg.AssistantRole, tv, false, false, false)
return return
} }
// Update lastToolCall with parsed function call // Update lastToolCall with parsed function call
@@ -891,7 +913,7 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
lastToolCall.ID = "" lastToolCall.ID = ""
// Trigger the assistant to continue processing with the new tool response // Trigger the assistant to continue processing with the new tool response
// by calling chatRound with empty content to continue the assistant's response // by calling chatRound with empty content to continue the assistant's response
chatRound("", cfg.AssistantRole, tv, false, false) chatRound("", cfg.AssistantRole, tv, false, false, false)
return return
} }
resp := callToolWithAgent(fc.Name, fc.Args) resp := callToolWithAgent(fc.Name, fc.Args)
@@ -911,7 +933,7 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
lastToolCall.ID = "" lastToolCall.ID = ""
// Trigger the assistant to continue processing with the new tool response // Trigger the assistant to continue processing with the new tool response
// by calling chatRound with empty content to continue the assistant's response // by calling chatRound with empty content to continue the assistant's response
chatRound("", cfg.AssistantRole, tv, false, false) chatRound("", cfg.AssistantRole, tv, false, false, false)
} }
func chatToTextSlice(showSys bool) []string { func chatToTextSlice(showSys bool) []string {
@@ -1033,6 +1055,41 @@ func refreshLocalModelsIfEmpty() {
localModelsMu.Unlock() localModelsMu.Unlock()
} }
func summarizeAndStartNewChat() {
if len(chatBody.Messages) == 0 {
notifyUser("info", "No chat history to summarize")
return
}
// Create a dummy TextView for the summary request (won't be displayed)
dummyTV := tview.NewTextView()
// Call chatRound with summaryMode true to generate summary
notifyUser("info", "Summarizing chat history...")
lastSummary = ""
chatRound("", cfg.UserRole, dummyTV, false, false, true)
summary := lastSummary
if summary == "" {
notifyUser("error", "Failed to generate summary")
return
}
// Start a new chat
startNewChat()
// Inject summary as a tool call response
toolMsg := models.RoleMsg{
Role: cfg.ToolRole,
Content: summary,
ToolCallID: "",
}
chatBody.Messages = append(chatBody.Messages, toolMsg)
// Update UI
textView.SetText(chatToText(cfg.ShowSys))
colorText()
// Update storage
if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil {
logger.Warn("failed to update storage after injecting summary", "error", err)
}
notifyUser("info", "Chat summarized and new chat started with summary as tool response")
}
func init() { func init() {
var err error var err error
cfg, err = config.LoadConfig("config.toml") cfg, err = config.LoadConfig("config.toml")

11
tui.go
View File

@@ -91,6 +91,7 @@ var (
[yellow]Alt+1[white]: toggle shell mode (execute commands locally) [yellow]Alt+1[white]: toggle shell mode (execute commands locally)
[yellow]Alt+4[white]: edit msg role [yellow]Alt+4[white]: edit msg role
[yellow]Alt+5[white]: toggle system and tool messages display [yellow]Alt+5[white]: toggle system and tool messages display
[yellow]Alt+3[white]: summarize chat history and start new chat with summary as tool response
[yellow]Alt+6[white]: toggle status line visibility [yellow]Alt+6[white]: toggle status line visibility
[yellow]Alt+9[white]: warm up (load) selected llama.cpp model [yellow]Alt+9[white]: warm up (load) selected llama.cpp model
@@ -779,6 +780,10 @@ func init() {
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(cfg.ShowSys))
colorText() colorText()
} }
if event.Key() == tcell.KeyRune && event.Rune() == '3' && event.Modifiers()&tcell.ModAlt != 0 {
go summarizeAndStartNewChat()
return nil
}
if event.Key() == tcell.KeyRune && event.Rune() == '6' && event.Modifiers()&tcell.ModAlt != 0 { if event.Key() == tcell.KeyRune && event.Rune() == '6' && event.Modifiers()&tcell.ModAlt != 0 {
// toggle status line visibility // toggle status line visibility
if name, _ := pages.GetFrontPage(); name != "main" { if name, _ := pages.GetFrontPage(); name != "main" {
@@ -826,7 +831,7 @@ func init() {
// there is no case where user msg is regenerated // there is no case where user msg is regenerated
// lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role // lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role
textView.SetText(chatToText(cfg.ShowSys)) textView.SetText(chatToText(cfg.ShowSys))
go chatRound("", cfg.UserRole, textView, true, false) go chatRound("", cfg.UserRole, textView, true, false, false)
return nil return nil
} }
if event.Key() == tcell.KeyF3 && !botRespMode { if event.Key() == tcell.KeyF3 && !botRespMode {
@@ -1129,7 +1134,7 @@ func init() {
// INFO: continue bot/text message // INFO: continue bot/text message
// without new role // without new role
lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role
go chatRound("", lastRole, textView, false, true) go chatRound("", lastRole, textView, false, true, false)
return nil return nil
} }
if event.Key() == tcell.KeyCtrlQ { if event.Key() == tcell.KeyCtrlQ {
@@ -1289,7 +1294,7 @@ func init() {
textView.ScrollToEnd() textView.ScrollToEnd()
colorText() colorText()
} }
go chatRound(msgText, persona, textView, false, false) go chatRound(msgText, persona, textView, false, false, false)
// Also clear any image attachment after sending the message // Also clear any image attachment after sending the message
go func() { go func() {
// Wait a short moment for the message to be processed, then clear the image attachment // Wait a short moment for the message to be processed, then clear the image attachment