Enha: tool filter

This commit is contained in:
Grail Finder
2026-02-27 18:45:59 +03:00
parent c855c30ae2
commit 1fcab8365e
5 changed files with 186 additions and 60 deletions

71
bot.go
View File

@@ -777,7 +777,7 @@ func showSpinner() {
botPersona = cfg.WriteNextMsgAsCompletionAgent botPersona = cfg.WriteNextMsgAsCompletionAgent
} }
for botRespMode || toolRunningMode { for botRespMode || toolRunningMode {
time.Sleep(100 * time.Millisecond) time.Sleep(400 * time.Millisecond)
spin := i % len(spinners) spin := i % len(spinners)
app.QueueUpdateDraw(func() { app.QueueUpdateDraw(func() {
switch { switch {
@@ -1096,12 +1096,9 @@ func findCall(msg, toolCall string) bool {
} }
lastToolCall.Args = openAIToolMap lastToolCall.Args = openAIToolMap
fc = lastToolCall fc = lastToolCall
// Set lastToolCall.ID from parsed tool call ID if available // NOTE: We do NOT override lastToolCall.ID from arguments.
if len(openAIToolMap) > 0 { // The ID should come from the streaming response (chunk.ToolID) set earlier.
if id, exists := openAIToolMap["id"]; exists { // Some tools like todo_create have "id" in their arguments which is NOT the tool call ID.
lastToolCall.ID = id
}
}
} else { } else {
jsStr := toolCallRE.FindString(msg) jsStr := toolCallRE.FindString(msg)
if jsStr == "" { // no tool call case if jsStr == "" { // no tool call case
@@ -1138,14 +1135,21 @@ func findCall(msg, toolCall string) bool {
lastToolCall.Args = fc.Args lastToolCall.Args = fc.Args
} }
// we got here => last msg recognized as a tool call (correct or not) // we got here => last msg recognized as a tool call (correct or not)
// make sure it has ToolCallID // Use the tool call ID from streaming response (lastToolCall.ID)
if chatBody.Messages[len(chatBody.Messages)-1].ToolCallID == "" { // Don't generate random ID - the ID should match between assistant message and tool response
// Tool call IDs should be alphanumeric strings with length 9! lastMsgIdx := len(chatBody.Messages) - 1
chatBody.Messages[len(chatBody.Messages)-1].ToolCallID = randString(9) if lastToolCall.ID != "" {
chatBody.Messages[lastMsgIdx].ToolCallID = lastToolCall.ID
} }
// Ensure lastToolCall.ID is set, fallback to assistant message's ToolCallID // Store tool call info in the assistant message
if lastToolCall.ID == "" { // Convert Args map to JSON string for storage
lastToolCall.ID = chatBody.Messages[len(chatBody.Messages)-1].ToolCallID argsJSON, _ := json.Marshal(lastToolCall.Args)
chatBody.Messages[lastMsgIdx].ToolCalls = []models.ToolCall{
{
ID: lastToolCall.ID,
Name: lastToolCall.Name,
Args: string(argsJSON),
},
} }
// call a func // call a func
_, ok := fnMap[fc.Name] _, ok := fnMap[fc.Name]
@@ -1175,15 +1179,18 @@ func findCall(msg, toolCall string) bool {
toolRunningMode = true toolRunningMode = true
resp := callToolWithAgent(fc.Name, fc.Args) resp := callToolWithAgent(fc.Name, fc.Args)
toolRunningMode = false toolRunningMode = false
toolMsg := string(resp) // Remove the "tool response: " prefix and %+v formatting toolMsg := string(resp)
logger.Info("llm used a tool call", "tool_name", fc.Name, "too_args", fc.Args, "id", fc.ID, "tool_resp", toolMsg) logger.Info("llm used a tool call", "tool_name", fc.Name, "too_args", fc.Args, "id", fc.ID, "tool_resp", toolMsg)
fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n", fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
"\n\n", len(chatBody.Messages), cfg.ToolRole, toolMsg) "\n\n", len(chatBody.Messages), cfg.ToolRole, toolMsg)
// Create tool response message with the proper tool_call_id // Create tool response message with the proper tool_call_id
// Mark shell commands as always visible
isShellCommand := fc.Name == "execute_command"
toolResponseMsg := models.RoleMsg{ toolResponseMsg := models.RoleMsg{
Role: cfg.ToolRole, Role: cfg.ToolRole,
Content: toolMsg, Content: toolMsg,
ToolCallID: lastToolCall.ID, // Use the stored tool call ID ToolCallID: lastToolCall.ID,
IsShellCommand: isShellCommand,
} }
chatBody.Messages = append(chatBody.Messages, toolResponseMsg) chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
logger.Debug("findCall: added actual tool response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", len(chatBody.Messages)) logger.Debug("findCall: added actual tool response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", len(chatBody.Messages))
@@ -1201,8 +1208,36 @@ func findCall(msg, toolCall string) bool {
func chatToTextSlice(messages []models.RoleMsg, showSys bool) []string { func chatToTextSlice(messages []models.RoleMsg, showSys bool) []string {
resp := make([]string, len(messages)) resp := make([]string, len(messages))
for i, msg := range messages { for i, msg := range messages {
// INFO: skips system msg and tool msg // Handle tool call indicators (assistant messages with tool call but empty content)
if !showSys && (msg.Role == cfg.ToolRole || msg.Role == "system") { if (msg.Role == cfg.AssistantRole || msg.Role == "assistant") && msg.ToolCallID != "" && msg.Content == "" && len(msg.ToolCalls) > 0 {
// This is a tool call indicator - show collapsed
if toolCollapsed {
toolName := msg.ToolCalls[0].Name
resp[i] = fmt.Sprintf("[yellow::i][tool call: %s (press Ctrl+T to expand)][-:-:-]", toolName)
} else {
// Show full tool call info
toolName := msg.ToolCalls[0].Name
resp[i] = fmt.Sprintf("[yellow::i][tool call: %s][-:-:-]\nargs: %s", toolName, msg.ToolCalls[0].Args)
}
continue
}
// Handle tool responses
if msg.Role == cfg.ToolRole || msg.Role == "tool" {
// Always show shell commands
if msg.IsShellCommand {
resp[i] = msg.ToText(i)
continue
}
// Hide non-shell tool responses when collapsed
if toolCollapsed {
continue
}
// When expanded, show tool responses
resp[i] = msg.ToText(i)
continue
}
// INFO: skips system msg when showSys is false
if !showSys && msg.Role == "system" {
continue continue
} }
resp[i] = msg.ToText(i) resp[i] = msg.ToText(i)

69
llm.go
View File

@@ -282,21 +282,38 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
"content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages)) "content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages))
} }
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages) filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
// Filter out tool call indicators (assistant messages with ToolCallID but empty content)
var filteredForLLM []models.RoleMsg
for _, msg := range filteredMessages {
isToolCallIndicator := msg.Role != "system" && msg.ToolCallID != "" && msg.Content == "" && len(msg.ToolCalls) > 0
if isToolCallIndicator {
continue
}
filteredForLLM = append(filteredForLLM, msg)
}
// openai /v1/chat does not support custom roles; needs to be user, assistant, system // openai /v1/chat does not support custom roles; needs to be user, assistant, system
// Add persona suffix to the last user message to indicate who the assistant should reply as // Add persona suffix to the last user message to indicate who the assistant should reply as
bodyCopy := &models.ChatBody{ bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(filteredMessages)), Messages: make([]models.RoleMsg, len(filteredForLLM)),
Model: chatBody.Model, Model: chatBody.Model,
Stream: chatBody.Stream, Stream: chatBody.Stream,
} }
for i, msg := range filteredMessages { for i, msg := range filteredForLLM {
strippedMsg := *stripThinkingFromMsg(&msg) strippedMsg := *stripThinkingFromMsg(&msg)
if strippedMsg.Role == cfg.UserRole { if strippedMsg.Role == cfg.UserRole {
bodyCopy.Messages[i] = strippedMsg bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "user" bodyCopy.Messages[i].Role = "user"
} else if strippedMsg.Role == cfg.AssistantRole {
bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "assistant"
} else if strippedMsg.Role == cfg.ToolRole {
bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "tool"
} else { } else {
bodyCopy.Messages[i] = strippedMsg bodyCopy.Messages[i] = strippedMsg
} }
// Clear ToolCalls - they're stored in chat history for display but not sent to LLM
bodyCopy.Messages[i].ToolCalls = nil
} }
// Clean null/empty messages to prevent API issues // Clean null/empty messages to prevent API issues
bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages) bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
@@ -423,20 +440,37 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
} }
// Create copy of chat body with standardized user role // Create copy of chat body with standardized user role
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages) filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
// Filter out tool call indicators (assistant messages with ToolCallID but empty content)
var filteredForLLM []models.RoleMsg
for _, msg := range filteredMessages {
isToolCallIndicator := msg.Role != "system" && msg.ToolCallID != "" && msg.Content == "" && len(msg.ToolCalls) > 0
if isToolCallIndicator {
continue
}
filteredForLLM = append(filteredForLLM, msg)
}
// Add persona suffix to the last user message to indicate who the assistant should reply as // Add persona suffix to the last user message to indicate who the assistant should reply as
bodyCopy := &models.ChatBody{ bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(filteredMessages)), Messages: make([]models.RoleMsg, len(filteredForLLM)),
Model: chatBody.Model, Model: chatBody.Model,
Stream: chatBody.Stream, Stream: chatBody.Stream,
} }
for i, msg := range filteredMessages { for i, msg := range filteredForLLM {
strippedMsg := *stripThinkingFromMsg(&msg) strippedMsg := *stripThinkingFromMsg(&msg)
if strippedMsg.Role == cfg.UserRole || i == 1 { if strippedMsg.Role == cfg.UserRole || i == 1 {
bodyCopy.Messages[i] = strippedMsg bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "user" bodyCopy.Messages[i].Role = "user"
} else if strippedMsg.Role == cfg.AssistantRole {
bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "assistant"
} else if strippedMsg.Role == cfg.ToolRole {
bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "tool"
} else { } else {
bodyCopy.Messages[i] = strippedMsg bodyCopy.Messages[i] = strippedMsg
} }
// Clear ToolCalls - they're stored in chat history for display but not sent to LLM
bodyCopy.Messages[i].ToolCalls = nil
} }
// Clean null/empty messages to prevent API issues // Clean null/empty messages to prevent API issues
bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages) bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
@@ -587,20 +621,37 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
} }
// Create copy of chat body with standardized user role // Create copy of chat body with standardized user role
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages) filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
// Filter out tool call indicators (assistant messages with ToolCallID but empty content)
var filteredForLLM []models.RoleMsg
for _, msg := range filteredMessages {
isToolCallIndicator := msg.Role != "system" && msg.ToolCallID != "" && msg.Content == "" && len(msg.ToolCalls) > 0
if isToolCallIndicator {
continue
}
filteredForLLM = append(filteredForLLM, msg)
}
// Add persona suffix to the last user message to indicate who the assistant should reply as // Add persona suffix to the last user message to indicate who the assistant should reply as
bodyCopy := &models.ChatBody{ bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(filteredMessages)), Messages: make([]models.RoleMsg, len(filteredForLLM)),
Model: chatBody.Model, Model: chatBody.Model,
Stream: chatBody.Stream, Stream: chatBody.Stream,
} }
for i, msg := range filteredMessages { for i, msg := range filteredForLLM {
strippedMsg := *stripThinkingFromMsg(&msg) strippedMsg := *stripThinkingFromMsg(&msg)
bodyCopy.Messages[i] = strippedMsg if strippedMsg.Role == cfg.UserRole {
// Standardize role if it's a user role
if bodyCopy.Messages[i].Role == cfg.UserRole {
bodyCopy.Messages[i] = strippedMsg bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "user" bodyCopy.Messages[i].Role = "user"
} else if strippedMsg.Role == cfg.AssistantRole {
bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "assistant"
} else if strippedMsg.Role == cfg.ToolRole {
bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "tool"
} else {
bodyCopy.Messages[i] = strippedMsg
} }
// Clear ToolCalls - they're stored in chat history for display but not sent to LLM
bodyCopy.Messages[i].ToolCalls = nil
} }
// Clean null/empty messages to prevent API issues // Clean null/empty messages to prevent API issues
bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages) bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)

View File

@@ -16,6 +16,7 @@ var (
shellHistory []string shellHistory []string
shellHistoryPos int = -1 shellHistoryPos int = -1
thinkingCollapsed = false thinkingCollapsed = false
toolCollapsed = false
statusLineTempl = "help (F12) | chat: [orange:-:b]%s[-:-:-] (F1) | [%s:-:b]tool use[-:-:-] (ctrl+k) | model: [%s:-:b]%s[-:-:-] (ctrl+l) | [%s:-:b]skip LLM resp[-:-:-] (F10)\nAPI: [orange:-:b]%s[-:-:-] (ctrl+v) | writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | bot will write as [orange:-:b]%s[-:-:-] (ctrl+x)" statusLineTempl = "help (F12) | chat: [orange:-:b]%s[-:-:-] (F1) | [%s:-:b]tool use[-:-:-] (ctrl+k) | model: [%s:-:b]%s[-:-:-] (ctrl+l) | [%s:-:b]skip LLM resp[-:-:-] (F10)\nAPI: [orange:-:b]%s[-:-:-] (ctrl+v) | writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | bot will write as [orange:-:b]%s[-:-:-] (ctrl+x)"
focusSwitcher = map[tview.Primitive]tview.Primitive{} focusSwitcher = map[tview.Primitive]tview.Primitive{}
) )

View File

@@ -27,6 +27,12 @@ type FuncCall struct {
Args map[string]string `json:"args"` Args map[string]string `json:"args"`
} }
type ToolCall struct {
ID string `json:"id,omitempty"`
Name string `json:"name"`
Args string `json:"arguments"`
}
type LLMResp struct { type LLMResp struct {
Choices []struct { Choices []struct {
FinishReason string `json:"finish_reason"` FinishReason string `json:"finish_reason"`
@@ -109,6 +115,8 @@ type RoleMsg struct {
Content string `json:"-"` Content string `json:"-"`
ContentParts []any `json:"-"` ContentParts []any `json:"-"`
ToolCallID string `json:"tool_call_id,omitempty"` // For tool response messages ToolCallID string `json:"tool_call_id,omitempty"` // For tool response messages
ToolCalls []ToolCall `json:"tool_calls,omitempty"` // For assistant messages with tool calls
IsShellCommand bool `json:"is_shell_command,omitempty"` // True for shell command outputs (always shown)
KnownTo []string `json:"known_to,omitempty"` KnownTo []string `json:"known_to,omitempty"`
Stats *ResponseStats `json:"stats"` Stats *ResponseStats `json:"stats"`
hasContentParts bool // Flag to indicate which content type to marshal hasContentParts bool // Flag to indicate which content type to marshal
@@ -124,12 +132,16 @@ func (m RoleMsg) MarshalJSON() ([]byte, error) {
Role string `json:"role"` Role string `json:"role"`
Content []any `json:"content"` Content []any `json:"content"`
ToolCallID string `json:"tool_call_id,omitempty"` ToolCallID string `json:"tool_call_id,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
IsShellCommand bool `json:"is_shell_command,omitempty"`
KnownTo []string `json:"known_to,omitempty"` KnownTo []string `json:"known_to,omitempty"`
Stats *ResponseStats `json:"stats,omitempty"` Stats *ResponseStats `json:"stats,omitempty"`
}{ }{
Role: m.Role, Role: m.Role,
Content: m.ContentParts, Content: m.ContentParts,
ToolCallID: m.ToolCallID, ToolCallID: m.ToolCallID,
ToolCalls: m.ToolCalls,
IsShellCommand: m.IsShellCommand,
KnownTo: m.KnownTo, KnownTo: m.KnownTo,
Stats: m.Stats, Stats: m.Stats,
} }
@@ -140,12 +152,16 @@ func (m RoleMsg) MarshalJSON() ([]byte, error) {
Role string `json:"role"` Role string `json:"role"`
Content string `json:"content"` Content string `json:"content"`
ToolCallID string `json:"tool_call_id,omitempty"` ToolCallID string `json:"tool_call_id,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
IsShellCommand bool `json:"is_shell_command,omitempty"`
KnownTo []string `json:"known_to,omitempty"` KnownTo []string `json:"known_to,omitempty"`
Stats *ResponseStats `json:"stats,omitempty"` Stats *ResponseStats `json:"stats,omitempty"`
}{ }{
Role: m.Role, Role: m.Role,
Content: m.Content, Content: m.Content,
ToolCallID: m.ToolCallID, ToolCallID: m.ToolCallID,
ToolCalls: m.ToolCalls,
IsShellCommand: m.IsShellCommand,
KnownTo: m.KnownTo, KnownTo: m.KnownTo,
Stats: m.Stats, Stats: m.Stats,
} }
@@ -160,6 +176,8 @@ func (m *RoleMsg) UnmarshalJSON(data []byte) error {
Role string `json:"role"` Role string `json:"role"`
Content []any `json:"content"` Content []any `json:"content"`
ToolCallID string `json:"tool_call_id,omitempty"` ToolCallID string `json:"tool_call_id,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
IsShellCommand bool `json:"is_shell_command,omitempty"`
KnownTo []string `json:"known_to,omitempty"` KnownTo []string `json:"known_to,omitempty"`
Stats *ResponseStats `json:"stats,omitempty"` Stats *ResponseStats `json:"stats,omitempty"`
} }
@@ -167,6 +185,8 @@ func (m *RoleMsg) UnmarshalJSON(data []byte) error {
m.Role = structured.Role m.Role = structured.Role
m.ContentParts = structured.Content m.ContentParts = structured.Content
m.ToolCallID = structured.ToolCallID m.ToolCallID = structured.ToolCallID
m.ToolCalls = structured.ToolCalls
m.IsShellCommand = structured.IsShellCommand
m.KnownTo = structured.KnownTo m.KnownTo = structured.KnownTo
m.Stats = structured.Stats m.Stats = structured.Stats
m.hasContentParts = true m.hasContentParts = true
@@ -178,6 +198,8 @@ func (m *RoleMsg) UnmarshalJSON(data []byte) error {
Role string `json:"role"` Role string `json:"role"`
Content string `json:"content"` Content string `json:"content"`
ToolCallID string `json:"tool_call_id,omitempty"` ToolCallID string `json:"tool_call_id,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
IsShellCommand bool `json:"is_shell_command,omitempty"`
KnownTo []string `json:"known_to,omitempty"` KnownTo []string `json:"known_to,omitempty"`
Stats *ResponseStats `json:"stats,omitempty"` Stats *ResponseStats `json:"stats,omitempty"`
} }
@@ -187,6 +209,8 @@ func (m *RoleMsg) UnmarshalJSON(data []byte) error {
m.Role = simple.Role m.Role = simple.Role
m.Content = simple.Content m.Content = simple.Content
m.ToolCallID = simple.ToolCallID m.ToolCallID = simple.ToolCallID
m.ToolCalls = simple.ToolCalls
m.IsShellCommand = simple.IsShellCommand
m.KnownTo = simple.KnownTo m.KnownTo = simple.KnownTo
m.Stats = simple.Stats m.Stats = simple.Stats
m.hasContentParts = false m.hasContentParts = false

15
tui.go
View File

@@ -99,6 +99,7 @@ var (
[yellow]Alt+8[white]: show char img or last picked img [yellow]Alt+8[white]: show char img or last picked img
[yellow]Alt+9[white]: warm up (load) selected llama.cpp model [yellow]Alt+9[white]: warm up (load) selected llama.cpp model
[yellow]Alt+t[white]: toggle thinking blocks visibility (collapse/expand <think> blocks) [yellow]Alt+t[white]: toggle thinking blocks visibility (collapse/expand <think> blocks)
[yellow]Ctrl+t[white]: toggle tool call/response visibility (collapse/expand tool calls and non-shell tool responses)
[yellow]Alt+i[white]: show colorscheme selection popup [yellow]Alt+i[white]: show colorscheme selection popup
=== scrolling chat window (some keys similar to vim) === === scrolling chat window (some keys similar to vim) ===
@@ -563,6 +564,20 @@ func init() {
} }
return nil return nil
} }
// Handle Ctrl+T to toggle tool call/response visibility
if event.Key() == tcell.KeyRune && event.Rune() == 't' && event.Modifiers()&tcell.ModCtrl != 0 {
toolCollapsed = !toolCollapsed
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
colorText()
status := "expanded"
if toolCollapsed {
status = "collapsed"
}
if err := notifyUser("tools", "Tool calls/responses "+status); err != nil {
logger.Error("failed to send notification", "error", err)
}
return nil
}
if event.Key() == tcell.KeyRune && event.Rune() == 'i' && event.Modifiers()&tcell.ModAlt != 0 { if event.Key() == tcell.KeyRune && event.Rune() == 'i' && event.Modifiers()&tcell.ModAlt != 0 {
if isFullScreenPageActive() { if isFullScreenPageActive() {
return event return event