Feat: failed tool call is send to llm

This commit is contained in:
Grail Finder
2025-11-27 20:54:51 +03:00
parent 77774f2356
commit 2ff954f7c9
2 changed files with 38 additions and 14 deletions

32
bot.go
View File

@@ -11,6 +11,7 @@ import (
"gf-lt/models" "gf-lt/models"
"gf-lt/rag" "gf-lt/rag"
"gf-lt/storage" "gf-lt/storage"
"html"
"io" "io"
"log/slog" "log/slog"
"net" "net"
@@ -473,10 +474,23 @@ out:
func findCall(msg, toolCall string, tv *tview.TextView) { func findCall(msg, toolCall string, tv *tview.TextView) {
fc := &models.FuncCall{} fc := &models.FuncCall{}
if toolCall != "" { if toolCall != "" {
// HTML-decode the tool call string to handle encoded characters like &lt; -> <=
decodedToolCall := html.UnescapeString(toolCall)
openAIToolMap := make(map[string]string) openAIToolMap := make(map[string]string)
// respect tool call // respect tool call
if err := json.Unmarshal([]byte(toolCall), &openAIToolMap); err != nil { if err := json.Unmarshal([]byte(decodedToolCall), &openAIToolMap); err != nil {
logger.Error("failed to unmarshal openai tool call", "call", toolCall, "error", err) logger.Error("failed to unmarshal openai tool call", "call", decodedToolCall, "error", err)
// Send error response to LLM so it can retry or handle the error
toolResponseMsg := models.RoleMsg{
Role: cfg.ToolRole,
Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
ToolCallID: lastToolCallID, // Use the stored tool call ID
}
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
// Clear the stored tool call ID after using it
lastToolCallID = ""
// Trigger the assistant to continue processing with the error message
chatRound("", cfg.AssistantRole, tv, false, false)
return return
} }
lastToolCall.Args = openAIToolMap lastToolCall.Args = openAIToolMap
@@ -489,8 +503,18 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
prefix := "__tool_call__\n" prefix := "__tool_call__\n"
suffix := "\n__tool_call__" suffix := "\n__tool_call__"
jsStr = strings.TrimSuffix(strings.TrimPrefix(jsStr, prefix), suffix) jsStr = strings.TrimSuffix(strings.TrimPrefix(jsStr, prefix), suffix)
if err := json.Unmarshal([]byte(jsStr), &fc); err != nil { // HTML-decode the JSON string to handle encoded characters like &lt; -> <=
logger.Error("failed to unmarshal tool call", "error", err, "json_string", jsStr) decodedJsStr := html.UnescapeString(jsStr)
if err := json.Unmarshal([]byte(decodedJsStr), &fc); err != nil {
logger.Error("failed to unmarshal tool call", "error", err, "json_string", decodedJsStr)
// Send error response to LLM so it can retry or handle the error
toolResponseMsg := models.RoleMsg{
Role: cfg.ToolRole,
Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
}
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
// Trigger the assistant to continue processing with the error message
chatRound("", cfg.AssistantRole, tv, false, false)
return return
} }
} }

View File

@@ -228,10 +228,10 @@ func fileRead(args map[string]string) []byte {
return jsonResult return jsonResult
} }
func fileUpdate(args map[string]string) []byte { func fileWrite(args map[string]string) []byte {
path, ok := args["path"] path, ok := args["path"]
if !ok || path == "" { if !ok || path == "" {
msg := "path not provided to file_update tool" msg := "path not provided to file_write tool"
logger.Error(msg) logger.Error(msg)
return []byte(msg) return []byte(msg)
} }
@@ -249,7 +249,7 @@ func fileUpdate(args map[string]string) []byte {
switch mode { switch mode {
case "overwrite": case "overwrite":
if err := writeStringToFile(path, content); err != nil { if err := writeStringToFile(path, content); err != nil {
msg := "failed to update file; error: " + err.Error() msg := "failed to write to file; error: " + err.Error()
logger.Error(msg) logger.Error(msg)
return []byte(msg) return []byte(msg)
} }
@@ -265,7 +265,7 @@ func fileUpdate(args map[string]string) []byte {
return []byte(msg) return []byte(msg)
} }
msg := "file updated successfully at " + path msg := "file written successfully at " + path
return []byte(msg) return []byte(msg)
} }
@@ -533,7 +533,7 @@ var fnMap = map[string]fnSig{
"websearch": websearch, "websearch": websearch,
"file_create": fileCreate, "file_create": fileCreate,
"file_read": fileRead, "file_read": fileRead,
"file_update": fileUpdate, "file_write": fileWrite,
"file_delete": fileDelete, "file_delete": fileDelete,
"file_move": fileMove, "file_move": fileMove,
"file_copy": fileCopy, "file_copy": fileCopy,
@@ -661,19 +661,19 @@ var baseTools = []models.Tool{
}, },
}, },
// file_update // file_write
models.Tool{ models.Tool{
Type: "function", Type: "function",
Function: models.ToolFunc{ Function: models.ToolFunc{
Name: "file_update", Name: "file_write",
Description: "Update a file with new content. Use when you want to modify an existing file (overwrite or append).", Description: "Write content to a file. Use when you want to create or modify a file (overwrite or append).",
Parameters: models.ToolFuncParams{ Parameters: models.ToolFuncParams{
Type: "object", Type: "object",
Required: []string{"path", "content"}, Required: []string{"path", "content"},
Properties: map[string]models.ToolArgProps{ Properties: map[string]models.ToolArgProps{
"path": models.ToolArgProps{ "path": models.ToolArgProps{
Type: "string", Type: "string",
Description: "path of the file to update", Description: "path of the file to write to",
}, },
"content": models.ToolArgProps{ "content": models.ToolArgProps{
Type: "string", Type: "string",
@@ -681,7 +681,7 @@ var baseTools = []models.Tool{
}, },
"mode": models.ToolArgProps{ "mode": models.ToolArgProps{
Type: "string", Type: "string",
Description: "update mode: 'overwrite' to replace entire file content, 'append' to add to the end (defaults to 'overwrite')", Description: "write mode: 'overwrite' to replace entire file content, 'append' to add to the end (defaults to 'overwrite')",
}, },
}, },
}, },