Merge branch 'enha/or-tools'

This commit is contained in:
Grail Finder
2025-11-28 14:35:13 +03:00
8 changed files with 893 additions and 92 deletions

82
bot.go
View File

@@ -11,6 +11,7 @@ import (
"gf-lt/models" "gf-lt/models"
"gf-lt/rag" "gf-lt/rag"
"gf-lt/storage" "gf-lt/storage"
"html"
"io" "io"
"log/slog" "log/slog"
"net" "net"
@@ -44,6 +45,7 @@ var (
ragger *rag.RAG ragger *rag.RAG
chunkParser ChunkParser chunkParser ChunkParser
lastToolCall *models.FuncCall lastToolCall *models.FuncCall
lastToolCallID string // Store the ID of the most recent tool call
//nolint:unused // TTS_ENABLED conditionally uses this //nolint:unused // TTS_ENABLED conditionally uses this
orator extra.Orator orator extra.Orator
asr extra.STT asr extra.STT
@@ -189,8 +191,7 @@ func sendMsgToLLM(body io.Reader) {
} }
} else { } else {
// Log the request body for debugging // Log the request body for debugging
logger.Info("sending request to API", "api", cfg.CurrentAPI, "body", string(bodyBytes)) logger.Debug("sending request to API", "api", cfg.CurrentAPI, "body", string(bodyBytes))
// Create request with the captured body // Create request with the captured body
req, err = http.NewRequest("POST", cfg.CurrentAPI, bytes.NewReader(bodyBytes)) req, err = http.NewRequest("POST", cfg.CurrentAPI, bytes.NewReader(bodyBytes))
if err != nil { if err != nil {
@@ -238,6 +239,9 @@ func sendMsgToLLM(body io.Reader) {
logger.Error("error reading response body", "error", err, "line", string(line), logger.Error("error reading response body", "error", err, "line", string(line),
"user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI) "user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI)
// if err.Error() != "EOF" { // if err.Error() != "EOF" {
if err := notifyUser("API error", err.Error()); err != nil {
logger.Error("failed to notify", "error", err)
}
streamDone <- true streamDone <- true
break break
// } // }
@@ -267,11 +271,12 @@ func sendMsgToLLM(body io.Reader) {
break break
} }
// Handle error messages in response content // Handle error messages in response content
if string(line) != "" && strings.Contains(strings.ToLower(string(line)), "error") { // example needed, since llm could use the word error in the normal msg
logger.Error("API error response detected", "line", line, "url", cfg.CurrentAPI) // if string(line) != "" && strings.Contains(strings.ToLower(string(line)), "error") {
streamDone <- true // logger.Error("API error response detected", "line", line, "url", cfg.CurrentAPI)
break // streamDone <- true
} // break
// }
if chunk.Finished { if chunk.Finished {
if chunk.Chunk != "" { if chunk.Chunk != "" {
logger.Warn("text inside of finish llmchunk", "chunk", chunk, "counter", counter) logger.Warn("text inside of finish llmchunk", "chunk", chunk, "counter", counter)
@@ -290,6 +295,8 @@ func sendMsgToLLM(body io.Reader) {
openAIToolChan <- chunk.ToolChunk openAIToolChan <- chunk.ToolChunk
if chunk.FuncName != "" { if chunk.FuncName != "" {
lastToolCall.Name = chunk.FuncName lastToolCall.Name = chunk.FuncName
// Store the tool call ID for the response
lastToolCallID = chunk.ToolID
} }
interrupt: interrupt:
if interruptResp { // read bytes, so it would not get into beginning of the next req if interruptResp { // read bytes, so it would not get into beginning of the next req
@@ -467,10 +474,23 @@ out:
func findCall(msg, toolCall string, tv *tview.TextView) { func findCall(msg, toolCall string, tv *tview.TextView) {
fc := &models.FuncCall{} fc := &models.FuncCall{}
if toolCall != "" { if toolCall != "" {
// HTML-decode the tool call string to handle encoded characters like &lt; -> <=
decodedToolCall := html.UnescapeString(toolCall)
openAIToolMap := make(map[string]string) openAIToolMap := make(map[string]string)
// respect tool call // respect tool call
if err := json.Unmarshal([]byte(toolCall), &openAIToolMap); err != nil { if err := json.Unmarshal([]byte(decodedToolCall), &openAIToolMap); err != nil {
logger.Error("failed to unmarshal openai tool call", "call", toolCall, "error", err) logger.Error("failed to unmarshal openai tool call", "call", decodedToolCall, "error", err)
// Send error response to LLM so it can retry or handle the error
toolResponseMsg := models.RoleMsg{
Role: cfg.ToolRole,
Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
ToolCallID: lastToolCallID, // Use the stored tool call ID
}
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
// Clear the stored tool call ID after using it
lastToolCallID = ""
// Trigger the assistant to continue processing with the error message
chatRound("", cfg.AssistantRole, tv, false, false)
return return
} }
lastToolCall.Args = openAIToolMap lastToolCall.Args = openAIToolMap
@@ -483,8 +503,18 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
prefix := "__tool_call__\n" prefix := "__tool_call__\n"
suffix := "\n__tool_call__" suffix := "\n__tool_call__"
jsStr = strings.TrimSuffix(strings.TrimPrefix(jsStr, prefix), suffix) jsStr = strings.TrimSuffix(strings.TrimPrefix(jsStr, prefix), suffix)
if err := json.Unmarshal([]byte(jsStr), &fc); err != nil { // HTML-decode the JSON string to handle encoded characters like &lt; -> <=
logger.Error("failed to unmarshal tool call", "error", err, "json_string", jsStr) decodedJsStr := html.UnescapeString(jsStr)
if err := json.Unmarshal([]byte(decodedJsStr), &fc); err != nil {
logger.Error("failed to unmarshal tool call", "error", err, "json_string", decodedJsStr)
// Send error response to LLM so it can retry or handle the error
toolResponseMsg := models.RoleMsg{
Role: cfg.ToolRole,
Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
}
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
// Trigger the assistant to continue processing with the error message
chatRound("", cfg.AssistantRole, tv, false, false)
return return
} }
} }
@@ -492,14 +522,38 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
f, ok := fnMap[fc.Name] f, ok := fnMap[fc.Name]
if !ok { if !ok {
m := fc.Name + " is not implemented" m := fc.Name + " is not implemented"
chatRound(m, cfg.ToolRole, tv, false, false) // Create tool response message with the proper tool_call_id
toolResponseMsg := models.RoleMsg{
Role: cfg.ToolRole,
Content: m,
ToolCallID: lastToolCallID, // Use the stored tool call ID
}
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
// Clear the stored tool call ID after using it
lastToolCallID = ""
// Trigger the assistant to continue processing with the new tool response
// by calling chatRound with empty content to continue the assistant's response
chatRound("", cfg.AssistantRole, tv, false, false)
return return
} }
resp := f(fc.Args) resp := f(fc.Args)
toolMsg := fmt.Sprintf("tool response: %+v", string(resp)) toolMsg := string(resp) // Remove the "tool response: " prefix and %+v formatting
logger.Info("llm used tool call", "tool_resp", toolMsg, "tool_attrs", fc)
fmt.Fprintf(tv, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n", fmt.Fprintf(tv, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
"\n", len(chatBody.Messages), cfg.ToolRole, toolMsg) "\n", len(chatBody.Messages), cfg.ToolRole, toolMsg)
chatRound(toolMsg, cfg.ToolRole, tv, false, false) // Create tool response message with the proper tool_call_id
toolResponseMsg := models.RoleMsg{
Role: cfg.ToolRole,
Content: toolMsg,
ToolCallID: lastToolCallID, // Use the stored tool call ID
}
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
// Clear the stored tool call ID after using it
lastToolCallID = ""
// Trigger the assistant to continue processing with the new tool response
// by calling chatRound with empty content to continue the assistant's response
chatRound("", cfg.AssistantRole, tv, false, false)
} }
func chatToTextSlice(showSys bool) []string { func chatToTextSlice(showSys bool) []string {

View File

@@ -209,8 +209,17 @@ func makeStatusLine() string {
} else { } else {
imageInfo = "" imageInfo = ""
} }
// Add shell mode status to status line
var shellModeInfo string
if shellMode {
shellModeInfo = " | [green:-:b]SHELL MODE[-:-:-]"
} else {
shellModeInfo = ""
}
statusLine := fmt.Sprintf(indexLineCompletion, botRespMode, cfg.AssistantRole, activeChatName, statusLine := fmt.Sprintf(indexLineCompletion, botRespMode, cfg.AssistantRole, activeChatName,
cfg.ToolUse, chatBody.Model, cfg.SkipLLMResp, cfg.CurrentAPI, cfg.ThinkUse, logLevel.Level(), cfg.ToolUse, chatBody.Model, cfg.SkipLLMResp, cfg.CurrentAPI, cfg.ThinkUse, logLevel.Level(),
isRecording, persona, botPersona, injectRole) isRecording, persona, botPersona, injectRole)
return statusLine + imageInfo return statusLine + imageInfo + shellModeInfo
} }

36
llm.go
View File

@@ -76,7 +76,6 @@ type OpenRouterChat struct {
Model string Model string
} }
func (lcp LlamaCPPeer) GetToken() string { func (lcp LlamaCPPeer) GetToken() string {
return "" return ""
} }
@@ -161,11 +160,14 @@ func (op OpenAIer) ParseChunk(data []byte) (*models.TextChunk, error) {
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content, Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content,
} }
if len(llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls) > 0 { if len(llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls) > 0 {
resp.ToolChunk = llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0].Function.Arguments toolCall := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0]
fname := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0].Function.Name resp.ToolChunk = toolCall.Function.Arguments
fname := toolCall.Function.Name
if fname != "" { if fname != "" {
resp.FuncName = fname resp.FuncName = fname
} }
// Capture the tool call ID if available
resp.ToolID = toolCall.ID
} }
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" { if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
if resp.Chunk != "" { if resp.Chunk != "" {
@@ -469,6 +471,22 @@ func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) {
resp := &models.TextChunk{ resp := &models.TextChunk{
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content, Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content,
} }
// Handle tool calls similar to OpenAIer
if len(llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls) > 0 {
toolCall := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0]
resp.ToolChunk = toolCall.Function.Arguments
fname := toolCall.Function.Name
if fname != "" {
resp.FuncName = fname
}
// Capture the tool call ID if available
resp.ToolID = toolCall.ID
}
if resp.ToolChunk != "" {
resp.ToolResp = true
}
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" { if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
if resp.Chunk != "" { if resp.Chunk != "" {
logger.Error("text inside of finish llmchunk", "chunk", llmchunk) logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
@@ -484,16 +502,9 @@ func (or OpenRouterChat) GetToken() string {
func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg open router completion", "link", cfg.CurrentAPI) logger.Debug("formmsg open router completion", "link", cfg.CurrentAPI)
// Capture the image attachment path at the beginning to avoid race conditions // Capture the image attachment path at the beginning to avoid race conditions
// with API rotation that might clear the global variable // with API rotation that might clear the global variable
localImageAttachmentPath := imageAttachmentPath localImageAttachmentPath := imageAttachmentPath
if cfg.ToolUse && !resume {
// prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
// add to chat body
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
}
if msg != "" { // otherwise let the bot continue if msg != "" { // otherwise let the bot continue
var newMsg models.RoleMsg var newMsg models.RoleMsg
// Check if we have an image to add to this message // Check if we have an image to add to this message
@@ -536,7 +547,6 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
Model: chatBody.Model, Model: chatBody.Model,
Stream: chatBody.Stream, Stream: chatBody.Stream,
} }
for i, msg := range chatBody.Messages { for i, msg := range chatBody.Messages {
bodyCopy.Messages[i] = msg bodyCopy.Messages[i] = msg
// Standardize role if it's a user role // Standardize role if it's a user role
@@ -544,8 +554,10 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
bodyCopy.Messages[i].Role = "user" bodyCopy.Messages[i].Role = "user"
} }
} }
orBody := models.NewOpenRouterChatReq(*bodyCopy, defaultLCPProps) orBody := models.NewOpenRouterChatReq(*bodyCopy, defaultLCPProps)
if cfg.ToolUse && !resume && role != cfg.ToolRole {
orBody.Tools = baseTools // set tools to use
}
data, err := json.Marshal(orBody) data, err := json.Marshal(orBody)
if err != nil { if err != nil {
logger.Error("failed to form a msg", "error", err) logger.Error("failed to form a msg", "error", err)

View File

@@ -14,6 +14,8 @@ var (
injectRole = true injectRole = true
selectedIndex = int(-1) selectedIndex = int(-1)
currentAPIIndex = 0 // Index to track current API in ApiLinks slice currentAPIIndex = 0 // Index to track current API in ApiLinks slice
currentORModelIndex = 0 // Index to track current OpenRouter model in ORFreeModels slice
shellMode = false
// indexLine = "F12 to show keys help | bot resp mode: [orange:-:b]%v[-:-:-] (F6) | card's char: [orange:-:b]%s[-:-:-] (ctrl+s) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [orange:-:b]%v[-:-:-] (F10)\nAPI_URL: [orange:-:b]%s[-:-:-] (ctrl+v) | ThinkUse: [orange:-:b]%v[-:-:-] (ctrl+p) | Log Level: [orange:-:b]%v[-:-:-] (ctrl+p) | Recording: [orange:-:b]%v[-:-:-] (ctrl+r) | Writing as: [orange:-:b]%s[-:-:-] (ctrl+q)" // indexLine = "F12 to show keys help | bot resp mode: [orange:-:b]%v[-:-:-] (F6) | card's char: [orange:-:b]%s[-:-:-] (ctrl+s) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [orange:-:b]%v[-:-:-] (F10)\nAPI_URL: [orange:-:b]%s[-:-:-] (ctrl+v) | ThinkUse: [orange:-:b]%v[-:-:-] (ctrl+p) | Log Level: [orange:-:b]%v[-:-:-] (ctrl+p) | Recording: [orange:-:b]%v[-:-:-] (ctrl+r) | Writing as: [orange:-:b]%s[-:-:-] (ctrl+q)"
indexLineCompletion = "F12 to show keys help | bot resp mode: [orange:-:b]%v[-:-:-] (F6) | card's char: [orange:-:b]%s[-:-:-] (ctrl+s) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [orange:-:b]%v[-:-:-] (F10)\nAPI_URL: [orange:-:b]%s[-:-:-] (ctrl+v) | Insert <think>: [orange:-:b]%v[-:-:-] (ctrl+p) | Log Level: [orange:-:b]%v[-:-:-] (ctrl+p) | Recording: [orange:-:b]%v[-:-:-] (ctrl+r) | Writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | Bot will write as [orange:-:b]%s[-:-:-] (ctrl+x) | role_inject [orange:-:b]%v[-:-:-]" indexLineCompletion = "F12 to show keys help | bot resp mode: [orange:-:b]%v[-:-:-] (F6) | card's char: [orange:-:b]%s[-:-:-] (ctrl+s) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [orange:-:b]%v[-:-:-] (F10)\nAPI_URL: [orange:-:b]%s[-:-:-] (ctrl+v) | Insert <think>: [orange:-:b]%v[-:-:-] (ctrl+p) | Log Level: [orange:-:b]%v[-:-:-] (ctrl+p) | Recording: [orange:-:b]%v[-:-:-] (ctrl+r) | Writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | Bot will write as [orange:-:b]%s[-:-:-] (ctrl+x) | role_inject [orange:-:b]%v[-:-:-]"
focusSwitcher = map[tview.Primitive]tview.Primitive{} focusSwitcher = map[tview.Primitive]tview.Primitive{}

View File

@@ -9,6 +9,7 @@ import (
) )
type FuncCall struct { type FuncCall struct {
ID string `json:"id,omitempty"`
Name string `json:"name"` Name string `json:"name"`
Args map[string]string `json:"args"` Args map[string]string `json:"args"`
} }
@@ -39,6 +40,7 @@ type ToolDeltaFunc struct {
} }
type ToolDeltaResp struct { type ToolDeltaResp struct {
ID string `json:"id,omitempty"`
Index int `json:"index"` Index int `json:"index"`
Function ToolDeltaFunc `json:"function"` Function ToolDeltaFunc `json:"function"`
} }
@@ -70,6 +72,7 @@ type TextChunk struct {
Finished bool Finished bool
ToolResp bool ToolResp bool
FuncName string FuncName string
ToolID string
} }
type TextContentPart struct { type TextContentPart struct {
@@ -89,6 +92,7 @@ type RoleMsg struct {
Role string `json:"role"` Role string `json:"role"`
Content string `json:"-"` Content string `json:"-"`
ContentParts []interface{} `json:"-"` ContentParts []interface{} `json:"-"`
ToolCallID string `json:"tool_call_id,omitempty"` // For tool response messages
hasContentParts bool // Flag to indicate which content type to marshal hasContentParts bool // Flag to indicate which content type to marshal
} }
@@ -99,9 +103,11 @@ func (m RoleMsg) MarshalJSON() ([]byte, error) {
aux := struct { aux := struct {
Role string `json:"role"` Role string `json:"role"`
Content []interface{} `json:"content"` Content []interface{} `json:"content"`
ToolCallID string `json:"tool_call_id,omitempty"`
}{ }{
Role: m.Role, Role: m.Role,
Content: m.ContentParts, Content: m.ContentParts,
ToolCallID: m.ToolCallID,
} }
return json.Marshal(aux) return json.Marshal(aux)
} else { } else {
@@ -109,9 +115,11 @@ func (m RoleMsg) MarshalJSON() ([]byte, error) {
aux := struct { aux := struct {
Role string `json:"role"` Role string `json:"role"`
Content string `json:"content"` Content string `json:"content"`
ToolCallID string `json:"tool_call_id,omitempty"`
}{ }{
Role: m.Role, Role: m.Role,
Content: m.Content, Content: m.Content,
ToolCallID: m.ToolCallID,
} }
return json.Marshal(aux) return json.Marshal(aux)
} }
@@ -123,10 +131,12 @@ func (m *RoleMsg) UnmarshalJSON(data []byte) error {
var structured struct { var structured struct {
Role string `json:"role"` Role string `json:"role"`
Content []interface{} `json:"content"` Content []interface{} `json:"content"`
ToolCallID string `json:"tool_call_id,omitempty"`
} }
if err := json.Unmarshal(data, &structured); err == nil && len(structured.Content) > 0 { if err := json.Unmarshal(data, &structured); err == nil && len(structured.Content) > 0 {
m.Role = structured.Role m.Role = structured.Role
m.ContentParts = structured.Content m.ContentParts = structured.Content
m.ToolCallID = structured.ToolCallID
m.hasContentParts = true m.hasContentParts = true
return nil return nil
} }
@@ -135,12 +145,14 @@ func (m *RoleMsg) UnmarshalJSON(data []byte) error {
var simple struct { var simple struct {
Role string `json:"role"` Role string `json:"role"`
Content string `json:"content"` Content string `json:"content"`
ToolCallID string `json:"tool_call_id,omitempty"`
} }
if err := json.Unmarshal(data, &simple); err != nil { if err := json.Unmarshal(data, &simple); err != nil {
return err return err
} }
m.Role = simple.Role m.Role = simple.Role
m.Content = simple.Content m.Content = simple.Content
m.ToolCallID = simple.ToolCallID
m.hasContentParts = false m.hasContentParts = false
return nil return nil
} }

View File

@@ -31,6 +31,7 @@ type OpenRouterChatReq struct {
Temperature float32 `json:"temperature"` Temperature float32 `json:"temperature"`
MinP float32 `json:"min_p"` MinP float32 `json:"min_p"`
NPredict int32 `json:"max_tokens"` NPredict int32 `json:"max_tokens"`
Tools []Tool `json:"tools"`
} }
func NewOpenRouterChatReq(cb ChatBody, props map[string]float32) OpenRouterChatReq { func NewOpenRouterChatReq(cb ChatBody, props map[string]float32) OpenRouterChatReq {
@@ -60,6 +61,7 @@ type OpenRouterChatRespNonStream struct {
Content string `json:"content"` Content string `json:"content"`
Refusal any `json:"refusal"` Refusal any `json:"refusal"`
Reasoning any `json:"reasoning"` Reasoning any `json:"reasoning"`
ToolCalls []ToolDeltaResp `json:"tool_calls"`
} `json:"message"` } `json:"message"`
} `json:"choices"` } `json:"choices"`
Usage struct { Usage struct {
@@ -80,6 +82,7 @@ type OpenRouterChatResp struct {
Delta struct { Delta struct {
Role string `json:"role"` Role string `json:"role"`
Content string `json:"content"` Content string `json:"content"`
ToolCalls []ToolDeltaResp `json:"tool_calls"`
} `json:"delta"` } `json:"delta"`
FinishReason string `json:"finish_reason"` FinishReason string `json:"finish_reason"`
NativeFinishReason string `json:"native_finish_reason"` NativeFinishReason string `json:"native_finish_reason"`

586
tools.go
View File

@@ -6,6 +6,9 @@ import (
"fmt" "fmt"
"gf-lt/extra" "gf-lt/extra"
"gf-lt/models" "gf-lt/models"
"io"
"os"
"os/exec"
"regexp" "regexp"
"strconv" "strconv"
"strings" "strings"
@@ -36,13 +39,58 @@ Your current tools:
}, },
{ {
"name":"memorise", "name":"memorise",
"args": ["topic", "info"], "args": ["topic", "data"],
"when_to_use": "when asked to memorise something" "when_to_use": "when asked to memorise information under a topic"
}, },
{ {
"name":"recall_topics", "name":"recall_topics",
"args": [], "args": [],
"when_to_use": "to see what topics are saved in memory" "when_to_use": "to see what topics are saved in memory"
},
{
"name":"websearch",
"args": ["query", "limit"],
"when_to_use": "when asked to search the web for information; limit is optional (default 3)"
},
{
"name":"file_create",
"args": ["path", "content"],
"when_to_use": "when asked to create a new file with optional content"
},
{
"name":"file_read",
"args": ["path"],
"when_to_use": "when asked to read the content of a file"
},
{
"name":"file_write",
"args": ["path", "content", "mode"],
"when_to_use": "when asked to write content to a file; mode is optional (overwrite or append, default: overwrite)"
},
{
"name":"file_delete",
"args": ["path"],
"when_to_use": "when asked to delete a file"
},
{
"name":"file_move",
"args": ["src", "dst"],
"when_to_use": "when asked to move a file from source to destination"
},
{
"name":"file_copy",
"args": ["src", "dst"],
"when_to_use": "when asked to copy a file from source to destination"
},
{
"name":"file_list",
"args": ["path"],
"when_to_use": "when asked to list files in a directory; path is optional (default: current directory)"
},
{
"name":"execute_command",
"args": ["command", "args"],
"when_to_use": "when asked to execute a system command; args is optional"
} }
] ]
</tools> </tools>
@@ -171,6 +219,356 @@ func recallTopics(args map[string]string) []byte {
return []byte(joinedS) return []byte(joinedS)
} }
// File Manipulation Tools
func fileCreate(args map[string]string) []byte {
path, ok := args["path"]
if !ok || path == "" {
msg := "path not provided to file_create tool"
logger.Error(msg)
return []byte(msg)
}
content, ok := args["content"]
if !ok {
content = ""
}
if err := writeStringToFile(path, content); err != nil {
msg := "failed to create file; error: " + err.Error()
logger.Error(msg)
return []byte(msg)
}
msg := "file created successfully at " + path
return []byte(msg)
}
func fileRead(args map[string]string) []byte {
path, ok := args["path"]
if !ok || path == "" {
msg := "path not provided to file_read tool"
logger.Error(msg)
return []byte(msg)
}
content, err := readStringFromFile(path)
if err != nil {
msg := "failed to read file; error: " + err.Error()
logger.Error(msg)
return []byte(msg)
}
result := map[string]string{
"content": content,
"path": path,
}
jsonResult, err := json.Marshal(result)
if err != nil {
msg := "failed to marshal result; error: " + err.Error()
logger.Error(msg)
return []byte(msg)
}
return jsonResult
}
func fileWrite(args map[string]string) []byte {
path, ok := args["path"]
if !ok || path == "" {
msg := "path not provided to file_write tool"
logger.Error(msg)
return []byte(msg)
}
content, ok := args["content"]
if !ok {
content = ""
}
mode, ok := args["mode"]
if !ok || mode == "" {
mode = "overwrite"
}
switch mode {
case "overwrite":
if err := writeStringToFile(path, content); err != nil {
msg := "failed to write to file; error: " + err.Error()
logger.Error(msg)
return []byte(msg)
}
case "append":
if err := appendStringToFile(path, content); err != nil {
msg := "failed to append to file; error: " + err.Error()
logger.Error(msg)
return []byte(msg)
}
default:
msg := "invalid mode; use 'overwrite' or 'append'"
logger.Error(msg)
return []byte(msg)
}
msg := "file written successfully at " + path
return []byte(msg)
}
func fileDelete(args map[string]string) []byte {
path, ok := args["path"]
if !ok || path == "" {
msg := "path not provided to file_delete tool"
logger.Error(msg)
return []byte(msg)
}
if err := removeFile(path); err != nil {
msg := "failed to delete file; error: " + err.Error()
logger.Error(msg)
return []byte(msg)
}
msg := "file deleted successfully at " + path
return []byte(msg)
}
func fileMove(args map[string]string) []byte {
src, ok := args["src"]
if !ok || src == "" {
msg := "source path not provided to file_move tool"
logger.Error(msg)
return []byte(msg)
}
dst, ok := args["dst"]
if !ok || dst == "" {
msg := "destination path not provided to file_move tool"
logger.Error(msg)
return []byte(msg)
}
if err := moveFile(src, dst); err != nil {
msg := "failed to move file; error: " + err.Error()
logger.Error(msg)
return []byte(msg)
}
msg := fmt.Sprintf("file moved successfully from %s to %s", src, dst)
return []byte(msg)
}
func fileCopy(args map[string]string) []byte {
src, ok := args["src"]
if !ok || src == "" {
msg := "source path not provided to file_copy tool"
logger.Error(msg)
return []byte(msg)
}
dst, ok := args["dst"]
if !ok || dst == "" {
msg := "destination path not provided to file_copy tool"
logger.Error(msg)
return []byte(msg)
}
if err := copyFile(src, dst); err != nil {
msg := "failed to copy file; error: " + err.Error()
logger.Error(msg)
return []byte(msg)
}
msg := fmt.Sprintf("file copied successfully from %s to %s", src, dst)
return []byte(msg)
}
func fileList(args map[string]string) []byte {
path, ok := args["path"]
if !ok || path == "" {
path = "." // default to current directory
}
files, err := listDirectory(path)
if err != nil {
msg := "failed to list directory; error: " + err.Error()
logger.Error(msg)
return []byte(msg)
}
result := map[string]interface{}{
"directory": path,
"files": files,
}
jsonResult, err := json.Marshal(result)
if err != nil {
msg := "failed to marshal result; error: " + err.Error()
logger.Error(msg)
return []byte(msg)
}
return jsonResult
}
// Helper functions for file operations
func readStringFromFile(filename string) (string, error) {
data, err := os.ReadFile(filename)
if err != nil {
return "", err
}
return string(data), nil
}
func writeStringToFile(filename string, data string) error {
return os.WriteFile(filename, []byte(data), 0644)
}
func appendStringToFile(filename string, data string) error {
file, err := os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
return err
}
defer file.Close()
_, err = file.WriteString(data)
return err
}
func removeFile(filename string) error {
return os.Remove(filename)
}
func moveFile(src, dst string) error {
// First try with os.Rename (works within same filesystem)
if err := os.Rename(src, dst); err == nil {
return nil
}
// If that fails (e.g., cross-filesystem), copy and delete
return copyAndRemove(src, dst)
}
func copyFile(src, dst string) error {
srcFile, err := os.Open(src)
if err != nil {
return err
}
defer srcFile.Close()
dstFile, err := os.Create(dst)
if err != nil {
return err
}
defer dstFile.Close()
_, err = io.Copy(dstFile, srcFile)
return err
}
func copyAndRemove(src, dst string) error {
// Copy the file
if err := copyFile(src, dst); err != nil {
return err
}
// Remove the source file
return os.Remove(src)
}
func listDirectory(path string) ([]string, error) {
entries, err := os.ReadDir(path)
if err != nil {
return nil, err
}
var files []string
for _, entry := range entries {
if entry.IsDir() {
files = append(files, entry.Name()+"/") // Add "/" to indicate directory
} else {
files = append(files, entry.Name())
}
}
return files, nil
}
// Command Execution Tool
func executeCommand(args map[string]string) []byte {
command, ok := args["command"]
if !ok || command == "" {
msg := "command not provided to execute_command tool"
logger.Error(msg)
return []byte(msg)
}
if !isCommandAllowed(command) {
msg := fmt.Sprintf("command '%s' is not allowed", command)
logger.Error(msg)
return []byte(msg)
}
// Get arguments - handle both single arg and multiple args
var cmdArgs []string
if args["args"] != "" {
// If args is provided as a single string, split by spaces
cmdArgs = strings.Fields(args["args"])
} else {
// If individual args are provided, collect them
argNum := 1
for {
argKey := fmt.Sprintf("arg%d", argNum)
if argValue, exists := args[argKey]; exists && argValue != "" {
cmdArgs = append(cmdArgs, argValue)
} else {
break
}
argNum++
}
}
// Execute with timeout for safety
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
cmd := exec.CommandContext(ctx, command, cmdArgs...)
output, err := cmd.CombinedOutput()
if err != nil {
msg := fmt.Sprintf("command '%s' failed; error: %v; output: %s", command, err, string(output))
logger.Error(msg)
return []byte(msg)
}
return output
}
// Helper functions for command execution
func isCommandAllowed(command string) bool {
allowedCommands := map[string]bool{
"grep": true,
"sed": true,
"awk": true,
"find": true,
"cat": true,
"head": true,
"tail": true,
"sort": true,
"uniq": true,
"wc": true,
"ls": true,
"echo": true,
"cut": true,
"tr": true,
"cp": true,
"mv": true,
"rm": true,
"mkdir": true,
"rmdir": true,
"pwd": true,
}
return allowedCommands[command]
}
type fnSig func(map[string]string) []byte type fnSig func(map[string]string) []byte
var fnMap = map[string]fnSig{ var fnMap = map[string]fnSig{
@@ -178,6 +576,14 @@ var fnMap = map[string]fnSig{
"recall_topics": recallTopics, "recall_topics": recallTopics,
"memorise": memorise, "memorise": memorise,
"websearch": websearch, "websearch": websearch,
"file_create": fileCreate,
"file_read": fileRead,
"file_write": fileWrite,
"file_delete": fileDelete,
"file_move": fileMove,
"file_copy": fileCopy,
"file_list": fileList,
"execute_command": executeCommand,
} }
// openai style def // openai style def
@@ -257,4 +663,180 @@ var baseTools = []models.Tool{
}, },
}, },
}, },
// file_create
models.Tool{
Type: "function",
Function: models.ToolFunc{
Name: "file_create",
Description: "Create a new file with specified content. Use when you need to create a new file.",
Parameters: models.ToolFuncParams{
Type: "object",
Required: []string{"path"},
Properties: map[string]models.ToolArgProps{
"path": models.ToolArgProps{
Type: "string",
Description: "path where the file should be created",
},
"content": models.ToolArgProps{
Type: "string",
Description: "content to write to the file (optional, defaults to empty string)",
},
},
},
},
},
// file_read
models.Tool{
Type: "function",
Function: models.ToolFunc{
Name: "file_read",
Description: "Read the content of a file. Use when you need to see the content of a file.",
Parameters: models.ToolFuncParams{
Type: "object",
Required: []string{"path"},
Properties: map[string]models.ToolArgProps{
"path": models.ToolArgProps{
Type: "string",
Description: "path of the file to read",
},
},
},
},
},
// file_write
models.Tool{
Type: "function",
Function: models.ToolFunc{
Name: "file_write",
Description: "Write content to a file. Use when you want to create or modify a file (overwrite or append).",
Parameters: models.ToolFuncParams{
Type: "object",
Required: []string{"path", "content"},
Properties: map[string]models.ToolArgProps{
"path": models.ToolArgProps{
Type: "string",
Description: "path of the file to write to",
},
"content": models.ToolArgProps{
Type: "string",
Description: "content to write to the file",
},
"mode": models.ToolArgProps{
Type: "string",
Description: "write mode: 'overwrite' to replace entire file content, 'append' to add to the end (defaults to 'overwrite')",
},
},
},
},
},
// file_delete
models.Tool{
Type: "function",
Function: models.ToolFunc{
Name: "file_delete",
Description: "Delete a file. Use when you need to remove a file.",
Parameters: models.ToolFuncParams{
Type: "object",
Required: []string{"path"},
Properties: map[string]models.ToolArgProps{
"path": models.ToolArgProps{
Type: "string",
Description: "path of the file to delete",
},
},
},
},
},
// file_move
models.Tool{
Type: "function",
Function: models.ToolFunc{
Name: "file_move",
Description: "Move a file from one location to another. Use when you need to relocate a file.",
Parameters: models.ToolFuncParams{
Type: "object",
Required: []string{"src", "dst"},
Properties: map[string]models.ToolArgProps{
"src": models.ToolArgProps{
Type: "string",
Description: "source path of the file to move",
},
"dst": models.ToolArgProps{
Type: "string",
Description: "destination path where the file should be moved",
},
},
},
},
},
// file_copy
models.Tool{
Type: "function",
Function: models.ToolFunc{
Name: "file_copy",
Description: "Copy a file from one location to another. Use when you need to duplicate a file.",
Parameters: models.ToolFuncParams{
Type: "object",
Required: []string{"src", "dst"},
Properties: map[string]models.ToolArgProps{
"src": models.ToolArgProps{
Type: "string",
Description: "source path of the file to copy",
},
"dst": models.ToolArgProps{
Type: "string",
Description: "destination path where the file should be copied",
},
},
},
},
},
// file_list
models.Tool{
Type: "function",
Function: models.ToolFunc{
Name: "file_list",
Description: "List files and directories in a directory. Use when you need to see what files are in a directory.",
Parameters: models.ToolFuncParams{
Type: "object",
Required: []string{},
Properties: map[string]models.ToolArgProps{
"path": models.ToolArgProps{
Type: "string",
Description: "path of the directory to list (optional, defaults to current directory)",
},
},
},
},
},
// execute_command
models.Tool{
Type: "function",
Function: models.ToolFunc{
Name: "execute_command",
Description: "Execute a shell command safely. Use when you need to run system commands like grep sed awk find cat head tail sort uniq wc ls echo cut tr cp mv rm mkdir rmdir pwd",
Parameters: models.ToolFuncParams{
Type: "object",
Required: []string{"command"},
Properties: map[string]models.ToolArgProps{
"command": models.ToolArgProps{
Type: "string",
Description: "command to execute (only commands from whitelist are allowed: grep sed awk find cat head tail sort uniq wc ls echo cut tr cp mv rm mkdir rmdir pwd",
},
"args": models.ToolArgProps{
Type: "string",
Description: "command arguments as a single string (e.g., '-la {path}')",
},
},
},
},
},
} }

131
tui.go
View File

@@ -8,6 +8,7 @@ import (
_ "image/jpeg" _ "image/jpeg"
_ "image/png" _ "image/png"
"os" "os"
"os/exec"
"path" "path"
"slices" "slices"
"strconv" "strconv"
@@ -71,7 +72,7 @@ var (
[yellow]Ctrl+v[white]: switch between /completion and /chat api (if provided in config) [yellow]Ctrl+v[white]: switch between /completion and /chat api (if provided in config)
[yellow]Ctrl+r[white]: start/stop recording from your microphone (needs stt server) [yellow]Ctrl+r[white]: start/stop recording from your microphone (needs stt server)
[yellow]Ctrl+t[white]: remove thinking (<think>) and tool messages from context (delete from chat) [yellow]Ctrl+t[white]: remove thinking (<think>) and tool messages from context (delete from chat)
[yellow]Ctrl+l[white]: update connected model name (llamacpp) [yellow]Ctrl+l[white]: rotate through free OpenRouter models (if openrouter api) or update connected model name (llamacpp)
[yellow]Ctrl+k[white]: switch tool use (recommend tool use to llm after user msg) [yellow]Ctrl+k[white]: switch tool use (recommend tool use to llm after user msg)
[yellow]Ctrl+j[white]: if chat agent is char.png will show the image; then any key to return [yellow]Ctrl+j[white]: if chat agent is char.png will show the image; then any key to return
[yellow]Ctrl+a[white]: interrupt tts (needs tts server) [yellow]Ctrl+a[white]: interrupt tts (needs tts server)
@@ -80,7 +81,13 @@ var (
[yellow]Ctrl+q[white]: cycle through mentioned chars in chat, to pick persona to send next msg as [yellow]Ctrl+q[white]: cycle through mentioned chars in chat, to pick persona to send next msg as
[yellow]Ctrl+x[white]: cycle through mentioned chars in chat, to pick persona to send next msg as (for llm) [yellow]Ctrl+x[white]: cycle through mentioned chars in chat, to pick persona to send next msg as (for llm)
[yellow]Alt+5[white]: toggle fullscreen for input/chat window [yellow]Alt+5[white]: toggle fullscreen for input/chat window
[yellow]Alt+1[white]: toggle shell mode (execute commands locally)
=== scrolling chat window (some keys similar to vim) ===
[yellow]arrows up/down and j/k[white]: scroll up and down
[yellow]gg/G[white]: jump to the begging / end of the chat
=== status line ===
%s %s
Press Enter to go back Press Enter to go back
@@ -204,6 +211,102 @@ func makePropsForm(props map[string]float32) *tview.Form {
return form return form
} }
func toggleShellMode() {
shellMode = !shellMode
if shellMode {
// Update input placeholder to indicate shell mode
textArea.SetPlaceholder("SHELL MODE: Enter command and press <Esc> to execute")
} else {
// Reset to normal mode
textArea.SetPlaceholder("input is multiline; press <Enter> to start the next line;\npress <Esc> to send the message. Alt+1 to exit shell mode")
}
updateStatusLine()
}
func executeCommandAndDisplay(cmdText string) {
// Parse the command (split by spaces, but handle quoted arguments)
cmdParts := parseCommand(cmdText)
if len(cmdParts) == 0 {
fmt.Fprintf(textView, "\n[red]Error: No command provided[-:-:-]\n")
textView.ScrollToEnd()
colorText()
return
}
command := cmdParts[0]
args := []string{}
if len(cmdParts) > 1 {
args = cmdParts[1:]
}
// Create the command execution
cmd := exec.Command(command, args...)
// Execute the command and get output
output, err := cmd.CombinedOutput()
// Add the command being executed to the chat
fmt.Fprintf(textView, "\n[yellow]$ %s[-:-:-]\n", cmdText)
if err != nil {
// Include both output and error
fmt.Fprintf(textView, "[red]Error: %s[-:-:-]\n", err.Error())
if len(output) > 0 {
fmt.Fprintf(textView, "[red]%s[-:-:-]\n", string(output))
}
} else {
// Only output if successful
if len(output) > 0 {
fmt.Fprintf(textView, "[green]%s[-:-:-]\n", string(output))
} else {
fmt.Fprintf(textView, "[green]Command executed successfully (no output)[-:-:-]\n")
}
}
// Scroll to end and update colors
textView.ScrollToEnd()
colorText()
}
// parseCommand splits command string handling quotes properly
func parseCommand(cmd string) []string {
var args []string
var current string
var inQuotes bool
var quoteChar rune
for _, r := range cmd {
switch r {
case '"', '\'':
if inQuotes {
if r == quoteChar {
inQuotes = false
} else {
current += string(r)
}
} else {
inQuotes = true
quoteChar = r
}
case ' ', '\t':
if inQuotes {
current += string(r)
} else if current != "" {
args = append(args, current)
current = ""
}
default:
current += string(r)
}
}
if current != "" {
args = append(args, current)
}
return args
}
func init() { func init() {
tview.Styles = colorschemes["default"] tview.Styles = colorschemes["default"]
app = tview.NewApplication() app = tview.NewApplication()
@@ -575,10 +678,21 @@ func init() {
return nil return nil
} }
if event.Key() == tcell.KeyCtrlL { if event.Key() == tcell.KeyCtrlL {
// Check if the current API is an OpenRouter API
if strings.Contains(cfg.CurrentAPI, "openrouter.ai/api/v1/") {
// Rotate through OpenRouter free models
if len(ORFreeModels) > 0 {
currentORModelIndex = (currentORModelIndex + 1) % len(ORFreeModels)
chatBody.Model = ORFreeModels[currentORModelIndex]
}
updateStatusLine()
} else {
// For non-OpenRouter APIs, use the old logic
go func() { go func() {
fetchLCPModelName() // blocks fetchLCPModelName() // blocks
updateStatusLine() updateStatusLine()
}() }()
}
return nil return nil
} }
if event.Key() == tcell.KeyCtrlT { if event.Key() == tcell.KeyCtrlT {
@@ -812,10 +926,22 @@ func init() {
pages.AddPage(RAGLoadedPage, chatLoadedRAGTable, true, true) pages.AddPage(RAGLoadedPage, chatLoadedRAGTable, true, true)
return nil return nil
} }
if event.Key() == tcell.KeyRune && event.Modifiers() == tcell.ModAlt && event.Rune() == '1' {
// Toggle shell mode: when enabled, commands are executed locally instead of sent to LLM
toggleShellMode()
return nil
}
// cannot send msg in editMode or botRespMode // cannot send msg in editMode or botRespMode
if event.Key() == tcell.KeyEscape && !editMode && !botRespMode { if event.Key() == tcell.KeyEscape && !editMode && !botRespMode {
// read all text into buffer
msgText := textArea.GetText() msgText := textArea.GetText()
if shellMode && msgText != "" {
// In shell mode, execute command instead of sending to LLM
executeCommandAndDisplay(msgText)
textArea.SetText("", true) // Clear the input area
return nil
} else if !shellMode {
// Normal mode - send to LLM
nl := "\n" nl := "\n"
prevText := textView.GetText(true) prevText := textView.GetText(true)
persona := cfg.UserRole persona := cfg.UserRole
@@ -852,6 +978,7 @@ func init() {
// But clears it for the next message // But clears it for the next message
ClearImageAttachment() ClearImageAttachment()
}() }()
}
return nil return nil
} }
if event.Key() == tcell.KeyPgUp || event.Key() == tcell.KeyPgDn { if event.Key() == tcell.KeyPgUp || event.Key() == tcell.KeyPgDn {