Fix: tool use message

This commit is contained in:
Grail Finder
2025-02-16 12:27:21 +03:00
parent c9f5b17f1f
commit 58d632ed19
2 changed files with 16 additions and 12 deletions

26
llm.go
View File

@@ -28,8 +28,8 @@ type LlamaCPPeer struct {
type OpenAIer struct { type OpenAIer struct {
} }
func (lcp LlamaCPPeer) FormMsg(msg, role string, cont bool) (io.Reader, error) { func (lcp LlamaCPPeer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
if msg != "" { // otherwise let the bot continue if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg} newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
// if rag // if rag
@@ -43,16 +43,19 @@ func (lcp LlamaCPPeer) FormMsg(msg, role string, cont bool) (io.Reader, error) {
chatBody.Messages = append(chatBody.Messages, ragMsg) chatBody.Messages = append(chatBody.Messages, ragMsg)
} }
} }
if cfg.ToolUse && !resume {
// prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
// add to chat body
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
}
messages := make([]string, len(chatBody.Messages)) messages := make([]string, len(chatBody.Messages))
for i, m := range chatBody.Messages { for i, m := range chatBody.Messages {
messages[i] = m.ToPrompt() messages[i] = m.ToPrompt()
} }
prompt := strings.Join(messages, "\n") prompt := strings.Join(messages, "\n")
// strings builder? // strings builder?
if cfg.ToolUse && msg != "" && !cont { // if cfg.ToolUse && msg != "" && !resume {
prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg if !resume {
}
if !cont {
botMsgStart := "\n" + cfg.AssistantRole + ":\n" botMsgStart := "\n" + cfg.AssistantRole + ":\n"
prompt += botMsgStart prompt += botMsgStart
} }
@@ -60,6 +63,7 @@ func (lcp LlamaCPPeer) FormMsg(msg, role string, cont bool) (io.Reader, error) {
if cfg.ThinkUse && !cfg.ToolUse { if cfg.ThinkUse && !cfg.ToolUse {
prompt += "<think>" prompt += "<think>"
} }
logger.Info("checking prompt for llamacpp", "tool_use", cfg.ToolUse, "msg", msg, "resume", resume, "prompt", prompt)
payload := models.NewLCPReq(prompt, cfg, defaultLCPProps) payload := models.NewLCPReq(prompt, cfg, defaultLCPProps)
data, err := json.Marshal(payload) data, err := json.Marshal(payload)
if err != nil { if err != nil {
@@ -101,6 +105,11 @@ func (op OpenAIer) ParseChunk(data []byte) (string, bool, error) {
} }
func (op OpenAIer) FormMsg(msg, role string, resume bool) (io.Reader, error) { func (op OpenAIer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
if cfg.ToolUse && !resume {
// prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
// add to chat body
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
}
if msg != "" { // otherwise let the bot continue if msg != "" { // otherwise let the bot continue
newMsg := models.RoleMsg{Role: role, Content: msg} newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
@@ -114,11 +123,6 @@ func (op OpenAIer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
ragMsg := models.RoleMsg{Role: cfg.ToolRole, Content: ragResp} ragMsg := models.RoleMsg{Role: cfg.ToolRole, Content: ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg) chatBody.Messages = append(chatBody.Messages, ragMsg)
} }
if cfg.ToolUse {
toolMsg := models.RoleMsg{Role: cfg.ToolRole,
Content: toolSysMsg}
chatBody.Messages = append(chatBody.Messages, toolMsg)
}
} }
data, err := json.Marshal(chatBody) data, err := json.Marshal(chatBody)
if err != nil { if err != nil {

View File

@@ -12,7 +12,7 @@ var (
botRespMode = false botRespMode = false
editMode = false editMode = false
selectedIndex = int(-1) selectedIndex = int(-1)
indexLine = "F12 to show keys help | bot resp mode: %v (F6) | char: %s (ctrl+s) | chat: %s (F1) | RAGEnabled: %v (F11) | toolUseAdviced: %v (ctrl+k) | model: %s (ctrl+l)\nAPI_URL: %s (ctrl+v) | ThinkUse: %v (ctrl+p) | Log Level: %v (ctrl+p)" indexLine = "F12 to show keys help | bot resp mode: [orange:-:b]%v[-:-:-] (F6) | char: [orange:-:b]%s[-:-:-] (ctrl+s) | chat: [orange:-:b]%s[-:-:-] (F1) | RAGEnabled: [orange:-:b]%v[-:-:-] (F11) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l)\nAPI_URL: [orange:-:b]%s[-:-:-] (ctrl+v) | ThinkUse: [orange:-:b]%v[-:-:-] (ctrl+p) | Log Level: [orange:-:b]%v[-:-:-] (ctrl+p)"
focusSwitcher = map[tview.Primitive]tview.Primitive{} focusSwitcher = map[tview.Primitive]tview.Primitive{}
) )