369 lines
8.8 KiB
Go
369 lines
8.8 KiB
Go
package main
|
|
|
|
import (
|
|
"bufio"
|
|
"bytes"
|
|
"elefant/models"
|
|
"elefant/storage"
|
|
"encoding/json"
|
|
"fmt"
|
|
"io"
|
|
"log/slog"
|
|
"net/http"
|
|
"os"
|
|
"path"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/rivo/tview"
|
|
)
|
|
|
|
var httpClient = http.Client{
|
|
Timeout: time.Second * 20,
|
|
}
|
|
|
|
var (
|
|
logger *slog.Logger
|
|
APIURL = "http://localhost:8080/v1/chat/completions"
|
|
DB = map[string]map[string]any{}
|
|
userRole = "user"
|
|
assistantRole = "assistant"
|
|
toolRole = "tool"
|
|
assistantIcon = "<🤖>: "
|
|
userIcon = "<user>: "
|
|
historyDir = "./history/"
|
|
// TODO: pass as an cli arg
|
|
showSystemMsgs bool
|
|
chatFileLoaded string
|
|
chunkChan = make(chan string, 10)
|
|
streamDone = make(chan bool, 1)
|
|
chatBody *models.ChatBody
|
|
store storage.ChatHistory
|
|
defaultFirstMsg = "Hello! What can I do for you?"
|
|
defaultStarter = []models.MessagesStory{
|
|
{Role: "system", Content: systemMsg},
|
|
{Role: assistantRole, Content: defaultFirstMsg},
|
|
}
|
|
interruptResp = false
|
|
systemMsg = `You're a helpful assistant.
|
|
# Tools
|
|
You can do functions call if needed.
|
|
Your current tools:
|
|
<tools>
|
|
{
|
|
"name":"get_id",
|
|
"args": "username"
|
|
}
|
|
</tools>
|
|
To make a function call return a json object within __tool_call__ tags;
|
|
Example:
|
|
__tool_call__
|
|
{
|
|
"name":"get_id",
|
|
"args": "Adam"
|
|
}
|
|
__tool_call___
|
|
When making function call avoid typing anything else. 'tool' user will respond with the results of the call.
|
|
After that you are free to respond to the user.
|
|
`
|
|
)
|
|
|
|
// predifine funcs
|
|
func getUserDetails(id ...string) map[string]any {
|
|
// db query
|
|
// return DB[id[0]]
|
|
return map[string]any{
|
|
"username": "fm11",
|
|
"id": 24983,
|
|
"reputation": 911,
|
|
"balance": 214.73,
|
|
}
|
|
}
|
|
|
|
type fnSig func(...string) map[string]any
|
|
|
|
var fnMap = map[string]fnSig{
|
|
"get_id": getUserDetails,
|
|
}
|
|
|
|
// ====
|
|
|
|
func getUserInput(userPrompt string) string {
|
|
// fmt.Printf("<🤖>: %s\n<user>:", botMsg)
|
|
fmt.Printf(userPrompt)
|
|
reader := bufio.NewReader(os.Stdin)
|
|
line, err := reader.ReadString('\n')
|
|
if err != nil {
|
|
panic(err) // think about it
|
|
}
|
|
// fmt.Printf("read line: %s-\n", line)
|
|
return line
|
|
}
|
|
|
|
func formMsg(chatBody *models.ChatBody, newMsg, role string) io.Reader {
|
|
if newMsg != "" { // otherwise let the bot continue
|
|
newMsg := models.MessagesStory{Role: role, Content: newMsg}
|
|
chatBody.Messages = append(chatBody.Messages, newMsg)
|
|
}
|
|
data, err := json.Marshal(chatBody)
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
return bytes.NewReader(data)
|
|
}
|
|
|
|
// func sendMsgToLLM(body io.Reader) (*models.LLMRespChunk, error) {
|
|
func sendMsgToLLM(body io.Reader) (any, error) {
|
|
resp, err := httpClient.Post(APIURL, "application/json", body)
|
|
if err != nil {
|
|
logger.Error("llamacpp api", "error", err)
|
|
return nil, err
|
|
}
|
|
defer resp.Body.Close()
|
|
llmResp := []models.LLMRespChunk{}
|
|
// chunkChan <- assistantIcon
|
|
reader := bufio.NewReader(resp.Body)
|
|
counter := 0
|
|
for {
|
|
if interruptResp {
|
|
interruptResp = false
|
|
logger.Info("interrupted bot response")
|
|
break
|
|
}
|
|
llmchunk := models.LLMRespChunk{}
|
|
if counter > 2000 {
|
|
streamDone <- true
|
|
break
|
|
}
|
|
line, err := reader.ReadBytes('\n')
|
|
if err != nil {
|
|
streamDone <- true
|
|
panic(err)
|
|
}
|
|
// logger.Info("linecheck", "line", string(line), "len", len(line), "counter", counter)
|
|
if len(line) <= 1 {
|
|
continue // skip \n
|
|
}
|
|
// starts with -> data:
|
|
line = line[6:]
|
|
if err := json.Unmarshal(line, &llmchunk); err != nil {
|
|
logger.Error("failed to decode", "error", err, "line", string(line))
|
|
streamDone <- true
|
|
return nil, err
|
|
}
|
|
llmResp = append(llmResp, llmchunk)
|
|
logger.Info("streamview", "chunk", llmchunk)
|
|
// if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason != "chat.completion.chunk" {
|
|
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
|
|
streamDone <- true
|
|
// last chunk
|
|
break
|
|
}
|
|
counter++
|
|
// bot sends way too many \n
|
|
answerText := strings.ReplaceAll(llmchunk.Choices[0].Delta.Content, "\n\n", "\n")
|
|
chunkChan <- answerText
|
|
}
|
|
return llmResp, nil
|
|
}
|
|
|
|
func chatRound(userMsg, role string, tv *tview.TextView) {
|
|
botRespMode = true
|
|
reader := formMsg(chatBody, userMsg, role)
|
|
go sendMsgToLLM(reader)
|
|
fmt.Fprintf(tv, fmt.Sprintf("(%d) ", len(chatBody.Messages)))
|
|
fmt.Fprintf(tv, assistantIcon)
|
|
respText := strings.Builder{}
|
|
out:
|
|
for {
|
|
select {
|
|
case chunk := <-chunkChan:
|
|
// fmt.Printf(chunk)
|
|
fmt.Fprintf(tv, chunk)
|
|
respText.WriteString(chunk)
|
|
tv.ScrollToEnd()
|
|
case <-streamDone:
|
|
break out
|
|
}
|
|
}
|
|
botRespMode = false
|
|
chatBody.Messages = append(chatBody.Messages, models.MessagesStory{
|
|
Role: assistantRole, Content: respText.String(),
|
|
})
|
|
// bot msg is done;
|
|
// now check it for func call
|
|
logChat(chatFileLoaded, chatBody.Messages)
|
|
findCall(respText.String(), tv)
|
|
}
|
|
|
|
func logChat(fname string, msgs []models.MessagesStory) {
|
|
data, err := json.MarshalIndent(msgs, "", " ")
|
|
if err != nil {
|
|
logger.Error("failed to marshal", "error", err)
|
|
}
|
|
if err := os.WriteFile(fname, data, 0666); err != nil {
|
|
logger.Error("failed to write log", "error", err)
|
|
}
|
|
}
|
|
|
|
func findCall(msg string, tv *tview.TextView) {
|
|
prefix := "__tool_call__\n"
|
|
suffix := "\n__tool_call__"
|
|
fc := models.FuncCall{}
|
|
if !strings.HasPrefix(msg, prefix) ||
|
|
!strings.HasSuffix(msg, suffix) {
|
|
return
|
|
}
|
|
jsStr := strings.TrimSuffix(strings.TrimPrefix(msg, prefix), suffix)
|
|
if err := json.Unmarshal([]byte(jsStr), &fc); err != nil {
|
|
logger.Error("failed to unmarshal tool call", "error", err)
|
|
return
|
|
// panic(err)
|
|
}
|
|
// call a func
|
|
f, ok := fnMap[fc.Name]
|
|
if !ok {
|
|
m := fmt.Sprintf("%s is not implemented", fc.Name)
|
|
chatRound(m, toolRole, tv)
|
|
return
|
|
}
|
|
resp := f(fc.Args)
|
|
toolMsg := fmt.Sprintf("tool response: %+v", resp)
|
|
// reader := formMsg(chatBody, toolMsg, toolRole)
|
|
// sendMsgToLLM()
|
|
chatRound(toolMsg, toolRole, tv)
|
|
// return func result to the llm
|
|
}
|
|
|
|
func listHistoryFiles(dir string) ([]string, error) {
|
|
files, err := os.ReadDir(dir)
|
|
if err != nil {
|
|
logger.Error("failed to readdir", "error", err)
|
|
return nil, err
|
|
}
|
|
resp := make([]string, len(files))
|
|
for i, f := range files {
|
|
resp[i] = path.Join(dir, f.Name())
|
|
}
|
|
return resp, nil
|
|
}
|
|
|
|
func findLatestChat(dir string) string {
|
|
files, err := listHistoryFiles(dir)
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
var (
|
|
latestF string
|
|
newestTime int64
|
|
)
|
|
logger.Info("filelist", "list", files)
|
|
for _, fn := range files {
|
|
fi, err := os.Stat(fn)
|
|
if err != nil {
|
|
logger.Error("failed to get stat", "error", err, "name", fn)
|
|
panic(err)
|
|
}
|
|
currTime := fi.ModTime().Unix()
|
|
if currTime > newestTime {
|
|
newestTime = currTime
|
|
latestF = fn
|
|
}
|
|
}
|
|
return latestF
|
|
}
|
|
|
|
func readHistoryChat(fn string) ([]models.MessagesStory, error) {
|
|
content, err := os.ReadFile(fn)
|
|
if err != nil {
|
|
logger.Error("failed to read file", "error", err, "name", fn)
|
|
return nil, err
|
|
}
|
|
resp := []models.MessagesStory{}
|
|
if err := json.Unmarshal(content, &resp); err != nil {
|
|
logger.Error("failed to unmarshal", "error", err, "name", fn)
|
|
return nil, err
|
|
}
|
|
chatFileLoaded = fn
|
|
return resp, nil
|
|
}
|
|
|
|
func loadOldChatOrGetNew(fns ...string) []models.MessagesStory {
|
|
// find last chat
|
|
fn := findLatestChat(historyDir)
|
|
if len(fns) > 0 {
|
|
fn = fns[0]
|
|
}
|
|
logger.Info("reading history from file", "filename", fn)
|
|
history, err := readHistoryChat(fn)
|
|
if err != nil {
|
|
logger.Warn("faield to load history chat", "error", err)
|
|
return defaultStarter
|
|
}
|
|
return history
|
|
}
|
|
|
|
func chatToTextSlice(showSys bool) []string {
|
|
resp := make([]string, len(chatBody.Messages))
|
|
for i, msg := range chatBody.Messages {
|
|
if !showSys && (msg.Role != assistantRole && msg.Role != userRole) {
|
|
continue
|
|
}
|
|
resp[i] = msg.ToText(i)
|
|
}
|
|
return resp
|
|
}
|
|
|
|
func chatToText(showSys bool) string {
|
|
s := chatToTextSlice(showSys)
|
|
return strings.Join(s, "")
|
|
}
|
|
|
|
func textToMsg(rawMsg string) models.MessagesStory {
|
|
msg := models.MessagesStory{}
|
|
// system and tool?
|
|
if strings.HasPrefix(rawMsg, assistantIcon) {
|
|
msg.Role = assistantRole
|
|
msg.Content = strings.TrimPrefix(rawMsg, assistantIcon)
|
|
return msg
|
|
}
|
|
if strings.HasPrefix(rawMsg, userIcon) {
|
|
msg.Role = userRole
|
|
msg.Content = strings.TrimPrefix(rawMsg, userIcon)
|
|
return msg
|
|
}
|
|
return msg
|
|
}
|
|
|
|
func textSliceToChat(chat []string) []models.MessagesStory {
|
|
resp := make([]models.MessagesStory, len(chat))
|
|
for i, rawMsg := range chat {
|
|
msg := textToMsg(rawMsg)
|
|
resp[i] = msg
|
|
}
|
|
return resp
|
|
}
|
|
|
|
func init() {
|
|
file, err := os.OpenFile("log.txt", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
// create dir if does not exist
|
|
if err := os.MkdirAll(historyDir, os.ModePerm); err != nil {
|
|
panic(err)
|
|
}
|
|
// defer file.Close()
|
|
logger = slog.New(slog.NewTextHandler(file, nil))
|
|
logger.Info("test msg")
|
|
// https://github.com/coreydaley/ggerganov-llama.cpp/blob/master/examples/server/README.md
|
|
lastChat := loadOldChatOrGetNew()
|
|
logger.Info("loaded history", "chat", lastChat)
|
|
chatBody = &models.ChatBody{
|
|
Model: "modl_name",
|
|
Stream: true,
|
|
Messages: lastChat,
|
|
}
|
|
store = storage.NewProviderSQL("test.db")
|
|
}
|