Fix: specv2 cards wrapper case

This commit is contained in:
Grail Finder
2025-01-16 17:53:35 +03:00
parent db678b5215
commit c41ff09b2f
6 changed files with 53 additions and 8 deletions

View File

@@ -27,9 +27,7 @@
- RAG support|implementation; + - RAG support|implementation; +
- delete chat option; + - delete chat option; +
- RAG file loading status/progress; + - RAG file loading status/progress; +
- change temp, min-p and other params from tui;
- fullscreen textarea option (bothersome to implement); - fullscreen textarea option (bothersome to implement);
- consider adding use /completion of llamacpp, since openai endpoint clearly has template|format issues;
- separate messages that are stored and chat and send to the bot, i.e. option to omit tool calls (there might be a point where they are no longer needed in ctx); - separate messages that are stored and chat and send to the bot, i.e. option to omit tool calls (there might be a point where they are no longer needed in ctx);
- char card is the sys message, but how about giving tools to char that does not have it? - char card is the sys message, but how about giving tools to char that does not have it?
- it is a bit clumsy to mix chats in db and chars from the external files, maybe load external files in db on startup? - it is a bit clumsy to mix chats in db and chars from the external files, maybe load external files in db on startup?
@@ -37,6 +35,11 @@
- server mode: no tui but api calls with the func calling, rag, other middleware; - server mode: no tui but api calls with the func calling, rag, other middleware;
- boolean flag to use/not use tools. I see it as a msg from a tool to an llm "Hey, it might be good idea to use me!"; - boolean flag to use/not use tools. I see it as a msg from a tool to an llm "Hey, it might be good idea to use me!";
- connection to a model status; - connection to a model status;
- ===== /llamacpp specific (it has a different body -> interface instead of global var)
- edit syscards / create new ones;
- consider adding use /completion of llamacpp, since openai endpoint clearly has template|format issues;
- change temp, min-p and other params from tui;
- DRY;
### FIX: ### FIX:
- bot responding (or hanging) blocks everything; + - bot responding (or hanging) blocks everything; +
@@ -60,4 +63,3 @@
- number of sentences in a batch should depend on number of words there. + - number of sentences in a batch should depend on number of words there. +
- F1 can load any chat, by loading chat of other agent it does not switch agents, if that chat is continued, it will rewrite agent in db; (either allow only chats from current agent OR switch agent on chat loading); + - F1 can load any chat, by loading chat of other agent it does not switch agents, if that chat is continued, it will rewrite agent in db; (either allow only chats from current agent OR switch agent on chat loading); +
- after chat is deleted: load undeleted chat; + - after chat is deleted: load undeleted chat; +
- syscards sometimes store data inside of chub key;

25
bot.go
View File

@@ -77,6 +77,31 @@ func fetchModelName() {
logger.Warn("failed to decode resp", "link", api, "error", err) logger.Warn("failed to decode resp", "link", api, "error", err)
return return
} }
if resp.StatusCode != 200 {
currentModel = "none"
return
}
currentModel = path.Base(llmModel.Data[0].ID)
updateStatusLine()
}
func fetchProps() {
api := "http://localhost:8080/props"
resp, err := httpClient.Get(api)
if err != nil {
logger.Warn("failed to get model", "link", api, "error", err)
return
}
defer resp.Body.Close()
llmModel := models.LLMModels{}
if err := json.NewDecoder(resp.Body).Decode(&llmModel); err != nil {
logger.Warn("failed to decode resp", "link", api, "error", err)
return
}
if resp.StatusCode != 200 {
currentModel = "none"
return
}
currentModel = path.Base(llmModel.Data[0].ID) currentModel = path.Base(llmModel.Data[0].ID)
updateStatusLine() updateStatusLine()
} }

View File

@@ -22,6 +22,10 @@ type CharCardSpec struct {
Tags []any `json:"tags"` Tags []any `json:"tags"`
} }
type Spec2Wrapper struct {
Data CharCardSpec `json:"data"`
}
func (c *CharCardSpec) Simplify(userName, fpath string) *CharCard { func (c *CharCardSpec) Simplify(userName, fpath string) *CharCard {
fm := strings.ReplaceAll(strings.ReplaceAll(c.FirstMes, "{{char}}", c.Name), "{{user}}", userName) fm := strings.ReplaceAll(strings.ReplaceAll(c.FirstMes, "{{char}}", c.Name), "{{user}}", userName)
sysPr := strings.ReplaceAll(strings.ReplaceAll(c.Description, "{{char}}", c.Name), "{{user}}", userName) sysPr := strings.ReplaceAll(strings.ReplaceAll(c.Description, "{{char}}", c.Name), "{{user}}", userName)

View File

@@ -78,6 +78,7 @@ type ChatBody struct {
Model string `json:"model"` Model string `json:"model"`
Stream bool `json:"stream"` Stream bool `json:"stream"`
Messages []RoleMsg `json:"messages"` Messages []RoleMsg `json:"messages"`
DRYMultiplier float32 `json:"frequency_penalty"`
} }
type ChatToolsBody struct { type ChatToolsBody struct {

View File

@@ -6,6 +6,7 @@ import (
"encoding/base64" "encoding/base64"
"encoding/json" "encoding/json"
"errors" "errors"
"fmt"
"io" "io"
"os" "os"
"path" "path"
@@ -30,6 +31,13 @@ func (c PngEmbed) GetDecodedValue() (*models.CharCardSpec, error) {
if err := json.Unmarshal(data, &card); err != nil { if err := json.Unmarshal(data, &card); err != nil {
return nil, err return nil, err
} }
specWrap := &models.Spec2Wrapper{}
if card.Name == "" {
if err := json.Unmarshal(data, &specWrap); err != nil {
return nil, err
}
return &specWrap.Data, nil
}
return card, nil return card, nil
} }
@@ -81,6 +89,9 @@ func ReadCard(fname, uname string) (*models.CharCard, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
if charSpec.Name == "" {
return nil, fmt.Errorf("failed to find role; fname %s\n", fname)
}
return charSpec.Simplify(uname, fname), nil return charSpec.Simplify(uname, fname), nil
} }
@@ -110,7 +121,9 @@ func ReadDirCards(dirname, uname string) ([]*models.CharCard, error) {
fpath := path.Join(dirname, f.Name()) fpath := path.Join(dirname, f.Name())
cc, err := ReadCard(fpath, uname) cc, err := ReadCard(fpath, uname)
if err != nil { if err != nil {
return nil, err // better to log and continue // logger.Warn("failed to load card", "error", err)
continue
// return nil, err // better to log and continue
} }
resp = append(resp, cc) resp = append(resp, cc)
} }

2
tui.go
View File

@@ -196,12 +196,12 @@ func init() {
SetFieldWidth(4). SetFieldWidth(4).
SetAcceptanceFunc(tview.InputFieldInteger). SetAcceptanceFunc(tview.InputFieldInteger).
SetDoneFunc(func(key tcell.Key) { SetDoneFunc(func(key tcell.Key) {
defer indexPickWindow.SetText("")
pages.RemovePage(indexPage) pages.RemovePage(indexPage)
colorText() colorText()
updateStatusLine() updateStatusLine()
}) })
indexPickWindow.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey { indexPickWindow.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
defer indexPickWindow.SetText("")
switch event.Key() { switch event.Key() {
case tcell.KeyBackspace: case tcell.KeyBackspace:
return event return event