Fix: specv2 cards wrapper case
This commit is contained in:
@@ -27,9 +27,7 @@
|
||||
- RAG support|implementation; +
|
||||
- delete chat option; +
|
||||
- RAG file loading status/progress; +
|
||||
- change temp, min-p and other params from tui;
|
||||
- fullscreen textarea option (bothersome to implement);
|
||||
- consider adding use /completion of llamacpp, since openai endpoint clearly has template|format issues;
|
||||
- separate messages that are stored and chat and send to the bot, i.e. option to omit tool calls (there might be a point where they are no longer needed in ctx);
|
||||
- char card is the sys message, but how about giving tools to char that does not have it?
|
||||
- it is a bit clumsy to mix chats in db and chars from the external files, maybe load external files in db on startup?
|
||||
@@ -37,6 +35,11 @@
|
||||
- server mode: no tui but api calls with the func calling, rag, other middleware;
|
||||
- boolean flag to use/not use tools. I see it as a msg from a tool to an llm "Hey, it might be good idea to use me!";
|
||||
- connection to a model status;
|
||||
- ===== /llamacpp specific (it has a different body -> interface instead of global var)
|
||||
- edit syscards / create new ones;
|
||||
- consider adding use /completion of llamacpp, since openai endpoint clearly has template|format issues;
|
||||
- change temp, min-p and other params from tui;
|
||||
- DRY;
|
||||
|
||||
### FIX:
|
||||
- bot responding (or hanging) blocks everything; +
|
||||
@@ -60,4 +63,3 @@
|
||||
- number of sentences in a batch should depend on number of words there. +
|
||||
- F1 can load any chat, by loading chat of other agent it does not switch agents, if that chat is continued, it will rewrite agent in db; (either allow only chats from current agent OR switch agent on chat loading); +
|
||||
- after chat is deleted: load undeleted chat; +
|
||||
- syscards sometimes store data inside of chub key;
|
||||
|
||||
25
bot.go
25
bot.go
@@ -77,6 +77,31 @@ func fetchModelName() {
|
||||
logger.Warn("failed to decode resp", "link", api, "error", err)
|
||||
return
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
currentModel = "none"
|
||||
return
|
||||
}
|
||||
currentModel = path.Base(llmModel.Data[0].ID)
|
||||
updateStatusLine()
|
||||
}
|
||||
|
||||
func fetchProps() {
|
||||
api := "http://localhost:8080/props"
|
||||
resp, err := httpClient.Get(api)
|
||||
if err != nil {
|
||||
logger.Warn("failed to get model", "link", api, "error", err)
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
llmModel := models.LLMModels{}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&llmModel); err != nil {
|
||||
logger.Warn("failed to decode resp", "link", api, "error", err)
|
||||
return
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
currentModel = "none"
|
||||
return
|
||||
}
|
||||
currentModel = path.Base(llmModel.Data[0].ID)
|
||||
updateStatusLine()
|
||||
}
|
||||
|
||||
@@ -22,6 +22,10 @@ type CharCardSpec struct {
|
||||
Tags []any `json:"tags"`
|
||||
}
|
||||
|
||||
type Spec2Wrapper struct {
|
||||
Data CharCardSpec `json:"data"`
|
||||
}
|
||||
|
||||
func (c *CharCardSpec) Simplify(userName, fpath string) *CharCard {
|
||||
fm := strings.ReplaceAll(strings.ReplaceAll(c.FirstMes, "{{char}}", c.Name), "{{user}}", userName)
|
||||
sysPr := strings.ReplaceAll(strings.ReplaceAll(c.Description, "{{char}}", c.Name), "{{user}}", userName)
|
||||
|
||||
@@ -78,6 +78,7 @@ type ChatBody struct {
|
||||
Model string `json:"model"`
|
||||
Stream bool `json:"stream"`
|
||||
Messages []RoleMsg `json:"messages"`
|
||||
DRYMultiplier float32 `json:"frequency_penalty"`
|
||||
}
|
||||
|
||||
type ChatToolsBody struct {
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
@@ -30,6 +31,13 @@ func (c PngEmbed) GetDecodedValue() (*models.CharCardSpec, error) {
|
||||
if err := json.Unmarshal(data, &card); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
specWrap := &models.Spec2Wrapper{}
|
||||
if card.Name == "" {
|
||||
if err := json.Unmarshal(data, &specWrap); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &specWrap.Data, nil
|
||||
}
|
||||
return card, nil
|
||||
}
|
||||
|
||||
@@ -81,6 +89,9 @@ func ReadCard(fname, uname string) (*models.CharCard, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if charSpec.Name == "" {
|
||||
return nil, fmt.Errorf("failed to find role; fname %s\n", fname)
|
||||
}
|
||||
return charSpec.Simplify(uname, fname), nil
|
||||
}
|
||||
|
||||
@@ -110,7 +121,9 @@ func ReadDirCards(dirname, uname string) ([]*models.CharCard, error) {
|
||||
fpath := path.Join(dirname, f.Name())
|
||||
cc, err := ReadCard(fpath, uname)
|
||||
if err != nil {
|
||||
return nil, err // better to log and continue
|
||||
// logger.Warn("failed to load card", "error", err)
|
||||
continue
|
||||
// return nil, err // better to log and continue
|
||||
}
|
||||
resp = append(resp, cc)
|
||||
}
|
||||
|
||||
2
tui.go
2
tui.go
@@ -196,12 +196,12 @@ func init() {
|
||||
SetFieldWidth(4).
|
||||
SetAcceptanceFunc(tview.InputFieldInteger).
|
||||
SetDoneFunc(func(key tcell.Key) {
|
||||
defer indexPickWindow.SetText("")
|
||||
pages.RemovePage(indexPage)
|
||||
colorText()
|
||||
updateStatusLine()
|
||||
})
|
||||
indexPickWindow.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||
defer indexPickWindow.SetText("")
|
||||
switch event.Key() {
|
||||
case tcell.KeyBackspace:
|
||||
return event
|
||||
|
||||
Reference in New Issue
Block a user