Chore: solving TODOs

This commit is contained in:
Grail Finder
2025-10-03 20:42:36 +03:00
parent 448cb97074
commit dc183e3692
7 changed files with 21 additions and 34 deletions

14
bot.go
View File

@@ -85,19 +85,17 @@ func createClient(connectTimeout time.Duration) *http.Client {
}
}
func fetchModelName() *models.LLMModels {
// TODO: to config
api := "http://localhost:8080/v1/models"
func fetchLCPModelName() *models.LLMModels {
//nolint
resp, err := httpClient.Get(api)
resp, err := httpClient.Get(cfg.FetchModelNameAPI)
if err != nil {
logger.Warn("failed to get model", "link", api, "error", err)
logger.Warn("failed to get model", "link", cfg.FetchModelNameAPI, "error", err)
return nil
}
defer resp.Body.Close()
llmModel := models.LLMModels{}
if err := json.NewDecoder(resp.Body).Decode(&llmModel); err != nil {
logger.Warn("failed to decode resp", "link", api, "error", err)
logger.Warn("failed to decode resp", "link", cfg.FetchModelNameAPI, "error", err)
return nil
}
if resp.StatusCode != 200 {
@@ -272,7 +270,7 @@ func chatRagUse(qText string) (string, error) {
if err != nil {
return "", err
}
// TODO: this where llm should find the questions in text and ask them
// this where llm should find the questions in text and ask them
questionsS := tokenizer.Tokenize(qText)
questions := make([]string, len(questionsS))
for i, q := range questionsS {
@@ -525,7 +523,7 @@ func applyCharCard(cc *models.CharCard) {
}
history, err := loadAgentsLastChat(cfg.AssistantRole)
if err != nil {
// TODO: too much action for err != nil; loadAgentsLastChat needs to be split up
// too much action for err != nil; loadAgentsLastChat needs to be split up
logger.Warn("failed to load last agent chat;", "agent", cc.Role, "err", err)
history = []models.RoleMsg{
{Role: "system", Content: cc.SysPrompt},

View File

@@ -22,3 +22,4 @@ TTS_SPEED = 1.0
STT_ENABLED = false
STT_URL = "http://localhost:8081/inference"
DBPATH = "gflt.db"
FetchModelNameAPI = "http://localhost:8080/v1/models"

View File

@@ -7,13 +7,14 @@ import (
)
type Config struct {
EnableCluedo bool `toml:"EnableCluedo"` // Cluedo game mode toggle
CluedoRole2 string `toml:"CluedoRole2"` // Secondary AI role name
ChatAPI string `toml:"ChatAPI"`
CompletionAPI string `toml:"CompletionAPI"`
CurrentAPI string
CurrentProvider string
APIMap map[string]string
EnableCluedo bool `toml:"EnableCluedo"` // Cluedo game mode toggle
CluedoRole2 string `toml:"CluedoRole2"` // Secondary AI role name
ChatAPI string `toml:"ChatAPI"`
CompletionAPI string `toml:"CompletionAPI"`
CurrentAPI string
CurrentProvider string
APIMap map[string]string
FetchModelNameAPI string `toml:"FetchModelNameAPI"`
//
ShowSys bool `toml:"ShowSys"`
LogFile string `toml:"LogFile"`
@@ -88,6 +89,7 @@ func LoadConfigOrDefault(fn string) *Config {
// tts
config.TTS_ENABLED = false
config.TTS_URL = "http://localhost:8880/v1/audio/speech"
config.FetchModelNameAPI = "http://localhost:8080/v1/models"
}
config.CurrentAPI = config.ChatAPI
config.APIMap = map[string]string{

7
llm.go
View File

@@ -104,12 +104,7 @@ func (lcp LlamaCPPeer) FormMsg(msg, role string, resume bool) (io.Reader, error)
}
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt)
var payload any
payload = models.NewLCPReq(prompt, defaultLCPProps, chatBody.MakeStopSlice())
if strings.Contains(chatBody.Model, "deepseek") { // TODO: why?
payload = models.NewDSCompletionReq(prompt, chatBody.Model,
defaultLCPProps["temp"], chatBody.MakeStopSlice())
}
payload := models.NewLCPReq(prompt, defaultLCPProps, chatBody.MakeStopSlice())
data, err := json.Marshal(payload)
if err != nil {
logger.Error("failed to form a msg", "error", err)

View File

@@ -5,11 +5,6 @@ import (
"strings"
)
// type FuncCall struct {
// Name string `json:"name"`
// Args []string `json:"args"`
// }
type FuncCall struct {
Name string `json:"name"`
Args map[string]string `json:"args"`

View File

@@ -1,9 +1,9 @@
package main
import (
"gf-lt/config"
"encoding/json"
"fmt"
"gf-lt/config"
"net/http"
"time"
)
@@ -61,7 +61,7 @@ out:
}
func modelHandler(w http.ResponseWriter, req *http.Request) {
llmModel := fetchModelName()
llmModel := fetchLCPModelName()
payload, err := json.Marshal(llmModel)
if err != nil {
logger.Error("model handler", "error", err)

8
tui.go
View File

@@ -735,7 +735,7 @@ func init() {
}
if event.Key() == tcell.KeyCtrlL {
go func() {
fetchModelName() // blocks
fetchLCPModelName() // blocks
updateStatusLine()
}()
return nil
@@ -756,10 +756,6 @@ func init() {
return nil
}
cfg.CurrentAPI = newAPI
// // TODO: implement model pick
// if strings.Contains(cfg.CurrentAPI, "deepseek") {
// chatBody.Model = "deepseek-chat"
// }
choseChunkParser()
updateStatusLine()
return nil
@@ -793,7 +789,7 @@ func init() {
pages.AddPage(imgPage, imgView, true, true)
return nil
}
// TODO: move to menu or table
// DEPRECATED: rag is deprecated until I change my mind
// if event.Key() == tcell.KeyCtrlR && cfg.HFToken != "" {
// // rag load
// // menu of the text files from defined rag directory