Compare commits
60 Commits
enha/tool-
...
fix/datara
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8c4d01ab3b | ||
|
|
a842b00e96 | ||
|
|
014e297ae3 | ||
|
|
5f273681df | ||
|
|
17b68bc21f | ||
|
|
edfd43c52a | ||
|
|
62ec55505c | ||
|
|
f9866bcf5a | ||
|
|
822cc48834 | ||
|
|
4ef0a21511 | ||
|
|
d2caebdb4f | ||
|
|
e1f2a8cd7b | ||
|
|
efc92d884c | ||
|
|
ac8c8bb055 | ||
|
|
c2c107c786 | ||
|
|
c2757653a3 | ||
|
|
4bd6883966 | ||
|
|
7c56e27dbe | ||
|
|
fbc955ca37 | ||
|
|
c65c11bcfb | ||
|
|
04f1fd464b | ||
|
|
6e9c453ee0 | ||
|
|
645b7351a8 | ||
|
|
57088565bd | ||
|
|
4b6769e531 | ||
|
|
d144ee76d9 | ||
|
|
abcaad6609 | ||
|
|
50ce0200af | ||
|
|
58ccd63f4a | ||
|
|
3611d7eb59 | ||
|
|
8974d2f52c | ||
|
|
6b0d03f2d6 | ||
|
|
fb4deb1161 | ||
|
|
0e5d37666f | ||
|
|
093103bdd7 | ||
|
|
6c9a1ba56b | ||
|
|
93ecfc8a34 | ||
|
|
0c9c590d8f | ||
|
|
d130254e88 | ||
|
|
6e7a063300 | ||
|
|
c05b93299c | ||
|
|
cad1bd46c1 | ||
|
|
4bddce3700 | ||
|
|
fcc71987bf | ||
|
|
8458edf5a8 | ||
|
|
07b06bb0d3 | ||
|
|
3389b1d83b | ||
|
|
4f6000a43a | ||
|
|
9ba46b40cc | ||
|
|
5bb456272e | ||
|
|
8999f48fb9 | ||
|
|
b2f280a7f1 | ||
|
|
65cbd5d6a6 | ||
|
|
caac1d397a | ||
|
|
742f1ca838 | ||
|
|
e36bade353 | ||
|
|
01d8bcdbf5 | ||
|
|
f6a395bce9 | ||
|
|
dc34c63256 | ||
|
|
cdfccf9a24 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -3,6 +3,8 @@
|
|||||||
testlog
|
testlog
|
||||||
history/
|
history/
|
||||||
*.db
|
*.db
|
||||||
|
*.db-shm
|
||||||
|
*.db-wal
|
||||||
config.toml
|
config.toml
|
||||||
sysprompts/*
|
sysprompts/*
|
||||||
!sysprompts/alice_bob_carl.json
|
!sysprompts/alice_bob_carl.json
|
||||||
@@ -15,3 +17,4 @@ gflt
|
|||||||
chat_exports/*.json
|
chat_exports/*.json
|
||||||
ragimport
|
ragimport
|
||||||
.env
|
.env
|
||||||
|
onnx/
|
||||||
|
|||||||
101
Makefile
101
Makefile
@@ -1,4 +1,4 @@
|
|||||||
.PHONY: setconfig run lint lintall install-linters setup-whisper build-whisper download-whisper-model docker-up docker-down docker-logs noextra-run installdelve checkdelve
|
.PHONY: setconfig run lint lintall install-linters setup-whisper build-whisper download-whisper-model docker-up docker-down docker-logs noextra-run installdelve checkdelve fetch-onnx install-onnx-deps
|
||||||
|
|
||||||
run: setconfig
|
run: setconfig
|
||||||
go build -tags extra -o gf-lt && ./gf-lt
|
go build -tags extra -o gf-lt && ./gf-lt
|
||||||
@@ -30,6 +30,105 @@ lint: ## Run linters. Use make install-linters first.
|
|||||||
lintall: lint
|
lintall: lint
|
||||||
noblanks ./...
|
noblanks ./...
|
||||||
|
|
||||||
|
fetch-onnx:
|
||||||
|
mkdir -p onnx/embedgemma && curl -o onnx/embedgemma/config.json -L https://huggingface.co/onnx-community/embeddinggemma-300m-ONNX/resolve/main/config.json && curl -o onnx/embedgemma/tokenizer.json -L https://huggingface.co/onnx-community/embeddinggemma-300m-ONNX/resolve/main/tokenizer.json && curl -o onnx/embedgemma/model_q4.onnx -L https://huggingface.co/onnx-community/embeddinggemma-300m-ONNX/resolve/main/onnx/model_q4.onnx && curl -o onnx/embedgemma/model_q4.onnx_data -L https://huggingface.co/onnx-community/embeddinggemma-300m-ONNX/resolve/main/onnx/model_q4.onnx_data?download=true
|
||||||
|
|
||||||
|
install-onnx-deps: ## Install ONNX Runtime with CUDA support (or CPU fallback)
|
||||||
|
@echo "=== ONNX Runtime Installer ===" && \
|
||||||
|
echo "" && \
|
||||||
|
echo "Checking for existing ONNX Runtime..." && \
|
||||||
|
if ldconfig -p 2>/dev/null | grep -q libonnxruntime.so.1; then \
|
||||||
|
echo "ONNX Runtime is already installed:" && \
|
||||||
|
ldconfig -p 2>/dev/null | grep libonnxruntime && \
|
||||||
|
echo "" && \
|
||||||
|
echo "Skipping installation. To reinstall, remove existing libs first:" && \
|
||||||
|
echo " sudo rm -f /usr/local/lib/libonnxruntime*.so*" && \
|
||||||
|
exit 0; \
|
||||||
|
fi && \
|
||||||
|
echo "No ONNX Runtime found. Proceeding with installation..." && \
|
||||||
|
echo "" && \
|
||||||
|
echo "Detecting CUDA version..." && \
|
||||||
|
HAS_CUDA=0 && \
|
||||||
|
if command -v nvidia-smi >/dev/null 2>&1; then \
|
||||||
|
CUDA_INFO=$$(nvidia-smi --query-gpu=driver_version --format=csv,noheader 2>/dev/null | head -1) && \
|
||||||
|
if [ -n "$$CUDA_INFO" ]; then \
|
||||||
|
echo "Found NVIDIA GPU with driver: $$CUDA_INFO" && \
|
||||||
|
HAS_CUDA=1; \
|
||||||
|
else \
|
||||||
|
echo "NVIDIA driver found but could not detect CUDA version"; \
|
||||||
|
fi; \
|
||||||
|
else \
|
||||||
|
echo "No NVIDIA GPU detected (nvidia-smi not found)"; \
|
||||||
|
fi && \
|
||||||
|
echo "" && \
|
||||||
|
echo "Determining ONNX Runtime version..." && \
|
||||||
|
ARCH=$$(uname -m) && \
|
||||||
|
if [ "$$ARCH" = "x86_64" ]; then \
|
||||||
|
ONNX_ARCH="x64"; \
|
||||||
|
elif [ "$$ARCH" = "aarch64" ] || [ "$$ARCH" = "arm64" ]; then \
|
||||||
|
ONNX_ARCH="aarch64"; \
|
||||||
|
else \
|
||||||
|
echo "Unsupported architecture: $$ARCH" && \
|
||||||
|
exit 1; \
|
||||||
|
fi && \
|
||||||
|
echo "Detected architecture: $$ARCH (ONNX runtime: $$ONNX_ARCH)" && \
|
||||||
|
if [ "$$HAS_CUDA" = "1" ]; then \
|
||||||
|
echo "Installing ONNX Runtime with CUDA support..."; \
|
||||||
|
ONNX_VERSION="1.24.2"; \
|
||||||
|
else \
|
||||||
|
echo "Installing ONNX Runtime (CPU version)..."; \
|
||||||
|
ONNX_VERSION="1.24.2"; \
|
||||||
|
fi && \
|
||||||
|
FILENAME="onnxruntime-linux-$${ONNX_ARCH}-${ONNX_VERSION}.tgz" && \
|
||||||
|
URL="https://github.com/microsoft/onnxruntime/releases/download/v$${ONNX_VERSION}/$${FILENAME}" && \
|
||||||
|
echo "Downloading $${URL}..." && \
|
||||||
|
mkdir -p /tmp/onnx-install && \
|
||||||
|
curl -L -o /tmp/onnx-install/$${FILENAME} "$${URL}" || { \
|
||||||
|
echo "Failed to download ONNX Runtime v$${ONNX_VERSION}. Trying v1.18.0..." && \
|
||||||
|
ONNX_VERSION="1.18.0" && \
|
||||||
|
FILENAME="onnxruntime-linux-$${ONNX_ARCH}-${ONNX_VERSION}.tgz" && \
|
||||||
|
URL="https://github.com/microsoft/onnxruntime/releases/download/v$${ONNX_VERSION}/$${FILENAME}" && \
|
||||||
|
curl -L -o /tmp/onnx-install/$${FILENAME} "$${URL}" || { \
|
||||||
|
echo "ERROR: Failed to download ONNX Runtime from GitHub" && \
|
||||||
|
echo "" && \
|
||||||
|
echo "Please install manually:" && \
|
||||||
|
echo " 1. Go to https://github.com/microsoft/onnxruntime/releases" && \
|
||||||
|
echo " 2. Download onnxruntime-linux-$${ONNX_ARCH}-VERSION.tgz" && \
|
||||||
|
echo " 3. Extract and copy to /usr/local/lib:" && \
|
||||||
|
echo " tar -xzf onnxruntime-linux-$${ONNX_ARCH}-VERSION.tgz" && \
|
||||||
|
echo " sudo cp -r onnxruntime-linux-$${ONNX_ARCH}-VERSION/lib/* /usr/local/lib/" && \
|
||||||
|
echo " sudo ldconfig" && \
|
||||||
|
exit 1; \
|
||||||
|
}; \
|
||||||
|
} && \
|
||||||
|
echo "Extracting..." && \
|
||||||
|
cd /tmp/onnx-install && tar -xzf $${FILENAME} && \
|
||||||
|
echo "Installing to /usr/local/lib..." && \
|
||||||
|
ONNX_DIR=$$(find /tmp/onnx-install -maxdepth 1 -type d -name "onnxruntime-linux-*") && \
|
||||||
|
if [ -d "$${ONNX_DIR}/lib" ]; then \
|
||||||
|
cp -r $${ONNX_DIR}/lib/* /usr/local/lib/ 2>/dev/null || sudo cp -r $${ONNX_DIR}/lib/* /usr/local/lib/; \
|
||||||
|
else \
|
||||||
|
echo "ERROR: Could not find lib directory in extracted archive" && \
|
||||||
|
exit 1; \
|
||||||
|
fi && \
|
||||||
|
echo "Updating library cache..." && \
|
||||||
|
sudo ldconfig 2>/dev/null || ldconfig && \
|
||||||
|
echo "" && \
|
||||||
|
echo "=== Installation complete! ===" && \
|
||||||
|
echo "" && \
|
||||||
|
echo "Installed libraries:" && \
|
||||||
|
ldconfig -p | grep libonnxruntime || echo "(libraries may require logout/relogin to appear)" && \
|
||||||
|
echo "" && \
|
||||||
|
if [ "$$HAS_CUDA" = "1" ]; then \
|
||||||
|
echo "NOTE: CUDA-enabled ONNX Runtime installed."; \
|
||||||
|
echo "Ensure you also have CUDA libraries installed:"; \
|
||||||
|
echo " - libcudnn, libcublas, libcurand"; \
|
||||||
|
else \
|
||||||
|
echo "NOTE: CPU-only ONNX Runtime installed."; \
|
||||||
|
echo "For GPU support, install CUDA and re-run this script."; \
|
||||||
|
fi && \
|
||||||
|
rm -rf /tmp/onnx-install
|
||||||
|
|
||||||
# Whisper STT Setup (in batteries directory)
|
# Whisper STT Setup (in batteries directory)
|
||||||
setup-whisper: build-whisper download-whisper-model
|
setup-whisper: build-whisper download-whisper-model
|
||||||
|
|
||||||
|
|||||||
349
bot.go
349
bot.go
@@ -3,6 +3,7 @@ package main
|
|||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"compress/gzip"
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
@@ -21,7 +22,7 @@ import (
|
|||||||
"slices"
|
"slices"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync/atomic"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -36,7 +37,7 @@ var (
|
|||||||
chunkChan = make(chan string, 10)
|
chunkChan = make(chan string, 10)
|
||||||
openAIToolChan = make(chan string, 10)
|
openAIToolChan = make(chan string, 10)
|
||||||
streamDone = make(chan bool, 1)
|
streamDone = make(chan bool, 1)
|
||||||
chatBody *models.ChatBody
|
chatBody *models.SafeChatBody
|
||||||
store storage.FullRepo
|
store storage.FullRepo
|
||||||
defaultFirstMsg = "Hello! What can I do for you?"
|
defaultFirstMsg = "Hello! What can I do for you?"
|
||||||
defaultStarter = []models.RoleMsg{}
|
defaultStarter = []models.RoleMsg{}
|
||||||
@@ -48,7 +49,6 @@ var (
|
|||||||
//nolint:unused // TTS_ENABLED conditionally uses this
|
//nolint:unused // TTS_ENABLED conditionally uses this
|
||||||
orator Orator
|
orator Orator
|
||||||
asr STT
|
asr STT
|
||||||
localModelsMu sync.RWMutex
|
|
||||||
defaultLCPProps = map[string]float32{
|
defaultLCPProps = map[string]float32{
|
||||||
"temperature": 0.8,
|
"temperature": 0.8,
|
||||||
"dry_multiplier": 0.0,
|
"dry_multiplier": 0.0,
|
||||||
@@ -63,9 +63,17 @@ var (
|
|||||||
"google/gemma-3-27b-it:free",
|
"google/gemma-3-27b-it:free",
|
||||||
"meta-llama/llama-3.3-70b-instruct:free",
|
"meta-llama/llama-3.3-70b-instruct:free",
|
||||||
}
|
}
|
||||||
LocalModels = []string{}
|
LocalModels atomic.Value // stores []string
|
||||||
|
localModelsData atomic.Value // stores *models.LCPModels
|
||||||
|
orModelsData atomic.Value // stores *models.ORModels
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
LocalModels.Store([]string{})
|
||||||
|
localModelsData.Store((*models.LCPModels)(nil))
|
||||||
|
orModelsData.Store((*models.ORModels)(nil))
|
||||||
|
}
|
||||||
|
|
||||||
var thinkBlockRE = regexp.MustCompile(`(?s)<think>.*?</think>`)
|
var thinkBlockRE = regexp.MustCompile(`(?s)<think>.*?</think>`)
|
||||||
|
|
||||||
// parseKnownToTag extracts known_to list from content using configured tag.
|
// parseKnownToTag extracts known_to list from content using configured tag.
|
||||||
@@ -259,15 +267,13 @@ func warmUpModel() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Check if model is already loaded
|
// Check if model is already loaded
|
||||||
loaded, err := isModelLoaded(chatBody.Model)
|
loaded, err := isModelLoaded(chatBody.GetModel())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Debug("failed to check model status", "model", chatBody.Model, "error", err)
|
logger.Debug("failed to check model status", "model", chatBody.GetModel(), "error", err)
|
||||||
// Continue with warmup attempt anyway
|
// Continue with warmup attempt anyway
|
||||||
}
|
}
|
||||||
if loaded {
|
if loaded {
|
||||||
if err := notifyUser("model already loaded", "Model "+chatBody.Model+" is already loaded."); err != nil {
|
showToast("model already loaded", "Model "+chatBody.GetModel()+" is already loaded.")
|
||||||
logger.Debug("failed to notify user", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
go func() {
|
go func() {
|
||||||
@@ -276,7 +282,7 @@ func warmUpModel() {
|
|||||||
switch {
|
switch {
|
||||||
case strings.HasSuffix(cfg.CurrentAPI, "/completion"):
|
case strings.HasSuffix(cfg.CurrentAPI, "/completion"):
|
||||||
// Old completion endpoint
|
// Old completion endpoint
|
||||||
req := models.NewLCPReq(".", chatBody.Model, nil, map[string]float32{
|
req := models.NewLCPReq(".", chatBody.GetModel(), nil, map[string]float32{
|
||||||
"temperature": 0.8,
|
"temperature": 0.8,
|
||||||
"dry_multiplier": 0.0,
|
"dry_multiplier": 0.0,
|
||||||
"min_p": 0.05,
|
"min_p": 0.05,
|
||||||
@@ -288,7 +294,7 @@ func warmUpModel() {
|
|||||||
// OpenAI-compatible chat endpoint
|
// OpenAI-compatible chat endpoint
|
||||||
req := models.OpenAIReq{
|
req := models.OpenAIReq{
|
||||||
ChatBody: &models.ChatBody{
|
ChatBody: &models.ChatBody{
|
||||||
Model: chatBody.Model,
|
Model: chatBody.GetModel(),
|
||||||
Messages: []models.RoleMsg{
|
Messages: []models.RoleMsg{
|
||||||
{Role: "system", Content: "."},
|
{Role: "system", Content: "."},
|
||||||
},
|
},
|
||||||
@@ -312,7 +318,7 @@ func warmUpModel() {
|
|||||||
}
|
}
|
||||||
resp.Body.Close()
|
resp.Body.Close()
|
||||||
// Start monitoring for model load completion
|
// Start monitoring for model load completion
|
||||||
monitorModelLoad(chatBody.Model)
|
monitorModelLoad(chatBody.GetModel())
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -355,6 +361,7 @@ func fetchORModels(free bool) ([]string, error) {
|
|||||||
if err := json.NewDecoder(resp.Body).Decode(data); err != nil {
|
if err := json.NewDecoder(resp.Body).Decode(data); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
orModelsData.Store(data)
|
||||||
freeModels := data.ListModels(free)
|
freeModels := data.ListModels(free)
|
||||||
return freeModels, nil
|
return freeModels, nil
|
||||||
}
|
}
|
||||||
@@ -379,22 +386,22 @@ func fetchLCPModels() ([]string, error) {
|
|||||||
|
|
||||||
// fetchLCPModelsWithLoadStatus returns models with "(loaded)" indicator for loaded models
|
// fetchLCPModelsWithLoadStatus returns models with "(loaded)" indicator for loaded models
|
||||||
func fetchLCPModelsWithLoadStatus() ([]string, error) {
|
func fetchLCPModelsWithLoadStatus() ([]string, error) {
|
||||||
models, err := fetchLCPModelsWithStatus()
|
modelList, err := fetchLCPModelsWithStatus()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
result := make([]string, 0, len(models.Data))
|
result := make([]string, 0, len(modelList.Data))
|
||||||
li := 0 // loaded index
|
li := 0 // loaded index
|
||||||
for i, m := range models.Data {
|
for i, m := range modelList.Data {
|
||||||
modelName := m.ID
|
modelName := m.ID
|
||||||
if m.Status.Value == "loaded" {
|
if m.Status.Value == "loaded" {
|
||||||
modelName = "(loaded) " + modelName
|
modelName = models.LoadedMark + modelName
|
||||||
li = i
|
li = i
|
||||||
}
|
}
|
||||||
result = append(result, modelName)
|
result = append(result, modelName)
|
||||||
}
|
}
|
||||||
if li == 0 {
|
if li == 0 {
|
||||||
return result, nil // no loaded models
|
return result, nil // no loaded modelList
|
||||||
}
|
}
|
||||||
loadedModel := result[li]
|
loadedModel := result[li]
|
||||||
result = append(result[:li], result[li+1:]...)
|
result = append(result[:li], result[li+1:]...)
|
||||||
@@ -416,6 +423,7 @@ func fetchLCPModelsWithStatus() (*models.LCPModels, error) {
|
|||||||
if err := json.NewDecoder(resp.Body).Decode(data); err != nil {
|
if err := json.NewDecoder(resp.Body).Decode(data); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
localModelsData.Store(data)
|
||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -433,6 +441,33 @@ func isModelLoaded(modelID string) (bool, error) {
|
|||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ModelHasVision(api, modelID string) bool {
|
||||||
|
switch {
|
||||||
|
case strings.Contains(api, "deepseek"):
|
||||||
|
return false
|
||||||
|
case strings.Contains(api, "openrouter"):
|
||||||
|
resp, err := http.Get("https://openrouter.ai/api/v1/models")
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("failed to fetch OR models for vision check", "error", err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
orm := &models.ORModels{}
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(orm); err != nil {
|
||||||
|
logger.Warn("failed to decode OR models for vision check", "error", err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return orm.HasVision(modelID)
|
||||||
|
default:
|
||||||
|
models, err := fetchLCPModelsWithStatus()
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("failed to fetch LCP models for vision check", "error", err)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return models.HasVision(modelID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// monitorModelLoad starts a goroutine that periodically checks if the specified model is loaded.
|
// monitorModelLoad starts a goroutine that periodically checks if the specified model is loaded.
|
||||||
func monitorModelLoad(modelID string) {
|
func monitorModelLoad(modelID string) {
|
||||||
go func() {
|
go func() {
|
||||||
@@ -451,9 +486,7 @@ func monitorModelLoad(modelID string) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if loaded {
|
if loaded {
|
||||||
if err := notifyUser("model loaded", "Model "+modelID+" is now loaded and ready."); err != nil {
|
showToast("model loaded", "Model "+modelID+" is now loaded and ready.")
|
||||||
logger.Debug("failed to notify user", "error", err)
|
|
||||||
}
|
|
||||||
refreshChatDisplay()
|
refreshChatDisplay()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -464,6 +497,17 @@ func monitorModelLoad(modelID string) {
|
|||||||
|
|
||||||
// extractDetailedErrorFromBytes extracts detailed error information from response body bytes
|
// extractDetailedErrorFromBytes extracts detailed error information from response body bytes
|
||||||
func extractDetailedErrorFromBytes(body []byte, statusCode int) string {
|
func extractDetailedErrorFromBytes(body []byte, statusCode int) string {
|
||||||
|
// Try to decompress gzip if the response is compressed
|
||||||
|
if len(body) >= 2 && body[0] == 0x1f && body[1] == 0x8b {
|
||||||
|
reader, err := gzip.NewReader(bytes.NewReader(body))
|
||||||
|
if err == nil {
|
||||||
|
decompressed, err := io.ReadAll(reader)
|
||||||
|
reader.Close()
|
||||||
|
if err == nil {
|
||||||
|
body = decompressed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
// Try to parse as JSON to extract detailed error information
|
// Try to parse as JSON to extract detailed error information
|
||||||
var errorResponse map[string]any
|
var errorResponse map[string]any
|
||||||
if err := json.Unmarshal(body, &errorResponse); err == nil {
|
if err := json.Unmarshal(body, &errorResponse); err == nil {
|
||||||
@@ -529,9 +573,7 @@ func sendMsgToLLM(body io.Reader) {
|
|||||||
req, err := http.NewRequest("POST", cfg.CurrentAPI, body)
|
req, err := http.NewRequest("POST", cfg.CurrentAPI, body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("newreq error", "error", err)
|
logger.Error("newreq error", "error", err)
|
||||||
if err := notifyUser("error", "apicall failed:"+err.Error()); err != nil {
|
showToast("error", "apicall failed:"+err.Error())
|
||||||
logger.Error("failed to notify", "error", err)
|
|
||||||
}
|
|
||||||
streamDone <- true
|
streamDone <- true
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -543,9 +585,7 @@ func sendMsgToLLM(body io.Reader) {
|
|||||||
resp, err := httpClient.Do(req)
|
resp, err := httpClient.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("llamacpp api", "error", err)
|
logger.Error("llamacpp api", "error", err)
|
||||||
if err := notifyUser("error", "apicall failed:"+err.Error()); err != nil {
|
showToast("error", "apicall failed:"+err.Error())
|
||||||
logger.Error("failed to notify", "error", err)
|
|
||||||
}
|
|
||||||
streamDone <- true
|
streamDone <- true
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -556,9 +596,7 @@ func sendMsgToLLM(body io.Reader) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("failed to read error response body", "error", err, "status_code", resp.StatusCode)
|
logger.Error("failed to read error response body", "error", err, "status_code", resp.StatusCode)
|
||||||
detailedError := fmt.Sprintf("HTTP Status: %d, Failed to read response body: %v", resp.StatusCode, err)
|
detailedError := fmt.Sprintf("HTTP Status: %d, Failed to read response body: %v", resp.StatusCode, err)
|
||||||
if err := notifyUser("API Error", detailedError); err != nil {
|
showToast("API Error", detailedError)
|
||||||
logger.Error("failed to notify", "error", err)
|
|
||||||
}
|
|
||||||
resp.Body.Close()
|
resp.Body.Close()
|
||||||
streamDone <- true
|
streamDone <- true
|
||||||
return
|
return
|
||||||
@@ -566,9 +604,7 @@ func sendMsgToLLM(body io.Reader) {
|
|||||||
// Parse the error response for detailed information
|
// Parse the error response for detailed information
|
||||||
detailedError := extractDetailedErrorFromBytes(bodyBytes, resp.StatusCode)
|
detailedError := extractDetailedErrorFromBytes(bodyBytes, resp.StatusCode)
|
||||||
logger.Error("API returned error status", "status_code", resp.StatusCode, "detailed_error", detailedError)
|
logger.Error("API returned error status", "status_code", resp.StatusCode, "detailed_error", detailedError)
|
||||||
if err := notifyUser("API Error", detailedError); err != nil {
|
showToast("API Error", detailedError)
|
||||||
logger.Error("failed to notify", "error", err)
|
|
||||||
}
|
|
||||||
resp.Body.Close()
|
resp.Body.Close()
|
||||||
streamDone <- true
|
streamDone <- true
|
||||||
return
|
return
|
||||||
@@ -605,16 +641,12 @@ func sendMsgToLLM(body io.Reader) {
|
|||||||
detailedError := fmt.Sprintf("Streaming connection closed unexpectedly (Status: %d). This may indicate an API error. Check your API provider and model settings.", resp.StatusCode)
|
detailedError := fmt.Sprintf("Streaming connection closed unexpectedly (Status: %d). This may indicate an API error. Check your API provider and model settings.", resp.StatusCode)
|
||||||
logger.Error("error reading response body", "error", err, "detailed_error", detailedError,
|
logger.Error("error reading response body", "error", err, "detailed_error", detailedError,
|
||||||
"status_code", resp.StatusCode, "user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI)
|
"status_code", resp.StatusCode, "user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI)
|
||||||
if err := notifyUser("API Error", detailedError); err != nil {
|
showToast("API Error", detailedError)
|
||||||
logger.Error("failed to notify", "error", err)
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
logger.Error("error reading response body", "error", err, "line", string(line),
|
logger.Error("error reading response body", "error", err, "line", string(line),
|
||||||
"user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI)
|
"user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI)
|
||||||
// if err.Error() != "EOF" {
|
// if err.Error() != "EOF" {
|
||||||
if err := notifyUser("API error", err.Error()); err != nil {
|
showToast("API error", err.Error())
|
||||||
logger.Error("failed to notify", "error", err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
streamDone <- true
|
streamDone <- true
|
||||||
break
|
break
|
||||||
@@ -641,9 +673,7 @@ func sendMsgToLLM(body io.Reader) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("error parsing response body", "error", err,
|
logger.Error("error parsing response body", "error", err,
|
||||||
"line", string(line), "url", cfg.CurrentAPI)
|
"line", string(line), "url", cfg.CurrentAPI)
|
||||||
if err := notifyUser("LLM Response Error", "Failed to parse LLM response: "+err.Error()); err != nil {
|
showToast("LLM Response Error", "Failed to parse LLM response: "+err.Error())
|
||||||
logger.Error("failed to notify user", "error", err)
|
|
||||||
}
|
|
||||||
streamDone <- true
|
streamDone <- true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@@ -718,7 +748,7 @@ func sendMsgToLLM(body io.Reader) {
|
|||||||
}
|
}
|
||||||
interrupt:
|
interrupt:
|
||||||
if interruptResp { // read bytes, so it would not get into beginning of the next req
|
if interruptResp { // read bytes, so it would not get into beginning of the next req
|
||||||
interruptResp = false
|
// interruptResp = false
|
||||||
logger.Info("interrupted bot response", "chunk_counter", counter)
|
logger.Info("interrupted bot response", "chunk_counter", counter)
|
||||||
streamDone <- true
|
streamDone <- true
|
||||||
break
|
break
|
||||||
@@ -772,6 +802,7 @@ func showSpinner() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func chatRound(r *models.ChatRoundReq) error {
|
func chatRound(r *models.ChatRoundReq) error {
|
||||||
|
interruptResp = false
|
||||||
botRespMode = true
|
botRespMode = true
|
||||||
go showSpinner()
|
go showSpinner()
|
||||||
updateStatusLine()
|
updateStatusLine()
|
||||||
@@ -795,10 +826,10 @@ func chatRound(r *models.ChatRoundReq) error {
|
|||||||
}
|
}
|
||||||
go sendMsgToLLM(reader)
|
go sendMsgToLLM(reader)
|
||||||
logger.Debug("looking at vars in chatRound", "msg", r.UserMsg, "regen", r.Regen, "resume", r.Resume)
|
logger.Debug("looking at vars in chatRound", "msg", r.UserMsg, "regen", r.Regen, "resume", r.Resume)
|
||||||
msgIdx := len(chatBody.Messages)
|
msgIdx := chatBody.GetMessageCount()
|
||||||
if !r.Resume {
|
if !r.Resume {
|
||||||
// Add empty message to chatBody immediately so it persists during Alt+T toggle
|
// Add empty message to chatBody immediately so it persists during Alt+T toggle
|
||||||
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{
|
chatBody.AppendMessage(models.RoleMsg{
|
||||||
Role: botPersona, Content: "",
|
Role: botPersona, Content: "",
|
||||||
})
|
})
|
||||||
nl := "\n\n"
|
nl := "\n\n"
|
||||||
@@ -810,7 +841,7 @@ func chatRound(r *models.ChatRoundReq) error {
|
|||||||
}
|
}
|
||||||
fmt.Fprintf(textView, "%s[-:-:b](%d) %s[-:-:-]\n", nl, msgIdx, roleToIcon(botPersona))
|
fmt.Fprintf(textView, "%s[-:-:b](%d) %s[-:-:-]\n", nl, msgIdx, roleToIcon(botPersona))
|
||||||
} else {
|
} else {
|
||||||
msgIdx = len(chatBody.Messages) - 1
|
msgIdx = chatBody.GetMessageCount() - 1
|
||||||
}
|
}
|
||||||
respText := strings.Builder{}
|
respText := strings.Builder{}
|
||||||
toolResp := strings.Builder{}
|
toolResp := strings.Builder{}
|
||||||
@@ -867,7 +898,10 @@ out:
|
|||||||
fmt.Fprint(textView, chunk)
|
fmt.Fprint(textView, chunk)
|
||||||
respText.WriteString(chunk)
|
respText.WriteString(chunk)
|
||||||
// Update the message in chatBody.Messages so it persists during Alt+T
|
// Update the message in chatBody.Messages so it persists during Alt+T
|
||||||
chatBody.Messages[msgIdx].Content = respText.String()
|
chatBody.UpdateMessageFunc(msgIdx, func(msg models.RoleMsg) models.RoleMsg {
|
||||||
|
msg.Content = respText.String()
|
||||||
|
return msg
|
||||||
|
})
|
||||||
if scrollToEndEnabled {
|
if scrollToEndEnabled {
|
||||||
textView.ScrollToEnd()
|
textView.ScrollToEnd()
|
||||||
}
|
}
|
||||||
@@ -910,33 +944,39 @@ out:
|
|||||||
}
|
}
|
||||||
botRespMode = false
|
botRespMode = false
|
||||||
if r.Resume {
|
if r.Resume {
|
||||||
chatBody.Messages[len(chatBody.Messages)-1].Content += respText.String()
|
chatBody.UpdateMessageFunc(chatBody.GetMessageCount()-1, func(msg models.RoleMsg) models.RoleMsg {
|
||||||
updatedMsg := chatBody.Messages[len(chatBody.Messages)-1]
|
msg.Content += respText.String()
|
||||||
processedMsg := processMessageTag(&updatedMsg)
|
processedMsg := processMessageTag(&msg)
|
||||||
chatBody.Messages[len(chatBody.Messages)-1] = *processedMsg
|
if msgStats != nil && processedMsg.Role != cfg.ToolRole {
|
||||||
if msgStats != nil && chatBody.Messages[len(chatBody.Messages)-1].Role != cfg.ToolRole {
|
processedMsg.Stats = msgStats
|
||||||
chatBody.Messages[len(chatBody.Messages)-1].Stats = msgStats
|
}
|
||||||
}
|
return *processedMsg
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
chatBody.Messages[msgIdx].Content = respText.String()
|
chatBody.UpdateMessageFunc(msgIdx, func(msg models.RoleMsg) models.RoleMsg {
|
||||||
processedMsg := processMessageTag(&chatBody.Messages[msgIdx])
|
msg.Content = respText.String()
|
||||||
chatBody.Messages[msgIdx] = *processedMsg
|
processedMsg := processMessageTag(&msg)
|
||||||
if msgStats != nil && chatBody.Messages[msgIdx].Role != cfg.ToolRole {
|
if msgStats != nil && processedMsg.Role != cfg.ToolRole {
|
||||||
chatBody.Messages[msgIdx].Stats = msgStats
|
processedMsg.Stats = msgStats
|
||||||
}
|
}
|
||||||
stopTTSIfNotForUser(&chatBody.Messages[msgIdx])
|
return *processedMsg
|
||||||
|
})
|
||||||
|
stopTTSIfNotForUser(&chatBody.GetMessages()[msgIdx])
|
||||||
}
|
}
|
||||||
cleanChatBody()
|
cleanChatBody()
|
||||||
refreshChatDisplay()
|
refreshChatDisplay()
|
||||||
updateStatusLine()
|
updateStatusLine()
|
||||||
// bot msg is done;
|
// bot msg is done;
|
||||||
// now check it for func call
|
// now check it for func call
|
||||||
// logChat(activeChatName, chatBody.Messages)
|
// logChat(activeChatName, chatBody.GetMessages())
|
||||||
if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil {
|
if err := updateStorageChat(activeChatName, chatBody.GetMessages()); err != nil {
|
||||||
logger.Warn("failed to update storage", "error", err, "name", activeChatName)
|
logger.Warn("failed to update storage", "error", err, "name", activeChatName)
|
||||||
}
|
}
|
||||||
// Strip think blocks before parsing for tool calls
|
// Strip think blocks before parsing for tool calls
|
||||||
respTextNoThink := thinkBlockRE.ReplaceAllString(respText.String(), "")
|
respTextNoThink := thinkBlockRE.ReplaceAllString(respText.String(), "")
|
||||||
|
if interruptResp {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
if findCall(respTextNoThink, toolResp.String()) {
|
if findCall(respTextNoThink, toolResp.String()) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -944,8 +984,8 @@ out:
|
|||||||
// If so, trigger those characters to respond if that char is not controlled by user
|
// If so, trigger those characters to respond if that char is not controlled by user
|
||||||
// perhaps we should have narrator role to determine which char is next to act
|
// perhaps we should have narrator role to determine which char is next to act
|
||||||
if cfg.AutoTurn {
|
if cfg.AutoTurn {
|
||||||
lastMsg := chatBody.Messages[len(chatBody.Messages)-1]
|
lastMsg, ok := chatBody.GetLastMessage()
|
||||||
if len(lastMsg.KnownTo) > 0 {
|
if ok && len(lastMsg.KnownTo) > 0 {
|
||||||
triggerPrivateMessageResponses(&lastMsg)
|
triggerPrivateMessageResponses(&lastMsg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -954,13 +994,15 @@ out:
|
|||||||
|
|
||||||
// cleanChatBody removes messages with null or empty content to prevent API issues
|
// cleanChatBody removes messages with null or empty content to prevent API issues
|
||||||
func cleanChatBody() {
|
func cleanChatBody() {
|
||||||
if chatBody == nil || chatBody.Messages == nil {
|
if chatBody == nil || chatBody.GetMessageCount() == 0 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Tool request cleaning is now configurable via AutoCleanToolCallsFromCtx (default false)
|
// Tool request cleaning is now configurable via AutoCleanToolCallsFromCtx (default false)
|
||||||
// /completion msg where part meant for user and other part tool call
|
// /completion msg where part meant for user and other part tool call
|
||||||
// chatBody.Messages = cleanToolCalls(chatBody.Messages)
|
// chatBody.Messages = cleanToolCalls(chatBody.Messages)
|
||||||
chatBody.Messages = consolidateAssistantMessages(chatBody.Messages)
|
chatBody.WithLock(func(cb *models.ChatBody) {
|
||||||
|
cb.Messages = consolidateAssistantMessages(cb.Messages)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// convertJSONToMapStringString unmarshals JSON into map[string]interface{} and converts all values to strings.
|
// convertJSONToMapStringString unmarshals JSON into map[string]interface{} and converts all values to strings.
|
||||||
@@ -1060,7 +1102,7 @@ func findCall(msg, toolCall string) bool {
|
|||||||
Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
|
Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
|
||||||
ToolCallID: lastToolCall.ID, // Use the stored tool call ID
|
ToolCallID: lastToolCall.ID, // Use the stored tool call ID
|
||||||
}
|
}
|
||||||
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
|
chatBody.AppendMessage(toolResponseMsg)
|
||||||
// Clear the stored tool call ID after using it (no longer needed)
|
// Clear the stored tool call ID after using it (no longer needed)
|
||||||
// Trigger the assistant to continue processing with the error message
|
// Trigger the assistant to continue processing with the error message
|
||||||
crr := &models.ChatRoundReq{
|
crr := &models.ChatRoundReq{
|
||||||
@@ -1097,7 +1139,7 @@ func findCall(msg, toolCall string) bool {
|
|||||||
Role: cfg.ToolRole,
|
Role: cfg.ToolRole,
|
||||||
Content: "Error processing tool call: no valid JSON found. Please check the JSON format.",
|
Content: "Error processing tool call: no valid JSON found. Please check the JSON format.",
|
||||||
}
|
}
|
||||||
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
|
chatBody.AppendMessage(toolResponseMsg)
|
||||||
crr := &models.ChatRoundReq{
|
crr := &models.ChatRoundReq{
|
||||||
Role: cfg.AssistantRole,
|
Role: cfg.AssistantRole,
|
||||||
}
|
}
|
||||||
@@ -1114,8 +1156,8 @@ func findCall(msg, toolCall string) bool {
|
|||||||
Role: cfg.ToolRole,
|
Role: cfg.ToolRole,
|
||||||
Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
|
Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
|
||||||
}
|
}
|
||||||
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
|
chatBody.AppendMessage(toolResponseMsg)
|
||||||
logger.Debug("findCall: added tool error response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "message_count_after_add", len(chatBody.Messages))
|
logger.Debug("findCall: added tool error response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "message_count_after_add", chatBody.GetMessageCount())
|
||||||
// Trigger the assistant to continue processing with the error message
|
// Trigger the assistant to continue processing with the error message
|
||||||
// chatRound("", cfg.AssistantRole, tv, false, false)
|
// chatRound("", cfg.AssistantRole, tv, false, false)
|
||||||
crr := &models.ChatRoundReq{
|
crr := &models.ChatRoundReq{
|
||||||
@@ -1133,17 +1175,23 @@ func findCall(msg, toolCall string) bool {
|
|||||||
// we got here => last msg recognized as a tool call (correct or not)
|
// we got here => last msg recognized as a tool call (correct or not)
|
||||||
// Use the tool call ID from streaming response (lastToolCall.ID)
|
// Use the tool call ID from streaming response (lastToolCall.ID)
|
||||||
// Don't generate random ID - the ID should match between assistant message and tool response
|
// Don't generate random ID - the ID should match between assistant message and tool response
|
||||||
lastMsgIdx := len(chatBody.Messages) - 1
|
lastMsgIdx := chatBody.GetMessageCount() - 1
|
||||||
if lastToolCall.ID != "" {
|
if lastToolCall.ID != "" {
|
||||||
chatBody.Messages[lastMsgIdx].ToolCallID = lastToolCall.ID
|
chatBody.UpdateMessageFunc(lastMsgIdx, func(msg models.RoleMsg) models.RoleMsg {
|
||||||
|
msg.ToolCallID = lastToolCall.ID
|
||||||
|
return msg
|
||||||
|
})
|
||||||
}
|
}
|
||||||
// Store tool call info in the assistant message
|
// Store tool call info in the assistant message
|
||||||
// Convert Args map to JSON string for storage
|
// Convert Args map to JSON string for storage
|
||||||
chatBody.Messages[lastMsgIdx].ToolCall = &models.ToolCall{
|
chatBody.UpdateMessageFunc(lastMsgIdx, func(msg models.RoleMsg) models.RoleMsg {
|
||||||
ID: lastToolCall.ID,
|
msg.ToolCall = &models.ToolCall{
|
||||||
Name: lastToolCall.Name,
|
ID: lastToolCall.ID,
|
||||||
Args: mapToString(lastToolCall.Args),
|
Name: lastToolCall.Name,
|
||||||
}
|
Args: mapToString(lastToolCall.Args),
|
||||||
|
}
|
||||||
|
return msg
|
||||||
|
})
|
||||||
// call a func
|
// call a func
|
||||||
_, ok := fnMap[fc.Name]
|
_, ok := fnMap[fc.Name]
|
||||||
if !ok {
|
if !ok {
|
||||||
@@ -1154,8 +1202,8 @@ func findCall(msg, toolCall string) bool {
|
|||||||
Content: m,
|
Content: m,
|
||||||
ToolCallID: lastToolCall.ID, // Use the stored tool call ID
|
ToolCallID: lastToolCall.ID, // Use the stored tool call ID
|
||||||
}
|
}
|
||||||
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
|
chatBody.AppendMessage(toolResponseMsg)
|
||||||
logger.Debug("findCall: added tool not implemented response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", len(chatBody.Messages))
|
logger.Debug("findCall: added tool not implemented response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", chatBody.GetMessageCount())
|
||||||
// Clear the stored tool call ID after using it
|
// Clear the stored tool call ID after using it
|
||||||
lastToolCall.ID = ""
|
lastToolCall.ID = ""
|
||||||
// Trigger the assistant to continue processing with the new tool response
|
// Trigger the assistant to continue processing with the new tool response
|
||||||
@@ -1174,19 +1222,61 @@ func findCall(msg, toolCall string) bool {
|
|||||||
toolRunningMode = false
|
toolRunningMode = false
|
||||||
toolMsg := string(resp)
|
toolMsg := string(resp)
|
||||||
logger.Info("llm used a tool call", "tool_name", fc.Name, "too_args", fc.Args, "id", fc.ID, "tool_resp", toolMsg)
|
logger.Info("llm used a tool call", "tool_name", fc.Name, "too_args", fc.Args, "id", fc.ID, "tool_resp", toolMsg)
|
||||||
fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
|
|
||||||
"\n\n", len(chatBody.Messages), cfg.ToolRole, toolMsg)
|
|
||||||
// Create tool response message with the proper tool_call_id
|
// Create tool response message with the proper tool_call_id
|
||||||
// Mark shell commands as always visible
|
// Mark shell commands as always visible
|
||||||
isShellCommand := fc.Name == "execute_command"
|
isShellCommand := fc.Name == "execute_command"
|
||||||
toolResponseMsg := models.RoleMsg{
|
// Check if response is multimodal content (image)
|
||||||
Role: cfg.ToolRole,
|
var toolResponseMsg models.RoleMsg
|
||||||
Content: toolMsg,
|
if strings.HasPrefix(strings.TrimSpace(toolMsg), `{"type":"multimodal_content"`) {
|
||||||
ToolCallID: lastToolCall.ID,
|
// Parse multimodal content response
|
||||||
IsShellCommand: isShellCommand,
|
multimodalResp := models.MultimodalToolResp{}
|
||||||
|
if err := json.Unmarshal([]byte(toolMsg), &multimodalResp); err == nil && multimodalResp.Type == "multimodal_content" {
|
||||||
|
// Create RoleMsg with ContentParts
|
||||||
|
var contentParts []any
|
||||||
|
for _, part := range multimodalResp.Parts {
|
||||||
|
partType := part["type"]
|
||||||
|
switch partType {
|
||||||
|
case "text":
|
||||||
|
contentParts = append(contentParts, models.TextContentPart{Type: "text", Text: part["text"]})
|
||||||
|
case "image_url":
|
||||||
|
contentParts = append(contentParts, models.ImageContentPart{
|
||||||
|
Type: "image_url",
|
||||||
|
ImageURL: struct {
|
||||||
|
URL string `json:"url"`
|
||||||
|
}{URL: part["url"]},
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
toolResponseMsg = models.RoleMsg{
|
||||||
|
Role: cfg.ToolRole,
|
||||||
|
ContentParts: contentParts,
|
||||||
|
HasContentParts: true,
|
||||||
|
ToolCallID: lastToolCall.ID,
|
||||||
|
IsShellCommand: isShellCommand,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Fallback to regular content
|
||||||
|
toolResponseMsg = models.RoleMsg{
|
||||||
|
Role: cfg.ToolRole,
|
||||||
|
Content: toolMsg,
|
||||||
|
ToolCallID: lastToolCall.ID,
|
||||||
|
IsShellCommand: isShellCommand,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
toolResponseMsg = models.RoleMsg{
|
||||||
|
Role: cfg.ToolRole,
|
||||||
|
Content: toolMsg,
|
||||||
|
ToolCallID: lastToolCall.ID,
|
||||||
|
IsShellCommand: isShellCommand,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
|
fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
|
||||||
logger.Debug("findCall: added actual tool response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", len(chatBody.Messages))
|
"\n\n", chatBody.GetMessageCount(), cfg.ToolRole, toolResponseMsg.GetText())
|
||||||
|
chatBody.AppendMessage(toolResponseMsg)
|
||||||
|
logger.Debug("findCall: added actual tool response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", chatBody.GetMessageCount())
|
||||||
// Clear the stored tool call ID after using it
|
// Clear the stored tool call ID after using it
|
||||||
lastToolCall.ID = ""
|
lastToolCall.ID = ""
|
||||||
// Trigger the assistant to continue processing with the new tool response
|
// Trigger the assistant to continue processing with the new tool response
|
||||||
@@ -1207,11 +1297,11 @@ func chatToTextSlice(messages []models.RoleMsg, showSys bool) []string {
|
|||||||
// This is a tool call indicator - show collapsed
|
// This is a tool call indicator - show collapsed
|
||||||
if toolCollapsed {
|
if toolCollapsed {
|
||||||
toolName := messages[i].ToolCall.Name
|
toolName := messages[i].ToolCall.Name
|
||||||
resp[i] = fmt.Sprintf("%s\n[yellow::i][tool call: %s (press Ctrl+T to expand)][-:-:-]\n", icon, toolName)
|
resp[i] = strings.ReplaceAll(fmt.Sprintf("%s\n%s\n[yellow::i][tool call: %s (press Ctrl+T to expand)][-:-:-]\n", icon, messages[i].GetText(), toolName), "\n\n", "\n")
|
||||||
} else {
|
} else {
|
||||||
// Show full tool call info
|
// Show full tool call info
|
||||||
toolName := messages[i].ToolCall.Name
|
toolName := messages[i].ToolCall.Name
|
||||||
resp[i] = fmt.Sprintf("%s\n%s\n[yellow::i][tool call: %s][-:-:-]\nargs: %s\nid: %s\n", icon, messages[i].GetText(), toolName, messages[i].ToolCall.Args, messages[i].ToolCall.ID)
|
resp[i] = strings.ReplaceAll(fmt.Sprintf("%s\n%s\n[yellow::i][tool call: %s][-:-:-]\nargs: %s\nid: %s\n", icon, messages[i].GetText(), toolName, messages[i].ToolCall.Args, messages[i].ToolCall.ID), "\n\n", "\n")
|
||||||
}
|
}
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@@ -1305,8 +1395,8 @@ func applyCharCard(cc *models.CharCard, loadHistory bool) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func charToStart(agentName string, keepSysP bool) bool {
|
func charToStart(agentName string, keepSysP bool) bool {
|
||||||
cc, ok := sysMap[agentName]
|
cc := GetCardByRole(agentName)
|
||||||
if !ok {
|
if cc == nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
applyCharCard(cc, keepSysP)
|
applyCharCard(cc, keepSysP)
|
||||||
@@ -1316,49 +1406,63 @@ func charToStart(agentName string, keepSysP bool) bool {
|
|||||||
func updateModelLists() {
|
func updateModelLists() {
|
||||||
var err error
|
var err error
|
||||||
if cfg.OpenRouterToken != "" {
|
if cfg.OpenRouterToken != "" {
|
||||||
ORFreeModels, err = fetchORModels(true)
|
_, err := fetchORModels(true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warn("failed to fetch or models", "error", err)
|
logger.Warn("failed to fetch or models", "error", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// if llama.cpp started after gf-lt?
|
// if llama.cpp started after gf-lt?
|
||||||
localModelsMu.Lock()
|
ml, err := fetchLCPModelsWithLoadStatus()
|
||||||
LocalModels, err = fetchLCPModels()
|
|
||||||
localModelsMu.Unlock()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warn("failed to fetch llama.cpp models", "error", err)
|
logger.Warn("failed to fetch llama.cpp models", "error", err)
|
||||||
}
|
}
|
||||||
|
LocalModels.Store(ml)
|
||||||
|
for statusLineWidget == nil {
|
||||||
|
time.Sleep(time.Millisecond * 100)
|
||||||
|
}
|
||||||
|
// set already loaded model in llama.cpp
|
||||||
|
if strings.Contains(cfg.CurrentAPI, "localhost") || strings.Contains(cfg.CurrentAPI, "127.0.0.1") {
|
||||||
|
modelList := LocalModels.Load().([]string)
|
||||||
|
for i := range modelList {
|
||||||
|
if strings.Contains(modelList[i], models.LoadedMark) {
|
||||||
|
m := strings.TrimPrefix(modelList[i], models.LoadedMark)
|
||||||
|
cfg.CurrentModel = m
|
||||||
|
chatBody.Model = m
|
||||||
|
cachedModelColor.Store("green")
|
||||||
|
updateStatusLine()
|
||||||
|
updateToolCapabilities()
|
||||||
|
app.Draw()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func refreshLocalModelsIfEmpty() {
|
func refreshLocalModelsIfEmpty() {
|
||||||
localModelsMu.RLock()
|
models := LocalModels.Load().([]string)
|
||||||
if len(LocalModels) > 0 {
|
if len(models) > 0 {
|
||||||
localModelsMu.RUnlock()
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
localModelsMu.RUnlock()
|
|
||||||
// try to fetch
|
// try to fetch
|
||||||
models, err := fetchLCPModels()
|
models, err := fetchLCPModels()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warn("failed to fetch llama.cpp models", "error", err)
|
logger.Warn("failed to fetch llama.cpp models", "error", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
localModelsMu.Lock()
|
LocalModels.Store(models)
|
||||||
LocalModels = models
|
|
||||||
localModelsMu.Unlock()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func summarizeAndStartNewChat() {
|
func summarizeAndStartNewChat() {
|
||||||
if len(chatBody.Messages) == 0 {
|
if len(chatBody.Messages) == 0 {
|
||||||
_ = notifyUser("info", "No chat history to summarize")
|
showToast("info", "No chat history to summarize")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
_ = notifyUser("info", "Summarizing chat history...")
|
showToast("info", "Summarizing chat history...")
|
||||||
// Call the summarize_chat tool via agent
|
// Call the summarize_chat tool via agent
|
||||||
summaryBytes := callToolWithAgent("summarize_chat", map[string]string{})
|
summaryBytes := callToolWithAgent("summarize_chat", map[string]string{})
|
||||||
summary := string(summaryBytes)
|
summary := string(summaryBytes)
|
||||||
if summary == "" {
|
if summary == "" {
|
||||||
_ = notifyUser("error", "Failed to generate summary")
|
showToast("error", "Failed to generate summary")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Start a new chat
|
// Start a new chat
|
||||||
@@ -1377,7 +1481,7 @@ func summarizeAndStartNewChat() {
|
|||||||
if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil {
|
if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil {
|
||||||
logger.Warn("failed to update storage after injecting summary", "error", err)
|
logger.Warn("failed to update storage after injecting summary", "error", err)
|
||||||
}
|
}
|
||||||
_ = notifyUser("info", "Chat summarized and new chat started with summary as tool response")
|
showToast("info", "Chat summarized and new chat started with summary as tool response")
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
@@ -1405,14 +1509,20 @@ func init() {
|
|||||||
// load cards
|
// load cards
|
||||||
basicCard.Role = cfg.AssistantRole
|
basicCard.Role = cfg.AssistantRole
|
||||||
logLevel.Set(slog.LevelInfo)
|
logLevel.Set(slog.LevelInfo)
|
||||||
logger = slog.New(slog.NewTextHandler(logfile, &slog.HandlerOptions{Level: logLevel}))
|
logger = slog.New(slog.NewTextHandler(logfile, &slog.HandlerOptions{Level: logLevel, AddSource: true}))
|
||||||
store = storage.NewProviderSQL(cfg.DBPATH, logger)
|
store = storage.NewProviderSQL(cfg.DBPATH, logger)
|
||||||
if store == nil {
|
if store == nil {
|
||||||
cancel()
|
cancel()
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
ragger = rag.New(logger, store, cfg)
|
ragger, err = rag.New(logger, store, cfg)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("failed to create RAG", "error", err)
|
||||||
|
}
|
||||||
|
if ragger != nil && ragger.FallbackMessage() != "" && app != nil {
|
||||||
|
showToast("RAG", "ONNX unavailable, using API: "+ragger.FallbackMessage())
|
||||||
|
}
|
||||||
// https://github.com/coreydaley/ggerganov-llama.cpp/blob/master/examples/server/README.md
|
// https://github.com/coreydaley/ggerganov-llama.cpp/blob/master/examples/server/README.md
|
||||||
// load all chats in memory
|
// load all chats in memory
|
||||||
if _, err := loadHistoryChats(); err != nil {
|
if _, err := loadHistoryChats(); err != nil {
|
||||||
@@ -1423,11 +1533,11 @@ func init() {
|
|||||||
}
|
}
|
||||||
lastToolCall = &models.FuncCall{}
|
lastToolCall = &models.FuncCall{}
|
||||||
lastChat := loadOldChatOrGetNew()
|
lastChat := loadOldChatOrGetNew()
|
||||||
chatBody = &models.ChatBody{
|
chatBody = models.NewSafeChatBody(&models.ChatBody{
|
||||||
Model: "modelname",
|
Model: "modelname",
|
||||||
Stream: true,
|
Stream: true,
|
||||||
Messages: lastChat,
|
Messages: lastChat,
|
||||||
}
|
})
|
||||||
choseChunkParser()
|
choseChunkParser()
|
||||||
httpClient = createClient(time.Second * 90)
|
httpClient = createClient(time.Second * 90)
|
||||||
if cfg.TTS_ENABLED {
|
if cfg.TTS_ENABLED {
|
||||||
@@ -1436,6 +1546,23 @@ func init() {
|
|||||||
if cfg.STT_ENABLED {
|
if cfg.STT_ENABLED {
|
||||||
asr = NewSTT(logger, cfg)
|
asr = NewSTT(logger, cfg)
|
||||||
}
|
}
|
||||||
|
if cfg.PlaywrightEnabled {
|
||||||
|
if err := checkPlaywright(); err != nil {
|
||||||
|
// slow, need a faster check if playwright install
|
||||||
|
if err := installPW(); err != nil {
|
||||||
|
logger.Error("failed to install playwright", "error", err)
|
||||||
|
cancel()
|
||||||
|
os.Exit(1)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err := checkPlaywright(); err != nil {
|
||||||
|
logger.Error("failed to run playwright", "error", err)
|
||||||
|
cancel()
|
||||||
|
os.Exit(1)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
// Initialize scrollToEndEnabled based on config
|
// Initialize scrollToEndEnabled based on config
|
||||||
scrollToEndEnabled = cfg.AutoScrollEnabled
|
scrollToEndEnabled = cfg.AutoScrollEnabled
|
||||||
go updateModelLists()
|
go updateModelLists()
|
||||||
|
|||||||
@@ -13,6 +13,9 @@ OpenRouterChatAPI = "https://openrouter.ai/api/v1/chat/completions"
|
|||||||
# embeddings
|
# embeddings
|
||||||
EmbedURL = "http://localhost:8082/v1/embeddings"
|
EmbedURL = "http://localhost:8082/v1/embeddings"
|
||||||
HFToken = ""
|
HFToken = ""
|
||||||
|
EmbedModelPath = "onnx/embedgemma/model_q4.onnx"
|
||||||
|
EmbedTokenizerPath = "onnx/embedgemma/tokenizer.json"
|
||||||
|
EmbedDims = 768
|
||||||
#
|
#
|
||||||
ShowSys = true
|
ShowSys = true
|
||||||
LogFile = "log.txt"
|
LogFile = "log.txt"
|
||||||
@@ -24,9 +27,9 @@ ChunkLimit = 100000
|
|||||||
AutoScrollEnabled = true
|
AutoScrollEnabled = true
|
||||||
AutoCleanToolCallsFromCtx = false
|
AutoCleanToolCallsFromCtx = false
|
||||||
# rag settings
|
# rag settings
|
||||||
RAGEnabled = false
|
|
||||||
RAGBatchSize = 1
|
RAGBatchSize = 1
|
||||||
RAGWordLimit = 80
|
RAGWordLimit = 80
|
||||||
|
RAGOverlapWords = 16
|
||||||
RAGDir = "ragimport"
|
RAGDir = "ragimport"
|
||||||
# extra tts
|
# extra tts
|
||||||
TTS_ENABLED = false
|
TTS_ENABLED = false
|
||||||
@@ -56,3 +59,6 @@ StripThinkingFromAPI = true # Strip <think> blocks from messages before sending
|
|||||||
# Valid values: xhigh, high, medium, low, minimal, none (empty or none = disabled)
|
# Valid values: xhigh, high, medium, low, minimal, none (empty or none = disabled)
|
||||||
# Models that support reasoning will include thinking content wrapped in <think> tags
|
# Models that support reasoning will include thinking content wrapped in <think> tags
|
||||||
ReasoningEffort = "medium"
|
ReasoningEffort = "medium"
|
||||||
|
# playwright tools
|
||||||
|
PlaywrightEnabled = false
|
||||||
|
PlaywrightDebug = false
|
||||||
|
|||||||
@@ -34,13 +34,16 @@ type Config struct {
|
|||||||
ImagePreview bool `toml:"ImagePreview"`
|
ImagePreview bool `toml:"ImagePreview"`
|
||||||
EnableMouse bool `toml:"EnableMouse"`
|
EnableMouse bool `toml:"EnableMouse"`
|
||||||
// embeddings
|
// embeddings
|
||||||
EmbedURL string `toml:"EmbedURL"`
|
EmbedURL string `toml:"EmbedURL"`
|
||||||
HFToken string `toml:"HFToken"`
|
HFToken string `toml:"HFToken"`
|
||||||
|
EmbedModelPath string `toml:"EmbedModelPath"`
|
||||||
|
EmbedTokenizerPath string `toml:"EmbedTokenizerPath"`
|
||||||
|
EmbedDims int `toml:"EmbedDims"`
|
||||||
// rag settings
|
// rag settings
|
||||||
RAGEnabled bool `toml:"RAGEnabled"`
|
RAGDir string `toml:"RAGDir"`
|
||||||
RAGDir string `toml:"RAGDir"`
|
RAGBatchSize int `toml:"RAGBatchSize"`
|
||||||
RAGBatchSize int `toml:"RAGBatchSize"`
|
RAGWordLimit uint32 `toml:"RAGWordLimit"`
|
||||||
RAGWordLimit uint32 `toml:"RAGWordLimit"`
|
RAGOverlapWords uint32 `toml:"RAGOverlapWords"`
|
||||||
// deepseek
|
// deepseek
|
||||||
DeepSeekChatAPI string `toml:"DeepSeekChatAPI"`
|
DeepSeekChatAPI string `toml:"DeepSeekChatAPI"`
|
||||||
DeepSeekCompletionAPI string `toml:"DeepSeekCompletionAPI"`
|
DeepSeekCompletionAPI string `toml:"DeepSeekCompletionAPI"`
|
||||||
@@ -70,6 +73,9 @@ type Config struct {
|
|||||||
CharSpecificContextEnabled bool `toml:"CharSpecificContextEnabled"`
|
CharSpecificContextEnabled bool `toml:"CharSpecificContextEnabled"`
|
||||||
CharSpecificContextTag string `toml:"CharSpecificContextTag"`
|
CharSpecificContextTag string `toml:"CharSpecificContextTag"`
|
||||||
AutoTurn bool `toml:"AutoTurn"`
|
AutoTurn bool `toml:"AutoTurn"`
|
||||||
|
// playwright browser
|
||||||
|
PlaywrightEnabled bool `toml:"PlaywrightEnabled"`
|
||||||
|
PlaywrightDebug bool `toml:"PlaywrightDebug"` // !headless
|
||||||
}
|
}
|
||||||
|
|
||||||
func LoadConfig(fn string) (*Config, error) {
|
func LoadConfig(fn string) (*Config, error) {
|
||||||
|
|||||||
@@ -71,9 +71,6 @@ This document explains how to set up and configure the application using the `co
|
|||||||
#### EmbedURL (`"http://localhost:8082/v1/embeddings"`)
|
#### EmbedURL (`"http://localhost:8082/v1/embeddings"`)
|
||||||
- The endpoint for embedding API, used for RAG (Retrieval Augmented Generation) functionality.
|
- The endpoint for embedding API, used for RAG (Retrieval Augmented Generation) functionality.
|
||||||
|
|
||||||
#### RAGEnabled (`false`)
|
|
||||||
- Enable or disable RAG functionality for enhanced context retrieval.
|
|
||||||
|
|
||||||
#### RAGBatchSize (`1`)
|
#### RAGBatchSize (`1`)
|
||||||
- Number of documents to process in each RAG batch.
|
- Number of documents to process in each RAG batch.
|
||||||
|
|
||||||
@@ -162,6 +159,15 @@ Those could be switched in program, but also bould be setup in config.
|
|||||||
#### ToolUse
|
#### ToolUse
|
||||||
- Enable or disable explanation of tools to llm, so it could use them.
|
- Enable or disable explanation of tools to llm, so it could use them.
|
||||||
|
|
||||||
|
#### Playwright Browser Automation
|
||||||
|
These settings enable browser automation tools available to the LLM.
|
||||||
|
|
||||||
|
- **PlaywrightEnabled** (`false`)
|
||||||
|
- Enable or disable Playwright browser automation tools for the LLM. When enabled, the LLM can use tools like `pw_browser`, `pw_close`, and `pw_status` to automate browser interactions.
|
||||||
|
|
||||||
|
- **PlaywrightDebug** (`false`)
|
||||||
|
- Enable debug mode for Playwright browser. When set to `true`, the browser runs in visible (non-headless) mode, displaying the GUI for debugging purposes. When `false`, the browser runs in headless mode by default.
|
||||||
|
|
||||||
### StripThinkingFromAPI (`true`)
|
### StripThinkingFromAPI (`true`)
|
||||||
- Strip thinking blocks from messages before sending to LLM. Keeps them in chat history for local viewing but reduces token usage in API calls.
|
- Strip thinking blocks from messages before sending to LLM. Keeps them in chat history for local viewing but reduces token usage in API calls.
|
||||||
|
|
||||||
|
|||||||
11
go.mod
11
go.mod
@@ -14,25 +14,36 @@ require (
|
|||||||
github.com/jmoiron/sqlx v1.4.0
|
github.com/jmoiron/sqlx v1.4.0
|
||||||
github.com/ledongthuc/pdf v0.0.0-20250511090121-5959a4027728
|
github.com/ledongthuc/pdf v0.0.0-20250511090121-5959a4027728
|
||||||
github.com/neurosnap/sentences v1.1.2
|
github.com/neurosnap/sentences v1.1.2
|
||||||
|
github.com/playwright-community/playwright-go v0.5700.1
|
||||||
github.com/rivo/tview v0.42.0
|
github.com/rivo/tview v0.42.0
|
||||||
|
github.com/sugarme/tokenizer v0.3.0
|
||||||
|
github.com/yalue/onnxruntime_go v1.27.0
|
||||||
github.com/yuin/goldmark v1.4.13
|
github.com/yuin/goldmark v1.4.13
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/andybalholm/cascadia v1.3.3 // indirect
|
github.com/andybalholm/cascadia v1.3.3 // indirect
|
||||||
|
github.com/deckarep/golang-set/v2 v2.8.0 // indirect
|
||||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||||
github.com/ebitengine/oto/v3 v3.4.0 // indirect
|
github.com/ebitengine/oto/v3 v3.4.0 // indirect
|
||||||
github.com/ebitengine/purego v0.9.1 // indirect
|
github.com/ebitengine/purego v0.9.1 // indirect
|
||||||
|
github.com/emirpasic/gods v1.18.1 // indirect
|
||||||
github.com/gdamore/encoding v1.0.1 // indirect
|
github.com/gdamore/encoding v1.0.1 // indirect
|
||||||
|
github.com/go-jose/go-jose/v3 v3.0.4 // indirect
|
||||||
|
github.com/go-stack/stack v1.8.1 // indirect
|
||||||
github.com/google/uuid v1.6.0 // indirect
|
github.com/google/uuid v1.6.0 // indirect
|
||||||
github.com/hajimehoshi/go-mp3 v0.3.4 // indirect
|
github.com/hajimehoshi/go-mp3 v0.3.4 // indirect
|
||||||
github.com/hajimehoshi/oto/v2 v2.3.1 // indirect
|
github.com/hajimehoshi/oto/v2 v2.3.1 // indirect
|
||||||
github.com/lucasb-eyer/go-colorful v1.3.0 // indirect
|
github.com/lucasb-eyer/go-colorful v1.3.0 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
|
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect
|
||||||
github.com/ncruces/go-strftime v1.0.0 // indirect
|
github.com/ncruces/go-strftime v1.0.0 // indirect
|
||||||
|
github.com/patrickmn/go-cache v2.1.0+incompatible // indirect
|
||||||
github.com/pkg/errors v0.9.1 // indirect
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||||
github.com/rivo/uniseg v0.4.7 // indirect
|
github.com/rivo/uniseg v0.4.7 // indirect
|
||||||
|
github.com/schollz/progressbar/v2 v2.15.0 // indirect
|
||||||
|
github.com/sugarme/regexpset v0.0.0-20200920021344-4d4ec8eaf93c // indirect
|
||||||
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 // indirect
|
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 // indirect
|
||||||
golang.org/x/net v0.48.0 // indirect
|
golang.org/x/net v0.48.0 // indirect
|
||||||
golang.org/x/sys v0.39.0 // indirect
|
golang.org/x/sys v0.39.0 // indirect
|
||||||
|
|||||||
29
go.sum
29
go.sum
@@ -10,22 +10,32 @@ github.com/PuerkitoBio/goquery v1.11.0 h1:jZ7pwMQXIITcUXNH83LLk+txlaEy6NVOfTuP43
|
|||||||
github.com/PuerkitoBio/goquery v1.11.0/go.mod h1:wQHgxUOU3JGuj3oD/QFfxUdlzW6xPHfqyHre6VMY4DQ=
|
github.com/PuerkitoBio/goquery v1.11.0/go.mod h1:wQHgxUOU3JGuj3oD/QFfxUdlzW6xPHfqyHre6VMY4DQ=
|
||||||
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
|
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
|
||||||
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
|
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/deckarep/golang-set/v2 v2.8.0 h1:swm0rlPCmdWn9mESxKOjWk8hXSqoxOp+ZlfuyaAdFlQ=
|
||||||
|
github.com/deckarep/golang-set/v2 v2.8.0/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4=
|
||||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||||
github.com/ebitengine/oto/v3 v3.4.0 h1:br0PgASsEWaoWn38b2Goe7m1GKFYfNgnsjSd5Gg+/bQ=
|
github.com/ebitengine/oto/v3 v3.4.0 h1:br0PgASsEWaoWn38b2Goe7m1GKFYfNgnsjSd5Gg+/bQ=
|
||||||
github.com/ebitengine/oto/v3 v3.4.0/go.mod h1:IOleLVD0m+CMak3mRVwsYY8vTctQgOM0iiL6S7Ar7eI=
|
github.com/ebitengine/oto/v3 v3.4.0/go.mod h1:IOleLVD0m+CMak3mRVwsYY8vTctQgOM0iiL6S7Ar7eI=
|
||||||
github.com/ebitengine/purego v0.9.1 h1:a/k2f2HQU3Pi399RPW1MOaZyhKJL9w/xFpKAg4q1s0A=
|
github.com/ebitengine/purego v0.9.1 h1:a/k2f2HQU3Pi399RPW1MOaZyhKJL9w/xFpKAg4q1s0A=
|
||||||
github.com/ebitengine/purego v0.9.1/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
|
github.com/ebitengine/purego v0.9.1/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
|
||||||
|
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
||||||
|
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
||||||
github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw=
|
github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw=
|
||||||
github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo=
|
github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo=
|
||||||
github.com/gdamore/tcell/v2 v2.13.2 h1:5j4srfF8ow3HICOv/61/sOhQtA25qxEB2XR3Q/Bhx2g=
|
github.com/gdamore/tcell/v2 v2.13.2 h1:5j4srfF8ow3HICOv/61/sOhQtA25qxEB2XR3Q/Bhx2g=
|
||||||
github.com/gdamore/tcell/v2 v2.13.2/go.mod h1:+Wfe208WDdB7INEtCsNrAN6O2m+wsTPk1RAovjaILlo=
|
github.com/gdamore/tcell/v2 v2.13.2/go.mod h1:+Wfe208WDdB7INEtCsNrAN6O2m+wsTPk1RAovjaILlo=
|
||||||
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
|
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
|
||||||
github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
|
github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
|
||||||
|
github.com/go-jose/go-jose/v3 v3.0.4 h1:Wp5HA7bLQcKnf6YYao/4kpRpVMp/yf6+pJKV8WFSaNY=
|
||||||
|
github.com/go-jose/go-jose/v3 v3.0.4/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ=
|
||||||
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
||||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
||||||
|
github.com/go-stack/stack v1.8.1 h1:ntEHSVwIt7PNXNpgPmVfMrNhLtgjlmnZha2kOpuRiDw=
|
||||||
|
github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4=
|
||||||
|
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||||
@@ -53,12 +63,18 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE
|
|||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||||
|
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
|
||||||
|
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
|
||||||
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||||
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||||
github.com/neurosnap/sentences v1.1.2 h1:iphYOzx/XckXeBiLIUBkPu2EKMJ+6jDbz/sLJZ7ZoUw=
|
github.com/neurosnap/sentences v1.1.2 h1:iphYOzx/XckXeBiLIUBkPu2EKMJ+6jDbz/sLJZ7ZoUw=
|
||||||
github.com/neurosnap/sentences v1.1.2/go.mod h1:/pwU4E9XNL21ygMIkOIllv/SMy2ujHwpf8GQPu1YPbQ=
|
github.com/neurosnap/sentences v1.1.2/go.mod h1:/pwU4E9XNL21ygMIkOIllv/SMy2ujHwpf8GQPu1YPbQ=
|
||||||
|
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
|
||||||
|
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
|
||||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/playwright-community/playwright-go v0.5700.1 h1:PNFb1byWqrTT720rEO0JL88C6Ju0EmUnR5deFLvtP/U=
|
||||||
|
github.com/playwright-community/playwright-go v0.5700.1/go.mod h1:MlSn1dZrx8rszbCxY6x3qK89ZesJUYVx21B2JnkoNF0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||||
@@ -67,8 +83,19 @@ github.com/rivo/tview v0.42.0 h1:b/ftp+RxtDsHSaynXTbJb+/n/BxDEi+W3UfF5jILK6c=
|
|||||||
github.com/rivo/tview v0.42.0/go.mod h1:cSfIYfhpSGCjp3r/ECJb+GKS7cGJnqV8vfjQPwoXyfY=
|
github.com/rivo/tview v0.42.0/go.mod h1:cSfIYfhpSGCjp3r/ECJb+GKS7cGJnqV8vfjQPwoXyfY=
|
||||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
|
github.com/schollz/progressbar/v2 v2.15.0 h1:dVzHQ8fHRmtPjD3K10jT3Qgn/+H+92jhPrhmxIJfDz8=
|
||||||
|
github.com/schollz/progressbar/v2 v2.15.0/go.mod h1:UdPq3prGkfQ7MOzZKlDRpYKcFqEMczbD7YmbPgpzKMI=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
|
github.com/sugarme/regexpset v0.0.0-20200920021344-4d4ec8eaf93c h1:pwb4kNSHb4K89ymCaN+5lPH/MwnfSVg4rzGDh4d+iy4=
|
||||||
|
github.com/sugarme/regexpset v0.0.0-20200920021344-4d4ec8eaf93c/go.mod h1:2gwkXLWbDGUQWeL3RtpCmcY4mzCtU13kb9UsAg9xMaw=
|
||||||
|
github.com/sugarme/tokenizer v0.3.0 h1:FE8DYbNSz/kSbgEo9l/RjgYHkIJYEdskumitFQBE9FE=
|
||||||
|
github.com/sugarme/tokenizer v0.3.0/go.mod h1:VJ+DLK5ZEZwzvODOWwY0cw+B1dabTd3nCB5HuFCItCc=
|
||||||
|
github.com/yalue/onnxruntime_go v1.27.0 h1:c1YSgDNtpf0WGtxj3YeRIb8VC5LmM1J+Ve3uHdteC1U=
|
||||||
|
github.com/yalue/onnxruntime_go v1.27.0/go.mod h1:b4X26A8pekNb1ACJ58wAXgNKeUCGEAQ9dmACut9Sm/4=
|
||||||
github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE=
|
github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
@@ -152,6 +179,8 @@ golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxb
|
|||||||
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||||
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
|
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
|
||||||
|
|||||||
186
helpfuncs.go
186
helpfuncs.go
@@ -11,15 +11,22 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"slices"
|
"slices"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
|
||||||
|
"sync/atomic"
|
||||||
|
|
||||||
"github.com/rivo/tview"
|
"github.com/rivo/tview"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Cached model color - updated by background goroutine
|
// Cached model color - updated by background goroutine
|
||||||
var cachedModelColor string = "orange"
|
var cachedModelColor atomic.Value // stores string
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
cachedModelColor.Store("orange")
|
||||||
|
}
|
||||||
|
|
||||||
// startModelColorUpdater starts a background goroutine that periodically updates
|
// startModelColorUpdater starts a background goroutine that periodically updates
|
||||||
// the cached model color. Only runs HTTP requests for local llama.cpp APIs.
|
// the cached model color. Only runs HTTP requests for local llama.cpp APIs.
|
||||||
@@ -27,7 +34,6 @@ func startModelColorUpdater() {
|
|||||||
go func() {
|
go func() {
|
||||||
ticker := time.NewTicker(5 * time.Second)
|
ticker := time.NewTicker(5 * time.Second)
|
||||||
defer ticker.Stop()
|
defer ticker.Stop()
|
||||||
|
|
||||||
// Initial check
|
// Initial check
|
||||||
updateCachedModelColor()
|
updateCachedModelColor()
|
||||||
for range ticker.C {
|
for range ticker.C {
|
||||||
@@ -39,21 +45,20 @@ func startModelColorUpdater() {
|
|||||||
// updateCachedModelColor updates the global cachedModelColor variable
|
// updateCachedModelColor updates the global cachedModelColor variable
|
||||||
func updateCachedModelColor() {
|
func updateCachedModelColor() {
|
||||||
if !isLocalLlamacpp() {
|
if !isLocalLlamacpp() {
|
||||||
cachedModelColor = "orange"
|
cachedModelColor.Store("orange")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if model is loaded
|
// Check if model is loaded
|
||||||
loaded, err := isModelLoaded(chatBody.Model)
|
loaded, err := isModelLoaded(chatBody.GetModel())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// On error, assume not loaded (red)
|
// On error, assume not loaded (red)
|
||||||
cachedModelColor = "red"
|
cachedModelColor.Store("red")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if loaded {
|
if loaded {
|
||||||
cachedModelColor = "green"
|
cachedModelColor.Store("green")
|
||||||
} else {
|
} else {
|
||||||
cachedModelColor = "red"
|
cachedModelColor.Store("red")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -104,7 +109,7 @@ func refreshChatDisplay() {
|
|||||||
viewingAs = cfg.WriteNextMsgAs
|
viewingAs = cfg.WriteNextMsgAs
|
||||||
}
|
}
|
||||||
// Filter messages for this character
|
// Filter messages for this character
|
||||||
filteredMessages := filterMessagesForCharacter(chatBody.Messages, viewingAs)
|
filteredMessages := filterMessagesForCharacter(chatBody.GetMessages(), viewingAs)
|
||||||
displayText := chatToText(filteredMessages, cfg.ShowSys)
|
displayText := chatToText(filteredMessages, cfg.ShowSys)
|
||||||
textView.SetText(displayText)
|
textView.SetText(displayText)
|
||||||
colorText()
|
colorText()
|
||||||
@@ -199,7 +204,11 @@ func initSysCards() ([]string, error) {
|
|||||||
logger.Warn("empty role", "file", cc.FilePath)
|
logger.Warn("empty role", "file", cc.FilePath)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
sysMap[cc.Role] = cc
|
if cc.ID == "" {
|
||||||
|
cc.ID = models.ComputeCardID(cc.Role, cc.FilePath)
|
||||||
|
}
|
||||||
|
sysMap[cc.ID] = cc
|
||||||
|
roleToID[cc.Role] = cc.ID
|
||||||
labels = append(labels, cc.Role)
|
labels = append(labels, cc.Role)
|
||||||
}
|
}
|
||||||
return labels, nil
|
return labels, nil
|
||||||
@@ -214,8 +223,8 @@ func startNewChat(keepSysP bool) {
|
|||||||
logger.Warn("no such sys msg", "name", cfg.AssistantRole)
|
logger.Warn("no such sys msg", "name", cfg.AssistantRole)
|
||||||
}
|
}
|
||||||
// set chat body
|
// set chat body
|
||||||
chatBody.Messages = chatBody.Messages[:2]
|
chatBody.TruncateMessages(2)
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
newChat := &models.Chat{
|
newChat := &models.Chat{
|
||||||
ID: id + 1,
|
ID: id + 1,
|
||||||
Name: fmt.Sprintf("%d_%s", id+1, cfg.AssistantRole),
|
Name: fmt.Sprintf("%d_%s", id+1, cfg.AssistantRole),
|
||||||
@@ -288,24 +297,25 @@ func listRolesWithUser() []string {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadImage() {
|
func loadImage() error {
|
||||||
filepath := defaultImage
|
filepath := defaultImage
|
||||||
cc, ok := sysMap[cfg.AssistantRole]
|
cc := GetCardByRole(cfg.AssistantRole)
|
||||||
if ok {
|
if cc != nil {
|
||||||
if strings.HasSuffix(cc.FilePath, ".png") {
|
if strings.HasSuffix(cc.FilePath, ".png") {
|
||||||
filepath = cc.FilePath
|
filepath = cc.FilePath
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file, err := os.Open(filepath)
|
file, err := os.Open(filepath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
return fmt.Errorf("failed to open image: %w", err)
|
||||||
}
|
}
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
img, _, err := image.Decode(file)
|
img, _, err := image.Decode(file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
return fmt.Errorf("failed to decode image: %w", err)
|
||||||
}
|
}
|
||||||
imgView.SetImage(img)
|
imgView.SetImage(img)
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func strInSlice(s string, sl []string) bool {
|
func strInSlice(s string, sl []string) bool {
|
||||||
@@ -331,7 +341,7 @@ func isLocalLlamacpp() bool {
|
|||||||
// The cached value is updated by a background goroutine every 5 seconds.
|
// The cached value is updated by a background goroutine every 5 seconds.
|
||||||
// For non-local models, returns orange. For local llama.cpp models, returns green if loaded, red if not.
|
// For non-local models, returns orange. For local llama.cpp models, returns green if loaded, red if not.
|
||||||
func getModelColor() string {
|
func getModelColor() string {
|
||||||
return cachedModelColor
|
return cachedModelColor.Load().(string)
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeStatusLine() string {
|
func makeStatusLine() string {
|
||||||
@@ -366,7 +376,7 @@ func makeStatusLine() string {
|
|||||||
// Get model color based on load status for local llama.cpp models
|
// Get model color based on load status for local llama.cpp models
|
||||||
modelColor := getModelColor()
|
modelColor := getModelColor()
|
||||||
statusLine := fmt.Sprintf(statusLineTempl, activeChatName,
|
statusLine := fmt.Sprintf(statusLineTempl, activeChatName,
|
||||||
boolColors[cfg.ToolUse], modelColor, chatBody.Model, boolColors[cfg.SkipLLMResp],
|
boolColors[cfg.ToolUse], modelColor, chatBody.GetModel(), boolColors[cfg.SkipLLMResp],
|
||||||
cfg.CurrentAPI, persona, botPersona)
|
cfg.CurrentAPI, persona, botPersona)
|
||||||
if cfg.STT_ENABLED {
|
if cfg.STT_ENABLED {
|
||||||
recordingS := fmt.Sprintf(" | [%s:-:b]voice recording[-:-:-] (ctrl+r)",
|
recordingS := fmt.Sprintf(" | [%s:-:b]voice recording[-:-:-] (ctrl+r)",
|
||||||
@@ -378,9 +388,98 @@ func makeStatusLine() string {
|
|||||||
roleInject := fmt.Sprintf(" | [%s:-:b]role injection[-:-:-] (alt+7)", boolColors[injectRole])
|
roleInject := fmt.Sprintf(" | [%s:-:b]role injection[-:-:-] (alt+7)", boolColors[injectRole])
|
||||||
statusLine += roleInject
|
statusLine += roleInject
|
||||||
}
|
}
|
||||||
|
// context tokens
|
||||||
|
contextTokens := getContextTokens()
|
||||||
|
maxCtx := getMaxContextTokens()
|
||||||
|
if maxCtx == 0 {
|
||||||
|
maxCtx = 16384
|
||||||
|
}
|
||||||
|
if contextTokens > 0 {
|
||||||
|
contextInfo := fmt.Sprintf(" | context-estim: [orange:-:b]%d/%d[-:-:-]", contextTokens, maxCtx)
|
||||||
|
statusLine += contextInfo
|
||||||
|
}
|
||||||
return statusLine + imageInfo + shellModeInfo
|
return statusLine + imageInfo + shellModeInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getContextTokens() int {
|
||||||
|
if chatBody == nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
total := 0
|
||||||
|
messages := chatBody.GetMessages()
|
||||||
|
for i := range messages {
|
||||||
|
msg := &messages[i]
|
||||||
|
if msg.Stats != nil && msg.Stats.Tokens > 0 {
|
||||||
|
total += msg.Stats.Tokens
|
||||||
|
} else if msg.GetText() != "" {
|
||||||
|
total += len(msg.GetText()) / 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return total
|
||||||
|
}
|
||||||
|
|
||||||
|
const deepseekContext = 128000
|
||||||
|
|
||||||
|
func getMaxContextTokens() int {
|
||||||
|
if chatBody == nil || chatBody.GetModel() == "" {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
modelName := chatBody.GetModel()
|
||||||
|
switch {
|
||||||
|
case strings.Contains(cfg.CurrentAPI, "openrouter"):
|
||||||
|
ord := orModelsData.Load()
|
||||||
|
if ord != nil {
|
||||||
|
data := ord.(*models.ORModels)
|
||||||
|
if data != nil {
|
||||||
|
for i := range data.Data {
|
||||||
|
m := &data.Data[i]
|
||||||
|
if m.ID == modelName {
|
||||||
|
return m.ContextLength
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case strings.Contains(cfg.CurrentAPI, "deepseek"):
|
||||||
|
return deepseekContext
|
||||||
|
default:
|
||||||
|
lmd := localModelsData.Load()
|
||||||
|
if lmd != nil {
|
||||||
|
data := lmd.(*models.LCPModels)
|
||||||
|
if data != nil {
|
||||||
|
for i := range data.Data {
|
||||||
|
m := &data.Data[i]
|
||||||
|
if m.ID == modelName {
|
||||||
|
for _, arg := range m.Status.Args {
|
||||||
|
if strings.HasPrefix(arg, "--ctx-size") {
|
||||||
|
if strings.Contains(arg, "=") {
|
||||||
|
val := strings.Split(arg, "=")[1]
|
||||||
|
if n, err := strconv.Atoi(val); err == nil {
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
idx := -1
|
||||||
|
for j, a := range m.Status.Args {
|
||||||
|
if a == "--ctx-size" && j+1 < len(m.Status.Args) {
|
||||||
|
idx = j + 1
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if idx != -1 {
|
||||||
|
if n, err := strconv.Atoi(m.Status.Args[idx]); err == nil {
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
// set of roles within card definition and mention in chat history
|
// set of roles within card definition and mention in chat history
|
||||||
func listChatRoles() []string {
|
func listChatRoles() []string {
|
||||||
currentChat, ok := chatMap[activeChatName]
|
currentChat, ok := chatMap[activeChatName]
|
||||||
@@ -388,13 +487,9 @@ func listChatRoles() []string {
|
|||||||
if !ok {
|
if !ok {
|
||||||
return cbc
|
return cbc
|
||||||
}
|
}
|
||||||
currentCard, ok := sysMap[currentChat.Agent]
|
currentCard := GetCardByRole(currentChat.Agent)
|
||||||
if !ok {
|
if currentCard == nil {
|
||||||
// case which won't let to switch roles:
|
logger.Warn("failed to find current card", "agent", currentChat.Agent)
|
||||||
// started new chat (basic_sys or any other), at the start it yet be saved or have chatbody
|
|
||||||
// if it does not have a card or chars, it'll return an empty slice
|
|
||||||
// log error
|
|
||||||
logger.Warn("failed to find current card in sysMap", "agent", currentChat.Agent, "sysMap", sysMap)
|
|
||||||
return cbc
|
return cbc
|
||||||
}
|
}
|
||||||
charset := []string{}
|
charset := []string{}
|
||||||
@@ -409,11 +504,8 @@ func listChatRoles() []string {
|
|||||||
|
|
||||||
func deepseekModelValidator() error {
|
func deepseekModelValidator() error {
|
||||||
if cfg.CurrentAPI == cfg.DeepSeekChatAPI || cfg.CurrentAPI == cfg.DeepSeekCompletionAPI {
|
if cfg.CurrentAPI == cfg.DeepSeekChatAPI || cfg.CurrentAPI == cfg.DeepSeekCompletionAPI {
|
||||||
if chatBody.Model != "deepseek-chat" && chatBody.Model != "deepseek-reasoner" {
|
if chatBody.GetModel() != "deepseek-chat" && chatBody.GetModel() != "deepseek-reasoner" {
|
||||||
if err := notifyUser("bad request", "wrong deepseek model name"); err != nil {
|
showToast("bad request", "wrong deepseek model name")
|
||||||
logger.Warn("failed ot notify user", "error", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -443,7 +535,7 @@ func updateFlexLayout() {
|
|||||||
if shellMode {
|
if shellMode {
|
||||||
flex.AddItem(shellInput, 0, 10, false)
|
flex.AddItem(shellInput, 0, 10, false)
|
||||||
} else {
|
} else {
|
||||||
flex.AddItem(textArea, 0, 10, false)
|
flex.AddItem(bottomFlex, 0, 10, true)
|
||||||
}
|
}
|
||||||
if positionVisible {
|
if positionVisible {
|
||||||
flex.AddItem(statusLineWidget, 0, 2, false)
|
flex.AddItem(statusLineWidget, 0, 2, false)
|
||||||
@@ -489,13 +581,13 @@ func executeCommandAndDisplay(cmdText string) {
|
|||||||
outputContent := workingDir
|
outputContent := workingDir
|
||||||
// Add the command being executed to the chat
|
// Add the command being executed to the chat
|
||||||
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
|
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
|
||||||
len(chatBody.Messages), cfg.ToolRole, cmdText)
|
chatBody.GetMessageCount(), cfg.ToolRole, cmdText)
|
||||||
fmt.Fprintf(textView, "%s\n", outputContent)
|
fmt.Fprintf(textView, "%s\n", outputContent)
|
||||||
combinedMsg := models.RoleMsg{
|
combinedMsg := models.RoleMsg{
|
||||||
Role: cfg.ToolRole,
|
Role: cfg.ToolRole,
|
||||||
Content: "$ " + cmdText + "\n\n" + outputContent,
|
Content: "$ " + cmdText + "\n\n" + outputContent,
|
||||||
}
|
}
|
||||||
chatBody.Messages = append(chatBody.Messages, combinedMsg)
|
chatBody.AppendMessage(combinedMsg)
|
||||||
if scrollToEndEnabled {
|
if scrollToEndEnabled {
|
||||||
textView.ScrollToEnd()
|
textView.ScrollToEnd()
|
||||||
}
|
}
|
||||||
@@ -504,13 +596,13 @@ func executeCommandAndDisplay(cmdText string) {
|
|||||||
} else {
|
} else {
|
||||||
outputContent := "cd: " + newDir + ": No such file or directory"
|
outputContent := "cd: " + newDir + ": No such file or directory"
|
||||||
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
|
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
|
||||||
len(chatBody.Messages), cfg.ToolRole, cmdText)
|
chatBody.GetMessageCount(), cfg.ToolRole, cmdText)
|
||||||
fmt.Fprintf(textView, "[red]%s[-:-:-]\n", outputContent)
|
fmt.Fprintf(textView, "[red]%s[-:-:-]\n", outputContent)
|
||||||
combinedMsg := models.RoleMsg{
|
combinedMsg := models.RoleMsg{
|
||||||
Role: cfg.ToolRole,
|
Role: cfg.ToolRole,
|
||||||
Content: "$ " + cmdText + "\n\n" + outputContent,
|
Content: "$ " + cmdText + "\n\n" + outputContent,
|
||||||
}
|
}
|
||||||
chatBody.Messages = append(chatBody.Messages, combinedMsg)
|
chatBody.AppendMessage(combinedMsg)
|
||||||
if scrollToEndEnabled {
|
if scrollToEndEnabled {
|
||||||
textView.ScrollToEnd()
|
textView.ScrollToEnd()
|
||||||
}
|
}
|
||||||
@@ -526,7 +618,7 @@ func executeCommandAndDisplay(cmdText string) {
|
|||||||
output, err := cmd.CombinedOutput()
|
output, err := cmd.CombinedOutput()
|
||||||
// Add the command being executed to the chat
|
// Add the command being executed to the chat
|
||||||
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
|
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
|
||||||
len(chatBody.Messages), cfg.ToolRole, cmdText)
|
chatBody.GetMessageCount(), cfg.ToolRole, cmdText)
|
||||||
var outputContent string
|
var outputContent string
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Include both output and error
|
// Include both output and error
|
||||||
@@ -557,7 +649,7 @@ func executeCommandAndDisplay(cmdText string) {
|
|||||||
Role: cfg.ToolRole,
|
Role: cfg.ToolRole,
|
||||||
Content: combinedContent,
|
Content: combinedContent,
|
||||||
}
|
}
|
||||||
chatBody.Messages = append(chatBody.Messages, combinedMsg)
|
chatBody.AppendMessage(combinedMsg)
|
||||||
// Scroll to end and update colors
|
// Scroll to end and update colors
|
||||||
if scrollToEndEnabled {
|
if scrollToEndEnabled {
|
||||||
textView.ScrollToEnd()
|
textView.ScrollToEnd()
|
||||||
@@ -587,7 +679,7 @@ func performSearch(term string) {
|
|||||||
searchResultLengths = nil
|
searchResultLengths = nil
|
||||||
originalTextForSearch = ""
|
originalTextForSearch = ""
|
||||||
// Re-render text without highlights
|
// Re-render text without highlights
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
colorText()
|
colorText()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -613,9 +705,7 @@ func performSearch(term string) {
|
|||||||
searchResults = nil
|
searchResults = nil
|
||||||
searchResultLengths = nil
|
searchResultLengths = nil
|
||||||
notification := "Pattern not found: " + term
|
notification := "Pattern not found: " + term
|
||||||
if err := notifyUser("search", notification); err != nil {
|
showToast("search", notification)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Store the formatted text positions and lengths for accurate highlighting
|
// Store the formatted text positions and lengths for accurate highlighting
|
||||||
@@ -648,9 +738,7 @@ func highlightCurrentMatch() {
|
|||||||
textView.Highlight(currentRegion).ScrollToHighlight()
|
textView.Highlight(currentRegion).ScrollToHighlight()
|
||||||
// Send notification about which match we're at
|
// Send notification about which match we're at
|
||||||
notification := fmt.Sprintf("Match %d of %d", searchIndex+1, len(searchResults))
|
notification := fmt.Sprintf("Match %d of %d", searchIndex+1, len(searchResults))
|
||||||
if err := notifyUser("search", notification); err != nil {
|
showToast("search", notification)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// showSearchBar shows the search input field as an overlay
|
// showSearchBar shows the search input field as an overlay
|
||||||
@@ -740,9 +828,7 @@ func addRegionTags(text string, positions []int, lengths []int, currentIdx int,
|
|||||||
// searchNext finds the next occurrence of the search term
|
// searchNext finds the next occurrence of the search term
|
||||||
func searchNext() {
|
func searchNext() {
|
||||||
if len(searchResults) == 0 {
|
if len(searchResults) == 0 {
|
||||||
if err := notifyUser("search", "No search results to navigate"); err != nil {
|
showToast("search", "No search results to navigate")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
searchIndex = (searchIndex + 1) % len(searchResults)
|
searchIndex = (searchIndex + 1) % len(searchResults)
|
||||||
@@ -752,9 +838,7 @@ func searchNext() {
|
|||||||
// searchPrev finds the previous occurrence of the search term
|
// searchPrev finds the previous occurrence of the search term
|
||||||
func searchPrev() {
|
func searchPrev() {
|
||||||
if len(searchResults) == 0 {
|
if len(searchResults) == 0 {
|
||||||
if err := notifyUser("search", "No search results to navigate"); err != nil {
|
showToast("search", "No search results to navigate")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if searchIndex == 0 {
|
if searchIndex == 0 {
|
||||||
|
|||||||
153
llm.go
153
llm.go
@@ -3,7 +3,6 @@ package main
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"gf-lt/models"
|
"gf-lt/models"
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
@@ -14,8 +13,9 @@ var lastImg string // for ctrl+j
|
|||||||
|
|
||||||
// containsToolSysMsg checks if the toolSysMsg already exists in the chat body
|
// containsToolSysMsg checks if the toolSysMsg already exists in the chat body
|
||||||
func containsToolSysMsg() bool {
|
func containsToolSysMsg() bool {
|
||||||
for i := range chatBody.Messages {
|
messages := chatBody.GetMessages()
|
||||||
if chatBody.Messages[i].Role == cfg.ToolRole && chatBody.Messages[i].Content == toolSysMsg {
|
for i := range messages {
|
||||||
|
if messages[i].Role == cfg.ToolRole && messages[i].Content == toolSysMsg {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -119,49 +119,64 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
|||||||
logger.Debug("formmsg lcpcompletion", "link", cfg.CurrentAPI)
|
logger.Debug("formmsg lcpcompletion", "link", cfg.CurrentAPI)
|
||||||
localImageAttachmentPath := imageAttachmentPath
|
localImageAttachmentPath := imageAttachmentPath
|
||||||
var multimodalData []string
|
var multimodalData []string
|
||||||
if localImageAttachmentPath != "" {
|
|
||||||
imageURL, err := models.CreateImageURLFromPath(localImageAttachmentPath)
|
|
||||||
if err != nil {
|
|
||||||
logger.Error("failed to create image URL from path for completion",
|
|
||||||
"error", err, "path", localImageAttachmentPath)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// Extract base64 part from data URL (e.g., "data:image/jpeg;base64,...")
|
|
||||||
parts := strings.SplitN(imageURL, ",", 2)
|
|
||||||
if len(parts) == 2 {
|
|
||||||
multimodalData = append(multimodalData, parts[1])
|
|
||||||
} else {
|
|
||||||
logger.Error("invalid image data URL format", "url", imageURL)
|
|
||||||
return nil, errors.New("invalid image data URL format")
|
|
||||||
}
|
|
||||||
imageAttachmentPath = "" // Clear the attachment after use
|
|
||||||
}
|
|
||||||
if msg != "" { // otherwise let the bot to continue
|
if msg != "" { // otherwise let the bot to continue
|
||||||
newMsg := models.RoleMsg{Role: role, Content: msg}
|
var newMsg models.RoleMsg
|
||||||
|
if localImageAttachmentPath != "" {
|
||||||
|
newMsg = models.NewMultimodalMsg(role, []any{})
|
||||||
|
newMsg.AddTextPart(msg)
|
||||||
|
imageURL, err := models.CreateImageURLFromPath(localImageAttachmentPath)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("failed to create image URL from path for completion",
|
||||||
|
"error", err, "path", localImageAttachmentPath)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
newMsg.AddImagePart(imageURL, localImageAttachmentPath)
|
||||||
|
imageAttachmentPath = "" // Clear the attachment after use
|
||||||
|
} else { // not a multimodal msg or image passed in tool call
|
||||||
|
newMsg = models.RoleMsg{Role: role, Content: msg}
|
||||||
|
}
|
||||||
newMsg = *processMessageTag(&newMsg)
|
newMsg = *processMessageTag(&newMsg)
|
||||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
chatBody.AppendMessage(newMsg)
|
||||||
}
|
}
|
||||||
// sending description of the tools and how to use them
|
// sending description of the tools and how to use them
|
||||||
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
||||||
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
chatBody.AppendMessage(models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
||||||
}
|
}
|
||||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.GetMessages())
|
||||||
|
// Build prompt and extract images inline as we process each message
|
||||||
messages := make([]string, len(filteredMessages))
|
messages := make([]string, len(filteredMessages))
|
||||||
for i := range filteredMessages {
|
for i := range filteredMessages {
|
||||||
messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt()
|
m := stripThinkingFromMsg(&filteredMessages[i])
|
||||||
|
messages[i] = m.ToPrompt()
|
||||||
|
// Extract images from this message and add marker inline
|
||||||
|
if len(m.ContentParts) > 0 {
|
||||||
|
for _, part := range m.ContentParts {
|
||||||
|
var imgURL string
|
||||||
|
// Check for struct type
|
||||||
|
if imgPart, ok := part.(models.ImageContentPart); ok {
|
||||||
|
imgURL = imgPart.ImageURL.URL
|
||||||
|
} else if partMap, ok := part.(map[string]any); ok {
|
||||||
|
// Check for map type (from JSON unmarshaling)
|
||||||
|
if partType, exists := partMap["type"]; exists && partType == "image_url" {
|
||||||
|
if imgURLMap, ok := partMap["image_url"].(map[string]any); ok {
|
||||||
|
if url, ok := imgURLMap["url"].(string); ok {
|
||||||
|
imgURL = url
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if imgURL != "" {
|
||||||
|
// Extract base64 part from data URL (e.g., "data:image/jpeg;base64,...")
|
||||||
|
parts := strings.SplitN(imgURL, ",", 2)
|
||||||
|
if len(parts) == 2 {
|
||||||
|
multimodalData = append(multimodalData, parts[1])
|
||||||
|
messages[i] += " <__media__>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
prompt := strings.Join(messages, "\n")
|
prompt := strings.Join(messages, "\n")
|
||||||
// Add multimodal media markers to the prompt text when multimodal data is present
|
|
||||||
// This is required by llama.cpp multimodal models so they know where to insert media
|
|
||||||
if len(multimodalData) > 0 {
|
|
||||||
// Add a media marker for each item in the multimodal data
|
|
||||||
var sb strings.Builder
|
|
||||||
sb.WriteString(prompt)
|
|
||||||
for range multimodalData {
|
|
||||||
sb.WriteString(" <__media__>") // llama.cpp default multimodal marker
|
|
||||||
}
|
|
||||||
prompt = sb.String()
|
|
||||||
}
|
|
||||||
// needs to be after <__media__> if there are images
|
// needs to be after <__media__> if there are images
|
||||||
if !resume {
|
if !resume {
|
||||||
botMsgStart := "\n" + botPersona + ":\n"
|
botMsgStart := "\n" + botPersona + ":\n"
|
||||||
@@ -169,7 +184,7 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
|||||||
}
|
}
|
||||||
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
||||||
"msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData))
|
"msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData))
|
||||||
payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData,
|
payload := models.NewLCPReq(prompt, chatBody.GetModel(), multimodalData,
|
||||||
defaultLCPProps, chatBody.MakeStopSliceExcluding("", listChatRoles()))
|
defaultLCPProps, chatBody.MakeStopSliceExcluding("", listChatRoles()))
|
||||||
data, err := json.Marshal(payload)
|
data, err := json.Marshal(payload)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -210,11 +225,9 @@ func (op LCPChat) ParseChunk(data []byte) (*models.TextChunk, error) {
|
|||||||
logger.Error("failed to decode", "error", err, "line", string(data))
|
logger.Error("failed to decode", "error", err, "line", string(data))
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle multiple choices safely
|
|
||||||
if len(llmchunk.Choices) == 0 {
|
if len(llmchunk.Choices) == 0 {
|
||||||
logger.Warn("LCPChat ParseChunk: no choices in response", "data", string(data))
|
logger.Warn("LCPChat empty chunk choices", "raw_data", string(data), "chunk", llmchunk)
|
||||||
return &models.TextChunk{Finished: true}, nil
|
return &models.TextChunk{}, nil
|
||||||
}
|
}
|
||||||
lastChoice := llmchunk.Choices[len(llmchunk.Choices)-1]
|
lastChoice := llmchunk.Choices[len(llmchunk.Choices)-1]
|
||||||
resp := &models.TextChunk{
|
resp := &models.TextChunk{
|
||||||
@@ -277,17 +290,17 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
|
|||||||
newMsg = models.NewRoleMsg(role, msg)
|
newMsg = models.NewRoleMsg(role, msg)
|
||||||
}
|
}
|
||||||
newMsg = *processMessageTag(&newMsg)
|
newMsg = *processMessageTag(&newMsg)
|
||||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
chatBody.AppendMessage(newMsg)
|
||||||
logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role,
|
logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role,
|
||||||
"content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages))
|
"content_len", len(newMsg.Content), "message_count_after_add", chatBody.GetMessageCount())
|
||||||
}
|
}
|
||||||
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
|
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.GetMessages())
|
||||||
// openai /v1/chat does not support custom roles; needs to be user, assistant, system
|
// openai /v1/chat does not support custom roles; needs to be user, assistant, system
|
||||||
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
||||||
bodyCopy := &models.ChatBody{
|
bodyCopy := &models.ChatBody{
|
||||||
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
||||||
Model: chatBody.Model,
|
Model: chatBody.GetModel(),
|
||||||
Stream: chatBody.Stream,
|
Stream: chatBody.GetStream(),
|
||||||
}
|
}
|
||||||
for i := range filteredMessages {
|
for i := range filteredMessages {
|
||||||
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
|
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
|
||||||
@@ -335,6 +348,10 @@ func (ds DeepSeekerCompletion) ParseChunk(data []byte) (*models.TextChunk, error
|
|||||||
logger.Error("failed to decode", "error", err, "line", string(data))
|
logger.Error("failed to decode", "error", err, "line", string(data))
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if len(llmchunk.Choices) == 0 {
|
||||||
|
logger.Warn("empty chunk choices", "raw_data", string(data), "chunk", llmchunk)
|
||||||
|
return &models.TextChunk{}, nil
|
||||||
|
}
|
||||||
resp := &models.TextChunk{
|
resp := &models.TextChunk{
|
||||||
Chunk: llmchunk.Choices[0].Text,
|
Chunk: llmchunk.Choices[0].Text,
|
||||||
}
|
}
|
||||||
@@ -359,13 +376,13 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
|
|||||||
if msg != "" { // otherwise let the bot to continue
|
if msg != "" { // otherwise let the bot to continue
|
||||||
newMsg := models.RoleMsg{Role: role, Content: msg}
|
newMsg := models.RoleMsg{Role: role, Content: msg}
|
||||||
newMsg = *processMessageTag(&newMsg)
|
newMsg = *processMessageTag(&newMsg)
|
||||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
chatBody.AppendMessage(newMsg)
|
||||||
}
|
}
|
||||||
// sending description of the tools and how to use them
|
// sending description of the tools and how to use them
|
||||||
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
||||||
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
chatBody.AppendMessage(models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
||||||
}
|
}
|
||||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.GetMessages())
|
||||||
messages := make([]string, len(filteredMessages))
|
messages := make([]string, len(filteredMessages))
|
||||||
for i := range filteredMessages {
|
for i := range filteredMessages {
|
||||||
messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt()
|
messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt()
|
||||||
@@ -378,7 +395,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
|
|||||||
}
|
}
|
||||||
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
||||||
"msg", msg, "resume", resume, "prompt", prompt)
|
"msg", msg, "resume", resume, "prompt", prompt)
|
||||||
payload := models.NewDSCompletionReq(prompt, chatBody.Model,
|
payload := models.NewDSCompletionReq(prompt, chatBody.GetModel(),
|
||||||
defaultLCPProps["temp"],
|
defaultLCPProps["temp"],
|
||||||
chatBody.MakeStopSliceExcluding("", listChatRoles()))
|
chatBody.MakeStopSliceExcluding("", listChatRoles()))
|
||||||
data, err := json.Marshal(payload)
|
data, err := json.Marshal(payload)
|
||||||
@@ -400,6 +417,10 @@ func (ds DeepSeekerChat) ParseChunk(data []byte) (*models.TextChunk, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
resp := &models.TextChunk{}
|
resp := &models.TextChunk{}
|
||||||
|
if len(llmchunk.Choices) == 0 {
|
||||||
|
logger.Warn("empty chunk choices", "raw_data", string(data), "chunk", llmchunk)
|
||||||
|
return resp, nil
|
||||||
|
}
|
||||||
if llmchunk.Choices[0].FinishReason != "" {
|
if llmchunk.Choices[0].FinishReason != "" {
|
||||||
if llmchunk.Choices[0].Delta.Content != "" {
|
if llmchunk.Choices[0].Delta.Content != "" {
|
||||||
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
|
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
|
||||||
@@ -428,15 +449,15 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
|||||||
if msg != "" { // otherwise let the bot continue
|
if msg != "" { // otherwise let the bot continue
|
||||||
newMsg := models.RoleMsg{Role: role, Content: msg}
|
newMsg := models.RoleMsg{Role: role, Content: msg}
|
||||||
newMsg = *processMessageTag(&newMsg)
|
newMsg = *processMessageTag(&newMsg)
|
||||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
chatBody.AppendMessage(newMsg)
|
||||||
}
|
}
|
||||||
// Create copy of chat body with standardized user role
|
// Create copy of chat body with standardized user role
|
||||||
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
|
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.GetMessages())
|
||||||
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
||||||
bodyCopy := &models.ChatBody{
|
bodyCopy := &models.ChatBody{
|
||||||
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
||||||
Model: chatBody.Model,
|
Model: chatBody.GetModel(),
|
||||||
Stream: chatBody.Stream,
|
Stream: chatBody.GetStream(),
|
||||||
}
|
}
|
||||||
for i := range filteredMessages {
|
for i := range filteredMessages {
|
||||||
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
|
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
|
||||||
@@ -482,6 +503,10 @@ func (or OpenRouterCompletion) ParseChunk(data []byte) (*models.TextChunk, error
|
|||||||
logger.Error("failed to decode", "error", err, "line", string(data))
|
logger.Error("failed to decode", "error", err, "line", string(data))
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if len(llmchunk.Choices) == 0 {
|
||||||
|
logger.Warn("empty chunk choices", "raw_data", string(data), "chunk", llmchunk)
|
||||||
|
return &models.TextChunk{}, nil
|
||||||
|
}
|
||||||
resp := &models.TextChunk{
|
resp := &models.TextChunk{
|
||||||
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Text,
|
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Text,
|
||||||
}
|
}
|
||||||
@@ -503,13 +528,13 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
|
|||||||
if msg != "" { // otherwise let the bot to continue
|
if msg != "" { // otherwise let the bot to continue
|
||||||
newMsg := models.RoleMsg{Role: role, Content: msg}
|
newMsg := models.RoleMsg{Role: role, Content: msg}
|
||||||
newMsg = *processMessageTag(&newMsg)
|
newMsg = *processMessageTag(&newMsg)
|
||||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
chatBody.AppendMessage(newMsg)
|
||||||
}
|
}
|
||||||
// sending description of the tools and how to use them
|
// sending description of the tools and how to use them
|
||||||
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
||||||
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
chatBody.AppendMessage(models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
||||||
}
|
}
|
||||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.GetMessages())
|
||||||
messages := make([]string, len(filteredMessages))
|
messages := make([]string, len(filteredMessages))
|
||||||
for i := range filteredMessages {
|
for i := range filteredMessages {
|
||||||
messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt()
|
messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt()
|
||||||
@@ -523,7 +548,7 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
|
|||||||
stopSlice := chatBody.MakeStopSliceExcluding("", listChatRoles())
|
stopSlice := chatBody.MakeStopSliceExcluding("", listChatRoles())
|
||||||
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
||||||
"msg", msg, "resume", resume, "prompt", prompt, "stop_strings", stopSlice)
|
"msg", msg, "resume", resume, "prompt", prompt, "stop_strings", stopSlice)
|
||||||
payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt,
|
payload := models.NewOpenRouterCompletionReq(chatBody.GetModel(), prompt,
|
||||||
defaultLCPProps, stopSlice)
|
defaultLCPProps, stopSlice)
|
||||||
data, err := json.Marshal(payload)
|
data, err := json.Marshal(payload)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -544,6 +569,10 @@ func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) {
|
|||||||
logger.Error("failed to decode", "error", err, "line", string(data))
|
logger.Error("failed to decode", "error", err, "line", string(data))
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if len(llmchunk.Choices) == 0 {
|
||||||
|
logger.Warn("empty chunk choices", "raw_data", string(data), "chunk", llmchunk)
|
||||||
|
return &models.TextChunk{}, nil
|
||||||
|
}
|
||||||
lastChoice := llmchunk.Choices[len(llmchunk.Choices)-1]
|
lastChoice := llmchunk.Choices[len(llmchunk.Choices)-1]
|
||||||
resp := &models.TextChunk{
|
resp := &models.TextChunk{
|
||||||
Chunk: lastChoice.Delta.Content,
|
Chunk: lastChoice.Delta.Content,
|
||||||
@@ -605,15 +634,15 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
|||||||
newMsg = models.NewRoleMsg(role, msg)
|
newMsg = models.NewRoleMsg(role, msg)
|
||||||
}
|
}
|
||||||
newMsg = *processMessageTag(&newMsg)
|
newMsg = *processMessageTag(&newMsg)
|
||||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
chatBody.AppendMessage(newMsg)
|
||||||
}
|
}
|
||||||
// Create copy of chat body with standardized user role
|
// Create copy of chat body with standardized user role
|
||||||
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
|
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.GetMessages())
|
||||||
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
||||||
bodyCopy := &models.ChatBody{
|
bodyCopy := &models.ChatBody{
|
||||||
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
||||||
Model: chatBody.Model,
|
Model: chatBody.GetModel(),
|
||||||
Stream: chatBody.Stream,
|
Stream: chatBody.GetStream(),
|
||||||
}
|
}
|
||||||
for i := range filteredMessages {
|
for i := range filteredMessages {
|
||||||
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
|
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
|
||||||
|
|||||||
3
main.go
3
main.go
@@ -17,8 +17,9 @@ var (
|
|||||||
shellHistoryPos int = -1
|
shellHistoryPos int = -1
|
||||||
thinkingCollapsed = false
|
thinkingCollapsed = false
|
||||||
toolCollapsed = true
|
toolCollapsed = true
|
||||||
statusLineTempl = "help (F12) | chat: [orange:-:b]%s[-:-:-] (F1) | [%s:-:b]tool use[-:-:-] (ctrl+k) | model: [%s:-:b]%s[-:-:-] (ctrl+l) | [%s:-:b]skip LLM resp[-:-:-] (F10)\nAPI: [orange:-:b]%s[-:-:-] (ctrl+v) | writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | bot will write as [orange:-:b]%s[-:-:-] (ctrl+x)"
|
statusLineTempl = "help (F12) | chat: [orange:-:b]%s[-:-:-] (F1) | [%s:-:b]tool use[-:-:-] (ctrl+k) | model: [%s:-:b]%s[-:-:-] (ctrl+l) | [%s:-:b]skip LLM resp[-:-:-] (F10) | API: [orange:-:b]%s[-:-:-] (ctrl+v)\nwriting as: [orange:-:b]%s[-:-:-] (ctrl+q) | bot will write as [orange:-:b]%s[-:-:-] (ctrl+x)"
|
||||||
focusSwitcher = map[tview.Primitive]tview.Primitive{}
|
focusSwitcher = map[tview.Primitive]tview.Primitive{}
|
||||||
|
app *tview.Application
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
package models
|
package models
|
||||||
|
|
||||||
import "strings"
|
import (
|
||||||
|
"crypto/md5"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
// https://github.com/malfoyslastname/character-card-spec-v2/blob/main/spec_v2.md
|
// https://github.com/malfoyslastname/character-card-spec-v2/blob/main/spec_v2.md
|
||||||
// what a bloat; trim to Role->Msg pair and first msg
|
// what a bloat; trim to Role->Msg pair and first msg
|
||||||
@@ -31,6 +35,7 @@ func (c *CharCardSpec) Simplify(userName, fpath string) *CharCard {
|
|||||||
fm := strings.ReplaceAll(strings.ReplaceAll(c.FirstMes, "{{char}}", c.Name), "{{user}}", userName)
|
fm := strings.ReplaceAll(strings.ReplaceAll(c.FirstMes, "{{char}}", c.Name), "{{user}}", userName)
|
||||||
sysPr := strings.ReplaceAll(strings.ReplaceAll(c.Description, "{{char}}", c.Name), "{{user}}", userName)
|
sysPr := strings.ReplaceAll(strings.ReplaceAll(c.Description, "{{char}}", c.Name), "{{user}}", userName)
|
||||||
return &CharCard{
|
return &CharCard{
|
||||||
|
ID: ComputeCardID(c.Name, fpath),
|
||||||
SysPrompt: sysPr,
|
SysPrompt: sysPr,
|
||||||
FirstMsg: fm,
|
FirstMsg: fm,
|
||||||
Role: c.Name,
|
Role: c.Name,
|
||||||
@@ -39,7 +44,12 @@ func (c *CharCardSpec) Simplify(userName, fpath string) *CharCard {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ComputeCardID(role, filePath string) string {
|
||||||
|
return fmt.Sprintf("%x", md5.Sum([]byte(role+filePath)))
|
||||||
|
}
|
||||||
|
|
||||||
type CharCard struct {
|
type CharCard struct {
|
||||||
|
ID string `json:"id"`
|
||||||
SysPrompt string `json:"sys_prompt"`
|
SysPrompt string `json:"sys_prompt"`
|
||||||
FirstMsg string `json:"first_msg"`
|
FirstMsg string `json:"first_msg"`
|
||||||
Role string `json:"role"`
|
Role string `json:"role"`
|
||||||
|
|||||||
13
models/consts.go
Normal file
13
models/consts.go
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
const (
|
||||||
|
LoadedMark = "(loaded) "
|
||||||
|
ToolRespMultyType = "multimodel_content"
|
||||||
|
)
|
||||||
|
|
||||||
|
type APIType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
APITypeChat APIType = iota
|
||||||
|
APITypeCompletion
|
||||||
|
)
|
||||||
294
models/models.go
294
models/models.go
@@ -6,6 +6,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
)
|
)
|
||||||
|
|
||||||
type FuncCall struct {
|
type FuncCall struct {
|
||||||
@@ -391,7 +392,6 @@ func CreateImageURLFromPath(imagePath string) (string, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine the image format based on file extension
|
// Determine the image format based on file extension
|
||||||
var mimeType string
|
var mimeType string
|
||||||
switch {
|
switch {
|
||||||
@@ -408,10 +408,8 @@ func CreateImageURLFromPath(imagePath string) (string, error) {
|
|||||||
default:
|
default:
|
||||||
mimeType = "image/jpeg" // default
|
mimeType = "image/jpeg" // default
|
||||||
}
|
}
|
||||||
|
|
||||||
// Encode to base64
|
// Encode to base64
|
||||||
encoded := base64.StdEncoding.EncodeToString(data)
|
encoded := base64.StdEncoding.EncodeToString(data)
|
||||||
|
|
||||||
// Create data URL
|
// Create data URL
|
||||||
return fmt.Sprintf("data:%s;base64,%s", mimeType, encoded), nil
|
return fmt.Sprintf("data:%s;base64,%s", mimeType, encoded), nil
|
||||||
}
|
}
|
||||||
@@ -519,24 +517,6 @@ type OpenAIReq struct {
|
|||||||
|
|
||||||
// ===
|
// ===
|
||||||
|
|
||||||
// type LLMModels struct {
|
|
||||||
// Object string `json:"object"`
|
|
||||||
// Data []struct {
|
|
||||||
// ID string `json:"id"`
|
|
||||||
// Object string `json:"object"`
|
|
||||||
// Created int `json:"created"`
|
|
||||||
// OwnedBy string `json:"owned_by"`
|
|
||||||
// Meta struct {
|
|
||||||
// VocabType int `json:"vocab_type"`
|
|
||||||
// NVocab int `json:"n_vocab"`
|
|
||||||
// NCtxTrain int `json:"n_ctx_train"`
|
|
||||||
// NEmbd int `json:"n_embd"`
|
|
||||||
// NParams int64 `json:"n_params"`
|
|
||||||
// Size int64 `json:"size"`
|
|
||||||
// } `json:"meta"`
|
|
||||||
// } `json:"data"`
|
|
||||||
// }
|
|
||||||
|
|
||||||
type LlamaCPPReq struct {
|
type LlamaCPPReq struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Stream bool `json:"stream"`
|
Stream bool `json:"stream"`
|
||||||
@@ -629,6 +609,20 @@ func (lcp *LCPModels) ListModels() []string {
|
|||||||
return resp
|
return resp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (lcp *LCPModels) HasVision(modelID string) bool {
|
||||||
|
for _, m := range lcp.Data {
|
||||||
|
if m.ID == modelID {
|
||||||
|
args := m.Status.Args
|
||||||
|
for i := 0; i < len(args)-1; i++ {
|
||||||
|
if args[i] == "--mmproj" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
type ResponseStats struct {
|
type ResponseStats struct {
|
||||||
Tokens int
|
Tokens int
|
||||||
Duration float64
|
Duration float64
|
||||||
@@ -642,9 +636,257 @@ type ChatRoundReq struct {
|
|||||||
Resume bool
|
Resume bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type APIType int
|
type MultimodalToolResp struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Parts []map[string]string `json:"parts"`
|
||||||
|
}
|
||||||
|
|
||||||
const (
|
// SafeChatBody is a thread-safe wrapper around ChatBody using RWMutex.
|
||||||
APITypeChat APIType = iota
|
// This allows safe concurrent access to chat state from multiple goroutines.
|
||||||
APITypeCompletion
|
type SafeChatBody struct {
|
||||||
)
|
mu sync.RWMutex
|
||||||
|
ChatBody
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSafeChatBody creates a new SafeChatBody from an existing ChatBody.
|
||||||
|
// If cb is nil, creates an empty ChatBody.
|
||||||
|
func NewSafeChatBody(cb *ChatBody) *SafeChatBody {
|
||||||
|
if cb == nil {
|
||||||
|
return &SafeChatBody{
|
||||||
|
ChatBody: ChatBody{
|
||||||
|
Messages: []RoleMsg{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &SafeChatBody{
|
||||||
|
ChatBody: *cb,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetModel returns the model name (thread-safe read).
|
||||||
|
func (s *SafeChatBody) GetModel() string {
|
||||||
|
s.mu.RLock()
|
||||||
|
defer s.mu.RUnlock()
|
||||||
|
return s.Model
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetModel sets the model name (thread-safe write).
|
||||||
|
func (s *SafeChatBody) SetModel(model string) {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
s.Model = model
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetStream returns the stream flag (thread-safe read).
|
||||||
|
func (s *SafeChatBody) GetStream() bool {
|
||||||
|
s.mu.RLock()
|
||||||
|
defer s.mu.RUnlock()
|
||||||
|
return s.Stream
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetStream sets the stream flag (thread-safe write).
|
||||||
|
func (s *SafeChatBody) SetStream(stream bool) {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
s.Stream = stream
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMessages returns a copy of all messages (thread-safe read).
|
||||||
|
// Returns a copy to prevent race conditions after the lock is released.
|
||||||
|
func (s *SafeChatBody) GetMessages() []RoleMsg {
|
||||||
|
s.mu.RLock()
|
||||||
|
defer s.mu.RUnlock()
|
||||||
|
// Return a copy to prevent external modification
|
||||||
|
messagesCopy := make([]RoleMsg, len(s.Messages))
|
||||||
|
copy(messagesCopy, s.Messages)
|
||||||
|
return messagesCopy
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetMessages replaces all messages (thread-safe write).
|
||||||
|
func (s *SafeChatBody) SetMessages(messages []RoleMsg) {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
s.Messages = messages
|
||||||
|
}
|
||||||
|
|
||||||
|
// AppendMessage adds a message to the end (thread-safe write).
|
||||||
|
func (s *SafeChatBody) AppendMessage(msg RoleMsg) {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
s.Messages = append(s.Messages, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMessageAt returns a message at a specific index (thread-safe read).
|
||||||
|
// Returns the message and a boolean indicating if the index was valid.
|
||||||
|
func (s *SafeChatBody) GetMessageAt(index int) (RoleMsg, bool) {
|
||||||
|
s.mu.RLock()
|
||||||
|
defer s.mu.RUnlock()
|
||||||
|
if index < 0 || index >= len(s.Messages) {
|
||||||
|
return RoleMsg{}, false
|
||||||
|
}
|
||||||
|
return s.Messages[index], true
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetMessageAt updates a message at a specific index (thread-safe write).
|
||||||
|
// Returns false if index is out of bounds.
|
||||||
|
func (s *SafeChatBody) SetMessageAt(index int, msg RoleMsg) bool {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
if index < 0 || index >= len(s.Messages) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
s.Messages[index] = msg
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetLastMessage returns the last message (thread-safe read).
|
||||||
|
// Returns the message and a boolean indicating if the chat has messages.
|
||||||
|
func (s *SafeChatBody) GetLastMessage() (RoleMsg, bool) {
|
||||||
|
s.mu.RLock()
|
||||||
|
defer s.mu.RUnlock()
|
||||||
|
if len(s.Messages) == 0 {
|
||||||
|
return RoleMsg{}, false
|
||||||
|
}
|
||||||
|
return s.Messages[len(s.Messages)-1], true
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMessageCount returns the number of messages (thread-safe read).
|
||||||
|
func (s *SafeChatBody) GetMessageCount() int {
|
||||||
|
s.mu.RLock()
|
||||||
|
defer s.mu.RUnlock()
|
||||||
|
return len(s.Messages)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveLastMessage removes the last message (thread-safe write).
|
||||||
|
// Returns false if there are no messages.
|
||||||
|
func (s *SafeChatBody) RemoveLastMessage() bool {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
if len(s.Messages) == 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
s.Messages = s.Messages[:len(s.Messages)-1]
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// TruncateMessages keeps only the first n messages (thread-safe write).
|
||||||
|
func (s *SafeChatBody) TruncateMessages(n int) {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
if n < len(s.Messages) {
|
||||||
|
s.Messages = s.Messages[:n]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearMessages removes all messages (thread-safe write).
|
||||||
|
func (s *SafeChatBody) ClearMessages() {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
s.Messages = []RoleMsg{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rename renames all occurrences of oldname to newname in messages (thread-safe read-modify-write).
|
||||||
|
func (s *SafeChatBody) Rename(oldname, newname string) {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
for i := range s.Messages {
|
||||||
|
s.Messages[i].Content = strings.ReplaceAll(s.Messages[i].Content, oldname, newname)
|
||||||
|
s.Messages[i].Role = strings.ReplaceAll(s.Messages[i].Role, oldname, newname)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListRoles returns all unique roles in messages (thread-safe read).
|
||||||
|
func (s *SafeChatBody) ListRoles() []string {
|
||||||
|
s.mu.RLock()
|
||||||
|
defer s.mu.RUnlock()
|
||||||
|
namesMap := make(map[string]struct{})
|
||||||
|
for i := range s.Messages {
|
||||||
|
namesMap[s.Messages[i].Role] = struct{}{}
|
||||||
|
}
|
||||||
|
resp := make([]string, len(namesMap))
|
||||||
|
i := 0
|
||||||
|
for k := range namesMap {
|
||||||
|
resp[i] = k
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
return resp
|
||||||
|
}
|
||||||
|
|
||||||
|
// MakeStopSlice returns stop strings for all roles (thread-safe read).
|
||||||
|
func (s *SafeChatBody) MakeStopSlice() []string {
|
||||||
|
return s.MakeStopSliceExcluding("", s.ListRoles())
|
||||||
|
}
|
||||||
|
|
||||||
|
// MakeStopSliceExcluding returns stop strings excluding a specific role (thread-safe read).
|
||||||
|
func (s *SafeChatBody) MakeStopSliceExcluding(excludeRole string, roleList []string) []string {
|
||||||
|
s.mu.RLock()
|
||||||
|
defer s.mu.RUnlock()
|
||||||
|
ss := []string{}
|
||||||
|
for _, role := range roleList {
|
||||||
|
if role == excludeRole {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
ss = append(ss,
|
||||||
|
role+":\n",
|
||||||
|
role+":",
|
||||||
|
role+": ",
|
||||||
|
role+": ",
|
||||||
|
role+": \n",
|
||||||
|
role+": ",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return ss
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateMessageFunc updates a message at index using a provided function.
|
||||||
|
// The function receives the current message and returns the updated message.
|
||||||
|
// This is atomic and thread-safe (read-modify-write under single lock).
|
||||||
|
// Returns false if index is out of bounds.
|
||||||
|
func (s *SafeChatBody) UpdateMessageFunc(index int, updater func(RoleMsg) RoleMsg) bool {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
if index < 0 || index >= len(s.Messages) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
s.Messages[index] = updater(s.Messages[index])
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// AppendMessageFunc appends a new message created by a provided function.
|
||||||
|
// The function receives the current message count and returns the new message.
|
||||||
|
// This is atomic and thread-safe.
|
||||||
|
func (s *SafeChatBody) AppendMessageFunc(creator func(count int) RoleMsg) {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
msg := creator(len(s.Messages))
|
||||||
|
s.Messages = append(s.Messages, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMessagesForLLM returns a filtered copy of messages for sending to LLM.
|
||||||
|
// This is thread-safe and returns a copy safe for external modification.
|
||||||
|
func (s *SafeChatBody) GetMessagesForLLM(filterFunc func([]RoleMsg) []RoleMsg) []RoleMsg {
|
||||||
|
s.mu.RLock()
|
||||||
|
defer s.mu.RUnlock()
|
||||||
|
if filterFunc == nil {
|
||||||
|
messagesCopy := make([]RoleMsg, len(s.Messages))
|
||||||
|
copy(messagesCopy, s.Messages)
|
||||||
|
return messagesCopy
|
||||||
|
}
|
||||||
|
return filterFunc(s.Messages)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithLock executes a function while holding the write lock.
|
||||||
|
// Use this for complex operations that need to be atomic.
|
||||||
|
func (s *SafeChatBody) WithLock(fn func(*ChatBody)) {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
fn(&s.ChatBody)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithRLock executes a function while holding the read lock.
|
||||||
|
// Use this for complex read-only operations.
|
||||||
|
func (s *SafeChatBody) WithRLock(fn func(*ChatBody)) {
|
||||||
|
s.mu.RLock()
|
||||||
|
defer s.mu.RUnlock()
|
||||||
|
fn(&s.ChatBody)
|
||||||
|
}
|
||||||
|
|||||||
@@ -172,3 +172,16 @@ func (orm *ORModels) ListModels(free bool) []string {
|
|||||||
}
|
}
|
||||||
return resp
|
return resp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (orm *ORModels) HasVision(modelID string) bool {
|
||||||
|
for i := range orm.Data {
|
||||||
|
if orm.Data[i].ID == modelID {
|
||||||
|
for _, mod := range orm.Data[i].Architecture.InputModalities {
|
||||||
|
if mod == "image" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|||||||
@@ -109,6 +109,12 @@ func ReadCardJson(fname string) (*models.CharCard, error) {
|
|||||||
if err := json.Unmarshal(data, &card); err != nil {
|
if err := json.Unmarshal(data, &card); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if card.FilePath == "" {
|
||||||
|
card.FilePath = fname
|
||||||
|
}
|
||||||
|
if card.ID == "" {
|
||||||
|
card.ID = models.ComputeCardID(card.Role, card.FilePath)
|
||||||
|
}
|
||||||
return &card, nil
|
return &card, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
108
popups.go
108
popups.go
@@ -1,6 +1,7 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"gf-lt/models"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@@ -21,7 +22,7 @@ func showModelSelectionPopup() {
|
|||||||
models, err := fetchLCPModelsWithLoadStatus()
|
models, err := fetchLCPModelsWithLoadStatus()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("failed to fetch models with load status", "error", err)
|
logger.Error("failed to fetch models with load status", "error", err)
|
||||||
return LocalModels
|
return LocalModels.Load().([]string)
|
||||||
}
|
}
|
||||||
return models
|
return models
|
||||||
}
|
}
|
||||||
@@ -29,7 +30,8 @@ func showModelSelectionPopup() {
|
|||||||
modelList := getModelListForAPI(cfg.CurrentAPI)
|
modelList := getModelListForAPI(cfg.CurrentAPI)
|
||||||
// Check for empty options list
|
// Check for empty options list
|
||||||
if len(modelList) == 0 {
|
if len(modelList) == 0 {
|
||||||
logger.Warn("empty model list for", "api", cfg.CurrentAPI, "localModelsLen", len(LocalModels), "orModelsLen", len(ORFreeModels))
|
localModels := LocalModels.Load().([]string)
|
||||||
|
logger.Warn("empty model list for", "api", cfg.CurrentAPI, "localModelsLen", len(localModels), "orModelsLen", len(ORFreeModels))
|
||||||
var message string
|
var message string
|
||||||
switch {
|
switch {
|
||||||
case strings.Contains(cfg.CurrentAPI, "openrouter.ai"):
|
case strings.Contains(cfg.CurrentAPI, "openrouter.ai"):
|
||||||
@@ -39,9 +41,7 @@ func showModelSelectionPopup() {
|
|||||||
default:
|
default:
|
||||||
message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models."
|
message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models."
|
||||||
}
|
}
|
||||||
if err := notifyUser("Empty list", message); err != nil {
|
showToast("Empty list", message)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create a list primitive
|
// Create a list primitive
|
||||||
@@ -51,7 +51,7 @@ func showModelSelectionPopup() {
|
|||||||
// Find the current model index to set as selected
|
// Find the current model index to set as selected
|
||||||
currentModelIndex := -1
|
currentModelIndex := -1
|
||||||
for i, model := range modelList {
|
for i, model := range modelList {
|
||||||
if strings.TrimPrefix(model, "(loaded) ") == chatBody.Model {
|
if strings.TrimPrefix(model, models.LoadedMark) == chatBody.GetModel() {
|
||||||
currentModelIndex = i
|
currentModelIndex = i
|
||||||
}
|
}
|
||||||
modelListWidget.AddItem(model, "", 0, nil)
|
modelListWidget.AddItem(model, "", 0, nil)
|
||||||
@@ -61,9 +61,9 @@ func showModelSelectionPopup() {
|
|||||||
modelListWidget.SetCurrentItem(currentModelIndex)
|
modelListWidget.SetCurrentItem(currentModelIndex)
|
||||||
}
|
}
|
||||||
modelListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
modelListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
||||||
modelName := strings.TrimPrefix(mainText, "(loaded) ")
|
modelName := strings.TrimPrefix(mainText, models.LoadedMark)
|
||||||
chatBody.Model = modelName
|
chatBody.SetModel(modelName)
|
||||||
cfg.CurrentModel = chatBody.Model
|
cfg.CurrentModel = chatBody.GetModel()
|
||||||
pages.RemovePage("modelSelectionPopup")
|
pages.RemovePage("modelSelectionPopup")
|
||||||
app.SetFocus(textArea)
|
app.SetFocus(textArea)
|
||||||
updateCachedModelColor()
|
updateCachedModelColor()
|
||||||
@@ -118,9 +118,7 @@ func showAPILinkSelectionPopup() {
|
|||||||
if len(apiLinks) == 0 {
|
if len(apiLinks) == 0 {
|
||||||
logger.Warn("no API links available for selection")
|
logger.Warn("no API links available for selection")
|
||||||
message := "No API links available. Please configure API links in your config file."
|
message := "No API links available. Please configure API links in your config file."
|
||||||
if err := notifyUser("Empty list", message); err != nil {
|
showToast("Empty list", message)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create a list primitive
|
// Create a list primitive
|
||||||
@@ -142,6 +140,7 @@ func showAPILinkSelectionPopup() {
|
|||||||
apiListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
apiListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
||||||
// Update the API in config
|
// Update the API in config
|
||||||
cfg.CurrentAPI = mainText
|
cfg.CurrentAPI = mainText
|
||||||
|
// updateToolCapabilities()
|
||||||
// Update model list based on new API
|
// Update model list based on new API
|
||||||
// Helper function to get model list for a given API (same as in props_table.go)
|
// Helper function to get model list for a given API (same as in props_table.go)
|
||||||
getModelListForAPI := func(api string) []string {
|
getModelListForAPI := func(api string) []string {
|
||||||
@@ -152,15 +151,14 @@ func showAPILinkSelectionPopup() {
|
|||||||
}
|
}
|
||||||
// Assume local llama.cpp
|
// Assume local llama.cpp
|
||||||
refreshLocalModelsIfEmpty()
|
refreshLocalModelsIfEmpty()
|
||||||
localModelsMu.RLock()
|
return LocalModels.Load().([]string)
|
||||||
defer localModelsMu.RUnlock()
|
|
||||||
return LocalModels
|
|
||||||
}
|
}
|
||||||
newModelList := getModelListForAPI(cfg.CurrentAPI)
|
newModelList := getModelListForAPI(cfg.CurrentAPI)
|
||||||
// Ensure chatBody.Model is in the new list; if not, set to first available model
|
// Ensure chatBody.Model is in the new list; if not, set to first available model
|
||||||
if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.Model) {
|
if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.GetModel()) {
|
||||||
chatBody.Model = newModelList[0]
|
chatBody.SetModel(strings.TrimPrefix(newModelList[0], models.LoadedMark))
|
||||||
cfg.CurrentModel = chatBody.Model
|
cfg.CurrentModel = chatBody.GetModel()
|
||||||
|
updateToolCapabilities()
|
||||||
}
|
}
|
||||||
pages.RemovePage("apiLinkSelectionPopup")
|
pages.RemovePage("apiLinkSelectionPopup")
|
||||||
app.SetFocus(textArea)
|
app.SetFocus(textArea)
|
||||||
@@ -203,9 +201,7 @@ func showUserRoleSelectionPopup() {
|
|||||||
if len(roles) == 0 {
|
if len(roles) == 0 {
|
||||||
logger.Warn("no roles available for selection")
|
logger.Warn("no roles available for selection")
|
||||||
message := "No roles available for selection."
|
message := "No roles available for selection."
|
||||||
if err := notifyUser("Empty list", message); err != nil {
|
showToast("Empty list", message)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create a list primitive
|
// Create a list primitive
|
||||||
@@ -232,7 +228,7 @@ func showUserRoleSelectionPopup() {
|
|||||||
// Update the user role in config
|
// Update the user role in config
|
||||||
cfg.WriteNextMsgAs = mainText
|
cfg.WriteNextMsgAs = mainText
|
||||||
// role got switch, update textview with character specific context for user
|
// role got switch, update textview with character specific context for user
|
||||||
filtered := filterMessagesForCharacter(chatBody.Messages, mainText)
|
filtered := filterMessagesForCharacter(chatBody.GetMessages(), mainText)
|
||||||
textView.SetText(chatToText(filtered, cfg.ShowSys))
|
textView.SetText(chatToText(filtered, cfg.ShowSys))
|
||||||
// Remove the popup page
|
// Remove the popup page
|
||||||
pages.RemovePage("userRoleSelectionPopup")
|
pages.RemovePage("userRoleSelectionPopup")
|
||||||
@@ -282,9 +278,7 @@ func showBotRoleSelectionPopup() {
|
|||||||
if len(roles) == 0 {
|
if len(roles) == 0 {
|
||||||
logger.Warn("no roles available for selection")
|
logger.Warn("no roles available for selection")
|
||||||
message := "No roles available for selection."
|
message := "No roles available for selection."
|
||||||
if err := notifyUser("Empty list", message); err != nil {
|
showToast("Empty list", message)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create a list primitive
|
// Create a list primitive
|
||||||
@@ -403,6 +397,66 @@ func showShellFileCompletionPopup(filter string) {
|
|||||||
app.SetFocus(widget)
|
app.SetFocus(widget)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func showTextAreaFileCompletionPopup(filter string) {
|
||||||
|
baseDir := cfg.FilePickerDir
|
||||||
|
if baseDir == "" {
|
||||||
|
baseDir = "."
|
||||||
|
}
|
||||||
|
complMatches := scanFiles(baseDir, filter)
|
||||||
|
if len(complMatches) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(complMatches) == 1 {
|
||||||
|
currentText := textArea.GetText()
|
||||||
|
atIdx := strings.LastIndex(currentText, "@")
|
||||||
|
if atIdx >= 0 {
|
||||||
|
before := currentText[:atIdx]
|
||||||
|
textArea.SetText(before+complMatches[0], true)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
widget := tview.NewList().ShowSecondaryText(false).
|
||||||
|
SetSelectedBackgroundColor(tcell.ColorGray)
|
||||||
|
widget.SetTitle("file completion").SetBorder(true)
|
||||||
|
for _, m := range complMatches {
|
||||||
|
widget.AddItem(m, "", 0, nil)
|
||||||
|
}
|
||||||
|
widget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
||||||
|
currentText := textArea.GetText()
|
||||||
|
atIdx := strings.LastIndex(currentText, "@")
|
||||||
|
if atIdx >= 0 {
|
||||||
|
before := currentText[:atIdx]
|
||||||
|
textArea.SetText(before+mainText, true)
|
||||||
|
}
|
||||||
|
pages.RemovePage("textAreaFileCompletionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
})
|
||||||
|
widget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
pages.RemovePage("textAreaFileCompletionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
|
||||||
|
pages.RemovePage("textAreaFileCompletionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
modal := func(p tview.Primitive, width, height int) tview.Primitive {
|
||||||
|
return tview.NewFlex().
|
||||||
|
AddItem(nil, 0, 1, false).
|
||||||
|
AddItem(tview.NewFlex().SetDirection(tview.FlexRow).
|
||||||
|
AddItem(nil, 0, 1, false).
|
||||||
|
AddItem(p, height, 1, true).
|
||||||
|
AddItem(nil, 0, 1, false), width, 1, true).
|
||||||
|
AddItem(nil, 0, 1, false)
|
||||||
|
}
|
||||||
|
pages.AddPage("textAreaFileCompletionPopup", modal(widget, 80, 20), true, true)
|
||||||
|
app.SetFocus(widget)
|
||||||
|
}
|
||||||
|
|
||||||
func updateWidgetColors(theme *tview.Theme) {
|
func updateWidgetColors(theme *tview.Theme) {
|
||||||
bgColor := theme.PrimitiveBackgroundColor
|
bgColor := theme.PrimitiveBackgroundColor
|
||||||
fgColor := theme.PrimaryTextColor
|
fgColor := theme.PrimaryTextColor
|
||||||
@@ -449,9 +503,7 @@ func showColorschemeSelectionPopup() {
|
|||||||
if len(schemeNames) == 0 {
|
if len(schemeNames) == 0 {
|
||||||
logger.Warn("no colorschemes available for selection")
|
logger.Warn("no colorschemes available for selection")
|
||||||
message := "No colorschemes available."
|
message := "No colorschemes available."
|
||||||
if err := notifyUser("Empty list", message); err != nil {
|
showToast("Empty list", message)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create a list primitive
|
// Create a list primitive
|
||||||
|
|||||||
@@ -4,14 +4,11 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/gdamore/tcell/v2"
|
"github.com/gdamore/tcell/v2"
|
||||||
"github.com/rivo/tview"
|
"github.com/rivo/tview"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ = sync.RWMutex{}
|
|
||||||
|
|
||||||
// Define constants for cell types
|
// Define constants for cell types
|
||||||
const (
|
const (
|
||||||
CellTypeCheckbox = "checkbox"
|
CellTypeCheckbox = "checkbox"
|
||||||
@@ -115,9 +112,6 @@ func makePropsTable(props map[string]float32) *tview.Table {
|
|||||||
row++
|
row++
|
||||||
}
|
}
|
||||||
// Add checkboxes
|
// Add checkboxes
|
||||||
addCheckboxRow("RAG use", cfg.RAGEnabled, func(checked bool) {
|
|
||||||
cfg.RAGEnabled = checked
|
|
||||||
})
|
|
||||||
addCheckboxRow("Inject role", injectRole, func(checked bool) {
|
addCheckboxRow("Inject role", injectRole, func(checked bool) {
|
||||||
injectRole = checked
|
injectRole = checked
|
||||||
})
|
})
|
||||||
@@ -160,9 +154,7 @@ func makePropsTable(props map[string]float32) *tview.Table {
|
|||||||
}
|
}
|
||||||
// Assume local llama.cpp
|
// Assume local llama.cpp
|
||||||
refreshLocalModelsIfEmpty()
|
refreshLocalModelsIfEmpty()
|
||||||
localModelsMu.RLock()
|
return LocalModels.Load().([]string)
|
||||||
defer localModelsMu.RUnlock()
|
|
||||||
return LocalModels
|
|
||||||
}
|
}
|
||||||
// Add input fields
|
// Add input fields
|
||||||
addInputRow("New char to write msg as", "", func(text string) {
|
addInputRow("New char to write msg as", "", func(text string) {
|
||||||
@@ -259,15 +251,14 @@ func makePropsTable(props map[string]float32) *tview.Table {
|
|||||||
// Handle nil options
|
// Handle nil options
|
||||||
if data.Options == nil {
|
if data.Options == nil {
|
||||||
logger.Error("options list is nil for", "label", label)
|
logger.Error("options list is nil for", "label", label)
|
||||||
if err := notifyUser("Configuration error", "Options list is nil for "+label); err != nil {
|
showToast("Configuration error", "Options list is nil for "+label)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for empty options list
|
// Check for empty options list
|
||||||
if len(data.Options) == 0 {
|
if len(data.Options) == 0 {
|
||||||
logger.Warn("empty options list for", "label", label, "api", cfg.CurrentAPI, "localModelsLen", len(LocalModels), "orModelsLen", len(ORFreeModels))
|
localModels := LocalModels.Load().([]string)
|
||||||
|
logger.Warn("empty options list for", "label", label, "api", cfg.CurrentAPI, "localModelsLen", len(localModels), "orModelsLen", len(ORFreeModels))
|
||||||
message := "No options available for " + label
|
message := "No options available for " + label
|
||||||
if label == "Select a model" {
|
if label == "Select a model" {
|
||||||
switch {
|
switch {
|
||||||
@@ -279,9 +270,7 @@ func makePropsTable(props map[string]float32) *tview.Table {
|
|||||||
message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models."
|
message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if err := notifyUser("Empty list", message); err != nil {
|
showToast("Empty list", message)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create a list primitive
|
// Create a list primitive
|
||||||
|
|||||||
314
rag/embedder.go
314
rag/embedder.go
@@ -9,6 +9,13 @@ import (
|
|||||||
"gf-lt/models"
|
"gf-lt/models"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/sugarme/tokenizer"
|
||||||
|
"github.com/sugarme/tokenizer/pretrained"
|
||||||
|
"github.com/yalue/onnxruntime_go"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Embedder defines the interface for embedding text
|
// Embedder defines the interface for embedding text
|
||||||
@@ -27,8 +34,10 @@ type APIEmbedder struct {
|
|||||||
func NewAPIEmbedder(l *slog.Logger, cfg *config.Config) *APIEmbedder {
|
func NewAPIEmbedder(l *slog.Logger, cfg *config.Config) *APIEmbedder {
|
||||||
return &APIEmbedder{
|
return &APIEmbedder{
|
||||||
logger: l,
|
logger: l,
|
||||||
client: &http.Client{},
|
client: &http.Client{
|
||||||
cfg: cfg,
|
Timeout: 30 * time.Second,
|
||||||
|
},
|
||||||
|
cfg: cfg,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -134,11 +143,302 @@ func (a *APIEmbedder) EmbedSlice(lines []string) ([][]float32, error) {
|
|||||||
return embeddings, nil
|
return embeddings, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: ONNXEmbedder implementation would go here
|
|
||||||
// This would require:
|
|
||||||
// 1. Loading ONNX models locally
|
// 1. Loading ONNX models locally
|
||||||
// 2. Using a Go ONNX runtime (like gorgonia/onnx or similar)
|
// 2. Using a Go ONNX runtime (like gorgonia/onnx or similar)
|
||||||
// 3. Converting text to embeddings without external API calls
|
// 3. Converting text to embeddings without external API calls
|
||||||
//
|
type ONNXEmbedder struct {
|
||||||
// For now, we'll focus on the API implementation which is already working in the current system,
|
session *onnxruntime_go.DynamicAdvancedSession
|
||||||
// and can be extended later when we have ONNX runtime integration
|
tokenizer *tokenizer.Tokenizer
|
||||||
|
tokenizerPath string
|
||||||
|
dims int
|
||||||
|
logger *slog.Logger
|
||||||
|
mu sync.Mutex
|
||||||
|
modelPath string
|
||||||
|
}
|
||||||
|
|
||||||
|
var onnxInitOnce sync.Once
|
||||||
|
var onnxReady bool
|
||||||
|
var onnxLibPath string
|
||||||
|
var cudaLibPath string
|
||||||
|
|
||||||
|
var onnxLibPaths = []string{
|
||||||
|
"/usr/lib/libonnxruntime.so",
|
||||||
|
"/usr/lib/libonnxruntime.so.1.24.2",
|
||||||
|
"/usr/local/lib/libonnxruntime.so",
|
||||||
|
"/usr/lib/x86_64-linux-gnu/libonnxruntime.so",
|
||||||
|
"/opt/onnxruntime/lib/libonnxruntime.so",
|
||||||
|
}
|
||||||
|
|
||||||
|
var cudaLibPaths = []string{
|
||||||
|
"/usr/lib/libonnxruntime_providers_cuda.so",
|
||||||
|
"/usr/local/lib/libonnxruntime_providers_cuda.so",
|
||||||
|
"/opt/onnxruntime/lib/libonnxruntime_providers_cuda.so",
|
||||||
|
}
|
||||||
|
|
||||||
|
func findONNXLibrary() string {
|
||||||
|
for _, path := range onnxLibPaths {
|
||||||
|
if _, err := os.Stat(path); err == nil {
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func findCUDALibrary() string {
|
||||||
|
for _, path := range cudaLibPaths {
|
||||||
|
if _, err := os.Stat(path); err == nil {
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewONNXEmbedder(modelPath, tokenizerPath string, dims int, logger *slog.Logger) (*ONNXEmbedder, error) {
|
||||||
|
// Check if model and tokenizer files exist
|
||||||
|
if _, err := os.Stat(modelPath); err != nil {
|
||||||
|
return nil, fmt.Errorf("ONNX model not found: %w", err)
|
||||||
|
}
|
||||||
|
if _, err := os.Stat(tokenizerPath); err != nil {
|
||||||
|
return nil, fmt.Errorf("tokenizer not found: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find ONNX library
|
||||||
|
onnxLibPath = findONNXLibrary()
|
||||||
|
if onnxLibPath == "" {
|
||||||
|
return nil, errors.New("ONNX runtime library not found in standard locations")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find CUDA provider library (optional)
|
||||||
|
cudaLibPath = findCUDALibrary()
|
||||||
|
if cudaLibPath == "" {
|
||||||
|
fmt.Println("WARNING: CUDA provider library not found, will use CPU")
|
||||||
|
}
|
||||||
|
emb := &ONNXEmbedder{
|
||||||
|
tokenizerPath: tokenizerPath,
|
||||||
|
dims: dims,
|
||||||
|
logger: logger,
|
||||||
|
modelPath: modelPath,
|
||||||
|
}
|
||||||
|
return emb, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ONNXEmbedder) ensureInitialized() error {
|
||||||
|
if e.session != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
e.mu.Lock()
|
||||||
|
defer e.mu.Unlock()
|
||||||
|
if e.session != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// Load tokenizer lazily
|
||||||
|
if e.tokenizer == nil {
|
||||||
|
tok, err := pretrained.FromFile(e.tokenizerPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to load tokenizer: %w", err)
|
||||||
|
}
|
||||||
|
e.tokenizer = tok
|
||||||
|
}
|
||||||
|
onnxInitOnce.Do(func() {
|
||||||
|
onnxruntime_go.SetSharedLibraryPath(onnxLibPath)
|
||||||
|
if err := onnxruntime_go.InitializeEnvironment(); err != nil {
|
||||||
|
e.logger.Error("failed to initialize ONNX runtime", "error", err)
|
||||||
|
onnxReady = false
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Register CUDA provider if available
|
||||||
|
if cudaLibPath != "" {
|
||||||
|
if err := onnxruntime_go.RegisterExecutionProviderLibrary("CUDA", cudaLibPath); err != nil {
|
||||||
|
e.logger.Warn("failed to register CUDA provider", "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
onnxReady = true
|
||||||
|
})
|
||||||
|
if !onnxReady {
|
||||||
|
return errors.New("ONNX runtime not ready")
|
||||||
|
}
|
||||||
|
// Create session options
|
||||||
|
opts, err := onnxruntime_go.NewSessionOptions()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create session options: %w", err)
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
_ = opts.Destroy()
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Try to add CUDA provider
|
||||||
|
useCUDA := cudaLibPath != ""
|
||||||
|
if useCUDA {
|
||||||
|
cudaOpts, err := onnxruntime_go.NewCUDAProviderOptions()
|
||||||
|
if err != nil {
|
||||||
|
e.logger.Warn("failed to create CUDA provider options, falling back to CPU", "error", err)
|
||||||
|
useCUDA = false
|
||||||
|
} else {
|
||||||
|
defer func() {
|
||||||
|
_ = cudaOpts.Destroy()
|
||||||
|
}()
|
||||||
|
if err := cudaOpts.Update(map[string]string{"device_id": "0"}); err != nil {
|
||||||
|
e.logger.Warn("failed to update CUDA options, falling back to CPU", "error", err)
|
||||||
|
useCUDA = false
|
||||||
|
} else if err := opts.AppendExecutionProviderCUDA(cudaOpts); err != nil {
|
||||||
|
e.logger.Warn("failed to append CUDA provider, falling back to CPU", "error", err)
|
||||||
|
useCUDA = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if useCUDA {
|
||||||
|
e.logger.Info("Using CUDA for ONNX inference")
|
||||||
|
} else {
|
||||||
|
e.logger.Info("Using CPU for ONNX inference")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create session with options
|
||||||
|
session, err := onnxruntime_go.NewDynamicAdvancedSession(
|
||||||
|
e.getModelPath(),
|
||||||
|
[]string{"input_ids", "attention_mask"},
|
||||||
|
[]string{"sentence_embedding"},
|
||||||
|
opts,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create ONNX session: %w", err)
|
||||||
|
}
|
||||||
|
e.session = session
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ONNXEmbedder) getModelPath() string {
|
||||||
|
return e.modelPath
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ONNXEmbedder) Destroy() error {
|
||||||
|
e.mu.Lock()
|
||||||
|
defer e.mu.Unlock()
|
||||||
|
if e.session != nil {
|
||||||
|
if err := e.session.Destroy(); err != nil {
|
||||||
|
return fmt.Errorf("failed to destroy ONNX session: %w", err)
|
||||||
|
}
|
||||||
|
e.session = nil
|
||||||
|
e.logger.Info("ONNX session destroyed, VRAM freed")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ONNXEmbedder) Embed(text string) ([]float32, error) {
|
||||||
|
if err := e.ensureInitialized(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// 1. Tokenize
|
||||||
|
encoding, err := e.tokenizer.EncodeSingle(text)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("tokenization failed: %w", err)
|
||||||
|
}
|
||||||
|
// 2. Convert to int64 and create attention mask
|
||||||
|
ids := encoding.Ids
|
||||||
|
inputIDs := make([]int64, len(ids))
|
||||||
|
attentionMask := make([]int64, len(ids))
|
||||||
|
for i, id := range ids {
|
||||||
|
inputIDs[i] = int64(id)
|
||||||
|
attentionMask[i] = 1
|
||||||
|
}
|
||||||
|
// 3. Create input tensors (shape: [1, seq_len])
|
||||||
|
seqLen := int64(len(inputIDs))
|
||||||
|
inputIDsTensor, err := onnxruntime_go.NewTensor[int64](
|
||||||
|
onnxruntime_go.NewShape(1, seqLen),
|
||||||
|
inputIDs,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create input_ids tensor: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = inputIDsTensor.Destroy() }()
|
||||||
|
maskTensor, err := onnxruntime_go.NewTensor[int64](
|
||||||
|
onnxruntime_go.NewShape(1, seqLen),
|
||||||
|
attentionMask,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create attention_mask tensor: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = maskTensor.Destroy() }()
|
||||||
|
// 4. Create output tensor
|
||||||
|
outputTensor, err := onnxruntime_go.NewEmptyTensor[float32](
|
||||||
|
onnxruntime_go.NewShape(1, int64(e.dims)),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create output tensor: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = outputTensor.Destroy() }()
|
||||||
|
// 5. Run inference
|
||||||
|
err = e.session.Run(
|
||||||
|
[]onnxruntime_go.Value{inputIDsTensor, maskTensor},
|
||||||
|
[]onnxruntime_go.Value{outputTensor},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("inference failed: %w", err)
|
||||||
|
}
|
||||||
|
// 6. Copy output data
|
||||||
|
outputData := outputTensor.GetData()
|
||||||
|
embedding := make([]float32, len(outputData))
|
||||||
|
copy(embedding, outputData)
|
||||||
|
return embedding, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ONNXEmbedder) EmbedSlice(texts []string) ([][]float32, error) {
|
||||||
|
if err := e.ensureInitialized(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
encodings := make([]*tokenizer.Encoding, len(texts))
|
||||||
|
maxLen := 0
|
||||||
|
for i, txt := range texts {
|
||||||
|
enc, err := e.tokenizer.EncodeSingle(txt)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
encodings[i] = enc
|
||||||
|
if l := len(enc.Ids); l > maxLen {
|
||||||
|
maxLen = l
|
||||||
|
}
|
||||||
|
}
|
||||||
|
batchSize := len(texts)
|
||||||
|
inputIDs := make([]int64, batchSize*maxLen)
|
||||||
|
attentionMask := make([]int64, batchSize*maxLen)
|
||||||
|
for i, enc := range encodings {
|
||||||
|
ids := enc.Ids
|
||||||
|
offset := i * maxLen
|
||||||
|
for j, id := range ids {
|
||||||
|
inputIDs[offset+j] = int64(id)
|
||||||
|
attentionMask[offset+j] = 1
|
||||||
|
}
|
||||||
|
// Remaining positions are already zero (padding)
|
||||||
|
}
|
||||||
|
// Create tensors with shape [batchSize, maxLen]
|
||||||
|
inputTensor, _ := onnxruntime_go.NewTensor[int64](
|
||||||
|
onnxruntime_go.NewShape(int64(batchSize), int64(maxLen)),
|
||||||
|
inputIDs,
|
||||||
|
)
|
||||||
|
defer func() { _ = inputTensor.Destroy() }()
|
||||||
|
maskTensor, _ := onnxruntime_go.NewTensor[int64](
|
||||||
|
onnxruntime_go.NewShape(int64(batchSize), int64(maxLen)),
|
||||||
|
attentionMask,
|
||||||
|
)
|
||||||
|
defer func() { _ = maskTensor.Destroy() }()
|
||||||
|
outputTensor, _ := onnxruntime_go.NewEmptyTensor[float32](
|
||||||
|
onnxruntime_go.NewShape(int64(batchSize), int64(e.dims)),
|
||||||
|
)
|
||||||
|
defer func() { _ = outputTensor.Destroy() }()
|
||||||
|
err := e.session.Run(
|
||||||
|
[]onnxruntime_go.Value{inputTensor, maskTensor},
|
||||||
|
[]onnxruntime_go.Value{outputTensor},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// Extract embeddings per batch item
|
||||||
|
data := outputTensor.GetData()
|
||||||
|
embeddings := make([][]float32, batchSize)
|
||||||
|
for i := 0; i < batchSize; i++ {
|
||||||
|
start := i * e.dims
|
||||||
|
emb := make([]float32, e.dims)
|
||||||
|
copy(emb, data[start:start+e.dims])
|
||||||
|
embeddings[i] = emb
|
||||||
|
}
|
||||||
|
return embeddings, nil
|
||||||
|
}
|
||||||
|
|||||||
736
rag/rag.go
736
rag/rag.go
@@ -1,6 +1,7 @@
|
|||||||
package rag
|
package rag
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"gf-lt/config"
|
"gf-lt/config"
|
||||||
@@ -9,51 +10,168 @@ import (
|
|||||||
"log/slog"
|
"log/slog"
|
||||||
"path"
|
"path"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"runtime"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/neurosnap/sentences/english"
|
"github.com/neurosnap/sentences/english"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const ()
|
||||||
|
|
||||||
var (
|
var (
|
||||||
// Status messages for TUI integration
|
// Status messages for TUI integration
|
||||||
LongJobStatusCh = make(chan string, 10) // Increased buffer size to prevent blocking
|
LongJobStatusCh = make(chan string, 100) // Increased buffer size for parallel batch updates
|
||||||
FinishedRAGStatus = "finished loading RAG file; press Enter"
|
FinishedRAGStatus = "finished loading RAG file; press Enter"
|
||||||
LoadedFileRAGStatus = "loaded file"
|
LoadedFileRAGStatus = "loaded file"
|
||||||
ErrRAGStatus = "some error occurred; failed to transfer data to vector db"
|
ErrRAGStatus = "some error occurred; failed to transfer data to vector db"
|
||||||
)
|
)
|
||||||
|
|
||||||
type RAG struct {
|
type RAG struct {
|
||||||
logger *slog.Logger
|
logger *slog.Logger
|
||||||
store storage.FullRepo
|
store storage.FullRepo
|
||||||
cfg *config.Config
|
cfg *config.Config
|
||||||
embedder Embedder
|
embedder Embedder
|
||||||
storage *VectorStorage
|
storage *VectorStorage
|
||||||
mu sync.Mutex
|
mu sync.RWMutex
|
||||||
|
idleMu sync.Mutex
|
||||||
|
fallbackMsg string
|
||||||
|
idleTimer *time.Timer
|
||||||
|
idleTimeout time.Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
func New(l *slog.Logger, s storage.FullRepo, cfg *config.Config) *RAG {
|
// batchTask represents a single batch to be embedded
|
||||||
// Initialize with API embedder by default, could be configurable later
|
type batchTask struct {
|
||||||
embedder := NewAPIEmbedder(l, cfg)
|
batchIndex int
|
||||||
|
paragraphs []string
|
||||||
|
filename string
|
||||||
|
totalBatches int
|
||||||
|
}
|
||||||
|
|
||||||
|
// batchResult represents the result of embedding a batch
|
||||||
|
type batchResult struct {
|
||||||
|
batchIndex int
|
||||||
|
embeddings [][]float32
|
||||||
|
paragraphs []string
|
||||||
|
filename string
|
||||||
|
}
|
||||||
|
|
||||||
|
// sendStatusNonBlocking sends a status message without blocking
|
||||||
|
func (r *RAG) sendStatusNonBlocking(status string) {
|
||||||
|
select {
|
||||||
|
case LongJobStatusCh <- status:
|
||||||
|
default:
|
||||||
|
r.logger.Warn("LongJobStatusCh channel is full or closed, dropping status message", "message", status)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(l *slog.Logger, s storage.FullRepo, cfg *config.Config) (*RAG, error) {
|
||||||
|
var embedder Embedder
|
||||||
|
var fallbackMsg string
|
||||||
|
if cfg.EmbedModelPath != "" && cfg.EmbedTokenizerPath != "" {
|
||||||
|
emb, err := NewONNXEmbedder(cfg.EmbedModelPath, cfg.EmbedTokenizerPath, cfg.EmbedDims, l)
|
||||||
|
if err != nil {
|
||||||
|
l.Error("failed to create ONNX embedder, falling back to API", "error", err)
|
||||||
|
fallbackMsg = err.Error()
|
||||||
|
embedder = NewAPIEmbedder(l, cfg)
|
||||||
|
} else {
|
||||||
|
embedder = emb
|
||||||
|
l.Info("using ONNX embedder", "model", cfg.EmbedModelPath, "dims", cfg.EmbedDims)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
embedder = NewAPIEmbedder(l, cfg)
|
||||||
|
l.Info("using API embedder", "url", cfg.EmbedURL)
|
||||||
|
}
|
||||||
rag := &RAG{
|
rag := &RAG{
|
||||||
logger: l,
|
logger: l,
|
||||||
store: s,
|
store: s,
|
||||||
cfg: cfg,
|
cfg: cfg,
|
||||||
embedder: embedder,
|
embedder: embedder,
|
||||||
storage: NewVectorStorage(l, s),
|
storage: NewVectorStorage(l, s),
|
||||||
|
fallbackMsg: fallbackMsg,
|
||||||
|
idleTimeout: 30 * time.Second,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: Vector tables are created via database migrations, not at runtime
|
// Note: Vector tables are created via database migrations, not at runtime
|
||||||
|
|
||||||
return rag
|
return rag, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func wordCounter(sentence string) int {
|
func createChunks(sentences []string, wordLimit, overlapWords uint32) []string {
|
||||||
return len(strings.Split(strings.TrimSpace(sentence), " "))
|
if len(sentences) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if overlapWords >= wordLimit {
|
||||||
|
overlapWords = wordLimit / 2
|
||||||
|
}
|
||||||
|
var chunks []string
|
||||||
|
i := 0
|
||||||
|
for i < len(sentences) {
|
||||||
|
var chunkWords []string
|
||||||
|
wordCount := 0
|
||||||
|
j := i
|
||||||
|
for j < len(sentences) && wordCount <= int(wordLimit) {
|
||||||
|
sentence := sentences[j]
|
||||||
|
words := strings.Fields(sentence)
|
||||||
|
chunkWords = append(chunkWords, sentence)
|
||||||
|
wordCount += len(words)
|
||||||
|
j++
|
||||||
|
// If this sentence alone exceeds limit, still include it and stop
|
||||||
|
if wordCount > int(wordLimit) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(chunkWords) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
chunk := strings.Join(chunkWords, " ")
|
||||||
|
chunks = append(chunks, chunk)
|
||||||
|
if j >= len(sentences) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
// Move i forward by skipping overlap
|
||||||
|
if overlapWords == 0 {
|
||||||
|
i = j
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Calculate how many sentences to skip to achieve overlapWords
|
||||||
|
overlapRemaining := int(overlapWords)
|
||||||
|
newI := i
|
||||||
|
for newI < j && overlapRemaining > 0 {
|
||||||
|
words := len(strings.Fields(sentences[newI]))
|
||||||
|
overlapRemaining -= words
|
||||||
|
if overlapRemaining >= 0 {
|
||||||
|
newI++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if newI == i {
|
||||||
|
newI = j
|
||||||
|
}
|
||||||
|
i = newI
|
||||||
|
}
|
||||||
|
return chunks
|
||||||
|
}
|
||||||
|
|
||||||
|
func sanitizeFTSQuery(query string) string {
|
||||||
|
// Remove double quotes and other problematic characters for FTS5
|
||||||
|
query = strings.ReplaceAll(query, "\"", " ")
|
||||||
|
query = strings.ReplaceAll(query, "'", " ")
|
||||||
|
query = strings.ReplaceAll(query, ";", " ")
|
||||||
|
query = strings.ReplaceAll(query, "\\", " ")
|
||||||
|
query = strings.TrimSpace(query)
|
||||||
|
if query == "" {
|
||||||
|
return "*" // match all
|
||||||
|
}
|
||||||
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *RAG) LoadRAG(fpath string) error {
|
func (r *RAG) LoadRAG(fpath string) error {
|
||||||
|
return r.LoadRAGWithContext(context.Background(), fpath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *RAG) LoadRAGWithContext(ctx context.Context, fpath string) error {
|
||||||
r.mu.Lock()
|
r.mu.Lock()
|
||||||
defer r.mu.Unlock()
|
defer r.mu.Unlock()
|
||||||
fileText, err := ExtractText(fpath)
|
fileText, err := ExtractText(fpath)
|
||||||
@@ -61,11 +179,9 @@ func (r *RAG) LoadRAG(fpath string) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
r.logger.Debug("rag: loaded file", "fp", fpath)
|
r.logger.Debug("rag: loaded file", "fp", fpath)
|
||||||
select {
|
|
||||||
case LongJobStatusCh <- LoadedFileRAGStatus:
|
// Send initial status (non-blocking with retry)
|
||||||
default:
|
r.sendStatusNonBlocking(LoadedFileRAGStatus)
|
||||||
r.logger.Warn("LongJobStatusCh channel is full or closed, dropping status message", "message", LoadedFileRAGStatus)
|
|
||||||
}
|
|
||||||
tokenizer, err := english.NewSentenceTokenizer(nil)
|
tokenizer, err := english.NewSentenceTokenizer(nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -75,31 +191,9 @@ func (r *RAG) LoadRAG(fpath string) error {
|
|||||||
for i, s := range sentences {
|
for i, s := range sentences {
|
||||||
sents[i] = s.Text
|
sents[i] = s.Text
|
||||||
}
|
}
|
||||||
// Group sentences into paragraphs based on word limit
|
|
||||||
paragraphs := []string{}
|
// Create chunks with overlap
|
||||||
par := strings.Builder{}
|
paragraphs := createChunks(sents, r.cfg.RAGWordLimit, r.cfg.RAGOverlapWords)
|
||||||
for i := 0; i < len(sents); i++ {
|
|
||||||
if strings.TrimSpace(sents[i]) != "" {
|
|
||||||
if par.Len() > 0 {
|
|
||||||
par.WriteString(" ")
|
|
||||||
}
|
|
||||||
par.WriteString(sents[i])
|
|
||||||
}
|
|
||||||
if wordCounter(par.String()) > int(r.cfg.RAGWordLimit) {
|
|
||||||
paragraph := strings.TrimSpace(par.String())
|
|
||||||
if paragraph != "" {
|
|
||||||
paragraphs = append(paragraphs, paragraph)
|
|
||||||
}
|
|
||||||
par.Reset()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Handle any remaining content in the paragraph buffer
|
|
||||||
if par.Len() > 0 {
|
|
||||||
paragraph := strings.TrimSpace(par.String())
|
|
||||||
if paragraph != "" {
|
|
||||||
paragraphs = append(paragraphs, paragraph)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Adjust batch size if needed
|
// Adjust batch size if needed
|
||||||
if len(paragraphs) < r.cfg.RAGBatchSize && len(paragraphs) > 0 {
|
if len(paragraphs) < r.cfg.RAGBatchSize && len(paragraphs) > 0 {
|
||||||
r.cfg.RAGBatchSize = len(paragraphs)
|
r.cfg.RAGBatchSize = len(paragraphs)
|
||||||
@@ -107,91 +201,348 @@ func (r *RAG) LoadRAG(fpath string) error {
|
|||||||
if len(paragraphs) == 0 {
|
if len(paragraphs) == 0 {
|
||||||
return errors.New("no valid paragraphs found in file")
|
return errors.New("no valid paragraphs found in file")
|
||||||
}
|
}
|
||||||
// Process paragraphs in batches synchronously
|
totalBatches := (len(paragraphs) + r.cfg.RAGBatchSize - 1) / r.cfg.RAGBatchSize
|
||||||
batchCount := 0
|
r.logger.Debug("starting parallel embedding", "total_batches", totalBatches, "batch_size", r.cfg.RAGBatchSize)
|
||||||
for i := 0; i < len(paragraphs); i += r.cfg.RAGBatchSize {
|
|
||||||
end := i + r.cfg.RAGBatchSize
|
// Determine concurrency level
|
||||||
if end > len(paragraphs) {
|
concurrency := runtime.NumCPU()
|
||||||
end = len(paragraphs)
|
if concurrency > totalBatches {
|
||||||
}
|
concurrency = totalBatches
|
||||||
batch := paragraphs[i:end]
|
}
|
||||||
batchCount++
|
if concurrency < 1 {
|
||||||
// Filter empty paragraphs
|
concurrency = 1
|
||||||
nonEmptyBatch := make([]string, 0, len(batch))
|
}
|
||||||
for _, p := range batch {
|
// If using ONNX embedder, limit concurrency to 1 due to mutex serialization
|
||||||
if strings.TrimSpace(p) != "" {
|
var isONNX bool
|
||||||
nonEmptyBatch = append(nonEmptyBatch, strings.TrimSpace(p))
|
if _, isONNX = r.embedder.(*ONNXEmbedder); isONNX {
|
||||||
|
concurrency = 1
|
||||||
|
}
|
||||||
|
embedderType := "API"
|
||||||
|
if isONNX {
|
||||||
|
embedderType = "ONNX"
|
||||||
|
}
|
||||||
|
r.logger.Debug("parallel embedding setup",
|
||||||
|
"total_batches", totalBatches,
|
||||||
|
"concurrency", concurrency,
|
||||||
|
"embedder", embedderType,
|
||||||
|
"batch_size", r.cfg.RAGBatchSize)
|
||||||
|
|
||||||
|
// Create context with timeout (30 minutes) and cancellation for error handling
|
||||||
|
ctx, cancel := context.WithTimeout(ctx, 30*time.Minute)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
// Channels for task distribution and results
|
||||||
|
taskCh := make(chan batchTask, totalBatches)
|
||||||
|
resultCh := make(chan batchResult, totalBatches)
|
||||||
|
errorCh := make(chan error, totalBatches)
|
||||||
|
|
||||||
|
// Start worker goroutines
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
for w := 0; w < concurrency; w++ {
|
||||||
|
wg.Add(1)
|
||||||
|
go r.embeddingWorker(ctx, w, taskCh, resultCh, errorCh, &wg)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close task channel after all tasks are sent (by separate goroutine)
|
||||||
|
go func() {
|
||||||
|
// Ensure task channel is closed when this goroutine exits
|
||||||
|
defer close(taskCh)
|
||||||
|
r.logger.Debug("task distributor started", "total_batches", totalBatches)
|
||||||
|
for i := 0; i < totalBatches; i++ {
|
||||||
|
start := i * r.cfg.RAGBatchSize
|
||||||
|
end := start + r.cfg.RAGBatchSize
|
||||||
|
if end > len(paragraphs) {
|
||||||
|
end = len(paragraphs)
|
||||||
|
}
|
||||||
|
batch := paragraphs[start:end]
|
||||||
|
|
||||||
|
// Filter empty paragraphs
|
||||||
|
nonEmptyBatch := make([]string, 0, len(batch))
|
||||||
|
for _, p := range batch {
|
||||||
|
if strings.TrimSpace(p) != "" {
|
||||||
|
nonEmptyBatch = append(nonEmptyBatch, strings.TrimSpace(p))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
task := batchTask{
|
||||||
|
batchIndex: i,
|
||||||
|
paragraphs: nonEmptyBatch,
|
||||||
|
filename: path.Base(fpath),
|
||||||
|
totalBatches: totalBatches,
|
||||||
|
}
|
||||||
|
|
||||||
|
select {
|
||||||
|
case taskCh <- task:
|
||||||
|
r.logger.Debug("task distributor sent batch", "batch", i, "paragraphs", len(nonEmptyBatch))
|
||||||
|
case <-ctx.Done():
|
||||||
|
r.logger.Debug("task distributor cancelled", "batches_sent", i+1, "total_batches", totalBatches)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(nonEmptyBatch) == 0 {
|
r.logger.Debug("task distributor finished", "batches_sent", totalBatches)
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Wait for workers to finish and close result channel
|
||||||
|
go func() {
|
||||||
|
wg.Wait()
|
||||||
|
close(resultCh)
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Process results in order and write to database
|
||||||
|
nextExpectedBatch := 0
|
||||||
|
resultsBuffer := make(map[int]batchResult)
|
||||||
|
filename := path.Base(fpath)
|
||||||
|
batchesProcessed := 0
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return ctx.Err()
|
||||||
|
|
||||||
|
case err := <-errorCh:
|
||||||
|
// First error from any worker, cancel everything
|
||||||
|
cancel()
|
||||||
|
r.logger.Error("embedding worker failed", "error", err)
|
||||||
|
r.sendStatusNonBlocking(ErrRAGStatus)
|
||||||
|
return fmt.Errorf("embedding failed: %w", err)
|
||||||
|
|
||||||
|
case result, ok := <-resultCh:
|
||||||
|
if !ok {
|
||||||
|
// All results processed
|
||||||
|
resultCh = nil
|
||||||
|
r.logger.Debug("result channel closed", "batches_processed", batchesProcessed, "total_batches", totalBatches)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store result in buffer
|
||||||
|
resultsBuffer[result.batchIndex] = result
|
||||||
|
|
||||||
|
// Process buffered results in order
|
||||||
|
for {
|
||||||
|
if res, exists := resultsBuffer[nextExpectedBatch]; exists {
|
||||||
|
// Write this batch to database
|
||||||
|
if err := r.writeBatchToStorage(ctx, res, filename); err != nil {
|
||||||
|
cancel()
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
batchesProcessed++
|
||||||
|
// Send progress update
|
||||||
|
statusMsg := fmt.Sprintf("processed batch %d/%d", batchesProcessed, totalBatches)
|
||||||
|
r.sendStatusNonBlocking(statusMsg)
|
||||||
|
|
||||||
|
delete(resultsBuffer, nextExpectedBatch)
|
||||||
|
nextExpectedBatch++
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
// No channels ready, check for deadlock conditions
|
||||||
|
if resultCh == nil && nextExpectedBatch < totalBatches {
|
||||||
|
// Missing batch results after result channel closed
|
||||||
|
r.logger.Error("missing batch results",
|
||||||
|
"expected", totalBatches,
|
||||||
|
"received", nextExpectedBatch,
|
||||||
|
"missing", totalBatches-nextExpectedBatch)
|
||||||
|
|
||||||
|
// Wait a short time for any delayed errors, then cancel
|
||||||
|
select {
|
||||||
|
case <-time.After(5 * time.Second):
|
||||||
|
cancel()
|
||||||
|
return fmt.Errorf("missing batch results: expected %d, got %d", totalBatches, nextExpectedBatch)
|
||||||
|
case <-ctx.Done():
|
||||||
|
return ctx.Err()
|
||||||
|
case err := <-errorCh:
|
||||||
|
cancel()
|
||||||
|
r.logger.Error("embedding worker failed after result channel closed", "error", err)
|
||||||
|
r.sendStatusNonBlocking(ErrRAGStatus)
|
||||||
|
return fmt.Errorf("embedding failed: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If we reach here, no deadlock yet, just busy loop prevention
|
||||||
|
time.Sleep(100 * time.Millisecond)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we're done
|
||||||
|
if resultCh == nil && nextExpectedBatch >= totalBatches {
|
||||||
|
r.logger.Debug("all batches processed successfully", "total", totalBatches)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r.logger.Debug("finished writing vectors", "batches", batchesProcessed)
|
||||||
|
r.resetIdleTimer()
|
||||||
|
r.sendStatusNonBlocking(FinishedRAGStatus)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// embeddingWorker processes batch embedding tasks
|
||||||
|
func (r *RAG) embeddingWorker(ctx context.Context, workerID int, taskCh <-chan batchTask, resultCh chan<- batchResult, errorCh chan<- error, wg *sync.WaitGroup) {
|
||||||
|
defer wg.Done()
|
||||||
|
r.logger.Debug("embedding worker started", "worker", workerID)
|
||||||
|
|
||||||
|
// Panic recovery to ensure worker doesn't crash silently
|
||||||
|
defer func() {
|
||||||
|
if rec := recover(); rec != nil {
|
||||||
|
r.logger.Error("embedding worker panicked", "worker", workerID, "panic", rec)
|
||||||
|
// Try to send error, but don't block if channel is full
|
||||||
|
select {
|
||||||
|
case errorCh <- fmt.Errorf("worker %d panicked: %v", workerID, rec):
|
||||||
|
default:
|
||||||
|
r.logger.Warn("error channel full, dropping panic error", "worker", workerID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
for task := range taskCh {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
r.logger.Debug("embedding worker cancelled", "worker", workerID)
|
||||||
|
return
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
r.logger.Debug("worker processing batch", "worker", workerID, "batch", task.batchIndex, "paragraphs", len(task.paragraphs), "total_batches", task.totalBatches)
|
||||||
|
|
||||||
|
// Skip empty batches
|
||||||
|
if len(task.paragraphs) == 0 {
|
||||||
|
select {
|
||||||
|
case resultCh <- batchResult{
|
||||||
|
batchIndex: task.batchIndex,
|
||||||
|
embeddings: nil,
|
||||||
|
paragraphs: nil,
|
||||||
|
filename: task.filename,
|
||||||
|
}:
|
||||||
|
case <-ctx.Done():
|
||||||
|
r.logger.Debug("embedding worker cancelled while sending empty batch", "worker", workerID)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.logger.Debug("worker sent empty batch", "worker", workerID, "batch", task.batchIndex)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// Embed the batch
|
// Embed with retry for API embedder
|
||||||
embeddings, err := r.embedder.EmbedSlice(nonEmptyBatch)
|
embeddings, err := r.embedWithRetry(ctx, task.paragraphs, 3)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.logger.Error("failed to embed batch", "error", err, "batch", batchCount)
|
// Try to send error, but don't block indefinitely
|
||||||
select {
|
select {
|
||||||
case LongJobStatusCh <- ErrRAGStatus:
|
case errorCh <- fmt.Errorf("worker %d batch %d: %w", workerID, task.batchIndex, err):
|
||||||
default:
|
case <-ctx.Done():
|
||||||
r.logger.Warn("LongJobStatusCh channel full, dropping message")
|
r.logger.Debug("embedding worker cancelled while sending error", "worker", workerID)
|
||||||
}
|
}
|
||||||
return fmt.Errorf("failed to embed batch %d: %w", batchCount, err)
|
return
|
||||||
}
|
}
|
||||||
if len(embeddings) != len(nonEmptyBatch) {
|
// Send result with context awareness
|
||||||
err := errors.New("embedding count mismatch")
|
|
||||||
r.logger.Error("embedding mismatch", "expected", len(nonEmptyBatch), "got", len(embeddings))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
// Write vectors to storage
|
|
||||||
filename := path.Base(fpath)
|
|
||||||
for j, text := range nonEmptyBatch {
|
|
||||||
vector := models.VectorRow{
|
|
||||||
Embeddings: embeddings[j],
|
|
||||||
RawText: text,
|
|
||||||
Slug: fmt.Sprintf("%s_%d_%d", filename, batchCount, j),
|
|
||||||
FileName: filename,
|
|
||||||
}
|
|
||||||
if err := r.storage.WriteVector(&vector); err != nil {
|
|
||||||
r.logger.Error("failed to write vector to DB", "error", err, "slug", vector.Slug)
|
|
||||||
select {
|
|
||||||
case LongJobStatusCh <- ErrRAGStatus:
|
|
||||||
default:
|
|
||||||
r.logger.Warn("LongJobStatusCh channel full, dropping message")
|
|
||||||
}
|
|
||||||
return fmt.Errorf("failed to write vector: %w", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
r.logger.Debug("wrote batch to db", "batch", batchCount, "size", len(nonEmptyBatch))
|
|
||||||
// Send progress status
|
|
||||||
statusMsg := fmt.Sprintf("processed batch %d/%d", batchCount, (len(paragraphs)+r.cfg.RAGBatchSize-1)/r.cfg.RAGBatchSize)
|
|
||||||
select {
|
select {
|
||||||
case LongJobStatusCh <- statusMsg:
|
case resultCh <- batchResult{
|
||||||
default:
|
batchIndex: task.batchIndex,
|
||||||
r.logger.Warn("LongJobStatusCh channel full, dropping message")
|
embeddings: embeddings,
|
||||||
|
paragraphs: task.paragraphs,
|
||||||
|
filename: task.filename,
|
||||||
|
}:
|
||||||
|
case <-ctx.Done():
|
||||||
|
r.logger.Debug("embedding worker cancelled while sending result", "worker", workerID)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.logger.Debug("worker completed batch", "worker", workerID, "batch", task.batchIndex, "embeddings", len(embeddings))
|
||||||
|
}
|
||||||
|
r.logger.Debug("embedding worker finished", "worker", workerID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// embedWithRetry attempts embedding with exponential backoff for API embedder
|
||||||
|
func (r *RAG) embedWithRetry(ctx context.Context, paragraphs []string, maxRetries int) ([][]float32, error) {
|
||||||
|
var lastErr error
|
||||||
|
for attempt := 0; attempt < maxRetries; attempt++ {
|
||||||
|
if attempt > 0 {
|
||||||
|
// Exponential backoff
|
||||||
|
backoff := time.Duration(attempt*attempt) * time.Second
|
||||||
|
if backoff > 10*time.Second {
|
||||||
|
backoff = 10 * time.Second
|
||||||
|
}
|
||||||
|
select {
|
||||||
|
case <-time.After(backoff):
|
||||||
|
case <-ctx.Done():
|
||||||
|
return nil, ctx.Err()
|
||||||
|
}
|
||||||
|
r.logger.Debug("retrying embedding", "attempt", attempt, "max_retries", maxRetries)
|
||||||
|
}
|
||||||
|
|
||||||
|
embeddings, err := r.embedder.EmbedSlice(paragraphs)
|
||||||
|
if err == nil {
|
||||||
|
// Validate embedding count
|
||||||
|
if len(embeddings) != len(paragraphs) {
|
||||||
|
return nil, fmt.Errorf("embedding count mismatch: expected %d, got %d", len(paragraphs), len(embeddings))
|
||||||
|
}
|
||||||
|
return embeddings, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
lastErr = err
|
||||||
|
// Only retry for API embedder errors (network/timeout)
|
||||||
|
// For ONNX embedder, fail fast
|
||||||
|
if _, isAPI := r.embedder.(*APIEmbedder); !isAPI {
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
r.logger.Debug("finished writing vectors", "batches", batchCount)
|
return nil, fmt.Errorf("embedding failed after %d attempts: %w", maxRetries, lastErr)
|
||||||
select {
|
}
|
||||||
case LongJobStatusCh <- FinishedRAGStatus:
|
|
||||||
default:
|
// writeBatchToStorage writes a single batch of vectors to the database
|
||||||
r.logger.Warn("LongJobStatusCh channel is full or closed, dropping status message", "message", FinishedRAGStatus)
|
func (r *RAG) writeBatchToStorage(ctx context.Context, result batchResult, filename string) error {
|
||||||
|
if len(result.embeddings) == 0 {
|
||||||
|
// Empty batch, skip
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
// Check context before starting
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return ctx.Err()
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build all vectors for batch write
|
||||||
|
vectors := make([]*models.VectorRow, 0, len(result.paragraphs))
|
||||||
|
for j, text := range result.paragraphs {
|
||||||
|
vectors = append(vectors, &models.VectorRow{
|
||||||
|
Embeddings: result.embeddings[j],
|
||||||
|
RawText: text,
|
||||||
|
Slug: fmt.Sprintf("%s_%d_%d", filename, result.batchIndex+1, j),
|
||||||
|
FileName: filename,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write all vectors in a single transaction
|
||||||
|
if err := r.storage.WriteVectors(vectors); err != nil {
|
||||||
|
r.logger.Error("failed to write vectors batch to DB", "error", err, "batch", result.batchIndex+1, "size", len(vectors))
|
||||||
|
r.sendStatusNonBlocking(ErrRAGStatus)
|
||||||
|
return fmt.Errorf("failed to write vectors batch: %w", err)
|
||||||
|
}
|
||||||
|
r.logger.Debug("wrote batch to db", "batch", result.batchIndex+1, "size", len(result.paragraphs))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *RAG) LineToVector(line string) ([]float32, error) {
|
func (r *RAG) LineToVector(line string) ([]float32, error) {
|
||||||
|
r.resetIdleTimer()
|
||||||
return r.embedder.Embed(line)
|
return r.embedder.Embed(line)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *RAG) SearchEmb(emb *models.EmbeddingResp) ([]models.VectorRow, error) {
|
func (r *RAG) searchEmb(emb *models.EmbeddingResp, limit int) ([]models.VectorRow, error) {
|
||||||
return r.storage.SearchClosest(emb.Embedding)
|
r.resetIdleTimer()
|
||||||
|
return r.storage.SearchClosest(emb.Embedding, limit)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *RAG) searchKeyword(query string, limit int) ([]models.VectorRow, error) {
|
||||||
|
r.resetIdleTimer()
|
||||||
|
sanitized := sanitizeFTSQuery(query)
|
||||||
|
return r.storage.SearchKeyword(sanitized, limit)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *RAG) ListLoaded() ([]string, error) {
|
func (r *RAG) ListLoaded() ([]string, error) {
|
||||||
|
r.mu.RLock()
|
||||||
|
defer r.mu.RUnlock()
|
||||||
return r.storage.ListFiles()
|
return r.storage.ListFiles()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *RAG) RemoveFile(filename string) error {
|
func (r *RAG) RemoveFile(filename string) error {
|
||||||
|
r.mu.Lock()
|
||||||
|
defer r.mu.Unlock()
|
||||||
|
r.resetIdleTimer()
|
||||||
return r.storage.RemoveEmbByFileName(filename)
|
return r.storage.RemoveEmbByFileName(filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -211,10 +562,13 @@ func (r *RAG) RefineQuery(query string) string {
|
|||||||
return original
|
return original
|
||||||
}
|
}
|
||||||
query = strings.ToLower(query)
|
query = strings.ToLower(query)
|
||||||
for _, stopWord := range stopWords {
|
words := strings.Fields(query)
|
||||||
wordPattern := `\b` + stopWord + `\b`
|
if len(words) >= 3 {
|
||||||
re := regexp.MustCompile(wordPattern)
|
for _, stopWord := range stopWords {
|
||||||
query = re.ReplaceAllString(query, "")
|
wordPattern := `\b` + stopWord + `\b`
|
||||||
|
re := regexp.MustCompile(wordPattern)
|
||||||
|
query = re.ReplaceAllString(query, "")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
query = strings.TrimSpace(query)
|
query = strings.TrimSpace(query)
|
||||||
if len(query) < 5 {
|
if len(query) < 5 {
|
||||||
@@ -246,7 +600,7 @@ func (r *RAG) extractImportantPhrases(query string) string {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if isImportant || len(word) > 3 {
|
if isImportant || len(word) >= 3 {
|
||||||
important = append(important, word)
|
important = append(important, word)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -265,6 +619,36 @@ func (r *RAG) GenerateQueryVariations(query string) []string {
|
|||||||
if len(parts) == 0 {
|
if len(parts) == 0 {
|
||||||
return variations
|
return variations
|
||||||
}
|
}
|
||||||
|
// Get loaded filenames to filter out filename terms
|
||||||
|
filenames, err := r.storage.ListFiles()
|
||||||
|
if err == nil && len(filenames) > 0 {
|
||||||
|
// Convert to lowercase for case-insensitive matching
|
||||||
|
lowerFilenames := make([]string, len(filenames))
|
||||||
|
for i, f := range filenames {
|
||||||
|
lowerFilenames[i] = strings.ToLower(f)
|
||||||
|
}
|
||||||
|
filteredParts := make([]string, 0, len(parts))
|
||||||
|
for _, part := range parts {
|
||||||
|
partLower := strings.ToLower(part)
|
||||||
|
skip := false
|
||||||
|
for _, fn := range lowerFilenames {
|
||||||
|
if strings.Contains(fn, partLower) || strings.Contains(partLower, fn) {
|
||||||
|
skip = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !skip {
|
||||||
|
filteredParts = append(filteredParts, part)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If filteredParts not empty and different from original, add filtered query
|
||||||
|
if len(filteredParts) > 0 && len(filteredParts) != len(parts) {
|
||||||
|
filteredQuery := strings.Join(filteredParts, " ")
|
||||||
|
if len(filteredQuery) >= 5 {
|
||||||
|
variations = append(variations, filteredQuery)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
if len(parts) >= 2 {
|
if len(parts) >= 2 {
|
||||||
trimmed := strings.Join(parts[:len(parts)-1], " ")
|
trimmed := strings.Join(parts[:len(parts)-1], " ")
|
||||||
if len(trimmed) >= 5 {
|
if len(trimmed) >= 5 {
|
||||||
@@ -328,9 +712,14 @@ func (r *RAG) RerankResults(results []models.VectorRow, query string) []models.V
|
|||||||
})
|
})
|
||||||
unique := make([]models.VectorRow, 0)
|
unique := make([]models.VectorRow, 0)
|
||||||
seen := make(map[string]bool)
|
seen := make(map[string]bool)
|
||||||
|
fileCounts := make(map[string]int)
|
||||||
for i := range scored {
|
for i := range scored {
|
||||||
if !seen[scored[i].row.Slug] {
|
if !seen[scored[i].row.Slug] {
|
||||||
|
if fileCounts[scored[i].row.FileName] >= 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
seen[scored[i].row.Slug] = true
|
seen[scored[i].row.Slug] = true
|
||||||
|
fileCounts[scored[i].row.FileName]++
|
||||||
unique = append(unique, scored[i].row)
|
unique = append(unique, scored[i].row)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -341,6 +730,9 @@ func (r *RAG) RerankResults(results []models.VectorRow, query string) []models.V
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *RAG) SynthesizeAnswer(results []models.VectorRow, query string) (string, error) {
|
func (r *RAG) SynthesizeAnswer(results []models.VectorRow, query string) (string, error) {
|
||||||
|
r.mu.RLock()
|
||||||
|
defer r.mu.RUnlock()
|
||||||
|
r.resetIdleTimer()
|
||||||
if len(results) == 0 {
|
if len(results) == 0 {
|
||||||
return "No relevant information found in the vector database.", nil
|
return "No relevant information found in the vector database.", nil
|
||||||
}
|
}
|
||||||
@@ -369,7 +761,7 @@ func (r *RAG) SynthesizeAnswer(results []models.VectorRow, query string) (string
|
|||||||
Embedding: emb,
|
Embedding: emb,
|
||||||
Index: 0,
|
Index: 0,
|
||||||
}
|
}
|
||||||
topResults, err := r.SearchEmb(embResp)
|
topResults, err := r.searchEmb(embResp, 1)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.logger.Error("failed to search for synthesis context", "error", err)
|
r.logger.Error("failed to search for synthesis context", "error", err)
|
||||||
return "", err
|
return "", err
|
||||||
@@ -396,9 +788,14 @@ func truncateString(s string, maxLen int) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (r *RAG) Search(query string, limit int) ([]models.VectorRow, error) {
|
func (r *RAG) Search(query string, limit int) ([]models.VectorRow, error) {
|
||||||
|
r.mu.RLock()
|
||||||
|
defer r.mu.RUnlock()
|
||||||
|
r.resetIdleTimer()
|
||||||
refined := r.RefineQuery(query)
|
refined := r.RefineQuery(query)
|
||||||
variations := r.GenerateQueryVariations(refined)
|
variations := r.GenerateQueryVariations(refined)
|
||||||
allResults := make([]models.VectorRow, 0)
|
|
||||||
|
// Collect embedding search results from all variations
|
||||||
|
var embResults []models.VectorRow
|
||||||
seen := make(map[string]bool)
|
seen := make(map[string]bool)
|
||||||
for _, q := range variations {
|
for _, q := range variations {
|
||||||
emb, err := r.LineToVector(q)
|
emb, err := r.LineToVector(q)
|
||||||
@@ -406,29 +803,78 @@ func (r *RAG) Search(query string, limit int) ([]models.VectorRow, error) {
|
|||||||
r.logger.Error("failed to embed query variation", "error", err, "query", q)
|
r.logger.Error("failed to embed query variation", "error", err, "query", q)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
embResp := &models.EmbeddingResp{
|
embResp := &models.EmbeddingResp{
|
||||||
Embedding: emb,
|
Embedding: emb,
|
||||||
Index: 0,
|
Index: 0,
|
||||||
}
|
}
|
||||||
|
results, err := r.searchEmb(embResp, limit*2) // Get more candidates
|
||||||
results, err := r.SearchEmb(embResp)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.logger.Error("failed to search embeddings", "error", err, "query", q)
|
r.logger.Error("failed to search embeddings", "error", err, "query", q)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, row := range results {
|
for _, row := range results {
|
||||||
if !seen[row.Slug] {
|
if !seen[row.Slug] {
|
||||||
seen[row.Slug] = true
|
seen[row.Slug] = true
|
||||||
allResults = append(allResults, row)
|
embResults = append(embResults, row)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
reranked := r.RerankResults(allResults, query)
|
// Sort embedding results by distance (lower is better)
|
||||||
if len(reranked) > limit {
|
sort.Slice(embResults, func(i, j int) bool {
|
||||||
reranked = reranked[:limit]
|
return embResults[i].Distance < embResults[j].Distance
|
||||||
|
})
|
||||||
|
|
||||||
|
// Perform keyword search
|
||||||
|
kwResults, err := r.searchKeyword(refined, limit*2)
|
||||||
|
if err != nil {
|
||||||
|
r.logger.Warn("keyword search failed, using only embeddings", "error", err)
|
||||||
|
kwResults = nil
|
||||||
}
|
}
|
||||||
|
// Sort keyword results by distance (already sorted by BM25 score)
|
||||||
|
// kwResults already sorted by distance (lower is better)
|
||||||
|
|
||||||
|
// Combine using Reciprocal Rank Fusion (RRF)
|
||||||
|
const rrfK = 60
|
||||||
|
type scoredRow struct {
|
||||||
|
row models.VectorRow
|
||||||
|
score float64
|
||||||
|
}
|
||||||
|
scoreMap := make(map[string]float64)
|
||||||
|
// Add embedding results
|
||||||
|
for rank, row := range embResults {
|
||||||
|
score := 1.0 / (float64(rank) + rrfK)
|
||||||
|
scoreMap[row.Slug] += score
|
||||||
|
}
|
||||||
|
// Add keyword results
|
||||||
|
for rank, row := range kwResults {
|
||||||
|
score := 1.0 / (float64(rank) + rrfK)
|
||||||
|
scoreMap[row.Slug] += score
|
||||||
|
// Ensure row exists in combined results
|
||||||
|
if _, exists := seen[row.Slug]; !exists {
|
||||||
|
embResults = append(embResults, row)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Create slice of scored rows
|
||||||
|
scoredRows := make([]scoredRow, 0, len(embResults))
|
||||||
|
for _, row := range embResults {
|
||||||
|
score := scoreMap[row.Slug]
|
||||||
|
scoredRows = append(scoredRows, scoredRow{row: row, score: score})
|
||||||
|
}
|
||||||
|
// Sort by descending RRF score
|
||||||
|
sort.Slice(scoredRows, func(i, j int) bool {
|
||||||
|
return scoredRows[i].score > scoredRows[j].score
|
||||||
|
})
|
||||||
|
// Take top limit
|
||||||
|
if len(scoredRows) > limit {
|
||||||
|
scoredRows = scoredRows[:limit]
|
||||||
|
}
|
||||||
|
// Convert back to VectorRow
|
||||||
|
finalResults := make([]models.VectorRow, len(scoredRows))
|
||||||
|
for i, sr := range scoredRows {
|
||||||
|
finalResults[i] = sr.row
|
||||||
|
}
|
||||||
|
// Apply reranking heuristics
|
||||||
|
reranked := r.RerankResults(finalResults, query)
|
||||||
return reranked, nil
|
return reranked, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -437,16 +883,58 @@ var (
|
|||||||
ragOnce sync.Once
|
ragOnce sync.Once
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func (r *RAG) FallbackMessage() string {
|
||||||
|
return r.fallbackMsg
|
||||||
|
}
|
||||||
|
|
||||||
func Init(c *config.Config, l *slog.Logger, s storage.FullRepo) error {
|
func Init(c *config.Config, l *slog.Logger, s storage.FullRepo) error {
|
||||||
|
var err error
|
||||||
ragOnce.Do(func() {
|
ragOnce.Do(func() {
|
||||||
if c == nil || l == nil || s == nil {
|
if c == nil || l == nil || s == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
ragInstance = New(l, s, c)
|
ragInstance, err = New(l, s, c)
|
||||||
})
|
})
|
||||||
return nil
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetInstance() *RAG {
|
func GetInstance() *RAG {
|
||||||
return ragInstance
|
return ragInstance
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *RAG) resetIdleTimer() {
|
||||||
|
r.idleMu.Lock()
|
||||||
|
defer r.idleMu.Unlock()
|
||||||
|
if r.idleTimer != nil {
|
||||||
|
r.idleTimer.Stop()
|
||||||
|
}
|
||||||
|
r.idleTimer = time.AfterFunc(r.idleTimeout, func() {
|
||||||
|
r.freeONNXMemory()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *RAG) freeONNXMemory() {
|
||||||
|
r.mu.Lock()
|
||||||
|
defer r.mu.Unlock()
|
||||||
|
if onnx, ok := r.embedder.(*ONNXEmbedder); ok {
|
||||||
|
if err := onnx.Destroy(); err != nil {
|
||||||
|
r.logger.Error("failed to free ONNX memory", "error", err)
|
||||||
|
} else {
|
||||||
|
r.logger.Info("freed ONNX VRAM after idle timeout")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *RAG) Destroy() {
|
||||||
|
r.mu.Lock()
|
||||||
|
defer r.mu.Unlock()
|
||||||
|
if r.idleTimer != nil {
|
||||||
|
r.idleTimer.Stop()
|
||||||
|
r.idleTimer = nil
|
||||||
|
}
|
||||||
|
if onnx, ok := r.embedder.(*ONNXEmbedder); ok {
|
||||||
|
if err := onnx.Destroy(); err != nil {
|
||||||
|
r.logger.Error("failed to destroy ONNX embedder", "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
232
rag/storage.go
232
rag/storage.go
@@ -1,6 +1,7 @@
|
|||||||
package rag
|
package rag
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"database/sql"
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"fmt"
|
"fmt"
|
||||||
"gf-lt/models"
|
"gf-lt/models"
|
||||||
@@ -62,6 +63,17 @@ func (vs *VectorStorage) WriteVector(row *models.VectorRow) error {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
embeddingSize := len(row.Embeddings)
|
||||||
|
// Start transaction
|
||||||
|
tx, err := vs.sqlxDB.Beginx()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
// Serialize the embeddings to binary
|
// Serialize the embeddings to binary
|
||||||
serializedEmbeddings := SerializeVector(row.Embeddings)
|
serializedEmbeddings := SerializeVector(row.Embeddings)
|
||||||
@@ -69,10 +81,102 @@ func (vs *VectorStorage) WriteVector(row *models.VectorRow) error {
|
|||||||
"INSERT INTO %s (embeddings, slug, raw_text, filename) VALUES (?, ?, ?, ?)",
|
"INSERT INTO %s (embeddings, slug, raw_text, filename) VALUES (?, ?, ?, ?)",
|
||||||
tableName,
|
tableName,
|
||||||
)
|
)
|
||||||
if _, err := vs.sqlxDB.Exec(query, serializedEmbeddings, row.Slug, row.RawText, row.FileName); err != nil {
|
if _, err := tx.Exec(query, serializedEmbeddings, row.Slug, row.RawText, row.FileName); err != nil {
|
||||||
vs.logger.Error("failed to write vector", "error", err, "slug", row.Slug)
|
vs.logger.Error("failed to write vector", "error", err, "slug", row.Slug)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
// Insert into FTS table
|
||||||
|
ftsQuery := `INSERT INTO fts_embeddings (slug, raw_text, filename, embedding_size) VALUES (?, ?, ?, ?)`
|
||||||
|
if _, err := tx.Exec(ftsQuery, row.Slug, row.RawText, row.FileName, embeddingSize); err != nil {
|
||||||
|
vs.logger.Error("failed to write to FTS table", "error", err, "slug", row.Slug)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = tx.Commit()
|
||||||
|
if err != nil {
|
||||||
|
vs.logger.Error("failed to commit transaction", "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteVectors stores multiple embedding vectors in a single transaction
|
||||||
|
func (vs *VectorStorage) WriteVectors(rows []*models.VectorRow) error {
|
||||||
|
if len(rows) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// SQLite has limit of 999 parameters per statement, each row uses 4 parameters
|
||||||
|
const maxBatchSize = 200 // 200 * 4 = 800 < 999
|
||||||
|
if len(rows) > maxBatchSize {
|
||||||
|
// Process in chunks
|
||||||
|
for i := 0; i < len(rows); i += maxBatchSize {
|
||||||
|
end := i + maxBatchSize
|
||||||
|
if end > len(rows) {
|
||||||
|
end = len(rows)
|
||||||
|
}
|
||||||
|
if err := vs.WriteVectors(rows[i:end]); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// All rows should have same embedding size (same model)
|
||||||
|
firstSize := len(rows[0].Embeddings)
|
||||||
|
for i, row := range rows {
|
||||||
|
if len(row.Embeddings) != firstSize {
|
||||||
|
return fmt.Errorf("embedding size mismatch: row %d has size %d, expected %d", i, len(row.Embeddings), firstSize)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tableName, err := vs.getTableName(rows[0].Embeddings)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// Start transaction
|
||||||
|
tx, err := vs.sqlxDB.Beginx()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Build batch insert for embeddings table
|
||||||
|
embeddingPlaceholders := make([]string, 0, len(rows))
|
||||||
|
embeddingArgs := make([]any, 0, len(rows)*4)
|
||||||
|
for _, row := range rows {
|
||||||
|
embeddingPlaceholders = append(embeddingPlaceholders, "(?, ?, ?, ?)")
|
||||||
|
embeddingArgs = append(embeddingArgs, SerializeVector(row.Embeddings), row.Slug, row.RawText, row.FileName)
|
||||||
|
}
|
||||||
|
embeddingQuery := fmt.Sprintf(
|
||||||
|
"INSERT INTO %s (embeddings, slug, raw_text, filename) VALUES %s",
|
||||||
|
tableName,
|
||||||
|
strings.Join(embeddingPlaceholders, ", "),
|
||||||
|
)
|
||||||
|
if _, err := tx.Exec(embeddingQuery, embeddingArgs...); err != nil {
|
||||||
|
vs.logger.Error("failed to write vectors batch", "error", err, "batch_size", len(rows))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// Build batch insert for FTS table
|
||||||
|
ftsPlaceholders := make([]string, 0, len(rows))
|
||||||
|
ftsArgs := make([]any, 0, len(rows)*4)
|
||||||
|
embeddingSize := len(rows[0].Embeddings)
|
||||||
|
for _, row := range rows {
|
||||||
|
ftsPlaceholders = append(ftsPlaceholders, "(?, ?, ?, ?)")
|
||||||
|
ftsArgs = append(ftsArgs, row.Slug, row.RawText, row.FileName, embeddingSize)
|
||||||
|
}
|
||||||
|
ftsQuery := "INSERT INTO fts_embeddings (slug, raw_text, filename, embedding_size) VALUES " +
|
||||||
|
strings.Join(ftsPlaceholders, ", ")
|
||||||
|
if _, err := tx.Exec(ftsQuery, ftsArgs...); err != nil {
|
||||||
|
vs.logger.Error("failed to write FTS batch", "error", err, "batch_size", len(rows))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = tx.Commit()
|
||||||
|
if err != nil {
|
||||||
|
vs.logger.Error("failed to commit transaction", "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
vs.logger.Debug("wrote vectors batch", "batch_size", len(rows))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -98,30 +202,25 @@ func (vs *VectorStorage) getTableName(emb []float32) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// SearchClosest finds vectors closest to the query vector using efficient cosine similarity calculation
|
// SearchClosest finds vectors closest to the query vector using efficient cosine similarity calculation
|
||||||
func (vs *VectorStorage) SearchClosest(query []float32) ([]models.VectorRow, error) {
|
func (vs *VectorStorage) SearchClosest(query []float32, limit int) ([]models.VectorRow, error) {
|
||||||
|
if limit <= 0 {
|
||||||
|
limit = 10
|
||||||
|
}
|
||||||
tableName, err := vs.getTableName(query)
|
tableName, err := vs.getTableName(query)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// For better performance, instead of loading all vectors at once,
|
|
||||||
// we'll implement batching and potentially add L2 distance-based pre-filtering
|
|
||||||
// since cosine similarity is related to L2 distance for normalized vectors
|
|
||||||
|
|
||||||
querySQL := "SELECT embeddings, slug, raw_text, filename FROM " + tableName
|
querySQL := "SELECT embeddings, slug, raw_text, filename FROM " + tableName
|
||||||
rows, err := vs.sqlxDB.Query(querySQL)
|
rows, err := vs.sqlxDB.Query(querySQL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
defer rows.Close()
|
||||||
|
|
||||||
// Use a min-heap or simple slice to keep track of top 3 closest vectors
|
|
||||||
type SearchResult struct {
|
type SearchResult struct {
|
||||||
vector models.VectorRow
|
vector models.VectorRow
|
||||||
distance float32
|
distance float32
|
||||||
}
|
}
|
||||||
var topResults []SearchResult
|
var topResults []SearchResult
|
||||||
// Process vectors one by one to avoid loading everything into memory
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var (
|
var (
|
||||||
embeddingsBlob []byte
|
embeddingsBlob []byte
|
||||||
@@ -132,12 +231,9 @@ func (vs *VectorStorage) SearchClosest(query []float32) ([]models.VectorRow, err
|
|||||||
vs.logger.Error("failed to scan row", "error", err)
|
vs.logger.Error("failed to scan row", "error", err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
storedEmbeddings := DeserializeVector(embeddingsBlob)
|
storedEmbeddings := DeserializeVector(embeddingsBlob)
|
||||||
|
|
||||||
// Calculate cosine similarity (returns value between -1 and 1, where 1 is most similar)
|
|
||||||
similarity := cosineSimilarity(query, storedEmbeddings)
|
similarity := cosineSimilarity(query, storedEmbeddings)
|
||||||
distance := 1 - similarity // Convert to distance where 0 is most similar
|
distance := 1 - similarity
|
||||||
|
|
||||||
result := SearchResult{
|
result := SearchResult{
|
||||||
vector: models.VectorRow{
|
vector: models.VectorRow{
|
||||||
@@ -149,20 +245,14 @@ func (vs *VectorStorage) SearchClosest(query []float32) ([]models.VectorRow, err
|
|||||||
distance: distance,
|
distance: distance,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add to top results and maintain only top 3
|
|
||||||
topResults = append(topResults, result)
|
topResults = append(topResults, result)
|
||||||
|
|
||||||
// Sort and keep only top 3
|
|
||||||
sort.Slice(topResults, func(i, j int) bool {
|
sort.Slice(topResults, func(i, j int) bool {
|
||||||
return topResults[i].distance < topResults[j].distance
|
return topResults[i].distance < topResults[j].distance
|
||||||
})
|
})
|
||||||
|
if len(topResults) > limit {
|
||||||
if len(topResults) > 3 {
|
topResults = topResults[:limit]
|
||||||
topResults = topResults[:3] // Keep only closest 3
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert back to VectorRow slice
|
|
||||||
results := make([]models.VectorRow, 0, len(topResults))
|
results := make([]models.VectorRow, 0, len(topResults))
|
||||||
for _, result := range topResults {
|
for _, result := range topResults {
|
||||||
result.vector.Distance = result.distance
|
result.vector.Distance = result.distance
|
||||||
@@ -171,6 +261,100 @@ func (vs *VectorStorage) SearchClosest(query []float32) ([]models.VectorRow, err
|
|||||||
return results, nil
|
return results, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetVectorBySlug retrieves a vector row by its slug
|
||||||
|
func (vs *VectorStorage) GetVectorBySlug(slug string) (*models.VectorRow, error) {
|
||||||
|
embeddingSizes := []int{384, 768, 1024, 1536, 2048, 3072, 4096, 5120}
|
||||||
|
for _, size := range embeddingSizes {
|
||||||
|
table := fmt.Sprintf("embeddings_%d", size)
|
||||||
|
query := fmt.Sprintf("SELECT embeddings, slug, raw_text, filename FROM %s WHERE slug = ?", table)
|
||||||
|
row := vs.sqlxDB.QueryRow(query, slug)
|
||||||
|
var (
|
||||||
|
embeddingsBlob []byte
|
||||||
|
retrievedSlug, rawText, fileName string
|
||||||
|
)
|
||||||
|
if err := row.Scan(&embeddingsBlob, &retrievedSlug, &rawText, &fileName); err != nil {
|
||||||
|
// No row in this table, continue to next size
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
storedEmbeddings := DeserializeVector(embeddingsBlob)
|
||||||
|
return &models.VectorRow{
|
||||||
|
Embeddings: storedEmbeddings,
|
||||||
|
Slug: retrievedSlug,
|
||||||
|
RawText: rawText,
|
||||||
|
FileName: fileName,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("vector with slug %s not found", slug)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchKeyword performs full-text search using FTS5
|
||||||
|
func (vs *VectorStorage) SearchKeyword(query string, limit int) ([]models.VectorRow, error) {
|
||||||
|
// Use FTS5 bm25 ranking. bm25 returns negative values where more negative is better.
|
||||||
|
// We'll order by bm25 (ascending) and limit.
|
||||||
|
ftsQuery := `SELECT slug, raw_text, filename, bm25(fts_embeddings) as score
|
||||||
|
FROM fts_embeddings
|
||||||
|
WHERE fts_embeddings MATCH ?
|
||||||
|
ORDER BY score
|
||||||
|
LIMIT ?`
|
||||||
|
|
||||||
|
// Try original query first
|
||||||
|
rows, err := vs.sqlxDB.Query(ftsQuery, query, limit)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("FTS search failed: %w", err)
|
||||||
|
}
|
||||||
|
results, err := vs.scanRows(rows)
|
||||||
|
rows.Close()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no results and query contains multiple terms, try OR fallback
|
||||||
|
if len(results) == 0 && strings.Contains(query, " ") && !strings.Contains(strings.ToUpper(query), " OR ") {
|
||||||
|
// Build OR query: term1 OR term2 OR term3
|
||||||
|
terms := strings.Fields(query)
|
||||||
|
if len(terms) > 1 {
|
||||||
|
orQuery := strings.Join(terms, " OR ")
|
||||||
|
rows, err := vs.sqlxDB.Query(ftsQuery, orQuery, limit)
|
||||||
|
if err != nil {
|
||||||
|
// Return original empty results rather than error
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
orResults, err := vs.scanRows(rows)
|
||||||
|
rows.Close()
|
||||||
|
if err == nil {
|
||||||
|
results = orResults
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanRows converts SQL rows to VectorRow slice
|
||||||
|
func (vs *VectorStorage) scanRows(rows *sql.Rows) ([]models.VectorRow, error) {
|
||||||
|
var results []models.VectorRow
|
||||||
|
for rows.Next() {
|
||||||
|
var slug, rawText, fileName string
|
||||||
|
var score float64
|
||||||
|
if err := rows.Scan(&slug, &rawText, &fileName, &score); err != nil {
|
||||||
|
vs.logger.Error("failed to scan FTS row", "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Convert BM25 score to distance-like metric (lower is better)
|
||||||
|
// BM25 is negative, more negative is better. We'll normalize to positive distance.
|
||||||
|
distance := float32(-score) // Make positive (since score is negative)
|
||||||
|
if distance < 0 {
|
||||||
|
distance = 0
|
||||||
|
}
|
||||||
|
results = append(results, models.VectorRow{
|
||||||
|
Slug: slug,
|
||||||
|
RawText: rawText,
|
||||||
|
FileName: fileName,
|
||||||
|
Distance: distance,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
// ListFiles returns a list of all loaded files
|
// ListFiles returns a list of all loaded files
|
||||||
func (vs *VectorStorage) ListFiles() ([]string, error) {
|
func (vs *VectorStorage) ListFiles() ([]string, error) {
|
||||||
fileLists := make([][]string, 0)
|
fileLists := make([][]string, 0)
|
||||||
@@ -215,6 +399,10 @@ func (vs *VectorStorage) ListFiles() ([]string, error) {
|
|||||||
// RemoveEmbByFileName removes all embeddings associated with a specific filename
|
// RemoveEmbByFileName removes all embeddings associated with a specific filename
|
||||||
func (vs *VectorStorage) RemoveEmbByFileName(filename string) error {
|
func (vs *VectorStorage) RemoveEmbByFileName(filename string) error {
|
||||||
var errors []string
|
var errors []string
|
||||||
|
// Delete from FTS table first
|
||||||
|
if _, err := vs.sqlxDB.Exec("DELETE FROM fts_embeddings WHERE filename = ?", filename); err != nil {
|
||||||
|
errors = append(errors, err.Error())
|
||||||
|
}
|
||||||
embeddingSizes := []int{384, 768, 1024, 1536, 2048, 3072, 4096, 5120}
|
embeddingSizes := []int{384, 768, 1024, 1536, 2048, 3072, 4096, 5120}
|
||||||
for _, size := range embeddingSizes {
|
for _, size := range embeddingSizes {
|
||||||
table := fmt.Sprintf("embeddings_%d", size)
|
table := fmt.Sprintf("embeddings_%d", size)
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ func historyToSJSON(msgs []models.RoleMsg) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func exportChat() error {
|
func exportChat() error {
|
||||||
data, err := json.MarshalIndent(chatBody.Messages, "", " ")
|
data, err := json.MarshalIndent(chatBody.GetMessages(), "", " ")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -54,7 +54,7 @@ func importChat(filename string) error {
|
|||||||
if _, ok := chatMap[activeChatName]; !ok {
|
if _, ok := chatMap[activeChatName]; !ok {
|
||||||
addNewChat(activeChatName)
|
addNewChat(activeChatName)
|
||||||
}
|
}
|
||||||
chatBody.Messages = messages
|
chatBody.SetMessages(messages)
|
||||||
cfg.AssistantRole = messages[1].Role
|
cfg.AssistantRole = messages[1].Role
|
||||||
if cfg.AssistantRole == cfg.UserRole {
|
if cfg.AssistantRole == cfg.UserRole {
|
||||||
cfg.AssistantRole = messages[2].Role
|
cfg.AssistantRole = messages[2].Role
|
||||||
@@ -168,8 +168,3 @@ func copyToClipboard(text string) error {
|
|||||||
cmd.Stdin = strings.NewReader(text)
|
cmd.Stdin = strings.NewReader(text)
|
||||||
return cmd.Run()
|
return cmd.Run()
|
||||||
}
|
}
|
||||||
|
|
||||||
func notifyUser(topic, message string) error {
|
|
||||||
cmd := exec.Command("notify-send", topic, message)
|
|
||||||
return cmd.Run()
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -10,16 +10,18 @@ import (
|
|||||||
//go:embed migrations/*
|
//go:embed migrations/*
|
||||||
var migrationsFS embed.FS
|
var migrationsFS embed.FS
|
||||||
|
|
||||||
func (p *ProviderSQL) Migrate() {
|
func (p *ProviderSQL) Migrate() error {
|
||||||
// Get the embedded filesystem
|
// Get the embedded filesystem
|
||||||
migrationsDir, err := fs.Sub(migrationsFS, "migrations")
|
migrationsDir, err := fs.Sub(migrationsFS, "migrations")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.logger.Error("Failed to get embedded migrations directory;", "error", err)
|
p.logger.Error("Failed to get embedded migrations directory;", "error", err)
|
||||||
|
return fmt.Errorf("failed to get embedded migrations directory: %w", err)
|
||||||
}
|
}
|
||||||
// List all .up.sql files
|
// List all .up.sql files
|
||||||
files, err := migrationsFS.ReadDir("migrations")
|
files, err := migrationsFS.ReadDir("migrations")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.logger.Error("Failed to read migrations directory;", "error", err)
|
p.logger.Error("Failed to read migrations directory;", "error", err)
|
||||||
|
return fmt.Errorf("failed to read migrations directory: %w", err)
|
||||||
}
|
}
|
||||||
// Execute each .up.sql file
|
// Execute each .up.sql file
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
@@ -27,11 +29,12 @@ func (p *ProviderSQL) Migrate() {
|
|||||||
err := p.executeMigration(migrationsDir, file.Name())
|
err := p.executeMigration(migrationsDir, file.Name())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.logger.Error("Failed to execute migration %s: %v", file.Name(), err)
|
p.logger.Error("Failed to execute migration %s: %v", file.Name(), err)
|
||||||
panic(err)
|
return fmt.Errorf("failed to execute migration %s: %w", file.Name(), err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p.logger.Debug("All migrations executed successfully!")
|
p.logger.Debug("All migrations executed successfully!")
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *ProviderSQL) executeMigration(migrationsDir fs.FS, fileName string) error {
|
func (p *ProviderSQL) executeMigration(migrationsDir fs.FS, fileName string) error {
|
||||||
|
|||||||
2
storage/migrations/003_add_fts.down.sql
Normal file
2
storage/migrations/003_add_fts.down.sql
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
-- Drop FTS5 virtual table
|
||||||
|
DROP TABLE IF EXISTS fts_embeddings;
|
||||||
15
storage/migrations/003_add_fts.up.sql
Normal file
15
storage/migrations/003_add_fts.up.sql
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
-- Create FTS5 virtual table for full-text search
|
||||||
|
CREATE VIRTUAL TABLE IF NOT EXISTS fts_embeddings USING fts5(
|
||||||
|
slug UNINDEXED,
|
||||||
|
raw_text,
|
||||||
|
filename UNINDEXED,
|
||||||
|
embedding_size UNINDEXED,
|
||||||
|
tokenize='porter unicode61' -- Use porter stemmer and unicode61 tokenizer
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create triggers to maintain FTS table when embeddings are inserted/deleted
|
||||||
|
-- Note: We'll handle inserts/deletes programmatically for simplicity
|
||||||
|
-- but triggers could be added here if needed.
|
||||||
|
|
||||||
|
-- Indexes for performance (FTS5 manages its own indexes)
|
||||||
|
-- No additional indexes needed for FTS5 virtual table.
|
||||||
2
storage/migrations/004_populate_fts.down.sql
Normal file
2
storage/migrations/004_populate_fts.down.sql
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
-- Clear FTS table (optional)
|
||||||
|
DELETE FROM fts_embeddings;
|
||||||
26
storage/migrations/004_populate_fts.up.sql
Normal file
26
storage/migrations/004_populate_fts.up.sql
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
-- Populate FTS table with existing embeddings
|
||||||
|
DELETE FROM fts_embeddings;
|
||||||
|
|
||||||
|
INSERT INTO fts_embeddings (slug, raw_text, filename, embedding_size)
|
||||||
|
SELECT slug, raw_text, filename, 384 FROM embeddings_384;
|
||||||
|
|
||||||
|
INSERT INTO fts_embeddings (slug, raw_text, filename, embedding_size)
|
||||||
|
SELECT slug, raw_text, filename, 768 FROM embeddings_768;
|
||||||
|
|
||||||
|
INSERT INTO fts_embeddings (slug, raw_text, filename, embedding_size)
|
||||||
|
SELECT slug, raw_text, filename, 1024 FROM embeddings_1024;
|
||||||
|
|
||||||
|
INSERT INTO fts_embeddings (slug, raw_text, filename, embedding_size)
|
||||||
|
SELECT slug, raw_text, filename, 1536 FROM embeddings_1536;
|
||||||
|
|
||||||
|
INSERT INTO fts_embeddings (slug, raw_text, filename, embedding_size)
|
||||||
|
SELECT slug, raw_text, filename, 2048 FROM embeddings_2048;
|
||||||
|
|
||||||
|
INSERT INTO fts_embeddings (slug, raw_text, filename, embedding_size)
|
||||||
|
SELECT slug, raw_text, filename, 3072 FROM embeddings_3072;
|
||||||
|
|
||||||
|
INSERT INTO fts_embeddings (slug, raw_text, filename, embedding_size)
|
||||||
|
SELECT slug, raw_text, filename, 4096 FROM embeddings_4096;
|
||||||
|
|
||||||
|
INSERT INTO fts_embeddings (slug, raw_text, filename, embedding_size)
|
||||||
|
SELECT slug, raw_text, filename, 5120 FROM embeddings_5120;
|
||||||
@@ -102,8 +102,27 @@ func NewProviderSQL(dbPath string, logger *slog.Logger) FullRepo {
|
|||||||
logger.Error("failed to open db connection", "error", err)
|
logger.Error("failed to open db connection", "error", err)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
// Enable WAL mode for better concurrency and performance
|
||||||
|
if _, err := db.Exec("PRAGMA journal_mode = WAL;"); err != nil {
|
||||||
|
logger.Warn("failed to enable WAL mode", "error", err)
|
||||||
|
}
|
||||||
|
if _, err := db.Exec("PRAGMA synchronous = NORMAL;"); err != nil {
|
||||||
|
logger.Warn("failed to set synchronous mode", "error", err)
|
||||||
|
}
|
||||||
|
// Increase cache size for better performance
|
||||||
|
if _, err := db.Exec("PRAGMA cache_size = -2000;"); err != nil {
|
||||||
|
logger.Warn("failed to set cache size", "error", err)
|
||||||
|
}
|
||||||
|
// Log actual journal mode for debugging
|
||||||
|
var journalMode string
|
||||||
|
if err := db.QueryRow("PRAGMA journal_mode;").Scan(&journalMode); err == nil {
|
||||||
|
logger.Debug("SQLite journal mode", "mode", journalMode)
|
||||||
|
}
|
||||||
p := ProviderSQL{db: db, logger: logger}
|
p := ProviderSQL{db: db, logger: logger}
|
||||||
p.Migrate()
|
if err := p.Migrate(); err != nil {
|
||||||
|
logger.Error("migration failed, app cannot start", "error", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"fmt"
|
"fmt"
|
||||||
"gf-lt/models"
|
"gf-lt/models"
|
||||||
|
"sort"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
|
|
||||||
"github.com/jmoiron/sqlx"
|
"github.com/jmoiron/sqlx"
|
||||||
@@ -11,7 +12,7 @@ import (
|
|||||||
|
|
||||||
type VectorRepo interface {
|
type VectorRepo interface {
|
||||||
WriteVector(*models.VectorRow) error
|
WriteVector(*models.VectorRow) error
|
||||||
SearchClosest(q []float32) ([]models.VectorRow, error)
|
SearchClosest(q []float32, limit int) ([]models.VectorRow, error)
|
||||||
ListFiles() ([]string, error)
|
ListFiles() ([]string, error)
|
||||||
RemoveEmbByFileName(filename string) error
|
RemoveEmbByFileName(filename string) error
|
||||||
DB() *sqlx.DB
|
DB() *sqlx.DB
|
||||||
@@ -79,7 +80,7 @@ func (p ProviderSQL) WriteVector(row *models.VectorRow) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ProviderSQL) SearchClosest(q []float32) ([]models.VectorRow, error) {
|
func (p ProviderSQL) SearchClosest(q []float32, limit int) ([]models.VectorRow, error) {
|
||||||
tableName, err := fetchTableName(q)
|
tableName, err := fetchTableName(q)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -94,7 +95,7 @@ func (p ProviderSQL) SearchClosest(q []float32) ([]models.VectorRow, error) {
|
|||||||
vector models.VectorRow
|
vector models.VectorRow
|
||||||
distance float32
|
distance float32
|
||||||
}
|
}
|
||||||
var topResults []SearchResult
|
var allResults []SearchResult
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var (
|
var (
|
||||||
embeddingsBlob []byte
|
embeddingsBlob []byte
|
||||||
@@ -119,28 +120,19 @@ func (p ProviderSQL) SearchClosest(q []float32) ([]models.VectorRow, error) {
|
|||||||
},
|
},
|
||||||
distance: distance,
|
distance: distance,
|
||||||
}
|
}
|
||||||
|
allResults = append(allResults, result)
|
||||||
// Add to top results and maintain only top results
|
}
|
||||||
topResults = append(topResults, result)
|
// Sort by distance
|
||||||
|
sort.Slice(allResults, func(i, j int) bool {
|
||||||
// Sort and keep only top results
|
return allResults[i].distance < allResults[j].distance
|
||||||
// We'll keep the top 3 closest vectors
|
})
|
||||||
if len(topResults) > 3 {
|
// Truncate to limit
|
||||||
// Simple sort and truncate to maintain only 3 best matches
|
if len(allResults) > limit {
|
||||||
for i := 0; i < len(topResults); i++ {
|
allResults = allResults[:limit]
|
||||||
for j := i + 1; j < len(topResults); j++ {
|
|
||||||
if topResults[i].distance > topResults[j].distance {
|
|
||||||
topResults[i], topResults[j] = topResults[j], topResults[i]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
topResults = topResults[:3]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert back to VectorRow slice
|
// Convert back to VectorRow slice
|
||||||
results := make([]models.VectorRow, len(topResults))
|
results := make([]models.VectorRow, len(allResults))
|
||||||
for i, result := range topResults {
|
for i, result := range allResults {
|
||||||
result.vector.Distance = result.distance
|
result.vector.Distance = result.distance
|
||||||
results[i] = result.vector
|
results[i] = result.vector
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
{
|
|
||||||
"sys_prompt": "A game of cluedo. Players are {{user}}, {{char}}, {{char2}};\n\nrooms: hall, lounge, dinning room kitchen, ballroom, conservatory, billiard room, library, study;\nweapons: candlestick, dagger, lead pipe, revolver, rope, spanner;\npeople: miss Scarlett, colonel Mustard, mrs. White, reverend Green, mrs. Peacock, professor Plum;\n\nA murder happened in a mansion with 9 rooms. Victim is dr. Black.\nPlayers goal is to find out who commited a murder, in what room and with what weapon.\nWeapons, people and rooms not involved in murder are distributed between players (as cards) by tool agent.\nThe objective of the game is to deduce the details of the murder. There are six characters, six murder weapons, and nine rooms, leaving the players with 324 possibilities. As soon as a player enters a room, they may make a suggestion as to the details, naming a suspect, the room they are in, and the weapon. For example: \"I suspect Professor Plum, in the Dining Room, with the candlestick\".\nOnce a player makes a suggestion, the others are called upon to disprove it.\nBefore the player's move, tool agent will remind that players their cards. There are two types of moves: making a suggestion (suggestion_move) and disproving other player suggestion (evidence_move);\nIn this version player wins when the correct details are named in the suggestion_move.\n\n<example_game>\n{{user}}:\nlet's start a game of cluedo!\ntool: cards of {{char}} are 'LEAD PIPE', 'BALLROOM', 'CONSERVATORY', 'STUDY', 'Mrs. White'; suggestion_move;\n{{char}}:\n(putting miss Scarlet into the Hall with the Revolver) \"I suspect miss Scarlett, in the Hall, with the revolver.\"\ntool: cards of {{char2}} are 'SPANNER', 'DAGGER', 'Professor Plum', 'LIBRARY', 'Mrs. Peacock'; evidence_move;\n{{char2}}:\n\"No objections.\" (no cards matching the suspicion of {{char}})\ntool: cards of {{user}} are 'Colonel Mustard', 'Miss Scarlett', 'DINNING ROOM', 'CANDLESTICK', 'HALL'; evidence_move;\n{{user}}:\n\"I object. Miss Scarlett is innocent.\" (shows card with 'Miss Scarlett')\ntool: cards of {{char2}} are 'SPANNER', 'DAGGER', 'Professor Plum', 'LIBRARY', 'Mrs. Peacock'; suggestion_move;\n{{char2}}:\n*So it was not Miss Scarlett, good to know.*\n(moves Mrs. White to the Billiard Room) \"It might have been Mrs. White, in the Billiard Room, with the Revolver.\"\ntool: cards of {{user}} are 'Colonel Mustard', 'Miss Scarlett', 'DINNING ROOM', 'CANDLESTICK', 'HALL'; evidence_move;\n{{user}}:\n(no matching cards for the assumption of {{char2}}) \"Sounds possible to me.\"\ntool: cards of {{char}} are 'LEAD PIPE', 'BALLROOM', 'CONSERVATORY', 'STUDY', 'Mrs. White'; evidence_move;\n{{char}}:\n(shows Mrs. White card) \"No. Was not Mrs. White\"\ntool: cards of {{user}} are 'Colonel Mustard', 'Miss Scarlett', 'DINNING ROOM', 'CANDLESTICK', 'HALL'; suggestion_move;\n{{user}}:\n*So not Mrs. White...* (moves Reverend Green into the Billiard Room) \"I suspect Reverend Green, in the Billiard Room, with the Revolver.\"\ntool: Correct. It was Reverend Green in the Billiard Room, with the revolver. {{user}} wins.\n</example_game>",
|
|
||||||
"role": "CluedoPlayer",
|
|
||||||
"role2": "CluedoEnjoyer",
|
|
||||||
"filepath": "sysprompts/cluedo.json",
|
|
||||||
"first_msg": "Hey guys! Want to play cluedo?"
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"sys_prompt": "You are an expert software engineering assistant. Your goal is to help users with coding tasks, debugging, refactoring, and software development.\n\n## Core Principles\n1. **Security First**: Never expose secrets, keys, or credentials. Never commit sensitive data.\n2. **No Git Actions**: You can READ git info (status, log, diff) for context, but NEVER perform git actions (commit, add, push, checkout, reset, rm, etc.). Let the user handle all git operations.\n3. **Explore Before Execute**: Always understand the codebase structure before making changes.\n4. **Follow Conventions**: Match existing code style, patterns, and frameworks used in the project.\n5. **Be Concise**: Minimize output tokens while maintaining quality. Avoid unnecessary explanations.\n\n## Workflow for Complex Tasks\nFor multi-step tasks, ALWAYS use the todo system to track progress:\n\n1. **Create Todo List**: At the start of complex tasks, use `todo_create` to break down work into actionable items.\n2. **Update Progress**: Mark items as `in_progress` when working on them, and `completed` when done.\n3. **Check Status**: Use `todo_read` to review your progress.\n\nExample workflow:\n- User: \"Add user authentication to this app\"\n- You: Create todos: [\"Analyze existing auth structure\", \"Check frameworks in use\", \"Implement auth middleware\", \"Add login endpoints\", \"Test implementation\"]\n\n## Task Execution Flow\n\n### Phase 1: Exploration (Always First)\n- Use `file_list` to understand directory structure (path defaults to FilePickerDir if not specified)\n- Use `file_read` to examine relevant files (paths are relative to FilePickerDir unless starting with `/`)\n- Use `execute_command` with `grep`/`find` to search for patterns\n- Check `README` or documentation files\n- Identify: frameworks, conventions, testing approach\n- **Git reads allowed**: You may use `git status`, `git log`, `git diff` for context, but only to inform your work\n- **Path handling**: Relative paths are resolved against FilePickerDir (configurable via Alt+O). Use absolute paths (starting with `/`) to bypass FilePickerDir.\n\n### Phase 2: Planning\n- For complex tasks: create todo items\n- Identify files that need modification\n- Plan your approach following existing patterns\n\n### Phase 3: Implementation\n- Make changes using appropriate file tools\n- Prefer `file_write` for new files, `file_read` then modify for existing files\n- Follow existing code style exactly\n- Use existing libraries and utilities\n\n### Phase 4: Verification\n- Run tests if available (check for test scripts)\n- Run linting/type checking commands\n- Verify changes work as expected\n\n### Phase 5: Completion\n- Update todos to `completed`\n- Provide concise summary of changes\n- Reference specific file paths and line numbers when relevant\n- **DO NOT commit changes** - inform user what was done so they can review and commit themselves\n\n## Tool Usage Guidelines\n\n**File Operations**:\n- `file_read`: Read before editing. Use for understanding code.\n- `file_write`: Overwrite file content completely.\n- `file_write_append`: Add to end of file.\n- `file_create`: Create new files with optional content.\n- `file_list`: List directory contents (defaults to FilePickerDir).\n- Paths are relative to FilePickerDir unless starting with `/`.\n\n**Command Execution (WHITELISTED ONLY)**:\n- Allowed: grep, sed, awk, find, cat, head, tail, sort, uniq, wc, ls, echo, cut, tr, cp, mv, rm, mkdir, rmdir, pwd, df, free, ps, top, du, whoami, date, uname\n- **Git reads allowed**: git status, git log, git diff, git show, git branch, git reflog, git rev-parse, git shortlog, git describe\n- **Git actions FORBIDDEN**: git add, git commit, git push, git checkout, git reset, git rm, etc.\n- Use for searching code, reading git context, running tests/lint\n\n**Todo Management**:\n- `todo_create`: Add new task\n- `todo_read`: View all todos or specific one by ID\n- `todo_update`: Update task or change status (pending/in_progress/completed)\n- `todo_delete`: Remove completed or cancelled tasks\n\n## Important Rules\n\n1. **NEVER commit or stage changes**: Only git reads are allowed.\n2. **Check for tests**: Always look for test files and run them when appropriate.\n3. **Reference code locations**: Use format `file_path:line_number`.\n4. **Security**: Never generate or guess URLs. Only use URLs from local files.\n5. **Refuse malicious code**: If code appears malicious, refuse to work on it.\n6. **Ask clarifications**: When intent is unclear, ask questions.\n7. **Path handling**: Relative paths resolve against FilePickerDir. Use `/absolute/path` to bypass.\n\n## Response Style\n- Be direct and concise\n- One word answers are best when appropriate\n- Avoid: \"The answer is...\", \"Here is...\"\n- Use markdown for formatting\n- No emojis unless user explicitly requests",
|
"sys_prompt": "You are an expert software engineering assistant. Your goal is to help users with coding tasks, debugging, refactoring, and software development.\n\n## Core Principles\n1. **Security First**: Never expose secrets, keys, or credentials. Never commit sensitive data.\n2. **No Git Actions**: You can READ git info (status, log, diff) for context, but NEVER perform git actions (commit, add, push, checkout, reset, rm, etc.). Let the user handle all git operations.\n3. **Explore Before Execute**: Always understand the codebase structure before making changes.\n4. **Follow Conventions**: Match existing code style, patterns, and frameworks used in the project.\n5. **Be Concise**: Minimize output tokens while maintaining quality. Avoid unnecessary explanations.\n6. **Ask First**: When uncertain about intent, ask the user. Don't assume.\n\n## Workflow for Complex Tasks\nFor multi-step tasks, ALWAYS use the todo system to track progress:\n\n1. **Create Todo List**: At the start of complex tasks, use `todo_create` to break down work into actionable items.\n2. **Update Progress**: Mark items as `in_progress` when working on them, and `completed` when done.\n3. **Check Status**: Use `todo_read` to review your progress.\n\nExample workflow:\n- User: \"Add user authentication to this app\"\n- You: Create todos: [\"Analyze existing auth structure\", \"Check frameworks in use\", \"Implement auth middleware\", \"Add login endpoints\", \"Test implementation\"]\n\n## Task Execution Flow\n\n### Phase 1: Exploration (Always First)\n- Use `file_list` to understand directory structure (path defaults to FilePickerDir if not specified)\n- Use `file_read` to examine relevant files (paths are relative to FilePickerDir unless starting with `/`)\n- Use `execute_command` with `grep`/`find` to search for patterns\n- Check README, Makefile, package.json, or similar for build/test commands\n- Identify: frameworks, conventions, testing approach, lint/typecheck commands\n- **Git reads allowed**: You may use `git status`, `git log`, `git diff` for context, but only to inform your work\n- **Path handling**: Relative paths resolve against FilePickerDir; absolute paths (starting with `/`) bypass it\n\n### Phase 2: Planning\n- For complex tasks: create todo items\n- Identify files that need modification\n- Plan your approach following existing patterns\n\n### Phase 3: Implementation\n- Make changes using appropriate file tools\n- Prefer `file_write` for new files, `file_read` then edit for existing files\n- Follow existing code style exactly\n- Use existing libraries and utilities\n\n### Phase 4: Verification\n- Run tests if available (check for test scripts in README/Makefile)\n- Run linting/type checking commands\n- Verify changes work as expected\n\n### Phase 5: Completion\n- Update todos to `completed`\n- Provide concise summary of changes\n- Reference specific file paths and line numbers when relevant\n- **DO NOT commit changes** - inform user what was done so they can review and commit themselves\n\n## Command Execution\n- Use `execute_command` with a single string containing command and arguments (e.g., `go run main.go`, `ls -la`, `cd /tmp`)\n- Use `cd /path` to change the working directory for file operations",
|
||||||
"role": "CodingAssistant",
|
"role": "CodingAssistant",
|
||||||
"filepath": "sysprompts/coding_assistant.json",
|
"filepath": "sysprompts/coding_assistant.json",
|
||||||
"first_msg": "Hello! I'm your coding assistant. I can help you with software engineering tasks like writing code, debugging, refactoring, and exploring codebases. I work best when you give me specific tasks, and for complex work, I'll create a todo list to track my progress. What would you like to work on?"
|
"first_msg": "Hello! I'm your coding assistant. Give me a specific task and I'll get started. For complex work, I'll track progress with todos."
|
||||||
}
|
}
|
||||||
|
|||||||
107
tables.go
107
tables.go
@@ -128,8 +128,8 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
|
|||||||
pages.RemovePage(historyPage)
|
pages.RemovePage(historyPage)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
chatBody.Messages = history
|
chatBody.SetMessages(history)
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
activeChatName = selectedChat
|
activeChatName = selectedChat
|
||||||
pages.RemovePage(historyPage)
|
pages.RemovePage(historyPage)
|
||||||
return
|
return
|
||||||
@@ -147,76 +147,67 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table {
|
|||||||
if err := store.RemoveChat(sc.ID); err != nil {
|
if err := store.RemoveChat(sc.ID); err != nil {
|
||||||
logger.Error("failed to remove chat from db", "chat_id", sc.ID, "chat_name", sc.Name)
|
logger.Error("failed to remove chat from db", "chat_id", sc.ID, "chat_name", sc.Name)
|
||||||
}
|
}
|
||||||
if err := notifyUser("chat deleted", selectedChat+" was deleted"); err != nil {
|
showToast("chat deleted", selectedChat+" was deleted")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
// load last chat
|
// load last chat
|
||||||
chatBody.Messages = loadOldChatOrGetNew()
|
chatBody.SetMessages(loadOldChatOrGetNew())
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
pages.RemovePage(historyPage)
|
pages.RemovePage(historyPage)
|
||||||
return
|
return
|
||||||
case "update card":
|
case "update card":
|
||||||
// save updated card
|
// save updated card
|
||||||
fi := strings.Index(selectedChat, "_")
|
fi := strings.Index(selectedChat, "_")
|
||||||
agentName := selectedChat[fi+1:]
|
agentName := selectedChat[fi+1:]
|
||||||
cc, ok := sysMap[agentName]
|
cc := GetCardByRole(agentName)
|
||||||
if !ok {
|
if cc == nil {
|
||||||
logger.Warn("no such card", "agent", agentName)
|
logger.Warn("no such card", "agent", agentName)
|
||||||
//no:lint
|
showToast("error", "no such card: "+agentName)
|
||||||
if err := notifyUser("error", "no such card: "+agentName); err != nil {
|
|
||||||
logger.Warn("failed ot notify", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// if chatBody.Messages[0].Role != "system" || chatBody.Messages[1].Role != agentName {
|
if msg0, ok := chatBody.GetMessageAt(0); ok {
|
||||||
// if err := notifyUser("error", "unexpected chat structure; card: "+agentName); err != nil {
|
cc.SysPrompt = msg0.Content
|
||||||
// logger.Warn("failed ot notify", "error", err)
|
}
|
||||||
// }
|
if msg1, ok := chatBody.GetMessageAt(1); ok {
|
||||||
// return
|
cc.FirstMsg = msg1.Content
|
||||||
// }
|
}
|
||||||
// change sys_prompt + first msg
|
|
||||||
cc.SysPrompt = chatBody.Messages[0].Content
|
|
||||||
cc.FirstMsg = chatBody.Messages[1].Content
|
|
||||||
if err := pngmeta.WriteToPng(cc.ToSpec(cfg.UserRole), cc.FilePath, cc.FilePath); err != nil {
|
if err := pngmeta.WriteToPng(cc.ToSpec(cfg.UserRole), cc.FilePath, cc.FilePath); err != nil {
|
||||||
logger.Error("failed to write charcard",
|
logger.Error("failed to write charcard", "error", err)
|
||||||
"error", err)
|
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
case "move sysprompt onto 1st msg":
|
case "move sysprompt onto 1st msg":
|
||||||
chatBody.Messages[1].Content = chatBody.Messages[0].Content + chatBody.Messages[1].Content
|
chatBody.WithLock(func(cb *models.ChatBody) {
|
||||||
chatBody.Messages[0].Content = rpDefenitionSysMsg
|
if len(cb.Messages) >= 2 {
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
cb.Messages[1].Content = cb.Messages[0].Content + cb.Messages[1].Content
|
||||||
|
cb.Messages[0].Content = rpDefenitionSysMsg
|
||||||
|
}
|
||||||
|
})
|
||||||
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
activeChatName = selectedChat
|
activeChatName = selectedChat
|
||||||
pages.RemovePage(historyPage)
|
pages.RemovePage(historyPage)
|
||||||
return
|
return
|
||||||
case "new_chat_from_card":
|
case "new_chat_from_card":
|
||||||
// Reread card from file and start fresh chat
|
|
||||||
fi := strings.Index(selectedChat, "_")
|
fi := strings.Index(selectedChat, "_")
|
||||||
agentName := selectedChat[fi+1:]
|
agentName := selectedChat[fi+1:]
|
||||||
cc, ok := sysMap[agentName]
|
cc := GetCardByRole(agentName)
|
||||||
if !ok {
|
if cc == nil {
|
||||||
logger.Warn("no such card", "agent", agentName)
|
logger.Warn("no such card", "agent", agentName)
|
||||||
if err := notifyUser("error", "no such card: "+agentName); err != nil {
|
showToast("error", "no such card: "+agentName)
|
||||||
logger.Warn("failed to notify", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Reload card from disk
|
|
||||||
newCard, err := pngmeta.ReadCard(cc.FilePath, cfg.UserRole)
|
newCard, err := pngmeta.ReadCard(cc.FilePath, cfg.UserRole)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("failed to reload charcard", "path", cc.FilePath, "error", err)
|
logger.Error("failed to reload charcard", "path", cc.FilePath, "error", err)
|
||||||
newCard, err = pngmeta.ReadCardJson(cc.FilePath)
|
newCard, err = pngmeta.ReadCardJson(cc.FilePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("failed to reload charcard", "path", cc.FilePath, "error", err)
|
logger.Error("failed to reload charcard", "path", cc.FilePath, "error", err)
|
||||||
if err := notifyUser("error", "failed to reload card: "+cc.FilePath); err != nil {
|
showToast("error", "failed to reload card: "+cc.FilePath)
|
||||||
logger.Warn("failed to notify", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Update sysMap with fresh card data
|
if newCard.ID == "" {
|
||||||
sysMap[agentName] = newCard
|
newCard.ID = models.ComputeCardID(newCard.Role, newCard.FilePath)
|
||||||
// fetching sysprompt and first message anew from the card
|
}
|
||||||
|
sysMap[newCard.ID] = newCard
|
||||||
|
roleToID[newCard.Role] = newCard.ID
|
||||||
startNewChat(false)
|
startNewChat(false)
|
||||||
pages.RemovePage(historyPage)
|
pages.RemovePage(historyPage)
|
||||||
return
|
return
|
||||||
@@ -457,13 +448,13 @@ func makeRAGTable(fileList []string, loadedFiles []string) *tview.Flex {
|
|||||||
go func() {
|
go func() {
|
||||||
if err := ragger.LoadRAG(fpath); err != nil {
|
if err := ragger.LoadRAG(fpath); err != nil {
|
||||||
logger.Error("failed to embed file", "chat", fpath, "error", err)
|
logger.Error("failed to embed file", "chat", fpath, "error", err)
|
||||||
_ = notifyUser("RAG", "failed to embed file; error: "+err.Error())
|
showToast("RAG", "failed to embed file; error: "+err.Error())
|
||||||
app.QueueUpdate(func() {
|
app.QueueUpdate(func() {
|
||||||
pages.RemovePage(RAGPage)
|
pages.RemovePage(RAGPage)
|
||||||
})
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
_ = notifyUser("RAG", "file loaded successfully")
|
showToast("RAG", "file loaded successfully")
|
||||||
app.QueueUpdate(func() {
|
app.QueueUpdate(func() {
|
||||||
pages.RemovePage(RAGPage)
|
pages.RemovePage(RAGPage)
|
||||||
})
|
})
|
||||||
@@ -474,13 +465,13 @@ func makeRAGTable(fileList []string, loadedFiles []string) *tview.Flex {
|
|||||||
go func() {
|
go func() {
|
||||||
if err := ragger.RemoveFile(f.name); err != nil {
|
if err := ragger.RemoveFile(f.name); err != nil {
|
||||||
logger.Error("failed to unload file from RAG", "filename", f.name, "error", err)
|
logger.Error("failed to unload file from RAG", "filename", f.name, "error", err)
|
||||||
_ = notifyUser("RAG", "failed to unload file; error: "+err.Error())
|
showToast("RAG", "failed to unload file; error: "+err.Error())
|
||||||
app.QueueUpdate(func() {
|
app.QueueUpdate(func() {
|
||||||
pages.RemovePage(RAGPage)
|
pages.RemovePage(RAGPage)
|
||||||
})
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
_ = notifyUser("RAG", "file unloaded successfully")
|
showToast("RAG", "file unloaded successfully")
|
||||||
app.QueueUpdate(func() {
|
app.QueueUpdate(func() {
|
||||||
pages.RemovePage(RAGPage)
|
pages.RemovePage(RAGPage)
|
||||||
})
|
})
|
||||||
@@ -492,9 +483,7 @@ func makeRAGTable(fileList []string, loadedFiles []string) *tview.Flex {
|
|||||||
logger.Error("failed to delete file", "filename", fpath, "error", err)
|
logger.Error("failed to delete file", "filename", fpath, "error", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if err := notifyUser("chat deleted", fpath+" was deleted"); err != nil {
|
showToast("chat deleted", fpath+" was deleted")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
default:
|
default:
|
||||||
pages.RemovePage(RAGPage)
|
pages.RemovePage(RAGPage)
|
||||||
@@ -529,8 +518,8 @@ func makeAgentTable(agentList []string) *tview.Table {
|
|||||||
SetSelectable(false))
|
SetSelectable(false))
|
||||||
case 1:
|
case 1:
|
||||||
if actions[c-1] == "filepath" {
|
if actions[c-1] == "filepath" {
|
||||||
cc, ok := sysMap[agentList[r]]
|
cc := GetCardByRole(agentList[r])
|
||||||
if !ok {
|
if cc == nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
chatActTable.SetCell(r, c,
|
chatActTable.SetCell(r, c,
|
||||||
@@ -582,7 +571,7 @@ func makeAgentTable(agentList []string) *tview.Table {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
// replace textview
|
// replace textview
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
colorText()
|
colorText()
|
||||||
updateStatusLine()
|
updateStatusLine()
|
||||||
// sysModal.ClearButtons()
|
// sysModal.ClearButtons()
|
||||||
@@ -603,9 +592,7 @@ func makeAgentTable(agentList []string) *tview.Table {
|
|||||||
if err := store.RemoveChat(sc.ID); err != nil {
|
if err := store.RemoveChat(sc.ID); err != nil {
|
||||||
logger.Error("failed to remove chat from db", "chat_id", sc.ID, "chat_name", sc.Name)
|
logger.Error("failed to remove chat from db", "chat_id", sc.ID, "chat_name", sc.Name)
|
||||||
}
|
}
|
||||||
if err := notifyUser("chat deleted", selected+" was deleted"); err != nil {
|
showToast("chat deleted", selected+" was deleted")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
pages.RemovePage(agentPage)
|
pages.RemovePage(agentPage)
|
||||||
return
|
return
|
||||||
default:
|
default:
|
||||||
@@ -676,13 +663,9 @@ func makeCodeBlockTable(codeBlocks []string) *tview.Table {
|
|||||||
switch tc.Text {
|
switch tc.Text {
|
||||||
case "copy":
|
case "copy":
|
||||||
if err := copyToClipboard(selected); err != nil {
|
if err := copyToClipboard(selected); err != nil {
|
||||||
if err := notifyUser("error", err.Error()); err != nil {
|
showToast("error", err.Error())
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err := notifyUser("copied", selected); err != nil {
|
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
}
|
||||||
|
showToast("copied", selected)
|
||||||
pages.RemovePage(codeBlockPage)
|
pages.RemovePage(codeBlockPage)
|
||||||
app.SetFocus(textArea)
|
app.SetFocus(textArea)
|
||||||
return
|
return
|
||||||
@@ -757,7 +740,7 @@ func makeImportChatTable(filenames []string) *tview.Table {
|
|||||||
colorText()
|
colorText()
|
||||||
updateStatusLine()
|
updateStatusLine()
|
||||||
// redraw the text in text area
|
// redraw the text in text area
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
pages.RemovePage(historyPage)
|
pages.RemovePage(historyPage)
|
||||||
app.SetFocus(textArea)
|
app.SetFocus(textArea)
|
||||||
return
|
return
|
||||||
@@ -775,9 +758,7 @@ func makeImportChatTable(filenames []string) *tview.Table {
|
|||||||
if err := store.RemoveChat(sc.ID); err != nil {
|
if err := store.RemoveChat(sc.ID); err != nil {
|
||||||
logger.Error("failed to remove chat from db", "chat_id", sc.ID, "chat_name", sc.Name)
|
logger.Error("failed to remove chat from db", "chat_id", sc.ID, "chat_name", sc.Name)
|
||||||
}
|
}
|
||||||
if err := notifyUser("chat deleted", selected+" was deleted"); err != nil {
|
showToast("chat deleted", selected+" was deleted")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
pages.RemovePage(historyPage)
|
pages.RemovePage(historyPage)
|
||||||
return
|
return
|
||||||
default:
|
default:
|
||||||
|
|||||||
653
tools_playwright.go
Normal file
653
tools_playwright.go
Normal file
@@ -0,0 +1,653 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"gf-lt/models"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/playwright-community/playwright-go"
|
||||||
|
)
|
||||||
|
|
||||||
|
var browserToolSysMsg = `
|
||||||
|
Additional browser automation tools (Playwright):
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "pw_start",
|
||||||
|
"args": [],
|
||||||
|
"when_to_use": "start a browser instance before doing any browser automation. Must be called first."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_stop",
|
||||||
|
"args": [],
|
||||||
|
"when_to_use": "stop the browser instance when done with automation."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_is_running",
|
||||||
|
"args": [],
|
||||||
|
"when_to_use": "check if browser is currently running."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_navigate",
|
||||||
|
"args": ["url"],
|
||||||
|
"when_to_use": "open a specific URL in the web browser."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_click",
|
||||||
|
"args": ["selector", "index"],
|
||||||
|
"when_to_use": "click on an element on the current webpage. Use 'index' for multiple matches (default 0)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_fill",
|
||||||
|
"args": ["selector", "text", "index"],
|
||||||
|
"when_to_use": "type text into an input field. Use 'index' for multiple matches (default 0)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_extract_text",
|
||||||
|
"args": ["selector"],
|
||||||
|
"when_to_use": "extract text content from the page or specific elements. Use selector 'body' for all page text."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_screenshot",
|
||||||
|
"args": ["selector", "full_page"],
|
||||||
|
"when_to_use": "take a screenshot of the page or a specific element. Returns a file path to the image. Use to verify actions or inspect visual state."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_screenshot_and_view",
|
||||||
|
"args": ["selector", "full_page"],
|
||||||
|
"when_to_use": "take a screenshot and return the image for viewing. Use to visually verify page state."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_wait_for_selector",
|
||||||
|
"args": ["selector", "timeout"],
|
||||||
|
"when_to_use": "wait for an element to appear on the page before proceeding with further actions."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_drag",
|
||||||
|
"args": ["x1", "y1", "x2", "y2"],
|
||||||
|
"when_to_use": "drag the mouse from point (x1,y1) to (x2,y2)."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_click_at",
|
||||||
|
"args": ["x", "y"],
|
||||||
|
"when_to_use": "click at specific X,Y coordinates on the page. Use when you know the exact position."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_get_html",
|
||||||
|
"args": ["selector"],
|
||||||
|
"when_to_use": "get the HTML content of the page or a specific element. Use to understand page structure or extract raw HTML."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_get_dom",
|
||||||
|
"args": ["selector"],
|
||||||
|
"when_to_use": "get a structured DOM representation with tag, attributes, text, and children. Use to inspect element hierarchy and properties."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pw_search_elements",
|
||||||
|
"args": ["text", "selector"],
|
||||||
|
"when_to_use": "search for elements by text content or CSS selector. Returns matching elements with their tags, text, and HTML."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
`
|
||||||
|
|
||||||
|
var (
|
||||||
|
pw *playwright.Playwright
|
||||||
|
browser playwright.Browser
|
||||||
|
browserStarted bool
|
||||||
|
browserStartMu sync.Mutex
|
||||||
|
page playwright.Page
|
||||||
|
)
|
||||||
|
|
||||||
|
func pwShutDown() error {
|
||||||
|
if pw == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
pwStop(nil)
|
||||||
|
return pw.Stop()
|
||||||
|
}
|
||||||
|
|
||||||
|
func installPW() error {
|
||||||
|
err := playwright.Install(&playwright.RunOptions{Verbose: false})
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("playwright not available", "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkPlaywright() error {
|
||||||
|
var err error
|
||||||
|
pw, err = playwright.Run()
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("playwright not available", "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwStart(args map[string]string) []byte {
|
||||||
|
browserStartMu.Lock()
|
||||||
|
defer browserStartMu.Unlock()
|
||||||
|
if browserStarted {
|
||||||
|
return []byte(`{"error": "Browser already started"}`)
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
browser, err = pw.Chromium.Launch(playwright.BrowserTypeLaunchOptions{
|
||||||
|
Headless: playwright.Bool(!cfg.PlaywrightDebug),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to launch browser: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
page, err = browser.NewPage()
|
||||||
|
if err != nil {
|
||||||
|
browser.Close()
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to create page: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
browserStarted = true
|
||||||
|
return []byte(`{"success": true, "message": "Browser started"}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwStop(args map[string]string) []byte {
|
||||||
|
browserStartMu.Lock()
|
||||||
|
defer browserStartMu.Unlock()
|
||||||
|
if !browserStarted {
|
||||||
|
return []byte(`{"success": true, "message": "Browser was not running"}`)
|
||||||
|
}
|
||||||
|
if page != nil {
|
||||||
|
page.Close()
|
||||||
|
page = nil
|
||||||
|
}
|
||||||
|
if browser != nil {
|
||||||
|
browser.Close()
|
||||||
|
browser = nil
|
||||||
|
}
|
||||||
|
browserStarted = false
|
||||||
|
return []byte(`{"success": true, "message": "Browser stopped"}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwIsRunning(args map[string]string) []byte {
|
||||||
|
if browserStarted {
|
||||||
|
return []byte(`{"running": true, "message": "Browser is running"}`)
|
||||||
|
}
|
||||||
|
return []byte(`{"running": false, "message": "Browser is not running"}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwNavigate(args map[string]string) []byte {
|
||||||
|
url, ok := args["url"]
|
||||||
|
if !ok || url == "" {
|
||||||
|
return []byte(`{"error": "url not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
_, err := page.Goto(url)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to navigate: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
title, _ := page.Title()
|
||||||
|
pageURL := page.URL()
|
||||||
|
return []byte(fmt.Sprintf(`{"success": true, "title": "%s", "url": "%s"}`, title, pageURL))
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwClick(args map[string]string) []byte {
|
||||||
|
selector, ok := args["selector"]
|
||||||
|
if !ok || selector == "" {
|
||||||
|
return []byte(`{"error": "selector not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
index := 0
|
||||||
|
if args["index"] != "" {
|
||||||
|
if i, err := strconv.Atoi(args["index"]); err != nil {
|
||||||
|
logger.Warn("failed to parse index", "value", args["index"], "error", err)
|
||||||
|
} else {
|
||||||
|
index = i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to find elements: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
if index >= count {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "Element not found at index %d (found %d elements)"}`, index, count))
|
||||||
|
}
|
||||||
|
err = locator.Nth(index).Click()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to click: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(`{"success": true, "message": "Clicked element"}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwFill(args map[string]string) []byte {
|
||||||
|
selector, ok := args["selector"]
|
||||||
|
if !ok || selector == "" {
|
||||||
|
return []byte(`{"error": "selector not provided"}`)
|
||||||
|
}
|
||||||
|
text := args["text"]
|
||||||
|
if text == "" {
|
||||||
|
text = ""
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
index := 0
|
||||||
|
if args["index"] != "" {
|
||||||
|
if i, err := strconv.Atoi(args["index"]); err != nil {
|
||||||
|
logger.Warn("failed to parse index", "value", args["index"], "error", err)
|
||||||
|
} else {
|
||||||
|
index = i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to find elements: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
if index >= count {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "Element not found at index %d"}`, index))
|
||||||
|
}
|
||||||
|
err = locator.Nth(index).Fill(text)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to fill: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(`{"success": true, "message": "Filled input"}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwExtractText(args map[string]string) []byte {
|
||||||
|
selector := args["selector"]
|
||||||
|
if selector == "" {
|
||||||
|
selector = "body"
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to find elements: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
if count == 0 {
|
||||||
|
return []byte(`{"error": "No elements found"}`)
|
||||||
|
}
|
||||||
|
if selector == "body" {
|
||||||
|
text, err := page.Locator("body").TextContent()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to get text: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"text": "%s"}`, text))
|
||||||
|
}
|
||||||
|
var texts []string
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
text, err := locator.Nth(i).TextContent()
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
texts = append(texts, text)
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"text": "%s"}`, joinLines(texts)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func joinLines(lines []string) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
for i, line := range lines {
|
||||||
|
if i > 0 {
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
sb.WriteString(line)
|
||||||
|
}
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwScreenshot(args map[string]string) []byte {
|
||||||
|
selector := args["selector"]
|
||||||
|
fullPage := args["full_page"] == "true"
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
path := fmt.Sprintf("/tmp/pw_screenshot_%d.png", os.Getpid())
|
||||||
|
var err error
|
||||||
|
if selector != "" && selector != "body" {
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
_, err = locator.Screenshot(playwright.LocatorScreenshotOptions{
|
||||||
|
Path: playwright.String(path),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
_, err = page.Screenshot(playwright.PageScreenshotOptions{
|
||||||
|
Path: playwright.String(path),
|
||||||
|
FullPage: playwright.Bool(fullPage),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to take screenshot: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"path": "%s"}`, path))
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwScreenshotAndView(args map[string]string) []byte {
|
||||||
|
selector := args["selector"]
|
||||||
|
fullPage := args["full_page"] == "true"
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
path := fmt.Sprintf("/tmp/pw_screenshot_%d.png", os.Getpid())
|
||||||
|
var err error
|
||||||
|
if selector != "" && selector != "body" {
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
_, err = locator.Screenshot(playwright.LocatorScreenshotOptions{
|
||||||
|
Path: playwright.String(path),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
_, err = page.Screenshot(playwright.PageScreenshotOptions{
|
||||||
|
Path: playwright.String(path),
|
||||||
|
FullPage: playwright.Bool(fullPage),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to take screenshot: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
dataURL, err := models.CreateImageURLFromPath(path)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to create image URL: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
resp := models.MultimodalToolResp{
|
||||||
|
Type: "multimodal_content",
|
||||||
|
Parts: []map[string]string{
|
||||||
|
{"type": "text", "text": "Screenshot saved: " + path},
|
||||||
|
{"type": "image_url", "url": dataURL},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
jsonResult, err := json.Marshal(resp)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to marshal result: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return jsonResult
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwWaitForSelector(args map[string]string) []byte {
|
||||||
|
selector, ok := args["selector"]
|
||||||
|
if !ok || selector == "" {
|
||||||
|
return []byte(`{"error": "selector not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
timeout := 30000
|
||||||
|
if args["timeout"] != "" {
|
||||||
|
if t, err := strconv.Atoi(args["timeout"]); err != nil {
|
||||||
|
logger.Warn("failed to parse timeout", "value", args["timeout"], "error", err)
|
||||||
|
} else {
|
||||||
|
timeout = t
|
||||||
|
}
|
||||||
|
}
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
err := locator.WaitFor(playwright.LocatorWaitForOptions{
|
||||||
|
Timeout: playwright.Float(float64(timeout)),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "element not found: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(`{"success": true, "message": "Element found"}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwDrag(args map[string]string) []byte {
|
||||||
|
x1, ok := args["x1"]
|
||||||
|
if !ok {
|
||||||
|
return []byte(`{"error": "x1 not provided"}`)
|
||||||
|
}
|
||||||
|
y1, ok := args["y1"]
|
||||||
|
if !ok {
|
||||||
|
return []byte(`{"error": "y1 not provided"}`)
|
||||||
|
}
|
||||||
|
x2, ok := args["x2"]
|
||||||
|
if !ok {
|
||||||
|
return []byte(`{"error": "x2 not provided"}`)
|
||||||
|
}
|
||||||
|
y2, ok := args["y2"]
|
||||||
|
if !ok {
|
||||||
|
return []byte(`{"error": "y2 not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
var fx1, fy1, fx2, fy2 float64
|
||||||
|
if parsedX1, err := strconv.ParseFloat(x1, 64); err != nil {
|
||||||
|
logger.Warn("failed to parse x1", "value", x1, "error", err)
|
||||||
|
} else {
|
||||||
|
fx1 = parsedX1
|
||||||
|
}
|
||||||
|
if parsedY1, err := strconv.ParseFloat(y1, 64); err != nil {
|
||||||
|
logger.Warn("failed to parse y1", "value", y1, "error", err)
|
||||||
|
} else {
|
||||||
|
fy1 = parsedY1
|
||||||
|
}
|
||||||
|
if parsedX2, err := strconv.ParseFloat(x2, 64); err != nil {
|
||||||
|
logger.Warn("failed to parse x2", "value", x2, "error", err)
|
||||||
|
} else {
|
||||||
|
fx2 = parsedX2
|
||||||
|
}
|
||||||
|
if parsedY2, err := strconv.ParseFloat(y2, 64); err != nil {
|
||||||
|
logger.Warn("failed to parse y2", "value", y2, "error", err)
|
||||||
|
} else {
|
||||||
|
fy2 = parsedY2
|
||||||
|
}
|
||||||
|
mouse := page.Mouse()
|
||||||
|
err := mouse.Move(fx1, fy1)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to move mouse: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
err = mouse.Down()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to mouse down: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
err = mouse.Move(fx2, fy2)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to move mouse: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
err = mouse.Up()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to mouse up: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"success": true, "message": "Dragged from (%s,%s) to (%s,%s)"}`, x1, y1, x2, y2))
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwClickAt(args map[string]string) []byte {
|
||||||
|
x, ok := args["x"]
|
||||||
|
if !ok {
|
||||||
|
return []byte(`{"error": "x not provided"}`)
|
||||||
|
}
|
||||||
|
y, ok := args["y"]
|
||||||
|
if !ok {
|
||||||
|
return []byte(`{"error": "y not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
fx, err := strconv.ParseFloat(x, 64)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to parse x: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
fy, err := strconv.ParseFloat(y, 64)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to parse y: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
mouse := page.Mouse()
|
||||||
|
err = mouse.Click(fx, fy)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to click: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"success": true, "message": "Clicked at (%s,%s)"}`, x, y))
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwGetHTML(args map[string]string) []byte {
|
||||||
|
selector := args["selector"]
|
||||||
|
if selector == "" {
|
||||||
|
selector = "body"
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to find elements: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
if count == 0 {
|
||||||
|
return []byte(`{"error": "No elements found"}`)
|
||||||
|
}
|
||||||
|
html, err := locator.First().InnerHTML()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to get HTML: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"html": %s}`, jsonString(html)))
|
||||||
|
}
|
||||||
|
|
||||||
|
type DOMElement struct {
|
||||||
|
Tag string `json:"tag,omitempty"`
|
||||||
|
Attributes map[string]string `json:"attributes,omitempty"`
|
||||||
|
Text string `json:"text,omitempty"`
|
||||||
|
Children []DOMElement `json:"children,omitempty"`
|
||||||
|
Selector string `json:"selector,omitempty"`
|
||||||
|
InnerHTML string `json:"innerHTML,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildDOMTree(locator playwright.Locator) ([]DOMElement, error) {
|
||||||
|
var results []DOMElement
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
el := locator.Nth(i)
|
||||||
|
dom, err := elementToDOM(el)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
results = append(results, dom)
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func elementToDOM(el playwright.Locator) (DOMElement, error) {
|
||||||
|
dom := DOMElement{}
|
||||||
|
tag, err := el.Evaluate(`el => el.nodeName`, nil)
|
||||||
|
if err == nil {
|
||||||
|
dom.Tag = strings.ToLower(fmt.Sprintf("%v", tag))
|
||||||
|
}
|
||||||
|
attributes := make(map[string]string)
|
||||||
|
attrs, err := el.Evaluate(`el => {
|
||||||
|
let attrs = {};
|
||||||
|
for (let i = 0; i < el.attributes.length; i++) {
|
||||||
|
let attr = el.attributes[i];
|
||||||
|
attrs[attr.name] = attr.value;
|
||||||
|
}
|
||||||
|
return attrs;
|
||||||
|
}`, nil)
|
||||||
|
if err == nil {
|
||||||
|
if amap, ok := attrs.(map[string]any); ok {
|
||||||
|
for k, v := range amap {
|
||||||
|
if vs, ok := v.(string); ok {
|
||||||
|
attributes[k] = vs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(attributes) > 0 {
|
||||||
|
dom.Attributes = attributes
|
||||||
|
}
|
||||||
|
text, err := el.TextContent()
|
||||||
|
if err == nil && text != "" {
|
||||||
|
dom.Text = text
|
||||||
|
}
|
||||||
|
innerHTML, err := el.InnerHTML()
|
||||||
|
if err == nil && innerHTML != "" {
|
||||||
|
dom.InnerHTML = innerHTML
|
||||||
|
}
|
||||||
|
childCount, _ := el.Count()
|
||||||
|
if childCount > 0 {
|
||||||
|
childrenLocator := el.Locator("*")
|
||||||
|
children, err := buildDOMTree(childrenLocator)
|
||||||
|
if err == nil && len(children) > 0 {
|
||||||
|
dom.Children = children
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return dom, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwGetDOM(args map[string]string) []byte {
|
||||||
|
selector := args["selector"]
|
||||||
|
if selector == "" {
|
||||||
|
selector = "body"
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to find elements: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
if count == 0 {
|
||||||
|
return []byte(`{"error": "No elements found"}`)
|
||||||
|
}
|
||||||
|
dom, err := elementToDOM(locator.First())
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to get DOM: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
data, err := json.Marshal(dom)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to marshal DOM: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"dom": %s}`, string(data)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwSearchElements(args map[string]string) []byte {
|
||||||
|
text := args["text"]
|
||||||
|
selector := args["selector"]
|
||||||
|
if text == "" && selector == "" {
|
||||||
|
return []byte(`{"error": "text or selector not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
var locator playwright.Locator
|
||||||
|
if text != "" {
|
||||||
|
locator = page.GetByText(text)
|
||||||
|
} else {
|
||||||
|
locator = page.Locator(selector)
|
||||||
|
}
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to search elements: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
if count == 0 {
|
||||||
|
return []byte(`{"elements": []}`)
|
||||||
|
}
|
||||||
|
var results []map[string]string
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
el := locator.Nth(i)
|
||||||
|
tag, _ := el.Evaluate(`el => el.nodeName`, nil)
|
||||||
|
text, _ := el.TextContent()
|
||||||
|
html, _ := el.InnerHTML()
|
||||||
|
results = append(results, map[string]string{
|
||||||
|
"index": strconv.Itoa(i),
|
||||||
|
"tag": strings.ToLower(fmt.Sprintf("%v", tag)),
|
||||||
|
"text": text,
|
||||||
|
"html": html,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
data, err := json.Marshal(results)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to marshal results: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"elements": %s}`, string(data)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func jsonString(s string) string {
|
||||||
|
b, _ := json.Marshal(s)
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
388
tui.go
388
tui.go
@@ -10,6 +10,7 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/gdamore/tcell/v2"
|
"github.com/gdamore/tcell/v2"
|
||||||
"github.com/rivo/tview"
|
"github.com/rivo/tview"
|
||||||
@@ -21,7 +22,6 @@ func isFullScreenPageActive() bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
app *tview.Application
|
|
||||||
pages *tview.Pages
|
pages *tview.Pages
|
||||||
textArea *tview.TextArea
|
textArea *tview.TextArea
|
||||||
editArea *tview.TextArea
|
editArea *tview.TextArea
|
||||||
@@ -29,12 +29,17 @@ var (
|
|||||||
statusLineWidget *tview.TextView
|
statusLineWidget *tview.TextView
|
||||||
helpView *tview.TextView
|
helpView *tview.TextView
|
||||||
flex *tview.Flex
|
flex *tview.Flex
|
||||||
|
bottomFlex *tview.Flex
|
||||||
|
notificationWidget *tview.TextView
|
||||||
imgView *tview.Image
|
imgView *tview.Image
|
||||||
defaultImage = "sysprompts/llama.png"
|
defaultImage = "sysprompts/llama.png"
|
||||||
indexPickWindow *tview.InputField
|
indexPickWindow *tview.InputField
|
||||||
renameWindow *tview.InputField
|
renameWindow *tview.InputField
|
||||||
roleEditWindow *tview.InputField
|
roleEditWindow *tview.InputField
|
||||||
shellInput *tview.InputField
|
shellInput *tview.InputField
|
||||||
|
confirmModal *tview.Modal
|
||||||
|
toastTimer *time.Timer
|
||||||
|
confirmPageName = "confirm"
|
||||||
fullscreenMode bool
|
fullscreenMode bool
|
||||||
positionVisible bool = true
|
positionVisible bool = true
|
||||||
scrollToEndEnabled bool = true
|
scrollToEndEnabled bool = true
|
||||||
@@ -135,6 +140,90 @@ func setShellMode(enabled bool) {
|
|||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// showToast displays a temporary notification in the bottom-right corner.
|
||||||
|
// It auto-hides after 3 seconds.
|
||||||
|
func showToast(title, message string) {
|
||||||
|
sanitize := func(s string, maxLen int) string {
|
||||||
|
sanitized := strings.Map(func(r rune) rune {
|
||||||
|
if r < 32 && r != '\t' {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}, s)
|
||||||
|
if len(sanitized) > maxLen {
|
||||||
|
sanitized = sanitized[:maxLen-3] + "..."
|
||||||
|
}
|
||||||
|
return sanitized
|
||||||
|
}
|
||||||
|
title = sanitize(title, 50)
|
||||||
|
message = sanitize(message, 197)
|
||||||
|
if toastTimer != nil {
|
||||||
|
toastTimer.Stop()
|
||||||
|
}
|
||||||
|
// show blocking notification to not mess up flex
|
||||||
|
if fullscreenMode {
|
||||||
|
notification := tview.NewTextView().
|
||||||
|
SetTextAlign(tview.AlignCenter).
|
||||||
|
SetDynamicColors(true).
|
||||||
|
SetRegions(true).
|
||||||
|
SetText(fmt.Sprintf("[yellow]%s[-]\n", message)).
|
||||||
|
SetChangedFunc(func() {
|
||||||
|
app.Draw()
|
||||||
|
})
|
||||||
|
notification.SetTitleAlign(tview.AlignLeft).
|
||||||
|
SetBorder(true).
|
||||||
|
SetTitle(title)
|
||||||
|
// Wrap it in a full‑screen Flex to position it in the top‑right corner.
|
||||||
|
// Outer Flex (row) pushes content to the top; inner Flex (column) pushes to the right.
|
||||||
|
background := tview.NewFlex().SetDirection(tview.FlexRow).
|
||||||
|
AddItem(nil, 0, 1, false). // top spacer
|
||||||
|
AddItem(tview.NewFlex().SetDirection(tview.FlexColumn).
|
||||||
|
AddItem(nil, 0, 1, false). // left spacer
|
||||||
|
AddItem(notification, 40, 1, true), // notification width 40
|
||||||
|
5, 1, false) // notification height 5
|
||||||
|
// Generate a unique page name (e.g., using timestamp) to allow multiple toasts.
|
||||||
|
pageName := fmt.Sprintf("toast-%d", time.Now().UnixNano())
|
||||||
|
pages.AddPage(pageName, background, true, true)
|
||||||
|
// Auto‑dismiss after 2 seconds, since blocking is more annoying
|
||||||
|
time.AfterFunc(2*time.Second, func() {
|
||||||
|
app.QueueUpdateDraw(func() {
|
||||||
|
if pages.HasPage(pageName) {
|
||||||
|
pages.RemovePage(pageName)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
notificationWidget.SetTitle(title)
|
||||||
|
notificationWidget.SetText(fmt.Sprintf("[yellow]%s[-]", message))
|
||||||
|
go func() {
|
||||||
|
app.QueueUpdateDraw(func() {
|
||||||
|
flex.RemoveItem(bottomFlex)
|
||||||
|
flex.RemoveItem(statusLineWidget)
|
||||||
|
bottomFlex = tview.NewFlex().SetDirection(tview.FlexColumn).
|
||||||
|
AddItem(textArea, 0, 1, true).
|
||||||
|
AddItem(notificationWidget, 40, 1, false)
|
||||||
|
flex.AddItem(bottomFlex, 0, 10, true)
|
||||||
|
if positionVisible {
|
||||||
|
flex.AddItem(statusLineWidget, 0, 2, false)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}()
|
||||||
|
toastTimer = time.AfterFunc(3*time.Second, func() {
|
||||||
|
app.QueueUpdateDraw(func() {
|
||||||
|
flex.RemoveItem(bottomFlex)
|
||||||
|
flex.RemoveItem(statusLineWidget)
|
||||||
|
bottomFlex = tview.NewFlex().SetDirection(tview.FlexColumn).
|
||||||
|
AddItem(textArea, 0, 1, true).
|
||||||
|
AddItem(notificationWidget, 0, 0, false)
|
||||||
|
flex.AddItem(bottomFlex, 0, 10, true)
|
||||||
|
if positionVisible {
|
||||||
|
flex.AddItem(statusLineWidget, 0, 2, false)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
// Start background goroutine to update model color cache
|
// Start background goroutine to update model color cache
|
||||||
startModelColorUpdater()
|
startModelColorUpdater()
|
||||||
@@ -184,7 +273,7 @@ func init() {
|
|||||||
shellHistoryPos = -1
|
shellHistoryPos = -1
|
||||||
}
|
}
|
||||||
// Handle Tab key for @ file completion
|
// Handle Tab key for @ file completion
|
||||||
if event.Key() == tcell.KeyTab {
|
if event.Key() == tcell.KeyTab && shellMode {
|
||||||
currentText := shellInput.GetText()
|
currentText := shellInput.GetText()
|
||||||
atIndex := strings.LastIndex(currentText, "@")
|
atIndex := strings.LastIndex(currentText, "@")
|
||||||
if atIndex >= 0 {
|
if atIndex >= 0 {
|
||||||
@@ -195,6 +284,39 @@ func init() {
|
|||||||
}
|
}
|
||||||
return event
|
return event
|
||||||
})
|
})
|
||||||
|
confirmModal = tview.NewModal().
|
||||||
|
SetText("You are trying to send an empty message.\nIt makes sense if the last message in the chat is from you.\nAre you sure?").
|
||||||
|
AddButtons([]string{"Yes", "No"}).
|
||||||
|
SetButtonBackgroundColor(tcell.ColorBlack).
|
||||||
|
SetButtonTextColor(tcell.ColorWhite).
|
||||||
|
SetDoneFunc(func(buttonIndex int, buttonLabel string) {
|
||||||
|
if buttonLabel == "Yes" {
|
||||||
|
persona := cfg.UserRole
|
||||||
|
if cfg.WriteNextMsgAs != "" {
|
||||||
|
persona = cfg.WriteNextMsgAs
|
||||||
|
}
|
||||||
|
chatRoundChan <- &models.ChatRoundReq{Role: persona, UserMsg: ""}
|
||||||
|
} // In both Yes and No, go back to the main page
|
||||||
|
pages.SwitchToPage("main") // or whatever your main page is named
|
||||||
|
})
|
||||||
|
confirmModal.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyRune {
|
||||||
|
switch event.Rune() {
|
||||||
|
case 'y', 'Y':
|
||||||
|
persona := cfg.UserRole
|
||||||
|
if cfg.WriteNextMsgAs != "" {
|
||||||
|
persona = cfg.WriteNextMsgAs
|
||||||
|
}
|
||||||
|
chatRoundChan <- &models.ChatRoundReq{Role: persona, UserMsg: ""}
|
||||||
|
pages.SwitchToPage("main")
|
||||||
|
return nil
|
||||||
|
case 'n', 'N', 'x', 'X':
|
||||||
|
pages.SwitchToPage("main")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
textArea = tview.NewTextArea().
|
textArea = tview.NewTextArea().
|
||||||
SetPlaceholder("input is multiline; press <Enter> to start the next line;\npress <Esc> to send the message.")
|
SetPlaceholder("input is multiline; press <Enter> to start the next line;\npress <Esc> to send the message.")
|
||||||
textArea.SetBorder(true).SetTitle("input")
|
textArea.SetBorder(true).SetTitle("input")
|
||||||
@@ -202,12 +324,26 @@ func init() {
|
|||||||
SetDynamicColors(true).
|
SetDynamicColors(true).
|
||||||
SetRegions(true).
|
SetRegions(true).
|
||||||
SetChangedFunc(func() {
|
SetChangedFunc(func() {
|
||||||
|
// INFO:
|
||||||
|
// https://github.com/rivo/tview/wiki/Concurrency#event-handlers
|
||||||
|
// although already called by default per tview specs
|
||||||
|
// calling it explicitly makes text streaming to look more smooth
|
||||||
app.Draw()
|
app.Draw()
|
||||||
})
|
})
|
||||||
|
notificationWidget = tview.NewTextView().
|
||||||
|
SetTextAlign(tview.AlignCenter).
|
||||||
|
SetDynamicColors(true).
|
||||||
|
SetRegions(true).
|
||||||
|
SetChangedFunc(func() {
|
||||||
|
})
|
||||||
|
notificationWidget.SetBorder(true).SetTitle("notification")
|
||||||
|
bottomFlex = tview.NewFlex().SetDirection(tview.FlexColumn).
|
||||||
|
AddItem(textArea, 0, 1, true).
|
||||||
|
AddItem(notificationWidget, 0, 0, false)
|
||||||
//
|
//
|
||||||
flex = tview.NewFlex().SetDirection(tview.FlexRow).
|
flex = tview.NewFlex().SetDirection(tview.FlexRow).
|
||||||
AddItem(textView, 0, 40, false).
|
AddItem(textView, 0, 40, false).
|
||||||
AddItem(textArea, 0, 10, true) // Restore original height
|
AddItem(bottomFlex, 0, 10, true)
|
||||||
if positionVisible {
|
if positionVisible {
|
||||||
flex.AddItem(statusLineWidget, 0, 2, false)
|
flex.AddItem(statusLineWidget, 0, 2, false)
|
||||||
}
|
}
|
||||||
@@ -219,7 +355,7 @@ func init() {
|
|||||||
searchResults = nil // Clear search results
|
searchResults = nil // Clear search results
|
||||||
searchResultLengths = nil // Clear search result lengths
|
searchResultLengths = nil // Clear search result lengths
|
||||||
originalTextForSearch = ""
|
originalTextForSearch = ""
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys)) // Reset text without search regions
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys)) // Reset text without search regions
|
||||||
colorText() // Apply normal chat coloring
|
colorText() // Apply normal chat coloring
|
||||||
} else {
|
} else {
|
||||||
// Original logic if no search is active
|
// Original logic if no search is active
|
||||||
@@ -276,10 +412,14 @@ func init() {
|
|||||||
// y += h / 2
|
// y += h / 2
|
||||||
// return x, y, w, h
|
// return x, y, w, h
|
||||||
// })
|
// })
|
||||||
|
notificationWidget.SetDrawFunc(func(screen tcell.Screen, x, y, w, h int) (int, int, int, int) {
|
||||||
|
y += h / 2
|
||||||
|
return x, y, w, h
|
||||||
|
})
|
||||||
// Initially set up flex without search bar
|
// Initially set up flex without search bar
|
||||||
flex = tview.NewFlex().SetDirection(tview.FlexRow).
|
flex = tview.NewFlex().SetDirection(tview.FlexRow).
|
||||||
AddItem(textView, 0, 40, false).
|
AddItem(textView, 0, 40, false).
|
||||||
AddItem(textArea, 0, 10, true) // Restore original height
|
AddItem(bottomFlex, 0, 10, true)
|
||||||
if positionVisible {
|
if positionVisible {
|
||||||
flex.AddItem(statusLineWidget, 0, 2, false)
|
flex.AddItem(statusLineWidget, 0, 2, false)
|
||||||
}
|
}
|
||||||
@@ -292,15 +432,15 @@ func init() {
|
|||||||
defer colorText()
|
defer colorText()
|
||||||
editedMsg := editArea.GetText()
|
editedMsg := editArea.GetText()
|
||||||
if editedMsg == "" {
|
if editedMsg == "" {
|
||||||
if err := notifyUser("edit", "no edit provided"); err != nil {
|
showToast("edit", "no edit provided")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
pages.RemovePage(editMsgPage)
|
pages.RemovePage(editMsgPage)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
chatBody.Messages[selectedIndex].SetText(editedMsg)
|
chatBody.WithLock(func(cb *models.ChatBody) {
|
||||||
|
cb.Messages[selectedIndex].SetText(editedMsg)
|
||||||
|
})
|
||||||
// change textarea
|
// change textarea
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
pages.RemovePage(editMsgPage)
|
pages.RemovePage(editMsgPage)
|
||||||
editMode = false
|
editMode = false
|
||||||
return nil
|
return nil
|
||||||
@@ -324,15 +464,15 @@ func init() {
|
|||||||
case tcell.KeyEnter:
|
case tcell.KeyEnter:
|
||||||
newRole := roleEditWindow.GetText()
|
newRole := roleEditWindow.GetText()
|
||||||
if newRole == "" {
|
if newRole == "" {
|
||||||
if err := notifyUser("edit", "no role provided"); err != nil {
|
showToast("edit", "no role provided")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
pages.RemovePage(roleEditPage)
|
pages.RemovePage(roleEditPage)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if selectedIndex >= 0 && selectedIndex < len(chatBody.Messages) {
|
if selectedIndex >= 0 && selectedIndex < chatBody.GetMessageCount() {
|
||||||
chatBody.Messages[selectedIndex].Role = newRole
|
chatBody.WithLock(func(cb *models.ChatBody) {
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
cb.Messages[selectedIndex].Role = newRole
|
||||||
|
})
|
||||||
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
colorText()
|
colorText()
|
||||||
pages.RemovePage(roleEditPage)
|
pages.RemovePage(roleEditPage)
|
||||||
}
|
}
|
||||||
@@ -353,9 +493,7 @@ func init() {
|
|||||||
siInt, err := strconv.Atoi(si)
|
siInt, err := strconv.Atoi(si)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("failed to convert provided index", "error", err, "si", si)
|
logger.Error("failed to convert provided index", "error", err, "si", si)
|
||||||
if err := notifyUser("cancel", "no index provided, copying user input"); err != nil {
|
showToast("cancel", "no index provided, copying user input")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
if err := copyToClipboard(textArea.GetText()); err != nil {
|
if err := copyToClipboard(textArea.GetText()); err != nil {
|
||||||
logger.Error("failed to copy to clipboard", "error", err)
|
logger.Error("failed to copy to clipboard", "error", err)
|
||||||
}
|
}
|
||||||
@@ -363,19 +501,17 @@ func init() {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
selectedIndex = siInt
|
selectedIndex = siInt
|
||||||
if len(chatBody.Messages)-1 < selectedIndex || selectedIndex < 0 {
|
if chatBody.GetMessageCount()-1 < selectedIndex || selectedIndex < 0 {
|
||||||
msg := "chosen index is out of bounds, will copy user input"
|
msg := "chosen index is out of bounds, will copy user input"
|
||||||
logger.Warn(msg, "index", selectedIndex)
|
logger.Warn(msg, "index", selectedIndex)
|
||||||
if err := notifyUser("error", msg); err != nil {
|
showToast("error", msg)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
if err := copyToClipboard(textArea.GetText()); err != nil {
|
if err := copyToClipboard(textArea.GetText()); err != nil {
|
||||||
logger.Error("failed to copy to clipboard", "error", err)
|
logger.Error("failed to copy to clipboard", "error", err)
|
||||||
}
|
}
|
||||||
hideIndexBar() // Hide overlay instead of removing page directly
|
hideIndexBar() // Hide overlay instead of removing page directly
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
m := chatBody.Messages[selectedIndex]
|
m := chatBody.GetMessages()[selectedIndex]
|
||||||
switch {
|
switch {
|
||||||
case roleEditMode:
|
case roleEditMode:
|
||||||
hideIndexBar() // Hide overlay first
|
hideIndexBar() // Hide overlay first
|
||||||
@@ -394,9 +530,7 @@ func init() {
|
|||||||
}
|
}
|
||||||
previewLen := min(30, len(msgText))
|
previewLen := min(30, len(msgText))
|
||||||
notification := fmt.Sprintf("msg '%s' was copied to the clipboard", msgText[:previewLen])
|
notification := fmt.Sprintf("msg '%s' was copied to the clipboard", msgText[:previewLen])
|
||||||
if err := notifyUser("copied", notification); err != nil {
|
showToast("copied", notification)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
hideIndexBar() // Hide overlay after copying
|
hideIndexBar() // Hide overlay after copying
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
@@ -428,9 +562,7 @@ func init() {
|
|||||||
logger.Error("failed to upsert chat", "error", err, "chat", currentChat)
|
logger.Error("failed to upsert chat", "error", err, "chat", currentChat)
|
||||||
}
|
}
|
||||||
notification := fmt.Sprintf("renamed chat to '%s'", activeChatName)
|
notification := fmt.Sprintf("renamed chat to '%s'", activeChatName)
|
||||||
if err := notifyUser("renamed", notification); err != nil {
|
showToast("renamed", notification)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return event
|
return event
|
||||||
})
|
})
|
||||||
@@ -446,7 +578,7 @@ func init() {
|
|||||||
searchResults = nil
|
searchResults = nil
|
||||||
searchResultLengths = nil
|
searchResultLengths = nil
|
||||||
originalTextForSearch = ""
|
originalTextForSearch = ""
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
colorText()
|
colorText()
|
||||||
return
|
return
|
||||||
} else {
|
} else {
|
||||||
@@ -504,7 +636,7 @@ func init() {
|
|||||||
//
|
//
|
||||||
textArea.SetMovedFunc(updateStatusLine)
|
textArea.SetMovedFunc(updateStatusLine)
|
||||||
updateStatusLine()
|
updateStatusLine()
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
colorText()
|
colorText()
|
||||||
if scrollToEndEnabled {
|
if scrollToEndEnabled {
|
||||||
textView.ScrollToEnd()
|
textView.ScrollToEnd()
|
||||||
@@ -518,7 +650,7 @@ func init() {
|
|||||||
if event.Key() == tcell.KeyRune && event.Rune() == '5' && event.Modifiers()&tcell.ModAlt != 0 {
|
if event.Key() == tcell.KeyRune && event.Rune() == '5' && event.Modifiers()&tcell.ModAlt != 0 {
|
||||||
// switch cfg.ShowSys
|
// switch cfg.ShowSys
|
||||||
cfg.ShowSys = !cfg.ShowSys
|
cfg.ShowSys = !cfg.ShowSys
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
colorText()
|
colorText()
|
||||||
}
|
}
|
||||||
if event.Key() == tcell.KeyRune && event.Rune() == '3' && event.Modifiers()&tcell.ModAlt != 0 {
|
if event.Key() == tcell.KeyRune && event.Rune() == '3' && event.Modifiers()&tcell.ModAlt != 0 {
|
||||||
@@ -540,9 +672,7 @@ func init() {
|
|||||||
if scrollToEndEnabled {
|
if scrollToEndEnabled {
|
||||||
status = "enabled"
|
status = "enabled"
|
||||||
}
|
}
|
||||||
if err := notifyUser("autoscroll", "Auto-scrolling "+status); err != nil {
|
showToast("autoscroll", "Auto-scrolling "+status)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
updateStatusLine()
|
updateStatusLine()
|
||||||
}
|
}
|
||||||
// Handle Alt+7 to toggle injectRole
|
// Handle Alt+7 to toggle injectRole
|
||||||
@@ -553,29 +683,25 @@ func init() {
|
|||||||
// Handle Alt+T to toggle thinking block visibility
|
// Handle Alt+T to toggle thinking block visibility
|
||||||
if event.Key() == tcell.KeyRune && event.Rune() == 't' && event.Modifiers()&tcell.ModAlt != 0 {
|
if event.Key() == tcell.KeyRune && event.Rune() == 't' && event.Modifiers()&tcell.ModAlt != 0 {
|
||||||
thinkingCollapsed = !thinkingCollapsed
|
thinkingCollapsed = !thinkingCollapsed
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
colorText()
|
colorText()
|
||||||
status := "expanded"
|
status := "expanded"
|
||||||
if thinkingCollapsed {
|
if thinkingCollapsed {
|
||||||
status = "collapsed"
|
status = "collapsed"
|
||||||
}
|
}
|
||||||
if err := notifyUser("thinking", "Thinking blocks "+status); err != nil {
|
showToast("thinking", "Thinking blocks "+status)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
// Handle Ctrl+T to toggle tool call/response visibility
|
// Handle Ctrl+T to toggle tool call/response visibility
|
||||||
if event.Key() == tcell.KeyCtrlT {
|
if event.Key() == tcell.KeyCtrlT {
|
||||||
toolCollapsed = !toolCollapsed
|
toolCollapsed = !toolCollapsed
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
colorText()
|
colorText()
|
||||||
status := "expanded"
|
status := "expanded"
|
||||||
if toolCollapsed {
|
if toolCollapsed {
|
||||||
status = "collapsed"
|
status = "collapsed"
|
||||||
}
|
}
|
||||||
if err := notifyUser("tools", "Tool calls/responses "+status); err != nil {
|
showToast("tools", "Tool calls/responses "+status)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if event.Key() == tcell.KeyRune && event.Rune() == 'i' && event.Modifiers()&tcell.ModAlt != 0 {
|
if event.Key() == tcell.KeyRune && event.Rune() == 'i' && event.Modifiers()&tcell.ModAlt != 0 {
|
||||||
@@ -595,9 +721,7 @@ func init() {
|
|||||||
// Check if there are no chats for this agent
|
// Check if there are no chats for this agent
|
||||||
if len(chatList) == 0 {
|
if len(chatList) == 0 {
|
||||||
notification := "no chats found for agent: " + cfg.AssistantRole
|
notification := "no chats found for agent: " + cfg.AssistantRole
|
||||||
if err := notifyUser("info", notification); err != nil {
|
showToast("info", notification)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
chatMap := make(map[string]models.Chat)
|
chatMap := make(map[string]models.Chat)
|
||||||
@@ -614,16 +738,14 @@ func init() {
|
|||||||
}
|
}
|
||||||
if event.Key() == tcell.KeyF2 && !botRespMode {
|
if event.Key() == tcell.KeyF2 && !botRespMode {
|
||||||
// regen last msg
|
// regen last msg
|
||||||
if len(chatBody.Messages) == 0 {
|
if chatBody.GetMessageCount() == 0 {
|
||||||
if err := notifyUser("info", "no messages to regenerate"); err != nil {
|
showToast("info", "no messages to regenerate")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
chatBody.Messages = chatBody.Messages[:len(chatBody.Messages)-1]
|
chatBody.TruncateMessages(chatBody.GetMessageCount() - 1)
|
||||||
// there is no case where user msg is regenerated
|
// there is no case where user msg is regenerated
|
||||||
// lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role
|
// lastRole := chatBody.GetMessages()[chatBody.GetMessageCount()-1].Role
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
// go chatRound("", cfg.UserRole, textView, true, false)
|
// go chatRound("", cfg.UserRole, textView, true, false)
|
||||||
if cfg.TTS_ENABLED {
|
if cfg.TTS_ENABLED {
|
||||||
TTSDoneChan <- true
|
TTSDoneChan <- true
|
||||||
@@ -642,14 +764,12 @@ func init() {
|
|||||||
colorText()
|
colorText()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if len(chatBody.Messages) == 0 {
|
if chatBody.GetMessageCount() == 0 {
|
||||||
if err := notifyUser("info", "no messages to delete"); err != nil {
|
showToast("info", "no messages to delete")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
chatBody.Messages = chatBody.Messages[:len(chatBody.Messages)-1]
|
chatBody.TruncateMessages(chatBody.GetMessageCount() - 1)
|
||||||
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
textView.SetText(chatToText(chatBody.GetMessages(), cfg.ShowSys))
|
||||||
if cfg.TTS_ENABLED {
|
if cfg.TTS_ENABLED {
|
||||||
TTSDoneChan <- true
|
TTSDoneChan <- true
|
||||||
}
|
}
|
||||||
@@ -691,21 +811,20 @@ func init() {
|
|||||||
if event.Key() == tcell.KeyF6 {
|
if event.Key() == tcell.KeyF6 {
|
||||||
interruptResp = true
|
interruptResp = true
|
||||||
botRespMode = false
|
botRespMode = false
|
||||||
|
toolRunningMode = false
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if event.Key() == tcell.KeyF7 {
|
if event.Key() == tcell.KeyF7 {
|
||||||
// copy msg to clipboard
|
// copy msg to clipboard
|
||||||
editMode = false
|
editMode = false
|
||||||
m := chatBody.Messages[len(chatBody.Messages)-1]
|
m := chatBody.GetMessages()[chatBody.GetMessageCount()-1]
|
||||||
msgText := m.GetText()
|
msgText := m.GetText()
|
||||||
if err := copyToClipboard(msgText); err != nil {
|
if err := copyToClipboard(msgText); err != nil {
|
||||||
logger.Error("failed to copy to clipboard", "error", err)
|
logger.Error("failed to copy to clipboard", "error", err)
|
||||||
}
|
}
|
||||||
previewLen := min(30, len(msgText))
|
previewLen := min(30, len(msgText))
|
||||||
notification := fmt.Sprintf("msg '%s' was copied to the clipboard", msgText[:previewLen])
|
notification := fmt.Sprintf("msg '%s' was copied to the clipboard", msgText[:previewLen])
|
||||||
if err := notifyUser("copied", notification); err != nil {
|
showToast("copied", notification)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if event.Key() == tcell.KeyF8 {
|
if event.Key() == tcell.KeyF8 {
|
||||||
@@ -719,9 +838,7 @@ func init() {
|
|||||||
text := textView.GetText(false)
|
text := textView.GetText(false)
|
||||||
cb := codeBlockRE.FindAllString(text, -1)
|
cb := codeBlockRE.FindAllString(text, -1)
|
||||||
if len(cb) == 0 {
|
if len(cb) == 0 {
|
||||||
if err := notifyUser("notify", "no code blocks in chat"); err != nil {
|
showToast("notify", "no code blocks in chat")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
table := makeCodeBlockTable(cb)
|
table := makeCodeBlockTable(cb)
|
||||||
@@ -736,9 +853,7 @@ func init() {
|
|||||||
// read files in chat_exports
|
// read files in chat_exports
|
||||||
filelist, err := os.ReadDir(exportDir)
|
filelist, err := os.ReadDir(exportDir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if err := notifyUser("failed to load exports", err.Error()); err != nil {
|
showToast("failed to load exports", err.Error())
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
fli := []string{}
|
fli := []string{}
|
||||||
@@ -768,9 +883,7 @@ func init() {
|
|||||||
logger.Error("failed to export chat;", "error", err, "chat_name", activeChatName)
|
logger.Error("failed to export chat;", "error", err, "chat_name", activeChatName)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if err := notifyUser("exported chat", "chat: "+activeChatName+" was exported"); err != nil {
|
showToast("exported chat", "chat: "+activeChatName+" was exported")
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if event.Key() == tcell.KeyCtrlP {
|
if event.Key() == tcell.KeyCtrlP {
|
||||||
@@ -809,9 +922,7 @@ func init() {
|
|||||||
labels, err := initSysCards()
|
labels, err := initSysCards()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("failed to read sys dir", "error", err)
|
logger.Error("failed to read sys dir", "error", err)
|
||||||
if err := notifyUser("error", "failed to read: "+cfg.SysDir); err != nil {
|
showToast("error", "failed to read: "+cfg.SysDir)
|
||||||
logger.Debug("failed to notify user", "error", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
at := makeAgentTable(labels)
|
at := makeAgentTable(labels)
|
||||||
@@ -824,6 +935,7 @@ func init() {
|
|||||||
if event.Key() == tcell.KeyCtrlK {
|
if event.Key() == tcell.KeyCtrlK {
|
||||||
// add message from tools
|
// add message from tools
|
||||||
cfg.ToolUse = !cfg.ToolUse
|
cfg.ToolUse = !cfg.ToolUse
|
||||||
|
updateToolCapabilities()
|
||||||
updateStatusLine()
|
updateStatusLine()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -835,21 +947,27 @@ func init() {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("failed to open attached image", "path", lastImg, "error", err)
|
logger.Error("failed to open attached image", "path", lastImg, "error", err)
|
||||||
// Fall back to showing agent image
|
// Fall back to showing agent image
|
||||||
loadImage()
|
if err := loadImage(); err != nil {
|
||||||
|
logger.Warn("failed to load agent image", "error", err)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
img, _, err := image.Decode(file)
|
img, _, err := image.Decode(file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("failed to decode attached image", "path", lastImg, "error", err)
|
logger.Error("failed to decode attached image", "path", lastImg, "error", err)
|
||||||
// Fall back to showing agent image
|
// Fall back to showing agent image
|
||||||
loadImage()
|
if err := loadImage(); err != nil {
|
||||||
|
logger.Warn("failed to load agent image", "error", err)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
imgView.SetImage(img)
|
imgView.SetImage(img)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// No attached image, show agent image as before
|
// No attached image, show agent image as before
|
||||||
loadImage()
|
if err := loadImage(); err != nil {
|
||||||
|
logger.Warn("failed to load agent image", "error", err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
pages.AddPage(imgPage, imgView, true, true)
|
pages.AddPage(imgPage, imgView, true, true)
|
||||||
return nil
|
return nil
|
||||||
@@ -861,9 +979,7 @@ func init() {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
msg := "failed to inference user speech; error:" + err.Error()
|
msg := "failed to inference user speech; error:" + err.Error()
|
||||||
logger.Error(msg)
|
logger.Error(msg)
|
||||||
if err := notifyUser("stt error", msg); err != nil {
|
showToast("stt error", msg)
|
||||||
logger.Error("failed to notify user", "error", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if userSpeech != "" {
|
if userSpeech != "" {
|
||||||
@@ -885,10 +1001,10 @@ func init() {
|
|||||||
TTSDoneChan <- true
|
TTSDoneChan <- true
|
||||||
}
|
}
|
||||||
if event.Key() == tcell.KeyRune && event.Rune() == '0' && event.Modifiers()&tcell.ModAlt != 0 && cfg.TTS_ENABLED {
|
if event.Key() == tcell.KeyRune && event.Rune() == '0' && event.Modifiers()&tcell.ModAlt != 0 && cfg.TTS_ENABLED {
|
||||||
if len(chatBody.Messages) > 0 {
|
if chatBody.GetMessageCount() > 0 {
|
||||||
// Stop any currently playing TTS first
|
// Stop any currently playing TTS first
|
||||||
TTSDoneChan <- true
|
TTSDoneChan <- true
|
||||||
lastMsg := chatBody.Messages[len(chatBody.Messages)-1]
|
lastMsg := chatBody.GetMessages()[chatBody.GetMessageCount()-1]
|
||||||
cleanedText := models.CleanText(lastMsg.GetText())
|
cleanedText := models.CleanText(lastMsg.GetText())
|
||||||
if cleanedText != "" {
|
if cleanedText != "" {
|
||||||
// nolint: errcheck
|
// nolint: errcheck
|
||||||
@@ -900,7 +1016,7 @@ func init() {
|
|||||||
if event.Key() == tcell.KeyCtrlW {
|
if event.Key() == tcell.KeyCtrlW {
|
||||||
// INFO: continue bot/text message
|
// INFO: continue bot/text message
|
||||||
// without new role
|
// without new role
|
||||||
lastRole := chatBody.Messages[len(chatBody.Messages)-1].Role
|
lastRole := chatBody.GetMessages()[chatBody.GetMessageCount()-1].Role
|
||||||
// go chatRound("", lastRole, textView, false, true)
|
// go chatRound("", lastRole, textView, false, true)
|
||||||
chatRoundChan <- &models.ChatRoundReq{Role: lastRole, Resume: true}
|
chatRoundChan <- &models.ChatRoundReq{Role: lastRole, Resume: true}
|
||||||
return nil
|
return nil
|
||||||
@@ -921,6 +1037,17 @@ func init() {
|
|||||||
showBotRoleSelectionPopup()
|
showBotRoleSelectionPopup()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
// INFO: shutdown
|
||||||
|
if event.Key() == tcell.KeyCtrlC {
|
||||||
|
logger.Info("caught Ctrl+C via tcell event")
|
||||||
|
go func() {
|
||||||
|
if err := pwShutDown(); err != nil {
|
||||||
|
logger.Error("shutdown failed", "err", err)
|
||||||
|
}
|
||||||
|
app.Stop()
|
||||||
|
}()
|
||||||
|
return nil // swallow the event
|
||||||
|
}
|
||||||
if event.Key() == tcell.KeyCtrlG {
|
if event.Key() == tcell.KeyCtrlG {
|
||||||
// cfg.RAGDir is the directory with files to use with RAG
|
// cfg.RAGDir is the directory with files to use with RAG
|
||||||
// rag load
|
// rag load
|
||||||
@@ -932,26 +1059,20 @@ func init() {
|
|||||||
// Create the RAG directory if it doesn't exist
|
// Create the RAG directory if it doesn't exist
|
||||||
if mkdirErr := os.MkdirAll(cfg.RAGDir, 0755); mkdirErr != nil {
|
if mkdirErr := os.MkdirAll(cfg.RAGDir, 0755); mkdirErr != nil {
|
||||||
logger.Error("failed to create RAG directory", "dir", cfg.RAGDir, "error", mkdirErr)
|
logger.Error("failed to create RAG directory", "dir", cfg.RAGDir, "error", mkdirErr)
|
||||||
if notifyerr := notifyUser("failed to create RAG directory", mkdirErr.Error()); notifyerr != nil {
|
showToast("failed to create RAG directory", mkdirErr.Error())
|
||||||
logger.Error("failed to send notification", "error", notifyerr)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
// Now try to read the directory again after creating it
|
// Now try to read the directory again after creating it
|
||||||
files, err = os.ReadDir(cfg.RAGDir)
|
files, err = os.ReadDir(cfg.RAGDir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("failed to read dir after creating it", "dir", cfg.RAGDir, "error", err)
|
logger.Error("failed to read dir after creating it", "dir", cfg.RAGDir, "error", err)
|
||||||
if notifyerr := notifyUser("failed to read RAG directory", err.Error()); notifyerr != nil {
|
showToast("failed to read RAG directory", err.Error())
|
||||||
logger.Error("failed to send notification", "error", notifyerr)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Other error (permissions, etc.)
|
// Other error (permissions, etc.)
|
||||||
logger.Error("failed to read dir", "dir", cfg.RAGDir, "error", err)
|
logger.Error("failed to read dir", "dir", cfg.RAGDir, "error", err)
|
||||||
if notifyerr := notifyUser("failed to open RAG files dir", err.Error()); notifyerr != nil {
|
showToast("failed to open RAG files dir", err.Error())
|
||||||
logger.Error("failed to send notification", "error", notifyerr)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -981,9 +1102,7 @@ func init() {
|
|||||||
if event.Key() == tcell.KeyRune && event.Modifiers() == tcell.ModAlt && event.Rune() == '9' {
|
if event.Key() == tcell.KeyRune && event.Modifiers() == tcell.ModAlt && event.Rune() == '9' {
|
||||||
// Warm up (load) the currently selected model
|
// Warm up (load) the currently selected model
|
||||||
go warmUpModel()
|
go warmUpModel()
|
||||||
if err := notifyUser("model warmup", "loading model: "+chatBody.Model); err != nil {
|
showToast("model warmup", "loading model: "+chatBody.GetModel())
|
||||||
logger.Debug("failed to notify user", "error", err)
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
// cannot send msg in editMode or botRespMode
|
// cannot send msg in editMode or botRespMode
|
||||||
@@ -997,41 +1116,54 @@ func init() {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
msgText := textArea.GetText()
|
msgText := textArea.GetText()
|
||||||
|
nl := "\n\n" // keep empty lines between messages
|
||||||
|
prevText := textView.GetText(true)
|
||||||
|
persona := cfg.UserRole
|
||||||
|
// strings.LastIndex()
|
||||||
|
// newline is not needed is prev msg ends with one
|
||||||
|
if strings.HasSuffix(prevText, nl) {
|
||||||
|
nl = ""
|
||||||
|
} else if strings.HasSuffix(prevText, "\n") {
|
||||||
|
nl = "\n" // only one newline, add another
|
||||||
|
}
|
||||||
if msgText != "" {
|
if msgText != "" {
|
||||||
nl := "\n\n" // keep empty lines between messages
|
// as what char user sends msg?
|
||||||
prevText := textView.GetText(true)
|
if cfg.WriteNextMsgAs != "" {
|
||||||
persona := cfg.UserRole
|
persona = cfg.WriteNextMsgAs
|
||||||
// strings.LastIndex()
|
|
||||||
// newline is not needed is prev msg ends with one
|
|
||||||
if strings.HasSuffix(prevText, nl) {
|
|
||||||
nl = ""
|
|
||||||
} else if strings.HasSuffix(prevText, "\n") {
|
|
||||||
nl = "\n" // only one newline, add another
|
|
||||||
}
|
}
|
||||||
if msgText != "" {
|
// check if plain text
|
||||||
// as what char user sends msg?
|
if !injectRole {
|
||||||
if cfg.WriteNextMsgAs != "" {
|
matches := roleRE.FindStringSubmatch(msgText)
|
||||||
persona = cfg.WriteNextMsgAs
|
if len(matches) > 1 {
|
||||||
|
persona = matches[1]
|
||||||
|
msgText = strings.TrimLeft(msgText[len(matches[0]):], " ")
|
||||||
}
|
}
|
||||||
// check if plain text
|
|
||||||
if !injectRole {
|
|
||||||
matches := roleRE.FindStringSubmatch(msgText)
|
|
||||||
if len(matches) > 1 {
|
|
||||||
persona = matches[1]
|
|
||||||
msgText = strings.TrimLeft(msgText[len(matches[0]):], " ")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// add user icon before user msg
|
|
||||||
fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
|
|
||||||
nl, len(chatBody.Messages), persona, msgText)
|
|
||||||
textArea.SetText("", true)
|
|
||||||
if scrollToEndEnabled {
|
|
||||||
textView.ScrollToEnd()
|
|
||||||
}
|
|
||||||
colorText()
|
|
||||||
}
|
}
|
||||||
// go chatRound(msgText, persona, textView, false, false)
|
// add user icon before user msg
|
||||||
chatRoundChan <- &models.ChatRoundReq{Role: persona, UserMsg: msgText}
|
fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
|
||||||
|
nl, chatBody.GetMessageCount(), persona, msgText)
|
||||||
|
textArea.SetText("", true)
|
||||||
|
if scrollToEndEnabled {
|
||||||
|
textView.ScrollToEnd()
|
||||||
|
}
|
||||||
|
colorText()
|
||||||
|
} else {
|
||||||
|
pages.AddPage(confirmPageName, confirmModal, true, true)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// go chatRound(msgText, persona, textView, false, false)
|
||||||
|
chatRoundChan <- &models.ChatRoundReq{Role: persona, UserMsg: msgText}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyTab && !shellMode {
|
||||||
|
currentF := app.GetFocus()
|
||||||
|
if currentF == textArea {
|
||||||
|
currentText := textArea.GetText()
|
||||||
|
atIndex := strings.LastIndex(currentText, "@")
|
||||||
|
if atIndex >= 0 {
|
||||||
|
filter := currentText[atIndex+1:]
|
||||||
|
showTextAreaFileCompletionPopup(filter)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user