Compare commits
204 Commits
feat/char-
...
feat/cli
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
326a1a4d09 | ||
|
|
0b7f621a75 | ||
|
|
46dbb7b981 | ||
|
|
ef0940daa8 | ||
|
|
3d44686a51 | ||
|
|
df04d8c21c | ||
|
|
7c8697f48e | ||
|
|
7f8bbefb05 | ||
|
|
2c9c36e2c6 | ||
|
|
e476575334 | ||
|
|
7e346b5e19 | ||
|
|
1396b3eb05 | ||
|
|
619b19cb46 | ||
|
|
26377702d3 | ||
|
|
fdcaa6c5e2 | ||
|
|
77c365959d | ||
|
|
f4fcb85570 | ||
|
|
92acfb7ed4 | ||
|
|
2901208c80 | ||
|
|
4cfe2fe37f | ||
|
|
648035b194 | ||
|
|
13773bcc97 | ||
|
|
d9b820c9c4 | ||
|
|
adc4dea644 | ||
|
|
6456cb0922 | ||
|
|
e77dbb3160 | ||
|
|
3013f21a75 | ||
|
|
bb57be95c6 | ||
|
|
528d4210fc | ||
|
|
54b4cccf92 | ||
|
|
c2c90f6d2b | ||
|
|
94769225cf | ||
|
|
0e42a6f069 | ||
|
|
a1b5f9cdc5 | ||
|
|
e74ff8c03f | ||
|
|
b6e802c12e | ||
|
|
c0d5db29a5 | ||
|
|
6ed96c9bd3 | ||
|
|
b5f0eabeea | ||
|
|
e0201886f8 | ||
|
|
5b175c12a6 | ||
|
|
c200c9328c | ||
|
|
23cb8f2578 | ||
|
|
4f0bce50c5 | ||
|
|
bf655a1087 | ||
|
|
c8f00198d6 | ||
|
|
c5a24b2a3f | ||
|
|
0f0c43f327 | ||
|
|
0e55e44f62 | ||
|
|
014e297ae3 | ||
|
|
5f273681df | ||
|
|
17b68bc21f | ||
|
|
edfd43c52a | ||
|
|
62ec55505c | ||
|
|
f9866bcf5a | ||
|
|
822cc48834 | ||
|
|
4ef0a21511 | ||
|
|
d2caebdb4f | ||
|
|
e1f2a8cd7b | ||
|
|
efc92d884c | ||
|
|
ac8c8bb055 | ||
|
|
c2c107c786 | ||
|
|
c2757653a3 | ||
|
|
4bd6883966 | ||
|
|
7c56e27dbe | ||
|
|
fbc955ca37 | ||
|
|
c65c11bcfb | ||
|
|
04f1fd464b | ||
|
|
6e9c453ee0 | ||
|
|
645b7351a8 | ||
|
|
57088565bd | ||
|
|
4b6769e531 | ||
|
|
2687f38d00 | ||
|
|
d144ee76d9 | ||
|
|
abcaad6609 | ||
|
|
50ce0200af | ||
|
|
58ccd63f4a | ||
|
|
3611d7eb59 | ||
|
|
8974d2f52c | ||
|
|
6b0d03f2d6 | ||
|
|
fb4deb1161 | ||
|
|
0e5d37666f | ||
|
|
093103bdd7 | ||
|
|
6c9a1ba56b | ||
|
|
93ecfc8a34 | ||
|
|
0c9c590d8f | ||
|
|
d130254e88 | ||
|
|
6e7a063300 | ||
|
|
c05b93299c | ||
|
|
cad1bd46c1 | ||
|
|
4bddce3700 | ||
|
|
fcc71987bf | ||
|
|
8458edf5a8 | ||
|
|
07b06bb0d3 | ||
|
|
3389b1d83b | ||
|
|
4f6000a43a | ||
|
|
9ba46b40cc | ||
|
|
5bb456272e | ||
|
|
8999f48fb9 | ||
|
|
b2f280a7f1 | ||
|
|
65cbd5d6a6 | ||
|
|
caac1d397a | ||
|
|
742f1ca838 | ||
|
|
e36bade353 | ||
|
|
01d8bcdbf5 | ||
|
|
f6a395bce9 | ||
|
|
dc34c63256 | ||
|
|
cdfccf9a24 | ||
|
|
1f112259d2 | ||
|
|
a505ffaaa9 | ||
|
|
32be271aa3 | ||
|
|
133ec27938 | ||
|
|
d79760a289 | ||
|
|
2580360f91 | ||
|
|
fe4dd0c982 | ||
|
|
83f99d3577 | ||
|
|
e521434073 | ||
|
|
916c5d3904 | ||
|
|
5b1cbb46fa | ||
|
|
1fcab8365e | ||
|
|
c855c30ae2 | ||
|
|
915b029d2c | ||
|
|
b599e1ab38 | ||
|
|
0d94734090 | ||
|
|
a0ff384b81 | ||
|
|
09b5e0d08f | ||
|
|
7d51c5d0f3 | ||
|
|
b97cd67d72 | ||
|
|
888c9fec65 | ||
|
|
4f07994bdc | ||
|
|
776fd7a2c4 | ||
|
|
9c6b0dc1fa | ||
|
|
9f51bd3853 | ||
|
|
b386c1181f | ||
|
|
b8e7649e69 | ||
|
|
6664c1a0fc | ||
|
|
e0c3fe554f | ||
|
|
40943ff4d3 | ||
|
|
6c03a1a277 | ||
|
|
27288e2aaa | ||
|
|
1c728ec7a7 | ||
|
|
78059083c2 | ||
|
|
34cd4ac141 | ||
|
|
343366b12d | ||
|
|
978369eeaa | ||
|
|
c39e1c267d | ||
|
|
9af21895c6 | ||
|
|
e3bd6f219f | ||
|
|
ae62c2c8d8 | ||
|
|
04db7c2f01 | ||
|
|
3d889e70b5 | ||
|
|
ef53e9bebe | ||
|
|
a546bfe596 | ||
|
|
23c21f87bb | ||
|
|
850ca103e5 | ||
|
|
b7b5fcbf79 | ||
|
|
1e13c7796d | ||
|
|
9a727b21ad | ||
|
|
beb944c390 | ||
|
|
5844dd1494 | ||
|
|
84c4010213 | ||
|
|
86260e218c | ||
|
|
2c694e2b2b | ||
|
|
66ccb7a732 | ||
|
|
deece322ef | ||
|
|
e7c8fef32d | ||
|
|
eedda0ec4b | ||
|
|
96ffbd5cf5 | ||
|
|
85b11fa9ff | ||
|
|
1675af98d4 | ||
|
|
61a0ddfdfd | ||
|
|
26ab5c59e3 | ||
|
|
35cc8c068f | ||
|
|
27fdec1361 | ||
|
|
76827a71cc | ||
|
|
3a9a7dbe99 | ||
|
|
d3361c13c5 | ||
|
|
7c1a8b0122 | ||
|
|
eeca909b65 | ||
|
|
b18d96ac13 | ||
|
|
b861b92e5d | ||
|
|
17f0afac80 | ||
|
|
931b646c30 | ||
|
|
f560ecf70b | ||
|
|
f40f09390b | ||
|
|
5548991f5c | ||
|
|
c12311da99 | ||
|
|
7d18a9d77e | ||
|
|
b67ae1be98 | ||
|
|
372e49199b | ||
|
|
d6d4f09f8d | ||
|
|
475936fb1b | ||
|
|
fa846225ee | ||
|
|
7b2fa04391 | ||
|
|
c83779b479 | ||
|
|
43b0fe3739 | ||
|
|
1b36ef938e | ||
|
|
987d5842a4 | ||
|
|
10b665813e | ||
|
|
8c3c2b9b23 | ||
|
|
e42eb96371 | ||
|
|
46a33baabb | ||
|
|
875de679cf | ||
|
|
3b542421e3 |
7
.gitignore
vendored
7
.gitignore
vendored
@@ -1,12 +1,13 @@
|
|||||||
*.txt
|
|
||||||
*.json
|
*.json
|
||||||
testlog
|
testlog
|
||||||
history/
|
history/
|
||||||
*.db
|
*.db
|
||||||
|
*.db-shm
|
||||||
|
*.db-wal
|
||||||
config.toml
|
config.toml
|
||||||
sysprompts/*
|
sysprompts/*
|
||||||
!sysprompts/cluedo.json
|
|
||||||
!sysprompts/alice_bob_carl.json
|
!sysprompts/alice_bob_carl.json
|
||||||
|
!sysprompts/coding_assistant.json
|
||||||
history_bak/
|
history_bak/
|
||||||
.aider*
|
.aider*
|
||||||
tags
|
tags
|
||||||
@@ -15,3 +16,5 @@ gflt
|
|||||||
chat_exports/*.json
|
chat_exports/*.json
|
||||||
ragimport
|
ragimport
|
||||||
.env
|
.env
|
||||||
|
onnx/
|
||||||
|
*.log
|
||||||
|
|||||||
130
Makefile
130
Makefile
@@ -1,5 +1,4 @@
|
|||||||
.PHONY: setconfig run lint setup-whisper build-whisper download-whisper-model docker-up docker-down docker-logs noextra-run noextra-server
|
.PHONY: setconfig run lint lintall install-linters setup-whisper build-whisper download-whisper-model docker-up docker-down docker-logs noextra-run installdelve checkdelve fetch-onnx install-onnx-deps
|
||||||
|
|
||||||
|
|
||||||
run: setconfig
|
run: setconfig
|
||||||
go build -tags extra -o gf-lt && ./gf-lt
|
go build -tags extra -o gf-lt && ./gf-lt
|
||||||
@@ -10,28 +9,134 @@ build-debug:
|
|||||||
debug: build-debug
|
debug: build-debug
|
||||||
dlv exec --headless --accept-multiclient --listen=:2345 ./gf-lt
|
dlv exec --headless --accept-multiclient --listen=:2345 ./gf-lt
|
||||||
|
|
||||||
server: setconfig
|
|
||||||
go build -tags extra -o gf-lt && ./gf-lt -port 3333
|
|
||||||
|
|
||||||
noextra-run: setconfig
|
noextra-run: setconfig
|
||||||
go build -tags '!extra' -o gf-lt && ./gf-lt
|
go build -tags '!extra' -o gf-lt && ./gf-lt
|
||||||
|
|
||||||
noextra-server: setconfig
|
|
||||||
go build -tags '!extra' -o gf-lt && ./gf-lt -port 3333
|
|
||||||
|
|
||||||
setconfig:
|
setconfig:
|
||||||
find config.toml &>/dev/null || cp config.example.toml config.toml
|
find config.toml &>/dev/null || cp config.example.toml config.toml
|
||||||
|
|
||||||
|
installdelve:
|
||||||
|
go install github.com/go-delve/delve/cmd/dlv@latest
|
||||||
|
|
||||||
|
checkdelve:
|
||||||
|
which dlv &>/dev/null || installdelve
|
||||||
|
|
||||||
|
install-linters: ## Install additional linters (noblanks)
|
||||||
|
go install github.com/GrailFinder/noblanks-linter/cmd/noblanks@latest
|
||||||
|
|
||||||
lint: ## Run linters. Use make install-linters first.
|
lint: ## Run linters. Use make install-linters first.
|
||||||
golangci-lint run -c .golangci.yml ./...
|
golangci-lint run -c .golangci.yml ./...
|
||||||
|
|
||||||
|
lintall: lint
|
||||||
|
noblanks ./...
|
||||||
|
|
||||||
|
fetch-onnx:
|
||||||
|
mkdir -p onnx/embedgemma && curl -o onnx/embedgemma/config.json -L https://huggingface.co/onnx-community/embeddinggemma-300m-ONNX/resolve/main/config.json && curl -o onnx/embedgemma/tokenizer.json -L https://huggingface.co/onnx-community/embeddinggemma-300m-ONNX/resolve/main/tokenizer.json && curl -o onnx/embedgemma/model_q4.onnx -L https://huggingface.co/onnx-community/embeddinggemma-300m-ONNX/resolve/main/onnx/model_q4.onnx && curl -o onnx/embedgemma/model_q4.onnx_data -L https://huggingface.co/onnx-community/embeddinggemma-300m-ONNX/resolve/main/onnx/model_q4.onnx_data?download=true
|
||||||
|
|
||||||
|
install-onnx-deps: ## Install ONNX Runtime with CUDA support (or CPU fallback)
|
||||||
|
@echo "=== ONNX Runtime Installer ===" && \
|
||||||
|
echo "" && \
|
||||||
|
echo "Checking for existing ONNX Runtime..." && \
|
||||||
|
if ldconfig -p 2>/dev/null | grep -q libonnxruntime.so.1; then \
|
||||||
|
echo "ONNX Runtime is already installed:" && \
|
||||||
|
ldconfig -p 2>/dev/null | grep libonnxruntime && \
|
||||||
|
echo "" && \
|
||||||
|
echo "Skipping installation. To reinstall, remove existing libs first:" && \
|
||||||
|
echo " sudo rm -f /usr/local/lib/libonnxruntime*.so*" && \
|
||||||
|
exit 0; \
|
||||||
|
fi && \
|
||||||
|
echo "No ONNX Runtime found. Proceeding with installation..." && \
|
||||||
|
echo "" && \
|
||||||
|
echo "Detecting CUDA version..." && \
|
||||||
|
HAS_CUDA=0 && \
|
||||||
|
if command -v nvidia-smi >/dev/null 2>&1; then \
|
||||||
|
CUDA_INFO=$$(nvidia-smi --query-gpu=driver_version --format=csv,noheader 2>/dev/null | head -1) && \
|
||||||
|
if [ -n "$$CUDA_INFO" ]; then \
|
||||||
|
echo "Found NVIDIA GPU with driver: $$CUDA_INFO" && \
|
||||||
|
HAS_CUDA=1; \
|
||||||
|
else \
|
||||||
|
echo "NVIDIA driver found but could not detect CUDA version"; \
|
||||||
|
fi; \
|
||||||
|
else \
|
||||||
|
echo "No NVIDIA GPU detected (nvidia-smi not found)"; \
|
||||||
|
fi && \
|
||||||
|
echo "" && \
|
||||||
|
echo "Determining ONNX Runtime version..." && \
|
||||||
|
ARCH=$$(uname -m) && \
|
||||||
|
if [ "$$ARCH" = "x86_64" ]; then \
|
||||||
|
ONNX_ARCH="x64"; \
|
||||||
|
elif [ "$$ARCH" = "aarch64" ] || [ "$$ARCH" = "arm64" ]; then \
|
||||||
|
ONNX_ARCH="aarch64"; \
|
||||||
|
else \
|
||||||
|
echo "Unsupported architecture: $$ARCH" && \
|
||||||
|
exit 1; \
|
||||||
|
fi && \
|
||||||
|
echo "Detected architecture: $$ARCH (ONNX runtime: $$ONNX_ARCH)" && \
|
||||||
|
if [ "$$HAS_CUDA" = "1" ]; then \
|
||||||
|
echo "Installing ONNX Runtime with CUDA support..."; \
|
||||||
|
ONNX_VERSION="1.24.2"; \
|
||||||
|
else \
|
||||||
|
echo "Installing ONNX Runtime (CPU version)..."; \
|
||||||
|
ONNX_VERSION="1.24.2"; \
|
||||||
|
fi && \
|
||||||
|
FILENAME="onnxruntime-linux-$${ONNX_ARCH}-${ONNX_VERSION}.tgz" && \
|
||||||
|
URL="https://github.com/microsoft/onnxruntime/releases/download/v$${ONNX_VERSION}/$${FILENAME}" && \
|
||||||
|
echo "Downloading $${URL}..." && \
|
||||||
|
mkdir -p /tmp/onnx-install && \
|
||||||
|
curl -L -o /tmp/onnx-install/$${FILENAME} "$${URL}" || { \
|
||||||
|
echo "Failed to download ONNX Runtime v$${ONNX_VERSION}. Trying v1.18.0..." && \
|
||||||
|
ONNX_VERSION="1.18.0" && \
|
||||||
|
FILENAME="onnxruntime-linux-$${ONNX_ARCH}-${ONNX_VERSION}.tgz" && \
|
||||||
|
URL="https://github.com/microsoft/onnxruntime/releases/download/v$${ONNX_VERSION}/$${FILENAME}" && \
|
||||||
|
curl -L -o /tmp/onnx-install/$${FILENAME} "$${URL}" || { \
|
||||||
|
echo "ERROR: Failed to download ONNX Runtime from GitHub" && \
|
||||||
|
echo "" && \
|
||||||
|
echo "Please install manually:" && \
|
||||||
|
echo " 1. Go to https://github.com/microsoft/onnxruntime/releases" && \
|
||||||
|
echo " 2. Download onnxruntime-linux-$${ONNX_ARCH}-VERSION.tgz" && \
|
||||||
|
echo " 3. Extract and copy to /usr/local/lib:" && \
|
||||||
|
echo " tar -xzf onnxruntime-linux-$${ONNX_ARCH}-VERSION.tgz" && \
|
||||||
|
echo " sudo cp -r onnxruntime-linux-$${ONNX_ARCH}-VERSION/lib/* /usr/local/lib/" && \
|
||||||
|
echo " sudo ldconfig" && \
|
||||||
|
exit 1; \
|
||||||
|
}; \
|
||||||
|
} && \
|
||||||
|
echo "Extracting..." && \
|
||||||
|
cd /tmp/onnx-install && tar -xzf $${FILENAME} && \
|
||||||
|
echo "Installing to /usr/local/lib..." && \
|
||||||
|
ONNX_DIR=$$(find /tmp/onnx-install -maxdepth 1 -type d -name "onnxruntime-linux-*") && \
|
||||||
|
if [ -d "$${ONNX_DIR}/lib" ]; then \
|
||||||
|
cp -r $${ONNX_DIR}/lib/* /usr/local/lib/ 2>/dev/null || sudo cp -r $${ONNX_DIR}/lib/* /usr/local/lib/; \
|
||||||
|
else \
|
||||||
|
echo "ERROR: Could not find lib directory in extracted archive" && \
|
||||||
|
exit 1; \
|
||||||
|
fi && \
|
||||||
|
echo "Updating library cache..." && \
|
||||||
|
sudo ldconfig 2>/dev/null || ldconfig && \
|
||||||
|
echo "" && \
|
||||||
|
echo "=== Installation complete! ===" && \
|
||||||
|
echo "" && \
|
||||||
|
echo "Installed libraries:" && \
|
||||||
|
ldconfig -p | grep libonnxruntime || echo "(libraries may require logout/relogin to appear)" && \
|
||||||
|
echo "" && \
|
||||||
|
if [ "$$HAS_CUDA" = "1" ]; then \
|
||||||
|
echo "NOTE: CUDA-enabled ONNX Runtime installed."; \
|
||||||
|
echo "Ensure you also have CUDA libraries installed:"; \
|
||||||
|
echo " - libcudnn, libcublas, libcurand"; \
|
||||||
|
else \
|
||||||
|
echo "NOTE: CPU-only ONNX Runtime installed."; \
|
||||||
|
echo "For GPU support, install CUDA and re-run this script."; \
|
||||||
|
fi && \
|
||||||
|
rm -rf /tmp/onnx-install
|
||||||
|
|
||||||
# Whisper STT Setup (in batteries directory)
|
# Whisper STT Setup (in batteries directory)
|
||||||
setup-whisper: build-whisper download-whisper-model
|
setup-whisper: build-whisper download-whisper-model
|
||||||
|
|
||||||
build-whisper: ## Build whisper.cpp from source in batteries directory
|
build-whisper: ## Build whisper.cpp from source in batteries directory
|
||||||
@echo "Building whisper.cpp from source in batteries directory..."
|
@echo "Building whisper.cpp from source in batteries directory..."
|
||||||
@if [ ! -d "batteries/whisper.cpp" ]; then \
|
@if [ ! -f "batteries/whisper.cpp/CMakeLists.txt" ]; then \
|
||||||
echo "Cloning whisper.cpp repository to batteries directory..."; \
|
echo "Cloning whisper.cpp repository to batteries directory..."; \
|
||||||
|
rm -rf batteries/whisper.cpp; \
|
||||||
git clone https://github.com/ggml-org/whisper.cpp.git batteries/whisper.cpp; \
|
git clone https://github.com/ggml-org/whisper.cpp.git batteries/whisper.cpp; \
|
||||||
fi
|
fi
|
||||||
cd batteries/whisper.cpp && cmake -B build -DGGML_CUDA=ON -DWHISPER_SDL2=ON; cmake --build build --config Release -j 8
|
cd batteries/whisper.cpp && cmake -B build -DGGML_CUDA=ON -DWHISPER_SDL2=ON; cmake --build build --config Release -j 8
|
||||||
@@ -39,11 +144,10 @@ build-whisper: ## Build whisper.cpp from source in batteries directory
|
|||||||
|
|
||||||
download-whisper-model: ## Download Whisper model for STT in batteries directory
|
download-whisper-model: ## Download Whisper model for STT in batteries directory
|
||||||
@echo "Downloading Whisper model for STT..."
|
@echo "Downloading Whisper model for STT..."
|
||||||
@if [ ! -d "batteries/whisper.cpp" ]; then \
|
@if [ ! -d "batteries/whisper.cpp/models" ]; then \
|
||||||
echo "Please run 'make setup-whisper' first to clone the repository."; \
|
mkdir -p "batteries/whisper.cpp/models"; \
|
||||||
exit 1; \
|
|
||||||
fi
|
fi
|
||||||
@cd batteries/whisper.cpp && bash ./models/download-ggml-model.sh large-v3-turbo-q5_0
|
curl -o batteries/whisper.cpp/models/ggml-large-v3-turbo-q5_0.bin -L "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3-turbo-q5_0.bin?download=true"
|
||||||
@echo "Whisper model downloaded successfully!"
|
@echo "Whisper model downloaded successfully!"
|
||||||
|
|
||||||
# Docker targets for STT/TTS services (in batteries directory)
|
# Docker targets for STT/TTS services (in batteries directory)
|
||||||
|
|||||||
14
README.md
14
README.md
@@ -8,10 +8,21 @@ made with use of [tview](https://github.com/rivo/tview)
|
|||||||
- tts/stt (run make commands to get deps);
|
- tts/stt (run make commands to get deps);
|
||||||
- image input;
|
- image input;
|
||||||
- function calls (function calls are implemented natively, to avoid calling outside sources);
|
- function calls (function calls are implemented natively, to avoid calling outside sources);
|
||||||
|
- [character specific context (unique feature)](docs/char-specific-context.md)
|
||||||
|
|
||||||
|
|
||||||
|
#### showcase on youtube
|
||||||
|
[](https://youtu.be/WCS4Xc902F8 "gf-lt showcase")
|
||||||
|
|
||||||
#### how it looks
|
#### how it looks
|
||||||

|

|
||||||
|
|
||||||
|
|
||||||
|
#### dependencies
|
||||||
|
- make
|
||||||
|
- go
|
||||||
|
- ffmpeg (extra)
|
||||||
|
|
||||||
#### how to install
|
#### how to install
|
||||||
(requires golang)
|
(requires golang)
|
||||||
clone the project
|
clone the project
|
||||||
@@ -27,7 +38,8 @@ make noextra-run
|
|||||||
```
|
```
|
||||||
|
|
||||||
#### keybinds
|
#### keybinds
|
||||||
while running you can press f12 for list of keys;
|
- use `insert` button to paste text from the clipboard to the text area, instead of shift+insert (might freeze the program);
|
||||||
|
- press f12 for list of keys;
|
||||||

|

|
||||||
|
|
||||||
#### setting up config
|
#### setting up config
|
||||||
|
|||||||
@@ -4,11 +4,12 @@ package agent
|
|||||||
// ones who do their own tools calls
|
// ones who do their own tools calls
|
||||||
// ones that works only with the output
|
// ones that works only with the output
|
||||||
|
|
||||||
// A: main chat -> agent (handles everything: tool + processing)
|
// A: main chat -> agent (handles everything: tool + processing), supports tool chaining
|
||||||
// B: main chat -> tool -> agent (process tool output)
|
// B: main chat -> tool -> agent (process tool output)
|
||||||
|
|
||||||
// AgenterA gets a task "find out weather in london"
|
// AgenterA gets a task like "go to the webpage, login and take a screenshot (tell me what you see)"
|
||||||
// proceeds to make tool calls on its own
|
// proceeds to make a plan and executes it.
|
||||||
|
// returns with final result or an error
|
||||||
type AgenterA interface {
|
type AgenterA interface {
|
||||||
ProcessTask(task string) []byte
|
ProcessTask(task string) []byte
|
||||||
}
|
}
|
||||||
@@ -38,8 +39,3 @@ func RegisterA(toolNames []string, a AgenterA) {
|
|||||||
func Get(toolName string) AgenterB {
|
func Get(toolName string) AgenterB {
|
||||||
return RegistryB[toolName]
|
return RegistryB[toolName]
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register is a convenience wrapper for RegisterB.
|
|
||||||
func Register(toolName string, a AgenterB) {
|
|
||||||
RegisterB(toolName, a)
|
|
||||||
}
|
|
||||||
|
|||||||
119
agent/pw_agent.go
Normal file
119
agent/pw_agent.go
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
package agent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"gf-lt/models"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PWAgent: is AgenterA type agent (enclosed with tool chaining)
|
||||||
|
// sysprompt explain tools and how to plan for execution
|
||||||
|
type PWAgent struct {
|
||||||
|
*AgentClient
|
||||||
|
sysprompt string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPWAgent creates a PWAgent with the given client and system prompt
|
||||||
|
func NewPWAgent(client *AgentClient, sysprompt string) *PWAgent {
|
||||||
|
return &PWAgent{AgentClient: client, sysprompt: sysprompt}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTools sets the tools available to the agent
|
||||||
|
func (a *PWAgent) SetTools(tools []models.Tool) {
|
||||||
|
a.tools = tools
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *PWAgent) ProcessTask(task string) []byte {
|
||||||
|
req, err := a.FormFirstMsg(a.sysprompt, task)
|
||||||
|
if err != nil {
|
||||||
|
a.Log().Error("PWAgent failed to process the request", "error", err)
|
||||||
|
return []byte("PWAgent failed to process the request; err: " + err.Error())
|
||||||
|
}
|
||||||
|
toolCallLimit := 10
|
||||||
|
for i := 0; i < toolCallLimit; i++ {
|
||||||
|
resp, err := a.LLMRequest(req)
|
||||||
|
if err != nil {
|
||||||
|
a.Log().Error("failed to process the request", "error", err)
|
||||||
|
return []byte("failed to process the request; err: " + err.Error())
|
||||||
|
}
|
||||||
|
execTool, toolCallID, hasToolCall := findToolCall(resp)
|
||||||
|
if !hasToolCall {
|
||||||
|
return resp
|
||||||
|
}
|
||||||
|
|
||||||
|
a.setToolCallOnLastMessage(resp, toolCallID)
|
||||||
|
|
||||||
|
toolResp := string(execTool())
|
||||||
|
req, err = a.FormMsgWithToolCallID(toolResp, toolCallID)
|
||||||
|
if err != nil {
|
||||||
|
a.Log().Error("failed to form next message", "error", err)
|
||||||
|
return []byte("failed to form next message; err: " + err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *PWAgent) setToolCallOnLastMessage(resp []byte, toolCallID string) {
|
||||||
|
if toolCallID == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var genericResp map[string]interface{}
|
||||||
|
if err := json.Unmarshal(resp, &genericResp); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var name string
|
||||||
|
var args map[string]string
|
||||||
|
if choices, ok := genericResp["choices"].([]interface{}); ok && len(choices) > 0 {
|
||||||
|
if firstChoice, ok := choices[0].(map[string]interface{}); ok {
|
||||||
|
if message, ok := firstChoice["message"].(map[string]interface{}); ok {
|
||||||
|
if toolCalls, ok := message["tool_calls"].([]interface{}); ok && len(toolCalls) > 0 {
|
||||||
|
if tc, ok := toolCalls[0].(map[string]interface{}); ok {
|
||||||
|
if fn, ok := tc["function"].(map[string]interface{}); ok {
|
||||||
|
name, _ = fn["name"].(string)
|
||||||
|
argsStr, _ := fn["arguments"].(string)
|
||||||
|
_ = json.Unmarshal([]byte(argsStr), &args)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if name == "" {
|
||||||
|
content, _ := genericResp["content"].(string)
|
||||||
|
name = extractToolNameFromText(content)
|
||||||
|
}
|
||||||
|
lastIdx := len(a.chatBody.Messages) - 1
|
||||||
|
if lastIdx >= 0 {
|
||||||
|
a.chatBody.Messages[lastIdx].ToolCallID = toolCallID
|
||||||
|
if name != "" {
|
||||||
|
argsJSON, _ := json.Marshal(args)
|
||||||
|
a.chatBody.Messages[lastIdx].ToolCall = &models.ToolCall{
|
||||||
|
ID: toolCallID,
|
||||||
|
Name: name,
|
||||||
|
Args: string(argsJSON),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractToolNameFromText(text string) string {
|
||||||
|
jsStr := toolCallRE.FindString(text)
|
||||||
|
if jsStr == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
jsStr = strings.TrimSpace(jsStr)
|
||||||
|
jsStr = strings.TrimPrefix(jsStr, "__tool_call__")
|
||||||
|
jsStr = strings.TrimSuffix(jsStr, "__tool_call__")
|
||||||
|
jsStr = strings.TrimSpace(jsStr)
|
||||||
|
start := strings.Index(jsStr, "{")
|
||||||
|
end := strings.LastIndex(jsStr, "}")
|
||||||
|
if start == -1 || end == -1 || end <= start {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
jsStr = jsStr[start : end+1]
|
||||||
|
var fc models.FuncCall
|
||||||
|
if err := json.Unmarshal([]byte(jsStr), &fc); err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return fc.Name
|
||||||
|
}
|
||||||
338
agent/pw_tools.go
Normal file
338
agent/pw_tools.go
Normal file
@@ -0,0 +1,338 @@
|
|||||||
|
package agent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gf-lt/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ToolFunc func(map[string]string) []byte
|
||||||
|
|
||||||
|
var pwToolMap = make(map[string]ToolFunc)
|
||||||
|
|
||||||
|
func RegisterPWTool(name string, fn ToolFunc) {
|
||||||
|
pwToolMap[name] = fn
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetPWTools() []models.Tool {
|
||||||
|
return pwTools
|
||||||
|
}
|
||||||
|
|
||||||
|
var pwTools = []models.Tool{
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_start",
|
||||||
|
Description: "Start a Playwright browser instance. Must be called first before any other browser automation. Uses headless mode by default.",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{},
|
||||||
|
Properties: map[string]models.ToolArgProps{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_stop",
|
||||||
|
Description: "Stop the Playwright browser instance. Call when done with browser automation.",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{},
|
||||||
|
Properties: map[string]models.ToolArgProps{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_is_running",
|
||||||
|
Description: "Check if Playwright browser is currently running.",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{},
|
||||||
|
Properties: map[string]models.ToolArgProps{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_navigate",
|
||||||
|
Description: "Navigate to a URL in the browser.",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{"url"},
|
||||||
|
Properties: map[string]models.ToolArgProps{
|
||||||
|
"url": {Type: "string", Description: "URL to navigate to"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_click",
|
||||||
|
Description: "Click on an element on the current webpage. Use 'index' for multiple matches (default 0).",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{"selector"},
|
||||||
|
Properties: map[string]models.ToolArgProps{
|
||||||
|
"selector": {Type: "string", Description: "CSS selector for the element"},
|
||||||
|
"index": {Type: "integer", Description: "Index for multiple matches (default 0)"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_fill",
|
||||||
|
Description: "Type text into an input field. Use 'index' for multiple matches (default 0).",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{"selector", "text"},
|
||||||
|
Properties: map[string]models.ToolArgProps{
|
||||||
|
"selector": {Type: "string", Description: "CSS selector for the input element"},
|
||||||
|
"text": {Type: "string", Description: "Text to type into the field"},
|
||||||
|
"index": {Type: "integer", Description: "Index for multiple matches (default 0)"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_extract_text",
|
||||||
|
Description: "Extract text content from the page or specific elements. Use selector 'body' for all page text.",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{},
|
||||||
|
Properties: map[string]models.ToolArgProps{
|
||||||
|
"selector": {Type: "string", Description: "CSS selector (default 'body' for all page text)"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_screenshot",
|
||||||
|
Description: "Take a screenshot of the page or a specific element. Returns a file path to the image.",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{},
|
||||||
|
Properties: map[string]models.ToolArgProps{
|
||||||
|
"selector": {Type: "string", Description: "CSS selector for element to screenshot"},
|
||||||
|
"full_page": {Type: "boolean", Description: "Capture full page (default false)"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_screenshot_and_view",
|
||||||
|
Description: "Take a screenshot and return the image for viewing. Use to visually verify page state.",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{},
|
||||||
|
Properties: map[string]models.ToolArgProps{
|
||||||
|
"selector": {Type: "string", Description: "CSS selector for element to screenshot"},
|
||||||
|
"full_page": {Type: "boolean", Description: "Capture full page (default false)"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_wait_for_selector",
|
||||||
|
Description: "Wait for an element to appear on the page before proceeding.",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{"selector"},
|
||||||
|
Properties: map[string]models.ToolArgProps{
|
||||||
|
"selector": {Type: "string", Description: "CSS selector to wait for"},
|
||||||
|
"timeout": {Type: "integer", Description: "Timeout in milliseconds (default 30000)"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_drag",
|
||||||
|
Description: "Drag the mouse from point (x1,y1) to (x2,y2).",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{"x1", "y1", "x2", "y2"},
|
||||||
|
Properties: map[string]models.ToolArgProps{
|
||||||
|
"x1": {Type: "number", Description: "Starting X coordinate"},
|
||||||
|
"y1": {Type: "number", Description: "Starting Y coordinate"},
|
||||||
|
"x2": {Type: "number", Description: "Ending X coordinate"},
|
||||||
|
"y2": {Type: "number", Description: "Ending Y coordinate"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_click_at",
|
||||||
|
Description: "Click at specific X,Y coordinates on the page.",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{"x", "y"},
|
||||||
|
Properties: map[string]models.ToolArgProps{
|
||||||
|
"x": {Type: "number", Description: "X coordinate"},
|
||||||
|
"y": {Type: "number", Description: "Y coordinate"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_get_html",
|
||||||
|
Description: "Get the HTML content of the page or a specific element.",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{},
|
||||||
|
Properties: map[string]models.ToolArgProps{
|
||||||
|
"selector": {Type: "string", Description: "CSS selector (default 'body')"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_get_dom",
|
||||||
|
Description: "Get a structured DOM representation with tag, attributes, text, and children.",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{},
|
||||||
|
Properties: map[string]models.ToolArgProps{
|
||||||
|
"selector": {Type: "string", Description: "CSS selector (default 'body')"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "function",
|
||||||
|
Function: models.ToolFunc{
|
||||||
|
Name: "pw_search_elements",
|
||||||
|
Description: "Search for elements by text content or CSS selector.",
|
||||||
|
Parameters: models.ToolFuncParams{
|
||||||
|
Type: "object",
|
||||||
|
Required: []string{},
|
||||||
|
Properties: map[string]models.ToolArgProps{
|
||||||
|
"text": {Type: "string", Description: "Text content to search for"},
|
||||||
|
"selector": {Type: "string", Description: "CSS selector to search for"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var toolCallRE = regexp.MustCompile(`__tool_call__(.+?)__tool_call__`)
|
||||||
|
|
||||||
|
type ParsedToolCall struct {
|
||||||
|
ID string
|
||||||
|
Name string
|
||||||
|
Args map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
func findToolCall(resp []byte) (func() []byte, string, bool) {
|
||||||
|
var genericResp map[string]interface{}
|
||||||
|
if err := json.Unmarshal(resp, &genericResp); err != nil {
|
||||||
|
return findToolCallFromText(string(resp))
|
||||||
|
}
|
||||||
|
if choices, ok := genericResp["choices"].([]interface{}); ok && len(choices) > 0 {
|
||||||
|
if firstChoice, ok := choices[0].(map[string]interface{}); ok {
|
||||||
|
if message, ok := firstChoice["message"].(map[string]interface{}); ok {
|
||||||
|
if toolCalls, ok := message["tool_calls"].([]interface{}); ok && len(toolCalls) > 0 {
|
||||||
|
return parseOpenAIToolCall(toolCalls)
|
||||||
|
}
|
||||||
|
if content, ok := message["content"].(string); ok {
|
||||||
|
return findToolCallFromText(content)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if text, ok := firstChoice["text"].(string); ok {
|
||||||
|
return findToolCallFromText(text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if content, ok := genericResp["content"].(string); ok {
|
||||||
|
return findToolCallFromText(content)
|
||||||
|
}
|
||||||
|
return findToolCallFromText(string(resp))
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseOpenAIToolCall(toolCalls []interface{}) (func() []byte, string, bool) {
|
||||||
|
if len(toolCalls) == 0 {
|
||||||
|
return nil, "", false
|
||||||
|
}
|
||||||
|
tc := toolCalls[0].(map[string]interface{})
|
||||||
|
id, _ := tc["id"].(string)
|
||||||
|
function, _ := tc["function"].(map[string]interface{})
|
||||||
|
name, _ := function["name"].(string)
|
||||||
|
argsStr, _ := function["arguments"].(string)
|
||||||
|
var args map[string]string
|
||||||
|
if err := json.Unmarshal([]byte(argsStr), &args); err != nil {
|
||||||
|
return func() []byte {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to parse arguments: %v"}`, err))
|
||||||
|
}, id, true
|
||||||
|
}
|
||||||
|
return func() []byte {
|
||||||
|
fn, ok := pwToolMap[name]
|
||||||
|
if !ok {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "tool %s not found"}`, name))
|
||||||
|
}
|
||||||
|
return fn(args)
|
||||||
|
}, id, true
|
||||||
|
}
|
||||||
|
|
||||||
|
func findToolCallFromText(text string) (func() []byte, string, bool) {
|
||||||
|
jsStr := toolCallRE.FindString(text)
|
||||||
|
if jsStr == "" {
|
||||||
|
return nil, "", false
|
||||||
|
}
|
||||||
|
jsStr = strings.TrimSpace(jsStr)
|
||||||
|
jsStr = strings.TrimPrefix(jsStr, "__tool_call__")
|
||||||
|
jsStr = strings.TrimSuffix(jsStr, "__tool_call__")
|
||||||
|
jsStr = strings.TrimSpace(jsStr)
|
||||||
|
start := strings.Index(jsStr, "{")
|
||||||
|
end := strings.LastIndex(jsStr, "}")
|
||||||
|
if start == -1 || end == -1 || end <= start {
|
||||||
|
return func() []byte {
|
||||||
|
return []byte(`{"error": "no valid JSON found in tool call"}`)
|
||||||
|
}, "", true
|
||||||
|
}
|
||||||
|
jsStr = jsStr[start : end+1]
|
||||||
|
var fc models.FuncCall
|
||||||
|
if err := json.Unmarshal([]byte(jsStr), &fc); err != nil {
|
||||||
|
return func() []byte {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to parse tool call: %v}`, err))
|
||||||
|
}, "", true
|
||||||
|
}
|
||||||
|
if fc.ID == "" {
|
||||||
|
fc.ID = "call_" + generateToolCallID()
|
||||||
|
}
|
||||||
|
return func() []byte {
|
||||||
|
fn, ok := pwToolMap[fc.Name]
|
||||||
|
if !ok {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "tool %s not found"}`, fc.Name))
|
||||||
|
}
|
||||||
|
return fn(fc.Args)
|
||||||
|
}, fc.ID, true
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateToolCallID() string {
|
||||||
|
return strconv.Itoa(len(pwToolMap) % 10000)
|
||||||
|
}
|
||||||
131
agent/request.go
131
agent/request.go
@@ -32,10 +32,14 @@ func detectAPI(api string) (isCompletion, isChat, isDeepSeek, isOpenRouter bool)
|
|||||||
type AgentClient struct {
|
type AgentClient struct {
|
||||||
cfg *config.Config
|
cfg *config.Config
|
||||||
getToken func() string
|
getToken func() string
|
||||||
log slog.Logger
|
log *slog.Logger
|
||||||
|
chatBody *models.ChatBody
|
||||||
|
sysprompt string
|
||||||
|
// lastToolCallID string
|
||||||
|
tools []models.Tool
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewAgentClient(cfg *config.Config, log slog.Logger, gt func() string) *AgentClient {
|
func NewAgentClient(cfg *config.Config, log *slog.Logger, gt func() string) *AgentClient {
|
||||||
return &AgentClient{
|
return &AgentClient{
|
||||||
cfg: cfg,
|
cfg: cfg,
|
||||||
getToken: gt,
|
getToken: gt,
|
||||||
@@ -44,11 +48,46 @@ func NewAgentClient(cfg *config.Config, log slog.Logger, gt func() string) *Agen
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (ag *AgentClient) Log() *slog.Logger {
|
func (ag *AgentClient) Log() *slog.Logger {
|
||||||
return &ag.log
|
return ag.log
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ag *AgentClient) FormMsg(sysprompt, msg string) (io.Reader, error) {
|
func (ag *AgentClient) FormFirstMsg(sysprompt, msg string) (io.Reader, error) {
|
||||||
b, err := ag.buildRequest(sysprompt, msg)
|
ag.sysprompt = sysprompt
|
||||||
|
ag.chatBody = &models.ChatBody{
|
||||||
|
Messages: []models.RoleMsg{
|
||||||
|
{Role: "system", Content: ag.sysprompt},
|
||||||
|
{Role: "user", Content: msg},
|
||||||
|
},
|
||||||
|
Stream: false,
|
||||||
|
Model: ag.cfg.CurrentModel,
|
||||||
|
}
|
||||||
|
b, err := ag.buildRequest()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return bytes.NewReader(b), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ag *AgentClient) FormMsg(msg string) (io.Reader, error) {
|
||||||
|
m := models.RoleMsg{
|
||||||
|
Role: "tool", Content: msg,
|
||||||
|
}
|
||||||
|
ag.chatBody.Messages = append(ag.chatBody.Messages, m)
|
||||||
|
b, err := ag.buildRequest()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return bytes.NewReader(b), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ag *AgentClient) FormMsgWithToolCallID(msg, toolCallID string) (io.Reader, error) {
|
||||||
|
m := models.RoleMsg{
|
||||||
|
Role: "tool",
|
||||||
|
Content: msg,
|
||||||
|
ToolCallID: toolCallID,
|
||||||
|
}
|
||||||
|
ag.chatBody.Messages = append(ag.chatBody.Messages, m)
|
||||||
|
b, err := ag.buildRequest()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -56,81 +95,52 @@ func (ag *AgentClient) FormMsg(sysprompt, msg string) (io.Reader, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// buildRequest creates the appropriate LLM request based on the current API endpoint.
|
// buildRequest creates the appropriate LLM request based on the current API endpoint.
|
||||||
func (ag *AgentClient) buildRequest(sysprompt, msg string) ([]byte, error) {
|
func (ag *AgentClient) buildRequest() ([]byte, error) {
|
||||||
api := ag.cfg.CurrentAPI
|
isCompletion, isChat, isDeepSeek, isOpenRouter := detectAPI(ag.cfg.CurrentAPI)
|
||||||
model := ag.cfg.CurrentModel
|
ag.log.Debug("agent building request", "api", ag.cfg.CurrentAPI, "isCompletion", isCompletion, "isChat", isChat, "isDeepSeek", isDeepSeek, "isOpenRouter", isOpenRouter)
|
||||||
messages := []models.RoleMsg{
|
|
||||||
{Role: "system", Content: sysprompt},
|
|
||||||
{Role: "user", Content: msg},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine API type
|
|
||||||
isCompletion, isChat, isDeepSeek, isOpenRouter := detectAPI(api)
|
|
||||||
ag.log.Debug("agent building request", "api", api, "isCompletion", isCompletion, "isChat", isChat, "isDeepSeek", isDeepSeek, "isOpenRouter", isOpenRouter)
|
|
||||||
|
|
||||||
// Build prompt for completion endpoints
|
// Build prompt for completion endpoints
|
||||||
if isCompletion {
|
if isCompletion {
|
||||||
var sb strings.Builder
|
var sb strings.Builder
|
||||||
for _, m := range messages {
|
for i := range ag.chatBody.Messages {
|
||||||
sb.WriteString(m.ToPrompt())
|
sb.WriteString(ag.chatBody.Messages[i].ToPrompt())
|
||||||
sb.WriteString("\n")
|
sb.WriteString("\n")
|
||||||
}
|
}
|
||||||
prompt := strings.TrimSpace(sb.String())
|
prompt := strings.TrimSpace(sb.String())
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case isDeepSeek:
|
case isDeepSeek:
|
||||||
// DeepSeek completion
|
// DeepSeek completion
|
||||||
req := models.NewDSCompletionReq(prompt, model, defaultProps["temperature"], []string{})
|
req := models.NewDSCompletionReq(prompt, ag.chatBody.Model, defaultProps["temperature"], []string{})
|
||||||
req.Stream = false // Agents don't need streaming
|
req.Stream = false // Agents don't need streaming
|
||||||
return json.Marshal(req)
|
return json.Marshal(req)
|
||||||
case isOpenRouter:
|
case isOpenRouter:
|
||||||
// OpenRouter completion
|
// OpenRouter completion
|
||||||
req := models.NewOpenRouterCompletionReq(model, prompt, defaultProps, []string{})
|
req := models.NewOpenRouterCompletionReq(ag.chatBody.Model, prompt, defaultProps, []string{})
|
||||||
req.Stream = false // Agents don't need streaming
|
req.Stream = false // Agents don't need streaming
|
||||||
return json.Marshal(req)
|
return json.Marshal(req)
|
||||||
default:
|
default:
|
||||||
// Assume llama.cpp completion
|
// Assume llama.cpp completion
|
||||||
req := models.NewLCPReq(prompt, model, nil, defaultProps, []string{})
|
req := models.NewLCPReq(prompt, ag.chatBody.Model, nil, defaultProps, []string{})
|
||||||
req.Stream = false // Agents don't need streaming
|
req.Stream = false // Agents don't need streaming
|
||||||
return json.Marshal(req)
|
return json.Marshal(req)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Chat completions endpoints
|
|
||||||
if isChat || !isCompletion {
|
|
||||||
chatBody := &models.ChatBody{
|
|
||||||
Model: model,
|
|
||||||
Stream: false, // Agents don't need streaming
|
|
||||||
Messages: messages,
|
|
||||||
}
|
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case isDeepSeek:
|
case isDeepSeek:
|
||||||
// DeepSeek chat
|
// DeepSeek chat
|
||||||
req := models.NewDSChatReq(*chatBody)
|
req := models.NewDSChatReq(*ag.chatBody)
|
||||||
return json.Marshal(req)
|
return json.Marshal(req)
|
||||||
case isOpenRouter:
|
case isOpenRouter:
|
||||||
// OpenRouter chat
|
// OpenRouter chat - agents don't use reasoning by default
|
||||||
req := models.NewOpenRouterChatReq(*chatBody, defaultProps)
|
req := models.NewOpenRouterChatReq(*ag.chatBody, defaultProps, ag.cfg.ReasoningEffort)
|
||||||
return json.Marshal(req)
|
return json.Marshal(req)
|
||||||
default:
|
default:
|
||||||
// Assume llama.cpp chat (OpenAI format)
|
// Assume llama.cpp chat (OpenAI format)
|
||||||
req := models.OpenAIReq{
|
req := models.OpenAIReq{
|
||||||
ChatBody: chatBody,
|
ChatBody: ag.chatBody,
|
||||||
Tools: nil,
|
Tools: ag.tools,
|
||||||
}
|
}
|
||||||
return json.Marshal(req)
|
return json.Marshal(req)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback (should not reach here)
|
|
||||||
ag.log.Warn("unknown API, using default chat completions format", "api", api)
|
|
||||||
chatBody := &models.ChatBody{
|
|
||||||
Model: model,
|
|
||||||
Stream: false, // Agents don't need streaming
|
|
||||||
Messages: messages,
|
|
||||||
}
|
|
||||||
return json.Marshal(chatBody)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ag *AgentClient) LLMRequest(body io.Reader) ([]byte, error) {
|
func (ag *AgentClient) LLMRequest(body io.Reader) ([]byte, error) {
|
||||||
@@ -140,7 +150,6 @@ func (ag *AgentClient) LLMRequest(body io.Reader) ([]byte, error) {
|
|||||||
ag.log.Error("failed to read request body", "error", err)
|
ag.log.Error("failed to read request body", "error", err)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
req, err := http.NewRequest("POST", ag.cfg.CurrentAPI, bytes.NewReader(bodyBytes))
|
req, err := http.NewRequest("POST", ag.cfg.CurrentAPI, bytes.NewReader(bodyBytes))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ag.log.Error("failed to create request", "error", err)
|
ag.log.Error("failed to create request", "error", err)
|
||||||
@@ -150,27 +159,22 @@ func (ag *AgentClient) LLMRequest(body io.Reader) ([]byte, error) {
|
|||||||
req.Header.Add("Content-Type", "application/json")
|
req.Header.Add("Content-Type", "application/json")
|
||||||
req.Header.Add("Authorization", "Bearer "+ag.getToken())
|
req.Header.Add("Authorization", "Bearer "+ag.getToken())
|
||||||
req.Header.Set("Accept-Encoding", "gzip")
|
req.Header.Set("Accept-Encoding", "gzip")
|
||||||
|
|
||||||
ag.log.Debug("agent LLM request", "url", ag.cfg.CurrentAPI, "body_preview", string(bodyBytes[:min(len(bodyBytes), 500)]))
|
ag.log.Debug("agent LLM request", "url", ag.cfg.CurrentAPI, "body_preview", string(bodyBytes[:min(len(bodyBytes), 500)]))
|
||||||
|
|
||||||
resp, err := httpClient.Do(req)
|
resp, err := httpClient.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ag.log.Error("llamacpp api request failed", "error", err, "url", ag.cfg.CurrentAPI)
|
ag.log.Error("llamacpp api request failed", "error", err, "url", ag.cfg.CurrentAPI)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
responseBytes, err := io.ReadAll(resp.Body)
|
responseBytes, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ag.log.Error("failed to read response", "error", err)
|
ag.log.Error("failed to read response", "error", err)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if resp.StatusCode >= 400 {
|
if resp.StatusCode >= 400 {
|
||||||
ag.log.Error("agent LLM request failed", "status", resp.StatusCode, "response", string(responseBytes[:min(len(responseBytes), 1000)]))
|
ag.log.Error("agent LLM request failed", "status", resp.StatusCode, "response", string(responseBytes[:min(len(responseBytes), 1000)]))
|
||||||
return responseBytes, fmt.Errorf("HTTP %d: %s", resp.StatusCode, string(responseBytes[:min(len(responseBytes), 200)]))
|
return responseBytes, fmt.Errorf("HTTP %d: %s", resp.StatusCode, string(responseBytes[:min(len(responseBytes), 200)]))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse response and extract text content
|
// Parse response and extract text content
|
||||||
text, err := extractTextFromResponse(responseBytes)
|
text, err := extractTextFromResponse(responseBytes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -178,24 +182,22 @@ func (ag *AgentClient) LLMRequest(body io.Reader) ([]byte, error) {
|
|||||||
// Return raw response as fallback
|
// Return raw response as fallback
|
||||||
return responseBytes, nil
|
return responseBytes, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return []byte(text), nil
|
return []byte(text), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// extractTextFromResponse parses common LLM response formats and extracts the text content.
|
// extractTextFromResponse parses common LLM response formats and extracts the text content.
|
||||||
func extractTextFromResponse(data []byte) (string, error) {
|
func extractTextFromResponse(data []byte) (string, error) {
|
||||||
// Try to parse as generic JSON first
|
// Try to parse as generic JSON first
|
||||||
var genericResp map[string]interface{}
|
var genericResp map[string]any
|
||||||
if err := json.Unmarshal(data, &genericResp); err != nil {
|
if err := json.Unmarshal(data, &genericResp); err != nil {
|
||||||
// Not JSON, return as string
|
// Not JSON, return as string
|
||||||
return string(data), nil
|
return string(data), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for OpenAI chat completion format
|
// Check for OpenAI chat completion format
|
||||||
if choices, ok := genericResp["choices"].([]interface{}); ok && len(choices) > 0 {
|
if choices, ok := genericResp["choices"].([]any); ok && len(choices) > 0 {
|
||||||
if firstChoice, ok := choices[0].(map[string]interface{}); ok {
|
if firstChoice, ok := choices[0].(map[string]any); ok {
|
||||||
// Chat completion: choices[0].message.content
|
// Chat completion: choices[0].message.content
|
||||||
if message, ok := firstChoice["message"].(map[string]interface{}); ok {
|
if message, ok := firstChoice["message"].(map[string]any); ok {
|
||||||
if content, ok := message["content"].(string); ok {
|
if content, ok := message["content"].(string); ok {
|
||||||
return content, nil
|
return content, nil
|
||||||
}
|
}
|
||||||
@@ -205,19 +207,17 @@ func extractTextFromResponse(data []byte) (string, error) {
|
|||||||
return text, nil
|
return text, nil
|
||||||
}
|
}
|
||||||
// Delta format for streaming (should not happen with stream: false)
|
// Delta format for streaming (should not happen with stream: false)
|
||||||
if delta, ok := firstChoice["delta"].(map[string]interface{}); ok {
|
if delta, ok := firstChoice["delta"].(map[string]any); ok {
|
||||||
if content, ok := delta["content"].(string); ok {
|
if content, ok := delta["content"].(string); ok {
|
||||||
return content, nil
|
return content, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for llama.cpp completion format
|
// Check for llama.cpp completion format
|
||||||
if content, ok := genericResp["content"].(string); ok {
|
if content, ok := genericResp["content"].(string); ok {
|
||||||
return content, nil
|
return content, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Unknown format, return pretty-printed JSON
|
// Unknown format, return pretty-printed JSON
|
||||||
prettyJSON, err := json.MarshalIndent(genericResp, "", " ")
|
prettyJSON, err := json.MarshalIndent(genericResp, "", " ")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -225,10 +225,3 @@ func extractTextFromResponse(data []byte) (string, error) {
|
|||||||
}
|
}
|
||||||
return string(prettyJSON), nil
|
return string(prettyJSON), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func min(a, b int) int {
|
|
||||||
if a < b {
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
return b
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -17,7 +17,8 @@ func NewWebAgentB(client *AgentClient, sysprompt string) *WebAgentB {
|
|||||||
|
|
||||||
// Process applies the formatting function to raw output
|
// Process applies the formatting function to raw output
|
||||||
func (a *WebAgentB) Process(args map[string]string, rawOutput []byte) []byte {
|
func (a *WebAgentB) Process(args map[string]string, rawOutput []byte) []byte {
|
||||||
msg, err := a.FormMsg(a.sysprompt,
|
msg, err := a.FormFirstMsg(
|
||||||
|
a.sysprompt,
|
||||||
fmt.Sprintf("request:\n%+v\ntool response:\n%v", args, string(rawOutput)))
|
fmt.Sprintf("request:\n%+v\ntool response:\n%v", args, string(rawOutput)))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
a.Log().Error("failed to process the request", "error", err)
|
a.Log().Error("failed to process the request", "error", err)
|
||||||
|
|||||||
BIN
assets/yt_thumb.jpg
Normal file
BIN
assets/yt_thumb.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 42 KiB |
@@ -6,19 +6,27 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "8081:8081"
|
- "8081:8081"
|
||||||
volumes:
|
volumes:
|
||||||
- whisper_models:/app/models
|
- ./whisper.cpp/models/ggml-large-v3-turbo-q5_0.bin:/app/models/ggml-large-v3-turbo-q5_0.bin
|
||||||
working_dir: /app
|
working_dir: /app
|
||||||
entrypoint: ""
|
entrypoint: ""
|
||||||
command: >
|
command: >
|
||||||
sh -c "
|
sh -c "
|
||||||
if [ ! -f /app/models/ggml-large-v3-turbo.bin ]; then
|
if [ ! -f /app/models/ggml-large-v3-turbo-q5_0.bin ]; then
|
||||||
echo 'Downloading ggml-large-v3-turbo model...'
|
echo 'Downloading ggml-large-v3-turboq5_0 model...'
|
||||||
./download-ggml-model.sh large-v3-turbo /app/models
|
curl -o /app/models/ggml-large-v3-turbo-q5_0.bin -L "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3-turbo-q5_0.bin?download=true"
|
||||||
fi &&
|
fi &&
|
||||||
./build/bin/whisper-server -m /app/models/ggml-large-v3-turbo.bin -t 4 -p 1 --port 8081 --host 0.0.0.0
|
./build/bin/whisper-server -m /app/models/ggml-large-v3-turbo-q5_0.bin -t 4 -p 1 --port 8081 --host 0.0.0.0
|
||||||
"
|
"
|
||||||
environment:
|
environment:
|
||||||
- WHISPER_LOG_LEVEL=3
|
- WHISPER_LOG_LEVEL=3
|
||||||
|
# For GPU support, uncomment the following lines:
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
reservations:
|
||||||
|
devices:
|
||||||
|
- driver: nvidia
|
||||||
|
count: 1
|
||||||
|
capabilities: [gpu]
|
||||||
# Restart policy in case the service fails
|
# Restart policy in case the service fails
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
@@ -45,7 +53,5 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
models:
|
models:
|
||||||
driver: local
|
driver: local
|
||||||
audio:
|
|
||||||
driver: local
|
|
||||||
whisper_models:
|
whisper_models:
|
||||||
driver: local
|
driver: local
|
||||||
|
|||||||
34
bot_test.go
34
bot_test.go
@@ -1,12 +1,10 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"gf-lt/config"
|
"gf-lt/config"
|
||||||
"gf-lt/models"
|
"gf-lt/models"
|
||||||
"reflect"
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestConsolidateConsecutiveAssistantMessages(t *testing.T) {
|
func TestConsolidateConsecutiveAssistantMessages(t *testing.T) {
|
||||||
// Mock config for testing
|
// Mock config for testing
|
||||||
testCfg := &config.Config{
|
testCfg := &config.Config{
|
||||||
@@ -14,7 +12,6 @@ func TestConsolidateConsecutiveAssistantMessages(t *testing.T) {
|
|||||||
WriteNextMsgAsCompletionAgent: "",
|
WriteNextMsgAsCompletionAgent: "",
|
||||||
}
|
}
|
||||||
cfg = testCfg
|
cfg = testCfg
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
input []models.RoleMsg
|
input []models.RoleMsg
|
||||||
@@ -114,38 +111,31 @@ func TestConsolidateConsecutiveAssistantMessages(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
result := consolidateAssistantMessages(tt.input)
|
result := consolidateAssistantMessages(tt.input)
|
||||||
|
|
||||||
if len(result) != len(tt.expected) {
|
if len(result) != len(tt.expected) {
|
||||||
t.Errorf("Expected %d messages, got %d", len(tt.expected), len(result))
|
t.Errorf("Expected %d messages, got %d", len(tt.expected), len(result))
|
||||||
t.Logf("Result: %+v", result)
|
t.Logf("Result: %+v", result)
|
||||||
t.Logf("Expected: %+v", tt.expected)
|
t.Logf("Expected: %+v", tt.expected)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, expectedMsg := range tt.expected {
|
for i, expectedMsg := range tt.expected {
|
||||||
if i >= len(result) {
|
if i >= len(result) {
|
||||||
t.Errorf("Result has fewer messages than expected at index %d", i)
|
t.Errorf("Result has fewer messages than expected at index %d", i)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
actualMsg := result[i]
|
actualMsg := result[i]
|
||||||
if actualMsg.Role != expectedMsg.Role {
|
if actualMsg.Role != expectedMsg.Role {
|
||||||
t.Errorf("Message %d: expected role '%s', got '%s'", i, expectedMsg.Role, actualMsg.Role)
|
t.Errorf("Message %d: expected role '%s', got '%s'", i, expectedMsg.Role, actualMsg.Role)
|
||||||
}
|
}
|
||||||
|
|
||||||
if actualMsg.Content != expectedMsg.Content {
|
if actualMsg.Content != expectedMsg.Content {
|
||||||
t.Errorf("Message %d: expected content '%s', got '%s'", i, expectedMsg.Content, actualMsg.Content)
|
t.Errorf("Message %d: expected content '%s', got '%s'", i, expectedMsg.Content, actualMsg.Content)
|
||||||
}
|
}
|
||||||
|
|
||||||
if actualMsg.ToolCallID != expectedMsg.ToolCallID {
|
if actualMsg.ToolCallID != expectedMsg.ToolCallID {
|
||||||
t.Errorf("Message %d: expected ToolCallID '%s', got '%s'", i, expectedMsg.ToolCallID, actualMsg.ToolCallID)
|
t.Errorf("Message %d: expected ToolCallID '%s', got '%s'", i, expectedMsg.ToolCallID, actualMsg.ToolCallID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Additional check: ensure no messages were lost
|
// Additional check: ensure no messages were lost
|
||||||
if !reflect.DeepEqual(result, tt.expected) {
|
if !reflect.DeepEqual(result, tt.expected) {
|
||||||
t.Errorf("Result does not match expected:\nResult: %+v\nExpected: %+v", result, tt.expected)
|
t.Errorf("Result does not match expected:\nResult: %+v\nExpected: %+v", result, tt.expected)
|
||||||
@@ -153,7 +143,6 @@ func TestConsolidateConsecutiveAssistantMessages(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnmarshalFuncCall(t *testing.T) {
|
func TestUnmarshalFuncCall(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@@ -213,7 +202,6 @@ func TestUnmarshalFuncCall(t *testing.T) {
|
|||||||
wantErr: true,
|
wantErr: true,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
got, err := unmarshalFuncCall(tt.jsonStr)
|
got, err := unmarshalFuncCall(tt.jsonStr)
|
||||||
@@ -238,7 +226,6 @@ func TestUnmarshalFuncCall(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestConvertJSONToMapStringString(t *testing.T) {
|
func TestConvertJSONToMapStringString(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@@ -265,7 +252,6 @@ func TestConvertJSONToMapStringString(t *testing.T) {
|
|||||||
wantErr: true,
|
wantErr: true,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
got, err := convertJSONToMapStringString(tt.jsonStr)
|
got, err := convertJSONToMapStringString(tt.jsonStr)
|
||||||
@@ -287,7 +273,6 @@ func TestConvertJSONToMapStringString(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseKnownToTag(t *testing.T) {
|
func TestParseKnownToTag(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@@ -378,7 +363,6 @@ func TestParseKnownToTag(t *testing.T) {
|
|||||||
wantKnownTo: []string{"Alice", "Bob", "Carl"},
|
wantKnownTo: []string{"Alice", "Bob", "Carl"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
// Set up config
|
// Set up config
|
||||||
@@ -402,7 +386,6 @@ func TestParseKnownToTag(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestProcessMessageTag(t *testing.T) {
|
func TestProcessMessageTag(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@@ -498,7 +481,6 @@ func TestProcessMessageTag(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
testCfg := &config.Config{
|
testCfg := &config.Config{
|
||||||
@@ -529,7 +511,6 @@ func TestProcessMessageTag(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFilterMessagesForCharacter(t *testing.T) {
|
func TestFilterMessagesForCharacter(t *testing.T) {
|
||||||
messages := []models.RoleMsg{
|
messages := []models.RoleMsg{
|
||||||
{Role: "system", Content: "System message", KnownTo: nil}, // visible to all
|
{Role: "system", Content: "System message", KnownTo: nil}, // visible to all
|
||||||
@@ -539,7 +520,6 @@ func TestFilterMessagesForCharacter(t *testing.T) {
|
|||||||
{Role: "Alice", Content: "Private to Carl", KnownTo: []string{"Alice", "Carl"}},
|
{Role: "Alice", Content: "Private to Carl", KnownTo: []string{"Alice", "Carl"}},
|
||||||
{Role: "Carl", Content: "Hi all", KnownTo: nil}, // visible to all
|
{Role: "Carl", Content: "Hi all", KnownTo: nil}, // visible to all
|
||||||
}
|
}
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
enabled bool
|
enabled bool
|
||||||
@@ -583,7 +563,6 @@ func TestFilterMessagesForCharacter(t *testing.T) {
|
|||||||
wantIndices: []int{0, 1, 5},
|
wantIndices: []int{0, 1, 5},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
testCfg := &config.Config{
|
testCfg := &config.Config{
|
||||||
@@ -591,15 +570,12 @@ func TestFilterMessagesForCharacter(t *testing.T) {
|
|||||||
CharSpecificContextTag: "@",
|
CharSpecificContextTag: "@",
|
||||||
}
|
}
|
||||||
cfg = testCfg
|
cfg = testCfg
|
||||||
|
|
||||||
got := filterMessagesForCharacter(messages, tt.character)
|
got := filterMessagesForCharacter(messages, tt.character)
|
||||||
|
|
||||||
if len(got) != len(tt.wantIndices) {
|
if len(got) != len(tt.wantIndices) {
|
||||||
t.Errorf("filterMessagesForCharacter() returned %d messages, want %d", len(got), len(tt.wantIndices))
|
t.Errorf("filterMessagesForCharacter() returned %d messages, want %d", len(got), len(tt.wantIndices))
|
||||||
t.Logf("got: %v", got)
|
t.Logf("got: %v", got)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, idx := range tt.wantIndices {
|
for i, idx := range tt.wantIndices {
|
||||||
if got[i].Content != messages[idx].Content {
|
if got[i].Content != messages[idx].Content {
|
||||||
t.Errorf("filterMessagesForCharacter() message %d content = %q, want %q", i, got[i].Content, messages[idx].Content)
|
t.Errorf("filterMessagesForCharacter() message %d content = %q, want %q", i, got[i].Content, messages[idx].Content)
|
||||||
@@ -608,7 +584,6 @@ func TestFilterMessagesForCharacter(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRoleMsgCopyPreservesKnownTo(t *testing.T) {
|
func TestRoleMsgCopyPreservesKnownTo(t *testing.T) {
|
||||||
// Test that the Copy() method preserves the KnownTo field
|
// Test that the Copy() method preserves the KnownTo field
|
||||||
originalMsg := models.RoleMsg{
|
originalMsg := models.RoleMsg{
|
||||||
@@ -616,9 +591,7 @@ func TestRoleMsgCopyPreservesKnownTo(t *testing.T) {
|
|||||||
Content: "Test message",
|
Content: "Test message",
|
||||||
KnownTo: []string{"Bob", "Charlie"},
|
KnownTo: []string{"Bob", "Charlie"},
|
||||||
}
|
}
|
||||||
|
|
||||||
copiedMsg := originalMsg.Copy()
|
copiedMsg := originalMsg.Copy()
|
||||||
|
|
||||||
if copiedMsg.Role != originalMsg.Role {
|
if copiedMsg.Role != originalMsg.Role {
|
||||||
t.Errorf("Copy() failed to preserve Role: got %q, want %q", copiedMsg.Role, originalMsg.Role)
|
t.Errorf("Copy() failed to preserve Role: got %q, want %q", copiedMsg.Role, originalMsg.Role)
|
||||||
}
|
}
|
||||||
@@ -635,7 +608,6 @@ func TestRoleMsgCopyPreservesKnownTo(t *testing.T) {
|
|||||||
t.Errorf("Copy() failed to preserve hasContentParts flag")
|
t.Errorf("Copy() failed to preserve hasContentParts flag")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestKnownToFieldPreservationScenario(t *testing.T) {
|
func TestKnownToFieldPreservationScenario(t *testing.T) {
|
||||||
// Test the specific scenario from the log where KnownTo field was getting lost
|
// Test the specific scenario from the log where KnownTo field was getting lost
|
||||||
originalMsg := models.RoleMsg{
|
originalMsg := models.RoleMsg{
|
||||||
@@ -643,28 +615,22 @@ func TestKnownToFieldPreservationScenario(t *testing.T) {
|
|||||||
Content: `Alice: "Okay, Bob. The word is... **'Ephemeral'**. (ooc: @Bob@)"`,
|
Content: `Alice: "Okay, Bob. The word is... **'Ephemeral'**. (ooc: @Bob@)"`,
|
||||||
KnownTo: []string{"Bob"}, // This was detected in the log
|
KnownTo: []string{"Bob"}, // This was detected in the log
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Logf("Original message - Role: %s, Content: %s, KnownTo: %v",
|
t.Logf("Original message - Role: %s, Content: %s, KnownTo: %v",
|
||||||
originalMsg.Role, originalMsg.Content, originalMsg.KnownTo)
|
originalMsg.Role, originalMsg.Content, originalMsg.KnownTo)
|
||||||
|
|
||||||
// Simulate what happens when the message gets copied during processing
|
// Simulate what happens when the message gets copied during processing
|
||||||
copiedMsg := originalMsg.Copy()
|
copiedMsg := originalMsg.Copy()
|
||||||
|
|
||||||
t.Logf("Copied message - Role: %s, Content: %s, KnownTo: %v",
|
t.Logf("Copied message - Role: %s, Content: %s, KnownTo: %v",
|
||||||
copiedMsg.Role, copiedMsg.Content, copiedMsg.KnownTo)
|
copiedMsg.Role, copiedMsg.Content, copiedMsg.KnownTo)
|
||||||
|
|
||||||
// Check if KnownTo field survived the copy
|
// Check if KnownTo field survived the copy
|
||||||
if len(copiedMsg.KnownTo) == 0 {
|
if len(copiedMsg.KnownTo) == 0 {
|
||||||
t.Error("ERROR: KnownTo field was lost during copy!")
|
t.Error("ERROR: KnownTo field was lost during copy!")
|
||||||
} else {
|
} else {
|
||||||
t.Log("SUCCESS: KnownTo field was preserved during copy!")
|
t.Log("SUCCESS: KnownTo field was preserved during copy!")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify the content is the same
|
// Verify the content is the same
|
||||||
if copiedMsg.Content != originalMsg.Content {
|
if copiedMsg.Content != originalMsg.Content {
|
||||||
t.Errorf("Content was changed during copy: got %s, want %s", copiedMsg.Content, originalMsg.Content)
|
t.Errorf("Content was changed during copy: got %s, want %s", copiedMsg.Content, originalMsg.Content)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify the KnownTo slice is properly copied
|
// Verify the KnownTo slice is properly copied
|
||||||
if !reflect.DeepEqual(copiedMsg.KnownTo, originalMsg.KnownTo) {
|
if !reflect.DeepEqual(copiedMsg.KnownTo, originalMsg.KnownTo) {
|
||||||
t.Errorf("KnownTo was not properly copied: got %v, want %v", copiedMsg.KnownTo, originalMsg.KnownTo)
|
t.Errorf("KnownTo was not properly copied: got %v, want %v", copiedMsg.KnownTo, originalMsg.KnownTo)
|
||||||
|
|||||||
74
cli-tests/sort-img/check.sh
Executable file
74
cli-tests/sort-img/check.sh
Executable file
@@ -0,0 +1,74 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||||
|
LOG_FILE=$(ls -t "$SCRIPT_DIR"/*_run.log 2>/dev/null | head -1)
|
||||||
|
|
||||||
|
PASS=0
|
||||||
|
FAIL=0
|
||||||
|
|
||||||
|
log_pass() {
|
||||||
|
echo "[PASS] $1"
|
||||||
|
PASS=$((PASS + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
log_fail() {
|
||||||
|
echo "[FAIL] $1"
|
||||||
|
FAIL=$((FAIL + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "=== Checking results ==="
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check has-animals directory exists
|
||||||
|
if [ -d "/tmp/sort-img/has-animals" ]; then
|
||||||
|
log_pass "has-animals directory exists"
|
||||||
|
else
|
||||||
|
log_fail "has-animals directory missing"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check no-animals directory exists
|
||||||
|
if [ -d "/tmp/sort-img/no-animals" ]; then
|
||||||
|
log_pass "no-animals directory exists"
|
||||||
|
else
|
||||||
|
log_fail "no-animals directory missing"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check has-animals contains at least one image
|
||||||
|
HAS_ANIMALS_FILES=$(ls -1 /tmp/sort-img/has-animals 2>/dev/null | wc -l)
|
||||||
|
if [ "$HAS_ANIMALS_FILES" -gt 0 ]; then
|
||||||
|
log_pass "has-animals contains images ($HAS_ANIMALS_FILES files)"
|
||||||
|
else
|
||||||
|
log_fail "has-animals is empty"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check no-animals contains at least one image
|
||||||
|
NO_ANIMALS_FILES=$(ls -1 /tmp/sort-img/no-animals 2>/dev/null | wc -l)
|
||||||
|
if [ "$NO_ANIMALS_FILES" -gt 0 ]; then
|
||||||
|
log_pass "no-animals contains images ($NO_ANIMALS_FILES files)"
|
||||||
|
else
|
||||||
|
log_fail "no-animals is empty"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check total files sorted correctly (3 original files should be in subdirs)
|
||||||
|
TOTAL_SORTED=$((HAS_ANIMALS_FILES + NO_ANIMALS_FILES))
|
||||||
|
if [ "$TOTAL_SORTED" -eq 3 ]; then
|
||||||
|
log_pass "all 3 files sorted into subdirectories"
|
||||||
|
else
|
||||||
|
log_fail "expected 3 files sorted, got $TOTAL_SORTED"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Summary ==="
|
||||||
|
echo "PASSED: $PASS"
|
||||||
|
echo "FAILED: $FAIL"
|
||||||
|
|
||||||
|
if [ $FAIL -gt 0 ]; then
|
||||||
|
echo ""
|
||||||
|
echo "Log file: $LOG_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "All tests passed!"
|
||||||
|
exit 0
|
||||||
25
cli-tests/sort-img/run.sh
Executable file
25
cli-tests/sort-img/run.sh
Executable file
@@ -0,0 +1,25 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||||
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||||
|
LOG_FILE="$SCRIPT_DIR/${TIMESTAMP}_run.log"
|
||||||
|
|
||||||
|
exec > "$LOG_FILE" 2>&1
|
||||||
|
|
||||||
|
echo "=== Running teardown ==="
|
||||||
|
"$SCRIPT_DIR/teardown.sh"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Running setup ==="
|
||||||
|
"$SCRIPT_DIR/setup.sh"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Running task ==="
|
||||||
|
TASK=$(cat "$SCRIPT_DIR/task.txt")
|
||||||
|
cd /home/grail/projects/plays/goplays/gf-lt
|
||||||
|
go run . -cli -msg "$TASK"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Done ==="
|
||||||
|
echo "Log file: $LOG_FILE"
|
||||||
9
cli-tests/sort-img/setup.sh
Executable file
9
cli-tests/sort-img/setup.sh
Executable file
@@ -0,0 +1,9 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
mkdir -p /tmp/sort-img
|
||||||
|
|
||||||
|
cp ../../../assets/ex01.png /tmp/sort-img/file1.png
|
||||||
|
cp ../../../assets/helppage.png /tmp/sort-img/file2.png
|
||||||
|
cp ../../../assets/yt_thumb.jpg /tmp/sort-img/file3.jpg
|
||||||
2
cli-tests/sort-img/task.txt
Normal file
2
cli-tests/sort-img/task.txt
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
go to /tmp/sort-img, create directories: has-animals, no-animals
|
||||||
|
sort images in /tmp/sort-img into created directories by content
|
||||||
4
cli-tests/sort-img/teardown.sh
Executable file
4
cli-tests/sort-img/teardown.sh
Executable file
@@ -0,0 +1,4 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
rm -rf /tmp/sort-img
|
||||||
91
cli-tests/sort-text/check.sh
Executable file
91
cli-tests/sort-text/check.sh
Executable file
@@ -0,0 +1,91 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||||
|
LOG_FILE=$(ls -t "$SCRIPT_DIR"/*_run.log 2>/dev/null | head -1)
|
||||||
|
|
||||||
|
PASS=0
|
||||||
|
FAIL=0
|
||||||
|
|
||||||
|
log_pass() {
|
||||||
|
echo "[PASS] $1"
|
||||||
|
PASS=$((PASS + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
log_fail() {
|
||||||
|
echo "[FAIL] $1"
|
||||||
|
FAIL=$((FAIL + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "=== Checking results ==="
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check animals directory exists
|
||||||
|
if [ -d "/tmp/sort-text/animals" ]; then
|
||||||
|
log_pass "animals directory exists"
|
||||||
|
else
|
||||||
|
log_fail "animals directory missing"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check colors directory exists
|
||||||
|
if [ -d "/tmp/sort-text/colors" ]; then
|
||||||
|
log_pass "colors directory exists"
|
||||||
|
else
|
||||||
|
log_fail "colors directory missing"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check animals contain cat/dog
|
||||||
|
ANIMALS_FILES=$(ls -1 /tmp/sort-text/animals 2>/dev/null | tr '\n' ' ')
|
||||||
|
if echo "$ANIMALS_FILES" | grep -q "file1.txt" && echo "$ANIMALS_FILES" | grep -q "file3.txt"; then
|
||||||
|
log_pass "animals contains animal files"
|
||||||
|
else
|
||||||
|
log_fail "animals missing animal files (got: $ANIMALS_FILES)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check colors contain red/blue
|
||||||
|
COLORS_FILES=$(ls -1 /tmp/sort-text/colors 2>/dev/null | tr '\n' ' ')
|
||||||
|
if echo "$COLORS_FILES" | grep -q "file2.txt" && echo "$COLORS_FILES" | grep -q "file4.txt"; then
|
||||||
|
log_pass "colors contains color files"
|
||||||
|
else
|
||||||
|
log_fail "colors missing color files (got: $COLORS_FILES)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify content
|
||||||
|
if grep -q "cat" /tmp/sort-text/animals/file1.txt 2>/dev/null; then
|
||||||
|
log_pass "file1.txt contains 'cat'"
|
||||||
|
else
|
||||||
|
log_fail "file1.txt missing 'cat'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if grep -q "dog" /tmp/sort-text/animals/file3.txt 2>/dev/null; then
|
||||||
|
log_pass "file3.txt contains 'dog'"
|
||||||
|
else
|
||||||
|
log_fail "file3.txt missing 'dog'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if grep -q "red" /tmp/sort-text/colors/file2.txt 2>/dev/null; then
|
||||||
|
log_pass "file2.txt contains 'red'"
|
||||||
|
else
|
||||||
|
log_fail "file2.txt missing 'red'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if grep -q "blue" /tmp/sort-text/colors/file4.txt 2>/dev/null; then
|
||||||
|
log_pass "file4.txt contains 'blue'"
|
||||||
|
else
|
||||||
|
log_fail "file4.txt missing 'blue'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Summary ==="
|
||||||
|
echo "PASSED: $PASS"
|
||||||
|
echo "FAILED: $FAIL"
|
||||||
|
|
||||||
|
if [ $FAIL -gt 0 ]; then
|
||||||
|
echo ""
|
||||||
|
echo "Log file: $LOG_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "All tests passed!"
|
||||||
|
exit 0
|
||||||
25
cli-tests/sort-text/run.sh
Executable file
25
cli-tests/sort-text/run.sh
Executable file
@@ -0,0 +1,25 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||||
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||||
|
LOG_FILE="$SCRIPT_DIR/${TIMESTAMP}_run.log"
|
||||||
|
|
||||||
|
exec > "$LOG_FILE" 2>&1
|
||||||
|
|
||||||
|
echo "=== Running teardown ==="
|
||||||
|
"$SCRIPT_DIR/teardown.sh"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Running setup ==="
|
||||||
|
"$SCRIPT_DIR/setup.sh"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Running task ==="
|
||||||
|
TASK=$(cat "$SCRIPT_DIR/task.txt")
|
||||||
|
cd /home/grail/projects/plays/goplays/gf-lt
|
||||||
|
go run . -cli -msg "$TASK"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Done ==="
|
||||||
|
echo "Log file: $LOG_FILE"
|
||||||
10
cli-tests/sort-text/setup.sh
Executable file
10
cli-tests/sort-text/setup.sh
Executable file
@@ -0,0 +1,10 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
mkdir -p /tmp/sort-text
|
||||||
|
|
||||||
|
printf "cat" > /tmp/sort-text/file1.txt
|
||||||
|
printf "red" > /tmp/sort-text/file2.txt
|
||||||
|
printf "dog" > /tmp/sort-text/file3.txt
|
||||||
|
printf "blue" > /tmp/sort-text/file4.txt
|
||||||
2
cli-tests/sort-text/task.txt
Normal file
2
cli-tests/sort-text/task.txt
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
go to /tmp/sort-text, create directories: animals, colors
|
||||||
|
sort /tmp/sort-text/*.txt into created directories by text content
|
||||||
4
cli-tests/sort-text/teardown.sh
Executable file
4
cli-tests/sort-text/teardown.sh
Executable file
@@ -0,0 +1,4 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
rm -rf /tmp/sort-text
|
||||||
63
colors.go
Normal file
63
colors.go
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/gdamore/tcell/v2"
|
||||||
|
"github.com/rivo/tview"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
colorschemes = map[string]tview.Theme{
|
||||||
|
"default": tview.Theme{
|
||||||
|
PrimitiveBackgroundColor: tcell.ColorDefault,
|
||||||
|
ContrastBackgroundColor: tcell.ColorGray,
|
||||||
|
MoreContrastBackgroundColor: tcell.ColorSteelBlue,
|
||||||
|
BorderColor: tcell.ColorGray,
|
||||||
|
TitleColor: tcell.ColorRed,
|
||||||
|
GraphicsColor: tcell.ColorBlue,
|
||||||
|
PrimaryTextColor: tcell.ColorLightGray,
|
||||||
|
SecondaryTextColor: tcell.ColorYellow,
|
||||||
|
TertiaryTextColor: tcell.ColorOrange,
|
||||||
|
InverseTextColor: tcell.ColorPurple,
|
||||||
|
ContrastSecondaryTextColor: tcell.ColorLime,
|
||||||
|
},
|
||||||
|
"gruvbox": tview.Theme{
|
||||||
|
PrimitiveBackgroundColor: tcell.NewHexColor(0x282828), // Background: #282828 (dark gray)
|
||||||
|
ContrastBackgroundColor: tcell.ColorDarkGoldenrod, // Selected option: warm yellow (#b57614)
|
||||||
|
MoreContrastBackgroundColor: tcell.ColorDarkSlateGray, // Non-selected options: dark grayish-blue (#32302f)
|
||||||
|
BorderColor: tcell.ColorLightGray, // Light gray (#a89984)
|
||||||
|
TitleColor: tcell.ColorRed, // Red (#fb4934)
|
||||||
|
GraphicsColor: tcell.ColorDarkCyan, // Cyan (#689d6a)
|
||||||
|
PrimaryTextColor: tcell.ColorLightGray, // Light gray (#d5c4a1)
|
||||||
|
SecondaryTextColor: tcell.ColorYellow, // Yellow (#fabd2f)
|
||||||
|
TertiaryTextColor: tcell.ColorOrange, // Orange (#fe8019)
|
||||||
|
InverseTextColor: tcell.ColorWhite, // White (#f9f5d7) for selected text
|
||||||
|
ContrastSecondaryTextColor: tcell.ColorLightGreen, // Light green (#b8bb26)
|
||||||
|
},
|
||||||
|
"solarized": tview.Theme{
|
||||||
|
PrimitiveBackgroundColor: tcell.NewHexColor(0x002b36), // Background: #002b36 (base03)
|
||||||
|
ContrastBackgroundColor: tcell.ColorDarkCyan, // Selected option: cyan (#2aa198)
|
||||||
|
MoreContrastBackgroundColor: tcell.ColorDarkSlateGray, // Non-selected options: dark blue (#073642)
|
||||||
|
BorderColor: tcell.ColorLightBlue, // Light blue (#839496)
|
||||||
|
TitleColor: tcell.ColorRed, // Red (#dc322f)
|
||||||
|
GraphicsColor: tcell.ColorBlue, // Blue (#268bd2)
|
||||||
|
PrimaryTextColor: tcell.ColorWhite, // White (#fdf6e3)
|
||||||
|
SecondaryTextColor: tcell.ColorYellow, // Yellow (#b58900)
|
||||||
|
TertiaryTextColor: tcell.ColorOrange, // Orange (#cb4b16)
|
||||||
|
InverseTextColor: tcell.ColorWhite, // White (#eee8d5) for selected text
|
||||||
|
ContrastSecondaryTextColor: tcell.ColorLightCyan, // Light cyan (#93a1a1)
|
||||||
|
},
|
||||||
|
"dracula": tview.Theme{
|
||||||
|
PrimitiveBackgroundColor: tcell.NewHexColor(0x282a36), // Background: #282a36
|
||||||
|
ContrastBackgroundColor: tcell.ColorDarkMagenta, // Selected option: magenta (#bd93f9)
|
||||||
|
MoreContrastBackgroundColor: tcell.ColorDarkGray, // Non-selected options: dark gray (#44475a)
|
||||||
|
BorderColor: tcell.ColorLightGray, // Light gray (#f8f8f2)
|
||||||
|
TitleColor: tcell.ColorRed, // Red (#ff5555)
|
||||||
|
GraphicsColor: tcell.ColorDarkCyan, // Cyan (#8be9fd)
|
||||||
|
PrimaryTextColor: tcell.ColorWhite, // White (#f8f8f2)
|
||||||
|
SecondaryTextColor: tcell.ColorYellow, // Yellow (#f1fa8c)
|
||||||
|
TertiaryTextColor: tcell.ColorOrange, // Orange (#ffb86c)
|
||||||
|
InverseTextColor: tcell.ColorWhite, // White (#f8f8f2) for selected text
|
||||||
|
ContrastSecondaryTextColor: tcell.ColorLightGreen, // Light green (#50fa7b)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
@@ -10,9 +10,15 @@ DeepSeekModel = "deepseek-reasoner"
|
|||||||
OpenRouterCompletionAPI = "https://openrouter.ai/api/v1/completions"
|
OpenRouterCompletionAPI = "https://openrouter.ai/api/v1/completions"
|
||||||
OpenRouterChatAPI = "https://openrouter.ai/api/v1/chat/completions"
|
OpenRouterChatAPI = "https://openrouter.ai/api/v1/chat/completions"
|
||||||
# OpenRouterToken = ""
|
# OpenRouterToken = ""
|
||||||
|
# embeddings
|
||||||
EmbedURL = "http://localhost:8082/v1/embeddings"
|
EmbedURL = "http://localhost:8082/v1/embeddings"
|
||||||
|
HFToken = ""
|
||||||
|
EmbedModelPath = "onnx/embedgemma/model_q4.onnx"
|
||||||
|
EmbedTokenizerPath = "onnx/embedgemma/tokenizer.json"
|
||||||
|
EmbedDims = 768
|
||||||
|
#
|
||||||
ShowSys = true
|
ShowSys = true
|
||||||
LogFile = "log.txt"
|
LogFile = "log.log"
|
||||||
UserRole = "user"
|
UserRole = "user"
|
||||||
ToolRole = "tool"
|
ToolRole = "tool"
|
||||||
AssistantRole = "assistant"
|
AssistantRole = "assistant"
|
||||||
@@ -22,8 +28,8 @@ AutoScrollEnabled = true
|
|||||||
AutoCleanToolCallsFromCtx = false
|
AutoCleanToolCallsFromCtx = false
|
||||||
# rag settings
|
# rag settings
|
||||||
RAGBatchSize = 1
|
RAGBatchSize = 1
|
||||||
RAGWordLimit = 80
|
RAGWordLimit = 250
|
||||||
RAGWorkers = 2
|
RAGOverlapWords = 25
|
||||||
RAGDir = "ragimport"
|
RAGDir = "ragimport"
|
||||||
# extra tts
|
# extra tts
|
||||||
TTS_ENABLED = false
|
TTS_ENABLED = false
|
||||||
@@ -36,15 +42,23 @@ STT_ENABLED = false
|
|||||||
STT_TYPE = "WHISPER_SERVER" # WHISPER_SERVER or WHISPER_BINARY
|
STT_TYPE = "WHISPER_SERVER" # WHISPER_SERVER or WHISPER_BINARY
|
||||||
STT_URL = "http://localhost:8081/inference"
|
STT_URL = "http://localhost:8081/inference"
|
||||||
WhisperBinaryPath = "./batteries/whisper.cpp/build/bin/whisper-cli" # Path to whisper binary (for WHISPER_BINARY mode)
|
WhisperBinaryPath = "./batteries/whisper.cpp/build/bin/whisper-cli" # Path to whisper binary (for WHISPER_BINARY mode)
|
||||||
WhisperModelPath = "./batteries/whisper.cpp/ggml-large-v3-turbo-q5_0.bin" # Path to whisper model file (for WHISPER_BINARY mode)
|
WhisperModelPath = "./batteries/whisper.cpp/models/ggml-large-v3-turbo-q5_0.bin" # Path to whisper model file (for WHISPER_BINARY mode)
|
||||||
STT_LANG = "en" # Language for speech recognition (for WHISPER_BINARY mode)
|
STT_LANG = "en" # Language for speech recognition (for WHISPER_BINARY mode)
|
||||||
STT_SR = 16000 # Sample rate for audio recording
|
STT_SR = 16000 # Sample rate for audio recording
|
||||||
#
|
#
|
||||||
DBPATH = "gflt.db"
|
DBPATH = "gflt.db"
|
||||||
FilePickerDir = "." # Directory where file picker should start
|
FilePickerDir = "." # Directory for file picker start and coding assistant file operations (relative paths resolved against this)
|
||||||
FilePickerExts = "png,jpg,jpeg,gif,webp" # Comma-separated list of allowed file extensions for file picker
|
FilePickerExts = "png,jpg,jpeg,gif,webp" # Comma-separated list of allowed file extensions for file picker
|
||||||
EnableMouse = false # Enable mouse support in the UI
|
EnableMouse = false # Enable mouse support in the UI
|
||||||
# character specific context
|
# character specific context
|
||||||
CharSpecificContextEnabled = true
|
CharSpecificContextEnabled = true
|
||||||
CharSpecificContextTag = "@"
|
CharSpecificContextTag = "@"
|
||||||
AutoTurn = true
|
AutoTurn = true
|
||||||
|
StripThinkingFromAPI = true # Strip <think> blocks from messages before sending to LLM (keeps them in chat history)
|
||||||
|
# OpenRouter reasoning configuration (only applies to OpenRouter chat API)
|
||||||
|
# Valid values: xhigh, high, medium, low, minimal, none (empty or none = disabled)
|
||||||
|
# Models that support reasoning will include thinking content wrapped in <think> tags
|
||||||
|
ReasoningEffort = "medium"
|
||||||
|
# playwright tools
|
||||||
|
PlaywrightEnabled = false
|
||||||
|
PlaywrightDebug = false # when true opens in gui mode (headless=false)
|
||||||
|
|||||||
@@ -18,7 +18,8 @@ type Config struct {
|
|||||||
UserRole string `toml:"UserRole"`
|
UserRole string `toml:"UserRole"`
|
||||||
ToolRole string `toml:"ToolRole"`
|
ToolRole string `toml:"ToolRole"`
|
||||||
ToolUse bool `toml:"ToolUse"`
|
ToolUse bool `toml:"ToolUse"`
|
||||||
ThinkUse bool `toml:"ThinkUse"`
|
StripThinkingFromAPI bool `toml:"StripThinkingFromAPI"`
|
||||||
|
ReasoningEffort string `toml:"ReasoningEffort"`
|
||||||
AssistantRole string `toml:"AssistantRole"`
|
AssistantRole string `toml:"AssistantRole"`
|
||||||
SysDir string `toml:"SysDir"`
|
SysDir string `toml:"SysDir"`
|
||||||
ChunkLimit uint32 `toml:"ChunkLimit"`
|
ChunkLimit uint32 `toml:"ChunkLimit"`
|
||||||
@@ -26,20 +27,22 @@ type Config struct {
|
|||||||
WriteNextMsgAs string
|
WriteNextMsgAs string
|
||||||
WriteNextMsgAsCompletionAgent string
|
WriteNextMsgAsCompletionAgent string
|
||||||
SkipLLMResp bool
|
SkipLLMResp bool
|
||||||
AutoCleanToolCallsFromCtx bool `toml:"AutoCleanToolCallsFromCtx"`
|
|
||||||
DBPATH string `toml:"DBPATH"`
|
DBPATH string `toml:"DBPATH"`
|
||||||
FilePickerDir string `toml:"FilePickerDir"`
|
FilePickerDir string `toml:"FilePickerDir"`
|
||||||
FilePickerExts string `toml:"FilePickerExts"`
|
FilePickerExts string `toml:"FilePickerExts"`
|
||||||
|
ImagePreview bool `toml:"ImagePreview"`
|
||||||
EnableMouse bool `toml:"EnableMouse"`
|
EnableMouse bool `toml:"EnableMouse"`
|
||||||
// embeddings
|
// embeddings
|
||||||
RAGEnabled bool `toml:"RAGEnabled"`
|
|
||||||
EmbedURL string `toml:"EmbedURL"`
|
EmbedURL string `toml:"EmbedURL"`
|
||||||
HFToken string `toml:"HFToken"`
|
HFToken string `toml:"HFToken"`
|
||||||
RAGDir string `toml:"RAGDir"`
|
EmbedModelPath string `toml:"EmbedModelPath"`
|
||||||
|
EmbedTokenizerPath string `toml:"EmbedTokenizerPath"`
|
||||||
|
EmbedDims int `toml:"EmbedDims"`
|
||||||
// rag settings
|
// rag settings
|
||||||
RAGWorkers uint32 `toml:"RAGWorkers"`
|
RAGDir string `toml:"RAGDir"`
|
||||||
RAGBatchSize int `toml:"RAGBatchSize"`
|
RAGBatchSize int `toml:"RAGBatchSize"`
|
||||||
RAGWordLimit uint32 `toml:"RAGWordLimit"`
|
RAGWordLimit uint32 `toml:"RAGWordLimit"`
|
||||||
|
RAGOverlapWords uint32 `toml:"RAGOverlapWords"`
|
||||||
// deepseek
|
// deepseek
|
||||||
DeepSeekChatAPI string `toml:"DeepSeekChatAPI"`
|
DeepSeekChatAPI string `toml:"DeepSeekChatAPI"`
|
||||||
DeepSeekCompletionAPI string `toml:"DeepSeekCompletionAPI"`
|
DeepSeekCompletionAPI string `toml:"DeepSeekCompletionAPI"`
|
||||||
@@ -69,6 +72,11 @@ type Config struct {
|
|||||||
CharSpecificContextEnabled bool `toml:"CharSpecificContextEnabled"`
|
CharSpecificContextEnabled bool `toml:"CharSpecificContextEnabled"`
|
||||||
CharSpecificContextTag string `toml:"CharSpecificContextTag"`
|
CharSpecificContextTag string `toml:"CharSpecificContextTag"`
|
||||||
AutoTurn bool `toml:"AutoTurn"`
|
AutoTurn bool `toml:"AutoTurn"`
|
||||||
|
// playwright browser
|
||||||
|
PlaywrightEnabled bool `toml:"PlaywrightEnabled"`
|
||||||
|
PlaywrightDebug bool `toml:"PlaywrightDebug"` // !headless
|
||||||
|
// CLI mode
|
||||||
|
CLIMode bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func LoadConfig(fn string) (*Config, error) {
|
func LoadConfig(fn string) (*Config, error) {
|
||||||
@@ -122,6 +130,9 @@ func LoadConfig(fn string) (*Config, error) {
|
|||||||
if config.CompletionAPI != "" {
|
if config.CompletionAPI != "" {
|
||||||
config.ApiLinks = append(config.ApiLinks, config.CompletionAPI)
|
config.ApiLinks = append(config.ApiLinks, config.CompletionAPI)
|
||||||
}
|
}
|
||||||
|
if config.RAGDir == "" {
|
||||||
|
config.RAGDir = "ragimport"
|
||||||
|
}
|
||||||
// if any value is empty fill with default
|
// if any value is empty fill with default
|
||||||
return config, nil
|
return config, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -113,16 +113,7 @@ When `AutoTurn` is enabled, the system can automatically trigger responses from
|
|||||||
## Cardmaking with multiple characters
|
## Cardmaking with multiple characters
|
||||||
|
|
||||||
So far only json format supports multiple characters.
|
So far only json format supports multiple characters.
|
||||||
Card example:
|
[card example](sysprompts/alice_bob_carl.json)
|
||||||
```
|
|
||||||
{
|
|
||||||
"sys_prompt": "This is a chat between Alice, Bob and Carl. Normally what is said by any character is seen by all others. But characters also might write messages intended to specific targets if their message contain string tag '@{CharName1,CharName2,CharName3}@'.\nFor example:\nAlice:\n\"Hey, Bob. I have a secret for you... (ooc: @Bob@)\"\nThis message would be seen only by Bob and Alice (sender always sees their own message).",
|
|
||||||
"role": "Alice",
|
|
||||||
"filepath": "sysprompts/alice_bob_carl.json",
|
|
||||||
"chars": ["Alice", "Bob", "Carl"],
|
|
||||||
"first_msg": "Hey guys! Want to play Alias like game? I'll tell Bob a word and he needs to describe that word so Carl can guess what it was?"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Limitations & Caveats
|
## Limitations & Caveats
|
||||||
|
|
||||||
@@ -131,7 +122,7 @@ Card example:
|
|||||||
Character‑specific context relies on the `/completion` endpoint (or other completion‑style endpoints) where the LLM is presented with a raw text prompt containing the entire filtered history. It does **not** work with OpenAI‑style `/v1/chat/completions` endpoints, because those endpoints enforce a fixed role set (`user`/`assistant`/`system`) and strip custom role names and metadata.
|
Character‑specific context relies on the `/completion` endpoint (or other completion‑style endpoints) where the LLM is presented with a raw text prompt containing the entire filtered history. It does **not** work with OpenAI‑style `/v1/chat/completions` endpoints, because those endpoints enforce a fixed role set (`user`/`assistant`/`system`) and strip custom role names and metadata.
|
||||||
|
|
||||||
### TTS
|
### TTS
|
||||||
Although text message might be hidden from user character. If TTS is enabled it will be read.
|
Although text message might be hidden from user character. If TTS is enabled it will be read until tags are parsed. If message should not be viewed by user, tts will stop.
|
||||||
|
|
||||||
### Tag Parsing
|
### Tag Parsing
|
||||||
|
|
||||||
@@ -63,26 +63,17 @@ This document explains how to set up and configure the application using the `co
|
|||||||
#### AutoScrollEnabled (`true`)
|
#### AutoScrollEnabled (`true`)
|
||||||
- Whether to automatically scroll chat window while llm streams its repsonse.
|
- Whether to automatically scroll chat window while llm streams its repsonse.
|
||||||
|
|
||||||
#### AutoCleanToolCallsFromCtx (`false`)
|
|
||||||
- Whether to automatically clean tool calls from the conversation context to manage token usage.
|
|
||||||
|
|
||||||
### RAG (Retrieval Augmented Generation) Settings
|
### RAG (Retrieval Augmented Generation) Settings
|
||||||
|
|
||||||
#### EmbedURL (`"http://localhost:8082/v1/embeddings"`)
|
#### EmbedURL (`"http://localhost:8082/v1/embeddings"`)
|
||||||
- The endpoint for embedding API, used for RAG (Retrieval Augmented Generation) functionality.
|
- The endpoint for embedding API, used for RAG (Retrieval Augmented Generation) functionality.
|
||||||
|
|
||||||
#### RAGEnabled (`false`)
|
|
||||||
- Enable or disable RAG functionality for enhanced context retrieval.
|
|
||||||
|
|
||||||
#### RAGBatchSize (`1`)
|
#### RAGBatchSize (`1`)
|
||||||
- Number of documents to process in each RAG batch.
|
- Number of documents to process in each RAG batch.
|
||||||
|
|
||||||
#### RAGWordLimit (`80`)
|
#### RAGWordLimit (`80`)
|
||||||
- Maximum number of words in a batch to tokenize and store.
|
- Maximum number of words in a batch to tokenize and store.
|
||||||
|
|
||||||
#### RAGWorkers (`2`)
|
|
||||||
- Number of concurrent workers for RAG processing.
|
|
||||||
|
|
||||||
#### RAGDir (`"ragimport"`)
|
#### RAGDir (`"ragimport"`)
|
||||||
- Directory containing documents for RAG processing.
|
- Directory containing documents for RAG processing.
|
||||||
|
|
||||||
@@ -140,14 +131,24 @@ This document explains how to set up and configure the application using the `co
|
|||||||
- Path to the SQLite database file used for storing conversation history and other data.
|
- Path to the SQLite database file used for storing conversation history and other data.
|
||||||
|
|
||||||
#### FilePickerDir (`"."`)
|
#### FilePickerDir (`"."`)
|
||||||
- Directory where the file (image) picker should start when selecting files.
|
- Directory where the file picker starts and where relative paths in coding assistant file tools (file_read, file_write, etc.) are resolved against. Use absolute paths (starting with `/`) to bypass this.
|
||||||
|
|
||||||
#### FilePickerExts (`"png,jpg,jpeg,gif,webp"`)
|
|
||||||
- Comma-separated list of allowed file extensions for the file picker.
|
|
||||||
|
|
||||||
#### EnableMouse (`false`)
|
#### EnableMouse (`false`)
|
||||||
- Enable or disable mouse support in the UI. When set to `true`, allows clicking buttons and interacting with UI elements using the mouse, but prevents the terminal from handling mouse events normally (such as selecting and copying text). When set to `false`, enables default terminal behavior allowing you to select and copy text, but disables mouse interaction with UI elements.
|
- Enable or disable mouse support in the UI. When set to `true`, allows clicking buttons and interacting with UI elements using the mouse, but prevents the terminal from handling mouse events normally (such as selecting and copying text). When set to `false`, enables default terminal behavior allowing you to select and copy text, but disables mouse interaction with UI elements.
|
||||||
|
|
||||||
|
### Character-Specific Context Settings (/completion only)
|
||||||
|
|
||||||
|
[character specific context page for more info](./char-specific-context.md)
|
||||||
|
|
||||||
|
#### CharSpecificContextEnabled (`true`)
|
||||||
|
- Enable or disable character-specific context functionality.
|
||||||
|
|
||||||
|
#### CharSpecificContextTag (`"@"`)
|
||||||
|
- The tag prefix used to reference character-specific context in messages.
|
||||||
|
|
||||||
|
#### AutoTurn (`true`)
|
||||||
|
- Enable or disable automatic turn detection/switching.
|
||||||
|
|
||||||
### Additional Features
|
### Additional Features
|
||||||
|
|
||||||
Those could be switched in program, but also bould be setup in config.
|
Those could be switched in program, but also bould be setup in config.
|
||||||
@@ -155,8 +156,20 @@ Those could be switched in program, but also bould be setup in config.
|
|||||||
#### ToolUse
|
#### ToolUse
|
||||||
- Enable or disable explanation of tools to llm, so it could use them.
|
- Enable or disable explanation of tools to llm, so it could use them.
|
||||||
|
|
||||||
#### ThinkUse
|
#### Playwright Browser Automation
|
||||||
- Enable or disable insertion of <think> token at the beggining of llm resp.
|
These settings enable browser automation tools available to the LLM.
|
||||||
|
|
||||||
|
- **PlaywrightEnabled** (`false`)
|
||||||
|
- Enable or disable Playwright browser automation tools for the LLM. When enabled, the LLM can use tools like `pw_browser`, `pw_close`, and `pw_status` to automate browser interactions.
|
||||||
|
|
||||||
|
- **PlaywrightDebug** (`false`)
|
||||||
|
- Enable debug mode for Playwright browser. When set to `true`, the browser runs in visible (non-headless) mode, displaying the GUI for debugging purposes. When `false`, the browser runs in headless mode by default.
|
||||||
|
|
||||||
|
### StripThinkingFromAPI (`true`)
|
||||||
|
- Strip thinking blocks from messages before sending to LLM. Keeps them in chat history for local viewing but reduces token usage in API calls.
|
||||||
|
|
||||||
|
#### ReasoningEffort (`"medium"`)
|
||||||
|
- OpenRouter reasoning configuration (only applies to OpenRouter chat API). Valid values: `xhigh`, `high`, `medium`, `low`, `minimal`, `none`. Empty or `none` disables reasoning.
|
||||||
|
|
||||||
## Environment Variables
|
## Environment Variables
|
||||||
|
|
||||||
|
|||||||
218
extra/google_tts.go
Normal file
218
extra/google_tts.go
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
//go:build extra
|
||||||
|
// +build extra
|
||||||
|
|
||||||
|
package extra
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"gf-lt/models"
|
||||||
|
"io"
|
||||||
|
"log/slog"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
google_translate_tts "github.com/GrailFinder/google-translate-tts"
|
||||||
|
"github.com/neurosnap/sentences/english"
|
||||||
|
)
|
||||||
|
|
||||||
|
type GoogleTranslateOrator struct {
|
||||||
|
logger *slog.Logger
|
||||||
|
mu sync.Mutex
|
||||||
|
speech *google_translate_tts.Speech
|
||||||
|
// fields for playback control
|
||||||
|
cmd *exec.Cmd
|
||||||
|
cmdMu sync.Mutex
|
||||||
|
stopCh chan struct{}
|
||||||
|
// text buffer and interrupt flag
|
||||||
|
textBuffer strings.Builder
|
||||||
|
interrupt bool
|
||||||
|
Speed float32
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *GoogleTranslateOrator) stoproutine() {
|
||||||
|
for {
|
||||||
|
<-TTSDoneChan
|
||||||
|
o.logger.Debug("orator got done signal")
|
||||||
|
o.Stop()
|
||||||
|
for len(TTSTextChan) > 0 {
|
||||||
|
<-TTSTextChan
|
||||||
|
}
|
||||||
|
o.mu.Lock()
|
||||||
|
o.textBuffer.Reset()
|
||||||
|
o.interrupt = true
|
||||||
|
o.mu.Unlock()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *GoogleTranslateOrator) readroutine() {
|
||||||
|
tokenizer, _ := english.NewSentenceTokenizer(nil)
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case chunk := <-TTSTextChan:
|
||||||
|
o.mu.Lock()
|
||||||
|
o.interrupt = false
|
||||||
|
_, err := o.textBuffer.WriteString(chunk)
|
||||||
|
if err != nil {
|
||||||
|
o.logger.Warn("failed to write to stringbuilder", "error", err)
|
||||||
|
o.mu.Unlock()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
text := o.textBuffer.String()
|
||||||
|
sentences := tokenizer.Tokenize(text)
|
||||||
|
o.logger.Debug("adding chunk", "chunk", chunk, "text", text, "sen-len", len(sentences))
|
||||||
|
if len(sentences) <= 1 {
|
||||||
|
o.mu.Unlock()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
completeSentences := sentences[:len(sentences)-1]
|
||||||
|
remaining := sentences[len(sentences)-1].Text
|
||||||
|
o.textBuffer.Reset()
|
||||||
|
o.textBuffer.WriteString(remaining)
|
||||||
|
o.mu.Unlock()
|
||||||
|
for _, sentence := range completeSentences {
|
||||||
|
o.mu.Lock()
|
||||||
|
interrupted := o.interrupt
|
||||||
|
o.mu.Unlock()
|
||||||
|
if interrupted {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cleanedText := models.CleanText(sentence.Text)
|
||||||
|
if cleanedText == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
o.logger.Debug("calling Speak with sentence", "sent", cleanedText)
|
||||||
|
if err := o.Speak(cleanedText); err != nil {
|
||||||
|
o.logger.Error("tts failed", "sentence", cleanedText, "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case <-TTSFlushChan:
|
||||||
|
o.logger.Debug("got flushchan signal start")
|
||||||
|
// lln is done get the whole message out
|
||||||
|
if len(TTSTextChan) > 0 { // otherwise might get stuck
|
||||||
|
for chunk := range TTSTextChan {
|
||||||
|
o.mu.Lock()
|
||||||
|
_, err := o.textBuffer.WriteString(chunk)
|
||||||
|
o.mu.Unlock()
|
||||||
|
if err != nil {
|
||||||
|
o.logger.Warn("failed to write to stringbuilder", "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if len(TTSTextChan) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
o.mu.Lock()
|
||||||
|
remaining := o.textBuffer.String()
|
||||||
|
remaining = models.CleanText(remaining)
|
||||||
|
o.textBuffer.Reset()
|
||||||
|
o.mu.Unlock()
|
||||||
|
if remaining == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
o.logger.Debug("calling Speak with remainder", "rem", remaining)
|
||||||
|
sentencesRem := tokenizer.Tokenize(remaining)
|
||||||
|
for _, rs := range sentencesRem { // to avoid dumping large volume of text
|
||||||
|
o.mu.Lock()
|
||||||
|
interrupt := o.interrupt
|
||||||
|
o.mu.Unlock()
|
||||||
|
if interrupt {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err := o.Speak(rs.Text); err != nil {
|
||||||
|
o.logger.Error("tts failed", "sentence", rs.Text, "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *GoogleTranslateOrator) GetLogger() *slog.Logger {
|
||||||
|
return o.logger
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *GoogleTranslateOrator) Speak(text string) error {
|
||||||
|
o.logger.Debug("fn: Speak is called", "text-len", len(text))
|
||||||
|
// Generate MP3 data directly as an io.Reader
|
||||||
|
reader, err := o.speech.GenerateSpeech(text)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("generate speech failed: %w", err)
|
||||||
|
}
|
||||||
|
// Wrap in io.NopCloser since GenerateSpeech returns io.Reader (no close needed)
|
||||||
|
body := io.NopCloser(reader)
|
||||||
|
defer body.Close()
|
||||||
|
// Build ffplay command with optional speed filter
|
||||||
|
args := []string{"-nodisp", "-autoexit"}
|
||||||
|
if o.Speed > 0.1 && o.Speed != 1.0 {
|
||||||
|
// atempo range is 0.5 to 2.0; you might clamp it here
|
||||||
|
args = append(args, "-af", fmt.Sprintf("atempo=%.2f", o.Speed))
|
||||||
|
}
|
||||||
|
args = append(args, "-i", "pipe:0")
|
||||||
|
cmd := exec.Command("ffplay", args...)
|
||||||
|
stdin, err := cmd.StdinPipe()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get stdin pipe: %w", err)
|
||||||
|
}
|
||||||
|
o.cmdMu.Lock()
|
||||||
|
o.cmd = cmd
|
||||||
|
o.stopCh = make(chan struct{})
|
||||||
|
o.cmdMu.Unlock()
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
return fmt.Errorf("failed to start ffplay: %w", err)
|
||||||
|
}
|
||||||
|
copyErr := make(chan error, 1)
|
||||||
|
go func() {
|
||||||
|
_, err := io.Copy(stdin, body)
|
||||||
|
stdin.Close()
|
||||||
|
copyErr <- err
|
||||||
|
}()
|
||||||
|
done := make(chan error, 1)
|
||||||
|
go func() {
|
||||||
|
done <- cmd.Wait()
|
||||||
|
}()
|
||||||
|
select {
|
||||||
|
case <-o.stopCh:
|
||||||
|
if o.cmd != nil && o.cmd.Process != nil {
|
||||||
|
o.cmd.Process.Kill()
|
||||||
|
}
|
||||||
|
<-done
|
||||||
|
return nil
|
||||||
|
case copyErrVal := <-copyErr:
|
||||||
|
if copyErrVal != nil {
|
||||||
|
if o.cmd != nil && o.cmd.Process != nil {
|
||||||
|
o.cmd.Process.Kill()
|
||||||
|
}
|
||||||
|
<-done
|
||||||
|
return copyErrVal
|
||||||
|
}
|
||||||
|
return <-done
|
||||||
|
case err := <-done:
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *GoogleTranslateOrator) Stop() {
|
||||||
|
o.cmdMu.Lock()
|
||||||
|
defer o.cmdMu.Unlock()
|
||||||
|
// Signal any running Speak to stop
|
||||||
|
if o.stopCh != nil {
|
||||||
|
select {
|
||||||
|
case <-o.stopCh: // already closed
|
||||||
|
default:
|
||||||
|
close(o.stopCh)
|
||||||
|
}
|
||||||
|
o.stopCh = nil
|
||||||
|
}
|
||||||
|
// Kill the external player process if it's still running
|
||||||
|
if o.cmd != nil && o.cmd.Process != nil {
|
||||||
|
o.cmd.Process.Kill()
|
||||||
|
o.cmd.Wait() // clean up zombie process
|
||||||
|
o.cmd = nil
|
||||||
|
}
|
||||||
|
// Also reset text buffer and interrupt flag (with o.mu)
|
||||||
|
o.mu.Lock()
|
||||||
|
o.textBuffer.Reset()
|
||||||
|
o.interrupt = true
|
||||||
|
o.mu.Unlock()
|
||||||
|
}
|
||||||
259
extra/kokoro.go
Normal file
259
extra/kokoro.go
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
//go:build extra
|
||||||
|
// +build extra
|
||||||
|
|
||||||
|
package extra
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"gf-lt/models"
|
||||||
|
"io"
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/neurosnap/sentences/english"
|
||||||
|
)
|
||||||
|
|
||||||
|
type KokoroOrator struct {
|
||||||
|
logger *slog.Logger
|
||||||
|
mu sync.Mutex
|
||||||
|
URL string
|
||||||
|
Format models.AudioFormat
|
||||||
|
Stream bool
|
||||||
|
Speed float32
|
||||||
|
Language string
|
||||||
|
Voice string
|
||||||
|
// fields for playback control
|
||||||
|
cmd *exec.Cmd
|
||||||
|
cmdMu sync.Mutex
|
||||||
|
stopCh chan struct{}
|
||||||
|
// textBuffer, interrupt etc. remain the same
|
||||||
|
textBuffer strings.Builder
|
||||||
|
interrupt bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *KokoroOrator) GetLogger() *slog.Logger {
|
||||||
|
return o.logger
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *KokoroOrator) Speak(text string) error {
|
||||||
|
o.logger.Debug("fn: Speak is called", "text-len", len(text))
|
||||||
|
body, err := o.requestSound(text)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("request failed: %w", err)
|
||||||
|
}
|
||||||
|
defer body.Close()
|
||||||
|
cmd := exec.Command("ffplay", "-nodisp", "-autoexit", "-i", "pipe:0")
|
||||||
|
stdin, err := cmd.StdinPipe()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get stdin pipe: %w", err)
|
||||||
|
}
|
||||||
|
o.cmdMu.Lock()
|
||||||
|
o.cmd = cmd
|
||||||
|
o.stopCh = make(chan struct{})
|
||||||
|
o.cmdMu.Unlock()
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
return fmt.Errorf("failed to start ffplay: %w", err)
|
||||||
|
}
|
||||||
|
// Copy audio in background
|
||||||
|
copyErr := make(chan error, 1)
|
||||||
|
go func() {
|
||||||
|
_, err := io.Copy(stdin, body)
|
||||||
|
stdin.Close()
|
||||||
|
copyErr <- err
|
||||||
|
}()
|
||||||
|
// Wait for player in background
|
||||||
|
done := make(chan error, 1)
|
||||||
|
go func() {
|
||||||
|
done <- cmd.Wait()
|
||||||
|
}()
|
||||||
|
// Wait for BOTH copy and player, but ensure we block until done
|
||||||
|
select {
|
||||||
|
case <-o.stopCh:
|
||||||
|
// Stop requested: kill player and wait for it to exit
|
||||||
|
if o.cmd != nil && o.cmd.Process != nil {
|
||||||
|
o.cmd.Process.Kill()
|
||||||
|
}
|
||||||
|
<-done // Wait for process to actually exit
|
||||||
|
return nil
|
||||||
|
case copyErrVal := <-copyErr:
|
||||||
|
if copyErrVal != nil {
|
||||||
|
// Copy failed: kill player and wait
|
||||||
|
if o.cmd != nil && o.cmd.Process != nil {
|
||||||
|
o.cmd.Process.Kill()
|
||||||
|
}
|
||||||
|
<-done
|
||||||
|
return copyErrVal
|
||||||
|
}
|
||||||
|
// Copy succeeded, now wait for playback to complete
|
||||||
|
return <-done
|
||||||
|
case err := <-done:
|
||||||
|
// Playback finished normally (copy must have succeeded or player would have exited early)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func (o *KokoroOrator) requestSound(text string) (io.ReadCloser, error) {
|
||||||
|
if o.URL == "" {
|
||||||
|
return nil, fmt.Errorf("TTS URL is empty")
|
||||||
|
}
|
||||||
|
payload := map[string]interface{}{
|
||||||
|
"input": text,
|
||||||
|
"voice": o.Voice,
|
||||||
|
"response_format": o.Format,
|
||||||
|
"download_format": o.Format,
|
||||||
|
"stream": o.Stream,
|
||||||
|
"speed": o.Speed,
|
||||||
|
// "return_download_link": true,
|
||||||
|
"lang_code": o.Language,
|
||||||
|
}
|
||||||
|
payloadBytes, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to marshal payload: %w", err)
|
||||||
|
}
|
||||||
|
req, err := http.NewRequest("POST", o.URL, bytes.NewBuffer(payloadBytes)) //nolint:noctx
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create request: %w", err)
|
||||||
|
}
|
||||||
|
req.Header.Set("accept", "application/json")
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
resp, err := http.DefaultClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("request failed: %w", err)
|
||||||
|
}
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
defer resp.Body.Close()
|
||||||
|
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
return resp.Body, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *KokoroOrator) stoproutine() {
|
||||||
|
for {
|
||||||
|
<-TTSDoneChan
|
||||||
|
o.logger.Debug("orator got done signal")
|
||||||
|
// 1. Stop any ongoing playback (kills external player, closes stopCh)
|
||||||
|
o.Stop()
|
||||||
|
// 2. Drain any pending text chunks
|
||||||
|
for len(TTSTextChan) > 0 {
|
||||||
|
<-TTSTextChan
|
||||||
|
}
|
||||||
|
// 3. Reset internal state
|
||||||
|
o.mu.Lock()
|
||||||
|
o.textBuffer.Reset()
|
||||||
|
o.interrupt = true
|
||||||
|
o.mu.Unlock()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *KokoroOrator) Stop() {
|
||||||
|
o.cmdMu.Lock()
|
||||||
|
defer o.cmdMu.Unlock()
|
||||||
|
// Signal any running Speak to stop
|
||||||
|
if o.stopCh != nil {
|
||||||
|
select {
|
||||||
|
case <-o.stopCh: // already closed
|
||||||
|
default:
|
||||||
|
close(o.stopCh)
|
||||||
|
}
|
||||||
|
o.stopCh = nil
|
||||||
|
}
|
||||||
|
// Kill the external player process if it's still running
|
||||||
|
if o.cmd != nil && o.cmd.Process != nil {
|
||||||
|
o.cmd.Process.Kill()
|
||||||
|
o.cmd.Wait() // clean up zombie process
|
||||||
|
o.cmd = nil
|
||||||
|
}
|
||||||
|
// Also reset text buffer and interrupt flag (with o.mu)
|
||||||
|
o.mu.Lock()
|
||||||
|
o.textBuffer.Reset()
|
||||||
|
o.interrupt = true
|
||||||
|
o.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *KokoroOrator) readroutine() {
|
||||||
|
tokenizer, _ := english.NewSentenceTokenizer(nil)
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case chunk := <-TTSTextChan:
|
||||||
|
o.mu.Lock()
|
||||||
|
o.interrupt = false
|
||||||
|
_, err := o.textBuffer.WriteString(chunk)
|
||||||
|
if err != nil {
|
||||||
|
o.logger.Warn("failed to write to stringbuilder", "error", err)
|
||||||
|
o.mu.Unlock()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
text := o.textBuffer.String()
|
||||||
|
sentences := tokenizer.Tokenize(text)
|
||||||
|
o.logger.Debug("adding chunk", "chunk", chunk, "text", text, "sen-len", len(sentences))
|
||||||
|
if len(sentences) <= 1 {
|
||||||
|
o.mu.Unlock()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
completeSentences := sentences[:len(sentences)-1]
|
||||||
|
remaining := sentences[len(sentences)-1].Text
|
||||||
|
o.textBuffer.Reset()
|
||||||
|
o.textBuffer.WriteString(remaining)
|
||||||
|
o.mu.Unlock()
|
||||||
|
for _, sentence := range completeSentences {
|
||||||
|
o.mu.Lock()
|
||||||
|
interrupted := o.interrupt
|
||||||
|
o.mu.Unlock()
|
||||||
|
if interrupted {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cleanedText := models.CleanText(sentence.Text)
|
||||||
|
if cleanedText == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
o.logger.Debug("calling Speak with sentence", "sent", cleanedText)
|
||||||
|
if err := o.Speak(cleanedText); err != nil {
|
||||||
|
o.logger.Error("tts failed", "sentence", cleanedText, "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case <-TTSFlushChan:
|
||||||
|
o.logger.Debug("got flushchan signal start")
|
||||||
|
// lln is done get the whole message out
|
||||||
|
if len(TTSTextChan) > 0 { // otherwise might get stuck
|
||||||
|
for chunk := range TTSTextChan {
|
||||||
|
o.mu.Lock()
|
||||||
|
_, err := o.textBuffer.WriteString(chunk)
|
||||||
|
o.mu.Unlock()
|
||||||
|
if err != nil {
|
||||||
|
o.logger.Warn("failed to write to stringbuilder", "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if len(TTSTextChan) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// flush remaining text
|
||||||
|
o.mu.Lock()
|
||||||
|
remaining := o.textBuffer.String()
|
||||||
|
remaining = models.CleanText(remaining)
|
||||||
|
o.textBuffer.Reset()
|
||||||
|
o.mu.Unlock()
|
||||||
|
if remaining == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
o.logger.Debug("calling Speak with remainder", "rem", remaining)
|
||||||
|
sentencesRem := tokenizer.Tokenize(remaining)
|
||||||
|
for _, rs := range sentencesRem { // to avoid dumping large volume of text
|
||||||
|
o.mu.Lock()
|
||||||
|
interrupt := o.interrupt
|
||||||
|
o.mu.Unlock()
|
||||||
|
if interrupt {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err := o.Speak(rs.Text); err != nil {
|
||||||
|
o.logger.Error("tts failed", "sentence", rs, "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
132
extra/stt.go
132
extra/stt.go
@@ -6,18 +6,10 @@ package extra
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"gf-lt/config"
|
"gf-lt/config"
|
||||||
"io"
|
"io"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"mime/multipart"
|
|
||||||
"net/http"
|
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
|
||||||
"syscall"
|
|
||||||
|
|
||||||
"github.com/gordonklaus/portaudio"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var specialRE = regexp.MustCompile(`\[.*?\]`)
|
var specialRE = regexp.MustCompile(`\[.*?\]`)
|
||||||
@@ -44,14 +36,6 @@ func NewSTT(logger *slog.Logger, cfg *config.Config) STT {
|
|||||||
return NewWhisperServer(logger, cfg)
|
return NewWhisperServer(logger, cfg)
|
||||||
}
|
}
|
||||||
|
|
||||||
type WhisperServer struct {
|
|
||||||
logger *slog.Logger
|
|
||||||
ServerURL string
|
|
||||||
SampleRate int
|
|
||||||
AudioBuffer *bytes.Buffer
|
|
||||||
recording bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewWhisperServer(logger *slog.Logger, cfg *config.Config) *WhisperServer {
|
func NewWhisperServer(logger *slog.Logger, cfg *config.Config) *WhisperServer {
|
||||||
return &WhisperServer{
|
return &WhisperServer{
|
||||||
logger: logger,
|
logger: logger,
|
||||||
@@ -61,69 +45,6 @@ func NewWhisperServer(logger *slog.Logger, cfg *config.Config) *WhisperServer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (stt *WhisperServer) StartRecording() error {
|
|
||||||
if err := stt.microphoneStream(stt.SampleRate); err != nil {
|
|
||||||
return fmt.Errorf("failed to init microphone: %w", err)
|
|
||||||
}
|
|
||||||
stt.recording = true
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (stt *WhisperServer) StopRecording() (string, error) {
|
|
||||||
stt.recording = false
|
|
||||||
// wait loop to finish?
|
|
||||||
if stt.AudioBuffer == nil {
|
|
||||||
err := errors.New("unexpected nil AudioBuffer")
|
|
||||||
stt.logger.Error(err.Error())
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
// Create WAV header first
|
|
||||||
body := &bytes.Buffer{}
|
|
||||||
writer := multipart.NewWriter(body)
|
|
||||||
// Add audio file part
|
|
||||||
part, err := writer.CreateFormFile("file", "recording.wav")
|
|
||||||
if err != nil {
|
|
||||||
stt.logger.Error("fn: StopRecording", "error", err)
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
// Stream directly to multipart writer: header + raw data
|
|
||||||
dataSize := stt.AudioBuffer.Len()
|
|
||||||
stt.writeWavHeader(part, dataSize)
|
|
||||||
if _, err := io.Copy(part, stt.AudioBuffer); err != nil {
|
|
||||||
stt.logger.Error("fn: StopRecording", "error", err)
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
// Reset buffer for next recording
|
|
||||||
stt.AudioBuffer.Reset()
|
|
||||||
// Add response format field
|
|
||||||
err = writer.WriteField("response_format", "text")
|
|
||||||
if err != nil {
|
|
||||||
stt.logger.Error("fn: StopRecording", "error", err)
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if writer.Close() != nil {
|
|
||||||
stt.logger.Error("fn: StopRecording", "error", err)
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
// Send request
|
|
||||||
resp, err := http.Post(stt.ServerURL, writer.FormDataContentType(), body) //nolint:noctx
|
|
||||||
if err != nil {
|
|
||||||
stt.logger.Error("fn: StopRecording", "error", err)
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
// Read and print response
|
|
||||||
responseTextBytes, err := io.ReadAll(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
stt.logger.Error("fn: StopRecording", "error", err)
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
resptext := strings.TrimRight(string(responseTextBytes), "\n")
|
|
||||||
// in case there are special tokens like [_BEG_]
|
|
||||||
resptext = specialRE.ReplaceAllString(resptext, "")
|
|
||||||
return strings.TrimSpace(strings.ReplaceAll(resptext, "\n ", "\n")), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (stt *WhisperServer) writeWavHeader(w io.Writer, dataSize int) {
|
func (stt *WhisperServer) writeWavHeader(w io.Writer, dataSize int) {
|
||||||
header := make([]byte, 44)
|
header := make([]byte, 44)
|
||||||
copy(header[0:4], "RIFF")
|
copy(header[0:4], "RIFF")
|
||||||
@@ -147,56 +68,3 @@ func (stt *WhisperServer) writeWavHeader(w io.Writer, dataSize int) {
|
|||||||
func (stt *WhisperServer) IsRecording() bool {
|
func (stt *WhisperServer) IsRecording() bool {
|
||||||
return stt.recording
|
return stt.recording
|
||||||
}
|
}
|
||||||
|
|
||||||
func (stt *WhisperServer) microphoneStream(sampleRate int) error {
|
|
||||||
// Temporarily redirect stderr to suppress ALSA warnings during PortAudio init
|
|
||||||
origStderr, errDup := syscall.Dup(syscall.Stderr)
|
|
||||||
if errDup != nil {
|
|
||||||
return fmt.Errorf("failed to dup stderr: %w", errDup)
|
|
||||||
}
|
|
||||||
nullFD, err := syscall.Open("/dev/null", syscall.O_WRONLY, 0)
|
|
||||||
if err != nil {
|
|
||||||
_ = syscall.Close(origStderr) // Close the dup'd fd if open fails
|
|
||||||
return fmt.Errorf("failed to open /dev/null: %w", err)
|
|
||||||
}
|
|
||||||
// redirect stderr
|
|
||||||
_ = syscall.Dup2(nullFD, syscall.Stderr)
|
|
||||||
// Initialize PortAudio (this is where ALSA warnings occur)
|
|
||||||
defer func() {
|
|
||||||
// Restore stderr
|
|
||||||
_ = syscall.Dup2(origStderr, syscall.Stderr)
|
|
||||||
_ = syscall.Close(origStderr)
|
|
||||||
_ = syscall.Close(nullFD)
|
|
||||||
}()
|
|
||||||
if err := portaudio.Initialize(); err != nil {
|
|
||||||
return fmt.Errorf("portaudio init failed: %w", err)
|
|
||||||
}
|
|
||||||
in := make([]int16, 64)
|
|
||||||
stream, err := portaudio.OpenDefaultStream(1, 0, float64(sampleRate), len(in), in)
|
|
||||||
if err != nil {
|
|
||||||
if paErr := portaudio.Terminate(); paErr != nil {
|
|
||||||
return fmt.Errorf("failed to open microphone: %w; terminate error: %w", err, paErr)
|
|
||||||
}
|
|
||||||
return fmt.Errorf("failed to open microphone: %w", err)
|
|
||||||
}
|
|
||||||
go func(stream *portaudio.Stream) {
|
|
||||||
if err := stream.Start(); err != nil {
|
|
||||||
stt.logger.Error("microphoneStream", "error", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
for {
|
|
||||||
if !stt.IsRecording() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if err := stream.Read(); err != nil {
|
|
||||||
stt.logger.Error("reading stream", "error", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if err := binary.Write(stt.AudioBuffer, binary.LittleEndian, in); err != nil {
|
|
||||||
stt.logger.Error("writing to buffer", "error", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}(stream)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|||||||
436
extra/tts.go
436
extra/tts.go
@@ -4,26 +4,13 @@
|
|||||||
package extra
|
package extra
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"gf-lt/config"
|
"gf-lt/config"
|
||||||
"gf-lt/models"
|
"gf-lt/models"
|
||||||
"io"
|
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"net/http"
|
|
||||||
"os"
|
"os"
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
google_translate_tts "github.com/GrailFinder/google-translate-tts"
|
google_translate_tts "github.com/GrailFinder/google-translate-tts"
|
||||||
"github.com/GrailFinder/google-translate-tts/handlers"
|
|
||||||
"github.com/gopxl/beep/v2"
|
|
||||||
"github.com/gopxl/beep/v2/mp3"
|
|
||||||
"github.com/gopxl/beep/v2/speaker"
|
|
||||||
"github.com/neurosnap/sentences/english"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@@ -31,43 +18,8 @@ var (
|
|||||||
TTSFlushChan = make(chan bool, 1)
|
TTSFlushChan = make(chan bool, 1)
|
||||||
TTSDoneChan = make(chan bool, 1)
|
TTSDoneChan = make(chan bool, 1)
|
||||||
// endsWithPunctuation = regexp.MustCompile(`[;.!?]$`)
|
// endsWithPunctuation = regexp.MustCompile(`[;.!?]$`)
|
||||||
threeOrMoreDashesRE = regexp.MustCompile(`-{3,}`)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// cleanText removes markdown and special characters that are not suitable for TTS
|
|
||||||
func cleanText(text string) string {
|
|
||||||
// Remove markdown-like characters that might interfere with TTS
|
|
||||||
text = strings.ReplaceAll(text, "*", "") // Bold/italic markers
|
|
||||||
text = strings.ReplaceAll(text, "#", "") // Headers
|
|
||||||
text = strings.ReplaceAll(text, "_", "") // Underline/italic markers
|
|
||||||
text = strings.ReplaceAll(text, "~", "") // Strikethrough markers
|
|
||||||
text = strings.ReplaceAll(text, "`", "") // Code markers
|
|
||||||
text = strings.ReplaceAll(text, "[", "") // Link brackets
|
|
||||||
text = strings.ReplaceAll(text, "]", "") // Link brackets
|
|
||||||
text = strings.ReplaceAll(text, "!", "") // Exclamation marks (if not punctuation)
|
|
||||||
// Remove HTML tags using regex
|
|
||||||
htmlTagRegex := regexp.MustCompile(`<[^>]*>`)
|
|
||||||
text = htmlTagRegex.ReplaceAllString(text, "")
|
|
||||||
// Split text into lines to handle table separators
|
|
||||||
lines := strings.Split(text, "\n")
|
|
||||||
var filteredLines []string
|
|
||||||
for _, line := range lines {
|
|
||||||
// Check if the line looks like a table separator (e.g., |----|, |===|, | - - - |)
|
|
||||||
// A table separator typically contains only |, -, =, and spaces
|
|
||||||
isTableSeparator := regexp.MustCompile(`^\s*\|\s*[-=\s]+\|\s*$`).MatchString(strings.TrimSpace(line))
|
|
||||||
if !isTableSeparator {
|
|
||||||
// If it's not a table separator, remove vertical bars but keep the content
|
|
||||||
processedLine := strings.ReplaceAll(line, "|", "")
|
|
||||||
filteredLines = append(filteredLines, processedLine)
|
|
||||||
}
|
|
||||||
// If it is a table separator, skip it (don't add to filteredLines)
|
|
||||||
}
|
|
||||||
text = strings.Join(filteredLines, "\n")
|
|
||||||
text = threeOrMoreDashesRE.ReplaceAllString(text, "")
|
|
||||||
text = strings.TrimSpace(text) // Remove leading/trailing whitespace
|
|
||||||
return text
|
|
||||||
}
|
|
||||||
|
|
||||||
type Orator interface {
|
type Orator interface {
|
||||||
Speak(text string) error
|
Speak(text string) error
|
||||||
Stop()
|
Stop()
|
||||||
@@ -75,140 +27,6 @@ type Orator interface {
|
|||||||
GetLogger() *slog.Logger
|
GetLogger() *slog.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
// impl https://github.com/remsky/Kokoro-FastAPI
|
|
||||||
type KokoroOrator struct {
|
|
||||||
logger *slog.Logger
|
|
||||||
mu sync.Mutex
|
|
||||||
URL string
|
|
||||||
Format models.AudioFormat
|
|
||||||
Stream bool
|
|
||||||
Speed float32
|
|
||||||
Language string
|
|
||||||
Voice string
|
|
||||||
currentStream *beep.Ctrl // Added for playback control
|
|
||||||
currentDone chan bool
|
|
||||||
textBuffer strings.Builder
|
|
||||||
interrupt bool
|
|
||||||
// textBuffer bytes.Buffer
|
|
||||||
}
|
|
||||||
|
|
||||||
// Google Translate TTS implementation
|
|
||||||
type GoogleTranslateOrator struct {
|
|
||||||
logger *slog.Logger
|
|
||||||
mu sync.Mutex
|
|
||||||
speech *google_translate_tts.Speech
|
|
||||||
currentStream *beep.Ctrl
|
|
||||||
currentDone chan bool
|
|
||||||
textBuffer strings.Builder
|
|
||||||
interrupt bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *KokoroOrator) stoproutine() {
|
|
||||||
for {
|
|
||||||
<-TTSDoneChan
|
|
||||||
o.logger.Debug("orator got done signal")
|
|
||||||
o.Stop()
|
|
||||||
// drain the channel
|
|
||||||
for len(TTSTextChan) > 0 {
|
|
||||||
<-TTSTextChan
|
|
||||||
}
|
|
||||||
o.mu.Lock()
|
|
||||||
o.textBuffer.Reset()
|
|
||||||
if o.currentDone != nil {
|
|
||||||
select {
|
|
||||||
case o.currentDone <- true:
|
|
||||||
default:
|
|
||||||
// Channel might be closed, ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
o.interrupt = true
|
|
||||||
o.mu.Unlock()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *KokoroOrator) readroutine() {
|
|
||||||
tokenizer, _ := english.NewSentenceTokenizer(nil)
|
|
||||||
// var sentenceBuf bytes.Buffer
|
|
||||||
// var remainder strings.Builder
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case chunk := <-TTSTextChan:
|
|
||||||
o.mu.Lock()
|
|
||||||
o.interrupt = false
|
|
||||||
_, err := o.textBuffer.WriteString(chunk)
|
|
||||||
if err != nil {
|
|
||||||
o.logger.Warn("failed to write to stringbuilder", "error", err)
|
|
||||||
o.mu.Unlock()
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
text := o.textBuffer.String()
|
|
||||||
o.mu.Unlock()
|
|
||||||
sentences := tokenizer.Tokenize(text)
|
|
||||||
o.logger.Debug("adding chunk", "chunk", chunk, "text", text, "sen-len", len(sentences))
|
|
||||||
for i, sentence := range sentences {
|
|
||||||
if i == len(sentences)-1 { // last sentence
|
|
||||||
o.mu.Lock()
|
|
||||||
o.textBuffer.Reset()
|
|
||||||
_, err := o.textBuffer.WriteString(sentence.Text)
|
|
||||||
o.mu.Unlock()
|
|
||||||
if err != nil {
|
|
||||||
o.logger.Warn("failed to write to stringbuilder", "error", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
continue // if only one (often incomplete) sentence; wait for next chunk
|
|
||||||
}
|
|
||||||
cleanedText := cleanText(sentence.Text)
|
|
||||||
if cleanedText == "" {
|
|
||||||
continue // Skip empty text after cleaning
|
|
||||||
}
|
|
||||||
o.logger.Debug("calling Speak with sentence", "sent", cleanedText)
|
|
||||||
if err := o.Speak(cleanedText); err != nil {
|
|
||||||
o.logger.Error("tts failed", "sentence", cleanedText, "error", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
case <-TTSFlushChan:
|
|
||||||
o.logger.Debug("got flushchan signal start")
|
|
||||||
// lln is done get the whole message out
|
|
||||||
if len(TTSTextChan) > 0 { // otherwise might get stuck
|
|
||||||
for chunk := range TTSTextChan {
|
|
||||||
o.mu.Lock()
|
|
||||||
_, err := o.textBuffer.WriteString(chunk)
|
|
||||||
o.mu.Unlock()
|
|
||||||
if err != nil {
|
|
||||||
o.logger.Warn("failed to write to stringbuilder", "error", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if len(TTSTextChan) == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// flush remaining text
|
|
||||||
o.mu.Lock()
|
|
||||||
remaining := o.textBuffer.String()
|
|
||||||
remaining = cleanText(remaining)
|
|
||||||
o.textBuffer.Reset()
|
|
||||||
o.mu.Unlock()
|
|
||||||
if remaining == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
o.logger.Debug("calling Speak with remainder", "rem", remaining)
|
|
||||||
sentencesRem := tokenizer.Tokenize(remaining)
|
|
||||||
for _, rs := range sentencesRem { // to avoid dumping large volume of text
|
|
||||||
o.mu.Lock()
|
|
||||||
interrupt := o.interrupt
|
|
||||||
o.mu.Unlock()
|
|
||||||
if interrupt {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if err := o.Speak(rs.Text); err != nil {
|
|
||||||
o.logger.Error("tts failed", "sentence", rs, "error", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewOrator(log *slog.Logger, cfg *config.Config) Orator {
|
func NewOrator(log *slog.Logger, cfg *config.Config) Orator {
|
||||||
provider := cfg.TTS_PROVIDER
|
provider := cfg.TTS_PROVIDER
|
||||||
if provider == "" {
|
if provider == "" {
|
||||||
@@ -238,266 +56,14 @@ func NewOrator(log *slog.Logger, cfg *config.Config) Orator {
|
|||||||
Language: language,
|
Language: language,
|
||||||
Proxy: "", // Proxy not supported
|
Proxy: "", // Proxy not supported
|
||||||
Speed: cfg.TTS_SPEED,
|
Speed: cfg.TTS_SPEED,
|
||||||
Handler: &handlers.Beep{},
|
|
||||||
}
|
}
|
||||||
orator := &GoogleTranslateOrator{
|
orator := &GoogleTranslateOrator{
|
||||||
logger: log,
|
logger: log,
|
||||||
speech: speech,
|
speech: speech,
|
||||||
|
Speed: cfg.TTS_SPEED,
|
||||||
}
|
}
|
||||||
go orator.readroutine()
|
go orator.readroutine()
|
||||||
go orator.stoproutine()
|
go orator.stoproutine()
|
||||||
return orator
|
return orator
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o *KokoroOrator) GetLogger() *slog.Logger {
|
|
||||||
return o.logger
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *KokoroOrator) requestSound(text string) (io.ReadCloser, error) {
|
|
||||||
if o.URL == "" {
|
|
||||||
return nil, fmt.Errorf("TTS URL is empty")
|
|
||||||
}
|
|
||||||
payload := map[string]interface{}{
|
|
||||||
"input": text,
|
|
||||||
"voice": o.Voice,
|
|
||||||
"response_format": o.Format,
|
|
||||||
"download_format": o.Format,
|
|
||||||
"stream": o.Stream,
|
|
||||||
"speed": o.Speed,
|
|
||||||
// "return_download_link": true,
|
|
||||||
"lang_code": o.Language,
|
|
||||||
}
|
|
||||||
payloadBytes, err := json.Marshal(payload)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to marshal payload: %w", err)
|
|
||||||
}
|
|
||||||
req, err := http.NewRequest("POST", o.URL, bytes.NewBuffer(payloadBytes)) //nolint:noctx
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to create request: %w", err)
|
|
||||||
}
|
|
||||||
req.Header.Set("accept", "application/json")
|
|
||||||
req.Header.Set("Content-Type", "application/json")
|
|
||||||
resp, err := http.DefaultClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("request failed: %w", err)
|
|
||||||
}
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
defer resp.Body.Close()
|
|
||||||
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
|
||||||
}
|
|
||||||
return resp.Body, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *KokoroOrator) Speak(text string) error {
|
|
||||||
o.logger.Debug("fn: Speak is called", "text-len", len(text))
|
|
||||||
body, err := o.requestSound(text)
|
|
||||||
if err != nil {
|
|
||||||
o.logger.Error("request failed", "error", err)
|
|
||||||
return fmt.Errorf("request failed: %w", err)
|
|
||||||
}
|
|
||||||
defer body.Close()
|
|
||||||
// Decode the mp3 audio from response body
|
|
||||||
streamer, format, err := mp3.Decode(body)
|
|
||||||
if err != nil {
|
|
||||||
o.logger.Error("mp3 decode failed", "error", err)
|
|
||||||
return fmt.Errorf("mp3 decode failed: %w", err)
|
|
||||||
}
|
|
||||||
defer streamer.Close()
|
|
||||||
// here it spams with errors that speaker cannot be initialized more than once, but how would we deal with many audio records then?
|
|
||||||
if err := speaker.Init(format.SampleRate, format.SampleRate.N(time.Second/10)); err != nil {
|
|
||||||
o.logger.Debug("failed to init speaker", "error", err)
|
|
||||||
}
|
|
||||||
done := make(chan bool)
|
|
||||||
o.mu.Lock()
|
|
||||||
o.currentDone = done
|
|
||||||
o.currentStream = &beep.Ctrl{Streamer: beep.Seq(streamer, beep.Callback(func() {
|
|
||||||
o.mu.Lock()
|
|
||||||
close(done)
|
|
||||||
o.currentStream = nil
|
|
||||||
o.currentDone = nil
|
|
||||||
o.mu.Unlock()
|
|
||||||
})), Paused: false}
|
|
||||||
o.mu.Unlock()
|
|
||||||
speaker.Play(o.currentStream)
|
|
||||||
<-done
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *KokoroOrator) Stop() {
|
|
||||||
// speaker.Clear()
|
|
||||||
o.logger.Debug("attempted to stop orator", "orator", o)
|
|
||||||
speaker.Lock()
|
|
||||||
defer speaker.Unlock()
|
|
||||||
o.mu.Lock()
|
|
||||||
defer o.mu.Unlock()
|
|
||||||
if o.currentStream != nil {
|
|
||||||
// o.currentStream.Paused = true
|
|
||||||
o.currentStream.Streamer = nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *GoogleTranslateOrator) stoproutine() {
|
|
||||||
for {
|
|
||||||
<-TTSDoneChan
|
|
||||||
o.logger.Debug("orator got done signal")
|
|
||||||
o.Stop()
|
|
||||||
// drain the channel
|
|
||||||
for len(TTSTextChan) > 0 {
|
|
||||||
<-TTSTextChan
|
|
||||||
}
|
|
||||||
o.mu.Lock()
|
|
||||||
o.textBuffer.Reset()
|
|
||||||
if o.currentDone != nil {
|
|
||||||
select {
|
|
||||||
case o.currentDone <- true:
|
|
||||||
default:
|
|
||||||
// Channel might be closed, ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
o.interrupt = true
|
|
||||||
o.mu.Unlock()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *GoogleTranslateOrator) readroutine() {
|
|
||||||
tokenizer, _ := english.NewSentenceTokenizer(nil)
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case chunk := <-TTSTextChan:
|
|
||||||
o.mu.Lock()
|
|
||||||
o.interrupt = false
|
|
||||||
_, err := o.textBuffer.WriteString(chunk)
|
|
||||||
if err != nil {
|
|
||||||
o.logger.Warn("failed to write to stringbuilder", "error", err)
|
|
||||||
o.mu.Unlock()
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
text := o.textBuffer.String()
|
|
||||||
o.mu.Unlock()
|
|
||||||
sentences := tokenizer.Tokenize(text)
|
|
||||||
o.logger.Debug("adding chunk", "chunk", chunk, "text", text, "sen-len", len(sentences))
|
|
||||||
for i, sentence := range sentences {
|
|
||||||
if i == len(sentences)-1 { // last sentence
|
|
||||||
o.mu.Lock()
|
|
||||||
o.textBuffer.Reset()
|
|
||||||
_, err := o.textBuffer.WriteString(sentence.Text)
|
|
||||||
o.mu.Unlock()
|
|
||||||
if err != nil {
|
|
||||||
o.logger.Warn("failed to write to stringbuilder", "error", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
continue // if only one (often incomplete) sentence; wait for next chunk
|
|
||||||
}
|
|
||||||
cleanedText := cleanText(sentence.Text)
|
|
||||||
if cleanedText == "" {
|
|
||||||
continue // Skip empty text after cleaning
|
|
||||||
}
|
|
||||||
o.logger.Debug("calling Speak with sentence", "sent", cleanedText)
|
|
||||||
if err := o.Speak(cleanedText); err != nil {
|
|
||||||
o.logger.Error("tts failed", "sentence", cleanedText, "error", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
case <-TTSFlushChan:
|
|
||||||
o.logger.Debug("got flushchan signal start")
|
|
||||||
// lln is done get the whole message out
|
|
||||||
if len(TTSTextChan) > 0 { // otherwise might get stuck
|
|
||||||
for chunk := range TTSTextChan {
|
|
||||||
o.mu.Lock()
|
|
||||||
_, err := o.textBuffer.WriteString(chunk)
|
|
||||||
o.mu.Unlock()
|
|
||||||
if err != nil {
|
|
||||||
o.logger.Warn("failed to write to stringbuilder", "error", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if len(TTSTextChan) == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
o.mu.Lock()
|
|
||||||
remaining := o.textBuffer.String()
|
|
||||||
remaining = cleanText(remaining)
|
|
||||||
o.textBuffer.Reset()
|
|
||||||
o.mu.Unlock()
|
|
||||||
if remaining == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
o.logger.Debug("calling Speak with remainder", "rem", remaining)
|
|
||||||
sentencesRem := tokenizer.Tokenize(remaining)
|
|
||||||
for _, rs := range sentencesRem { // to avoid dumping large volume of text
|
|
||||||
o.mu.Lock()
|
|
||||||
interrupt := o.interrupt
|
|
||||||
o.mu.Unlock()
|
|
||||||
if interrupt {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if err := o.Speak(rs.Text); err != nil {
|
|
||||||
o.logger.Error("tts failed", "sentence", rs.Text, "error", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *GoogleTranslateOrator) GetLogger() *slog.Logger {
|
|
||||||
return o.logger
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *GoogleTranslateOrator) Speak(text string) error {
|
|
||||||
o.logger.Debug("fn: Speak is called", "text-len", len(text))
|
|
||||||
// Generate MP3 data using google-translate-tts
|
|
||||||
reader, err := o.speech.GenerateSpeech(text)
|
|
||||||
if err != nil {
|
|
||||||
o.logger.Error("generate speech failed", "error", err)
|
|
||||||
return fmt.Errorf("generate speech failed: %w", err)
|
|
||||||
}
|
|
||||||
// Decode the mp3 audio from reader (wrap with NopCloser for io.ReadCloser)
|
|
||||||
streamer, format, err := mp3.Decode(io.NopCloser(reader))
|
|
||||||
if err != nil {
|
|
||||||
o.logger.Error("mp3 decode failed", "error", err)
|
|
||||||
return fmt.Errorf("mp3 decode failed: %w", err)
|
|
||||||
}
|
|
||||||
defer streamer.Close()
|
|
||||||
playbackStreamer := beep.Streamer(streamer)
|
|
||||||
speed := o.speech.Speed
|
|
||||||
if speed <= 0 {
|
|
||||||
speed = 1.0
|
|
||||||
}
|
|
||||||
if speed != 1.0 {
|
|
||||||
playbackStreamer = beep.ResampleRatio(3, float64(speed), streamer)
|
|
||||||
}
|
|
||||||
// Initialize speaker with the format's sample rate
|
|
||||||
if err := speaker.Init(format.SampleRate, format.SampleRate.N(time.Second/10)); err != nil {
|
|
||||||
o.logger.Debug("failed to init speaker", "error", err)
|
|
||||||
}
|
|
||||||
done := make(chan bool)
|
|
||||||
o.mu.Lock()
|
|
||||||
o.currentDone = done
|
|
||||||
o.currentStream = &beep.Ctrl{Streamer: beep.Seq(playbackStreamer, beep.Callback(func() {
|
|
||||||
o.mu.Lock()
|
|
||||||
close(done)
|
|
||||||
o.currentStream = nil
|
|
||||||
o.currentDone = nil
|
|
||||||
o.mu.Unlock()
|
|
||||||
})), Paused: false}
|
|
||||||
o.mu.Unlock()
|
|
||||||
speaker.Play(o.currentStream)
|
|
||||||
<-done // wait for playback to complete
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *GoogleTranslateOrator) Stop() {
|
|
||||||
o.logger.Debug("attempted to stop google translate orator")
|
|
||||||
speaker.Lock()
|
|
||||||
defer speaker.Unlock()
|
|
||||||
o.mu.Lock()
|
|
||||||
defer o.mu.Unlock()
|
|
||||||
if o.currentStream != nil {
|
|
||||||
o.currentStream.Streamer = nil
|
|
||||||
}
|
|
||||||
// Also stop the speech handler if possible
|
|
||||||
if o.speech != nil {
|
|
||||||
_ = o.speech.Stop()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -9,15 +9,13 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"gf-lt/config"
|
"gf-lt/config"
|
||||||
"io"
|
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"syscall"
|
"syscall"
|
||||||
|
"time"
|
||||||
"github.com/gordonklaus/portaudio"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type WhisperBinary struct {
|
type WhisperBinary struct {
|
||||||
@@ -25,11 +23,143 @@ type WhisperBinary struct {
|
|||||||
whisperPath string
|
whisperPath string
|
||||||
modelPath string
|
modelPath string
|
||||||
lang string
|
lang string
|
||||||
ctx context.Context
|
// Per-recording fields (protected by mu)
|
||||||
cancel context.CancelFunc
|
|
||||||
mu sync.Mutex
|
mu sync.Mutex
|
||||||
recording bool
|
recording bool
|
||||||
audioBuffer []int16
|
tempFile string
|
||||||
|
ctx context.Context
|
||||||
|
cancel context.CancelFunc
|
||||||
|
cmd *exec.Cmd
|
||||||
|
cmdMu sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *WhisperBinary) StartRecording() error {
|
||||||
|
w.mu.Lock()
|
||||||
|
defer w.mu.Unlock()
|
||||||
|
if w.recording {
|
||||||
|
return errors.New("recording is already in progress")
|
||||||
|
}
|
||||||
|
// Fresh context for this recording
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
w.ctx = ctx
|
||||||
|
w.cancel = cancel
|
||||||
|
// Create temporary file
|
||||||
|
tempFile, err := os.CreateTemp("", "recording_*.wav")
|
||||||
|
if err != nil {
|
||||||
|
cancel()
|
||||||
|
return fmt.Errorf("failed to create temp file: %w", err)
|
||||||
|
}
|
||||||
|
tempFile.Close()
|
||||||
|
w.tempFile = tempFile.Name()
|
||||||
|
// ffmpeg command: capture from default microphone, write WAV
|
||||||
|
args := []string{
|
||||||
|
"-f", "alsa", // or "pulse" if preferred
|
||||||
|
"-i", "default",
|
||||||
|
"-acodec", "pcm_s16le",
|
||||||
|
"-ar", "16000",
|
||||||
|
"-ac", "1",
|
||||||
|
"-y", // overwrite output file
|
||||||
|
w.tempFile,
|
||||||
|
}
|
||||||
|
cmd := exec.CommandContext(w.ctx, "ffmpeg", args...)
|
||||||
|
// Capture stderr for debugging (optional, but useful for diagnosing)
|
||||||
|
stderr, err := cmd.StderrPipe()
|
||||||
|
if err != nil {
|
||||||
|
cancel()
|
||||||
|
os.Remove(w.tempFile)
|
||||||
|
return fmt.Errorf("failed to create stderr pipe: %w", err)
|
||||||
|
}
|
||||||
|
go func() {
|
||||||
|
buf := make([]byte, 1024)
|
||||||
|
for {
|
||||||
|
n, err := stderr.Read(buf)
|
||||||
|
if n > 0 {
|
||||||
|
w.logger.Debug("ffmpeg stderr", "output", string(buf[:n]))
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
w.cmdMu.Lock()
|
||||||
|
w.cmd = cmd
|
||||||
|
w.cmdMu.Unlock()
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
cancel()
|
||||||
|
os.Remove(w.tempFile)
|
||||||
|
return fmt.Errorf("failed to start ffmpeg: %w", err)
|
||||||
|
}
|
||||||
|
w.recording = true
|
||||||
|
w.logger.Debug("Recording started", "file", w.tempFile)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *WhisperBinary) StopRecording() (string, error) {
|
||||||
|
w.mu.Lock()
|
||||||
|
defer w.mu.Unlock()
|
||||||
|
if !w.recording {
|
||||||
|
return "", errors.New("not currently recording")
|
||||||
|
}
|
||||||
|
w.recording = false
|
||||||
|
// Gracefully stop ffmpeg
|
||||||
|
w.cmdMu.Lock()
|
||||||
|
if w.cmd != nil && w.cmd.Process != nil {
|
||||||
|
w.logger.Debug("Sending SIGTERM to ffmpeg")
|
||||||
|
w.cmd.Process.Signal(syscall.SIGTERM)
|
||||||
|
// Wait for process to exit (up to 2 seconds)
|
||||||
|
done := make(chan error, 1)
|
||||||
|
go func() {
|
||||||
|
done <- w.cmd.Wait()
|
||||||
|
}()
|
||||||
|
select {
|
||||||
|
case <-done:
|
||||||
|
w.logger.Debug("ffmpeg exited after SIGTERM")
|
||||||
|
case <-time.After(2 * time.Second):
|
||||||
|
w.logger.Warn("ffmpeg did not exit, sending SIGKILL")
|
||||||
|
w.cmd.Process.Kill()
|
||||||
|
<-done
|
||||||
|
}
|
||||||
|
}
|
||||||
|
w.cmdMu.Unlock()
|
||||||
|
// Cancel context (already done, but for cleanliness)
|
||||||
|
if w.cancel != nil {
|
||||||
|
w.cancel()
|
||||||
|
}
|
||||||
|
// Validate temp file
|
||||||
|
if w.tempFile == "" {
|
||||||
|
return "", errors.New("no recording file")
|
||||||
|
}
|
||||||
|
defer os.Remove(w.tempFile)
|
||||||
|
info, err := os.Stat(w.tempFile)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to stat temp file: %w", err)
|
||||||
|
}
|
||||||
|
if info.Size() < 44 { // WAV header is 44 bytes
|
||||||
|
// Log ffmpeg stderr? Already captured in debug logs.
|
||||||
|
return "", fmt.Errorf("recording file too small (%d bytes), possibly no audio captured", info.Size())
|
||||||
|
}
|
||||||
|
// Run whisper.cpp binary
|
||||||
|
cmd := exec.Command(w.whisperPath, "-m", w.modelPath, "-l", w.lang, w.tempFile)
|
||||||
|
var outBuf, errBuf bytes.Buffer
|
||||||
|
cmd.Stdout = &outBuf
|
||||||
|
cmd.Stderr = &errBuf
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
w.logger.Error("whisper binary failed",
|
||||||
|
"error", err,
|
||||||
|
"stderr", errBuf.String(),
|
||||||
|
"file_size", info.Size())
|
||||||
|
return "", fmt.Errorf("whisper binary failed: %w (stderr: %s)", err, errBuf.String())
|
||||||
|
}
|
||||||
|
result := strings.TrimRight(outBuf.String(), "\n")
|
||||||
|
result = specialRE.ReplaceAllString(result, "")
|
||||||
|
return strings.TrimSpace(strings.ReplaceAll(result, "\n ", "\n")), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsRecording returns true if a recording is in progress.
|
||||||
|
func (w *WhisperBinary) IsRecording() bool {
|
||||||
|
w.mu.Lock()
|
||||||
|
defer w.mu.Unlock()
|
||||||
|
return w.recording
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewWhisperBinary(logger *slog.Logger, cfg *config.Config) *WhisperBinary {
|
func NewWhisperBinary(logger *slog.Logger, cfg *config.Config) *WhisperBinary {
|
||||||
@@ -44,283 +174,3 @@ func NewWhisperBinary(logger *slog.Logger, cfg *config.Config) *WhisperBinary {
|
|||||||
cancel: cancel,
|
cancel: cancel,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *WhisperBinary) StartRecording() error {
|
|
||||||
w.mu.Lock()
|
|
||||||
defer w.mu.Unlock()
|
|
||||||
if w.recording {
|
|
||||||
return errors.New("recording is already in progress")
|
|
||||||
}
|
|
||||||
// If context is cancelled, create a new one for the next recording session
|
|
||||||
if w.ctx.Err() != nil {
|
|
||||||
w.logger.Debug("Context cancelled, creating new context")
|
|
||||||
w.ctx, w.cancel = context.WithCancel(context.Background())
|
|
||||||
}
|
|
||||||
// Temporarily redirect stderr to suppress ALSA warnings during PortAudio init
|
|
||||||
origStderr, errDup := syscall.Dup(syscall.Stderr)
|
|
||||||
if errDup != nil {
|
|
||||||
return fmt.Errorf("failed to dup stderr: %w", errDup)
|
|
||||||
}
|
|
||||||
nullFD, err := syscall.Open("/dev/null", syscall.O_WRONLY, 0)
|
|
||||||
if err != nil {
|
|
||||||
_ = syscall.Close(origStderr) // Close the dup'd fd if open fails
|
|
||||||
return fmt.Errorf("failed to open /dev/null: %w", err)
|
|
||||||
}
|
|
||||||
// redirect stderr
|
|
||||||
_ = syscall.Dup2(nullFD, syscall.Stderr)
|
|
||||||
// Initialize PortAudio (this is where ALSA warnings occur)
|
|
||||||
portaudioErr := portaudio.Initialize()
|
|
||||||
defer func() {
|
|
||||||
// Restore stderr
|
|
||||||
_ = syscall.Dup2(origStderr, syscall.Stderr)
|
|
||||||
_ = syscall.Close(origStderr)
|
|
||||||
_ = syscall.Close(nullFD)
|
|
||||||
}()
|
|
||||||
if portaudioErr != nil {
|
|
||||||
return fmt.Errorf("portaudio init failed: %w", portaudioErr)
|
|
||||||
}
|
|
||||||
// Initialize audio buffer
|
|
||||||
w.audioBuffer = make([]int16, 0)
|
|
||||||
in := make([]int16, 1024) // buffer size
|
|
||||||
stream, err := portaudio.OpenDefaultStream(1, 0, 16000.0, len(in), in)
|
|
||||||
if err != nil {
|
|
||||||
if paErr := portaudio.Terminate(); paErr != nil {
|
|
||||||
return fmt.Errorf("failed to open microphone: %w; terminate error: %w", err, paErr)
|
|
||||||
}
|
|
||||||
return fmt.Errorf("failed to open microphone: %w", err)
|
|
||||||
}
|
|
||||||
go w.recordAudio(stream, in)
|
|
||||||
w.recording = true
|
|
||||||
w.logger.Debug("Recording started")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *WhisperBinary) recordAudio(stream *portaudio.Stream, in []int16) {
|
|
||||||
defer func() {
|
|
||||||
w.logger.Debug("recordAudio defer function called")
|
|
||||||
_ = stream.Stop() // Stop the stream
|
|
||||||
_ = portaudio.Terminate() // ignoring error as we're shutting down
|
|
||||||
w.logger.Debug("recordAudio terminated")
|
|
||||||
}()
|
|
||||||
w.logger.Debug("Starting audio stream")
|
|
||||||
if err := stream.Start(); err != nil {
|
|
||||||
w.logger.Error("Failed to start audio stream", "error", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
w.logger.Debug("Audio stream started, entering recording loop")
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case <-w.ctx.Done():
|
|
||||||
w.logger.Debug("Context done, exiting recording loop")
|
|
||||||
return
|
|
||||||
default:
|
|
||||||
// Check recording status with minimal lock time
|
|
||||||
w.mu.Lock()
|
|
||||||
recording := w.recording
|
|
||||||
w.mu.Unlock()
|
|
||||||
|
|
||||||
if !recording {
|
|
||||||
w.logger.Debug("Recording flag is false, exiting recording loop")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if err := stream.Read(); err != nil {
|
|
||||||
w.logger.Error("Error reading from stream", "error", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// Append samples to buffer - only acquire lock when necessary
|
|
||||||
w.mu.Lock()
|
|
||||||
if w.audioBuffer == nil {
|
|
||||||
w.audioBuffer = make([]int16, 0)
|
|
||||||
}
|
|
||||||
// Make a copy of the input buffer to avoid overwriting
|
|
||||||
tempBuffer := make([]int16, len(in))
|
|
||||||
copy(tempBuffer, in)
|
|
||||||
w.audioBuffer = append(w.audioBuffer, tempBuffer...)
|
|
||||||
w.mu.Unlock()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *WhisperBinary) StopRecording() (string, error) {
|
|
||||||
w.logger.Debug("StopRecording called")
|
|
||||||
w.mu.Lock()
|
|
||||||
if !w.recording {
|
|
||||||
w.mu.Unlock()
|
|
||||||
return "", errors.New("not currently recording")
|
|
||||||
}
|
|
||||||
w.logger.Debug("Setting recording to false and cancelling context")
|
|
||||||
w.recording = false
|
|
||||||
w.cancel() // This will stop the recording goroutine
|
|
||||||
w.mu.Unlock()
|
|
||||||
// // Small delay to allow the recording goroutine to react to context cancellation
|
|
||||||
// time.Sleep(20 * time.Millisecond)
|
|
||||||
// Save the recorded audio to a temporary file
|
|
||||||
tempFile, err := w.saveAudioToTempFile()
|
|
||||||
if err != nil {
|
|
||||||
w.logger.Error("Error saving audio to temp file", "error", err)
|
|
||||||
return "", fmt.Errorf("failed to save audio to temp file: %w", err)
|
|
||||||
}
|
|
||||||
w.logger.Debug("Saved audio to temp file", "file", tempFile)
|
|
||||||
// Run the whisper binary with a separate context to avoid cancellation during transcription
|
|
||||||
cmd := exec.Command(w.whisperPath, "-m", w.modelPath, "-l", w.lang, tempFile, "2>/dev/null")
|
|
||||||
var outBuf bytes.Buffer
|
|
||||||
cmd.Stdout = &outBuf
|
|
||||||
// Redirect stderr to suppress ALSA warnings and other stderr output
|
|
||||||
cmd.Stderr = io.Discard // Suppress stderr output from whisper binary
|
|
||||||
w.logger.Debug("Running whisper binary command")
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
// Clean up audio buffer
|
|
||||||
w.mu.Lock()
|
|
||||||
w.audioBuffer = nil
|
|
||||||
w.mu.Unlock()
|
|
||||||
// Since we're suppressing stderr, we'll just log that the command failed
|
|
||||||
w.logger.Error("Error running whisper binary", "error", err)
|
|
||||||
return "", fmt.Errorf("whisper binary failed: %w", err)
|
|
||||||
}
|
|
||||||
result := outBuf.String()
|
|
||||||
w.logger.Debug("Whisper binary completed", "result", result)
|
|
||||||
// Clean up audio buffer
|
|
||||||
w.mu.Lock()
|
|
||||||
w.audioBuffer = nil
|
|
||||||
w.mu.Unlock()
|
|
||||||
// Clean up the temporary file after transcription
|
|
||||||
w.logger.Debug("StopRecording completed")
|
|
||||||
os.Remove(tempFile)
|
|
||||||
result = strings.TrimRight(result, "\n")
|
|
||||||
// in case there are special tokens like [_BEG_]
|
|
||||||
result = specialRE.ReplaceAllString(result, "")
|
|
||||||
return strings.TrimSpace(strings.ReplaceAll(result, "\n ", "\n")), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// saveAudioToTempFile saves the recorded audio data to a temporary WAV file
|
|
||||||
func (w *WhisperBinary) saveAudioToTempFile() (string, error) {
|
|
||||||
w.logger.Debug("saveAudioToTempFile called")
|
|
||||||
// Create temporary WAV file
|
|
||||||
tempFile, err := os.CreateTemp("", "recording_*.wav")
|
|
||||||
if err != nil {
|
|
||||||
w.logger.Error("Failed to create temp file", "error", err)
|
|
||||||
return "", fmt.Errorf("failed to create temp file: %w", err)
|
|
||||||
}
|
|
||||||
w.logger.Debug("Created temp file", "file", tempFile.Name())
|
|
||||||
defer tempFile.Close()
|
|
||||||
|
|
||||||
// Write WAV header and data
|
|
||||||
w.logger.Debug("About to write WAV file", "file", tempFile.Name())
|
|
||||||
err = w.writeWAVFile(tempFile.Name())
|
|
||||||
if err != nil {
|
|
||||||
w.logger.Error("Error writing WAV file", "error", err)
|
|
||||||
return "", fmt.Errorf("failed to write WAV file: %w", err)
|
|
||||||
}
|
|
||||||
w.logger.Debug("WAV file written successfully", "file", tempFile.Name())
|
|
||||||
|
|
||||||
return tempFile.Name(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// writeWAVFile creates a WAV file from the recorded audio data
|
|
||||||
func (w *WhisperBinary) writeWAVFile(filename string) error {
|
|
||||||
w.logger.Debug("writeWAVFile called", "filename", filename)
|
|
||||||
// Open file for writing
|
|
||||||
file, err := os.Create(filename)
|
|
||||||
if err != nil {
|
|
||||||
w.logger.Error("Error creating file", "error", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer file.Close()
|
|
||||||
|
|
||||||
w.logger.Debug("About to acquire mutex in writeWAVFile")
|
|
||||||
w.mu.Lock()
|
|
||||||
w.logger.Debug("Locked mutex, copying audio buffer")
|
|
||||||
audioData := make([]int16, len(w.audioBuffer))
|
|
||||||
copy(audioData, w.audioBuffer)
|
|
||||||
w.mu.Unlock()
|
|
||||||
w.logger.Debug("Unlocked mutex", "audio_data_length", len(audioData))
|
|
||||||
|
|
||||||
if len(audioData) == 0 {
|
|
||||||
w.logger.Warn("No audio data to write")
|
|
||||||
return errors.New("no audio data to write")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculate data size (number of samples * size of int16)
|
|
||||||
dataSize := len(audioData) * 2 // 2 bytes per int16 sample
|
|
||||||
w.logger.Debug("Calculated data size", "size", dataSize)
|
|
||||||
|
|
||||||
// Write WAV header with the correct data size
|
|
||||||
header := w.createWAVHeader(16000, 1, 16, dataSize)
|
|
||||||
_, err = file.Write(header)
|
|
||||||
if err != nil {
|
|
||||||
w.logger.Error("Error writing WAV header", "error", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
w.logger.Debug("WAV header written successfully")
|
|
||||||
|
|
||||||
// Write audio data
|
|
||||||
w.logger.Debug("About to write audio data samples")
|
|
||||||
for i, sample := range audioData {
|
|
||||||
// Write little-endian 16-bit sample
|
|
||||||
_, err := file.Write([]byte{byte(sample), byte(sample >> 8)})
|
|
||||||
if err != nil {
|
|
||||||
w.logger.Error("Error writing sample", "index", i, "error", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
// Log progress every 10000 samples to avoid too much output
|
|
||||||
if i%10000 == 0 {
|
|
||||||
w.logger.Debug("Written samples", "count", i)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
w.logger.Debug("All audio data written successfully")
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// createWAVHeader creates a WAV file header
|
|
||||||
func (w *WhisperBinary) createWAVHeader(sampleRate, channels, bitsPerSample int, dataSize int) []byte {
|
|
||||||
header := make([]byte, 44)
|
|
||||||
copy(header[0:4], "RIFF")
|
|
||||||
// Total file size will be updated later
|
|
||||||
copy(header[8:12], "WAVE")
|
|
||||||
copy(header[12:16], "fmt ")
|
|
||||||
// fmt chunk size (16 for PCM)
|
|
||||||
header[16] = 16
|
|
||||||
header[17] = 0
|
|
||||||
header[18] = 0
|
|
||||||
header[19] = 0
|
|
||||||
// Audio format (1 = PCM)
|
|
||||||
header[20] = 1
|
|
||||||
header[21] = 0
|
|
||||||
// Number of channels
|
|
||||||
header[22] = byte(channels)
|
|
||||||
header[23] = 0
|
|
||||||
// Sample rate
|
|
||||||
header[24] = byte(sampleRate)
|
|
||||||
header[25] = byte(sampleRate >> 8)
|
|
||||||
header[26] = byte(sampleRate >> 16)
|
|
||||||
header[27] = byte(sampleRate >> 24)
|
|
||||||
// Byte rate
|
|
||||||
byteRate := sampleRate * channels * bitsPerSample / 8
|
|
||||||
header[28] = byte(byteRate)
|
|
||||||
header[29] = byte(byteRate >> 8)
|
|
||||||
header[30] = byte(byteRate >> 16)
|
|
||||||
header[31] = byte(byteRate >> 24)
|
|
||||||
// Block align
|
|
||||||
blockAlign := channels * bitsPerSample / 8
|
|
||||||
header[32] = byte(blockAlign)
|
|
||||||
header[33] = 0
|
|
||||||
// Bits per sample
|
|
||||||
header[34] = byte(bitsPerSample)
|
|
||||||
header[35] = 0
|
|
||||||
// "data" subchunk
|
|
||||||
copy(header[36:40], "data")
|
|
||||||
// Data size
|
|
||||||
header[40] = byte(dataSize)
|
|
||||||
header[41] = byte(dataSize >> 8)
|
|
||||||
header[42] = byte(dataSize >> 16)
|
|
||||||
header[43] = byte(dataSize >> 24)
|
|
||||||
|
|
||||||
return header
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *WhisperBinary) IsRecording() bool {
|
|
||||||
w.mu.Lock()
|
|
||||||
defer w.mu.Unlock()
|
|
||||||
return w.recording
|
|
||||||
}
|
|
||||||
|
|||||||
156
extra/whisper_server.go
Normal file
156
extra/whisper_server.go
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
//go:build extra
|
||||||
|
// +build extra
|
||||||
|
|
||||||
|
package extra
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log/slog"
|
||||||
|
"mime/multipart"
|
||||||
|
"net/http"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
type WhisperServer struct {
|
||||||
|
logger *slog.Logger
|
||||||
|
ServerURL string
|
||||||
|
SampleRate int
|
||||||
|
AudioBuffer *bytes.Buffer
|
||||||
|
recording bool // protected by mu
|
||||||
|
mu sync.Mutex // protects recording & AudioBuffer
|
||||||
|
cmd *exec.Cmd // protected by cmdMu
|
||||||
|
stopCh chan struct{} // protected by cmdMu
|
||||||
|
cmdMu sync.Mutex // protects cmd and stopCh
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stt *WhisperServer) StartRecording() error {
|
||||||
|
stt.mu.Lock()
|
||||||
|
defer stt.mu.Unlock()
|
||||||
|
if stt.recording {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// Build ffmpeg command for microphone capture
|
||||||
|
args := []string{
|
||||||
|
"-f", "alsa",
|
||||||
|
"-i", "default",
|
||||||
|
"-acodec", "pcm_s16le",
|
||||||
|
"-ar", fmt.Sprint(stt.SampleRate),
|
||||||
|
"-ac", "1",
|
||||||
|
"-f", "s16le",
|
||||||
|
"-",
|
||||||
|
}
|
||||||
|
cmd := exec.Command("ffmpeg", args...)
|
||||||
|
stdout, err := cmd.StdoutPipe()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get stdout pipe: %w", err)
|
||||||
|
}
|
||||||
|
stt.cmdMu.Lock()
|
||||||
|
stt.cmd = cmd
|
||||||
|
stt.stopCh = make(chan struct{})
|
||||||
|
stt.cmdMu.Unlock()
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
return fmt.Errorf("failed to start ffmpeg: %w", err)
|
||||||
|
}
|
||||||
|
stt.recording = true
|
||||||
|
stt.AudioBuffer.Reset()
|
||||||
|
// Read PCM data in goroutine
|
||||||
|
go func() {
|
||||||
|
buf := make([]byte, 4096)
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-stt.stopCh:
|
||||||
|
return
|
||||||
|
default:
|
||||||
|
n, err := stdout.Read(buf)
|
||||||
|
if n > 0 {
|
||||||
|
stt.mu.Lock()
|
||||||
|
stt.AudioBuffer.Write(buf[:n])
|
||||||
|
stt.mu.Unlock()
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
if err != io.EOF {
|
||||||
|
stt.logger.Error("recording read error", "error", err)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stt *WhisperServer) StopRecording() (string, error) {
|
||||||
|
stt.mu.Lock()
|
||||||
|
defer stt.mu.Unlock()
|
||||||
|
if !stt.recording {
|
||||||
|
return "", errors.New("not recording")
|
||||||
|
}
|
||||||
|
stt.recording = false
|
||||||
|
// Stop ffmpeg
|
||||||
|
stt.cmdMu.Lock()
|
||||||
|
if stt.cmd != nil && stt.cmd.Process != nil {
|
||||||
|
stt.cmd.Process.Kill()
|
||||||
|
stt.cmd.Wait()
|
||||||
|
}
|
||||||
|
close(stt.stopCh)
|
||||||
|
stt.cmdMu.Unlock()
|
||||||
|
// Rest of StopRecording unchanged (WAV header + HTTP upload)
|
||||||
|
// ...
|
||||||
|
stt.recording = false
|
||||||
|
// wait loop to finish?
|
||||||
|
if stt.AudioBuffer == nil {
|
||||||
|
err := errors.New("unexpected nil AudioBuffer")
|
||||||
|
stt.logger.Error(err.Error())
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
// Create WAV header first
|
||||||
|
body := &bytes.Buffer{}
|
||||||
|
writer := multipart.NewWriter(body)
|
||||||
|
// Add audio file part
|
||||||
|
part, err := writer.CreateFormFile("file", "recording.wav")
|
||||||
|
if err != nil {
|
||||||
|
stt.logger.Error("fn: StopRecording", "error", err)
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
// Stream directly to multipart writer: header + raw data
|
||||||
|
dataSize := stt.AudioBuffer.Len()
|
||||||
|
stt.writeWavHeader(part, dataSize)
|
||||||
|
if _, err := io.Copy(part, stt.AudioBuffer); err != nil {
|
||||||
|
stt.logger.Error("fn: StopRecording", "error", err)
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
// Reset buffer for next recording
|
||||||
|
stt.AudioBuffer.Reset()
|
||||||
|
// Add response format field
|
||||||
|
err = writer.WriteField("response_format", "text")
|
||||||
|
if err != nil {
|
||||||
|
stt.logger.Error("fn: StopRecording", "error", err)
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if writer.Close() != nil {
|
||||||
|
stt.logger.Error("fn: StopRecording", "error", err)
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
// Send request
|
||||||
|
resp, err := http.Post(stt.ServerURL, writer.FormDataContentType(), body) //nolint:noctx
|
||||||
|
if err != nil {
|
||||||
|
stt.logger.Error("fn: StopRecording", "error", err)
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
// Read and print response
|
||||||
|
responseTextBytes, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
stt.logger.Error("fn: StopRecording", "error", err)
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
resptext := strings.TrimRight(string(responseTextBytes), "\n")
|
||||||
|
// in case there are special tokens like [_BEG_]
|
||||||
|
resptext = specialRE.ReplaceAllString(resptext, "")
|
||||||
|
return strings.TrimSpace(strings.ReplaceAll(resptext, "\n ", "\n")), nil
|
||||||
|
}
|
||||||
23
go.mod
23
go.mod
@@ -4,33 +4,40 @@ go 1.25.1
|
|||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/BurntSushi/toml v1.5.0
|
github.com/BurntSushi/toml v1.5.0
|
||||||
github.com/GrailFinder/google-translate-tts v0.1.3
|
github.com/GrailFinder/google-translate-tts v0.1.4
|
||||||
github.com/GrailFinder/searchagent v0.2.0
|
github.com/GrailFinder/searchagent v0.2.0
|
||||||
|
github.com/PuerkitoBio/goquery v1.11.0
|
||||||
github.com/gdamore/tcell/v2 v2.13.2
|
github.com/gdamore/tcell/v2 v2.13.2
|
||||||
github.com/glebarez/go-sqlite v1.22.0
|
github.com/glebarez/go-sqlite v1.22.0
|
||||||
github.com/gopxl/beep/v2 v2.1.1
|
|
||||||
github.com/gordonklaus/portaudio v0.0.0-20250206071425-98a94950218b
|
|
||||||
github.com/jmoiron/sqlx v1.4.0
|
github.com/jmoiron/sqlx v1.4.0
|
||||||
|
github.com/ledongthuc/pdf v0.0.0-20250511090121-5959a4027728
|
||||||
github.com/neurosnap/sentences v1.1.2
|
github.com/neurosnap/sentences v1.1.2
|
||||||
|
github.com/playwright-community/playwright-go v0.5700.1
|
||||||
github.com/rivo/tview v0.42.0
|
github.com/rivo/tview v0.42.0
|
||||||
|
github.com/sugarme/tokenizer v0.3.0
|
||||||
|
github.com/yalue/onnxruntime_go v1.27.0
|
||||||
|
github.com/yuin/goldmark v1.4.13
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/PuerkitoBio/goquery v1.11.0 // indirect
|
|
||||||
github.com/andybalholm/cascadia v1.3.3 // indirect
|
github.com/andybalholm/cascadia v1.3.3 // indirect
|
||||||
|
github.com/deckarep/golang-set/v2 v2.8.0 // indirect
|
||||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||||
github.com/ebitengine/oto/v3 v3.4.0 // indirect
|
github.com/emirpasic/gods v1.18.1 // indirect
|
||||||
github.com/ebitengine/purego v0.9.1 // indirect
|
|
||||||
github.com/gdamore/encoding v1.0.1 // indirect
|
github.com/gdamore/encoding v1.0.1 // indirect
|
||||||
|
github.com/go-jose/go-jose/v3 v3.0.4 // indirect
|
||||||
|
github.com/go-stack/stack v1.8.1 // indirect
|
||||||
github.com/google/uuid v1.6.0 // indirect
|
github.com/google/uuid v1.6.0 // indirect
|
||||||
github.com/hajimehoshi/go-mp3 v0.3.4 // indirect
|
github.com/hajimehoshi/go-mp3 v0.3.4 // indirect
|
||||||
github.com/hajimehoshi/oto/v2 v2.3.1 // indirect
|
|
||||||
github.com/lucasb-eyer/go-colorful v1.3.0 // indirect
|
github.com/lucasb-eyer/go-colorful v1.3.0 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
|
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect
|
||||||
github.com/ncruces/go-strftime v1.0.0 // indirect
|
github.com/ncruces/go-strftime v1.0.0 // indirect
|
||||||
github.com/pkg/errors v0.9.1 // indirect
|
github.com/patrickmn/go-cache v2.1.0+incompatible // indirect
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||||
github.com/rivo/uniseg v0.4.7 // indirect
|
github.com/rivo/uniseg v0.4.7 // indirect
|
||||||
|
github.com/schollz/progressbar/v2 v2.15.0 // indirect
|
||||||
|
github.com/sugarme/regexpset v0.0.0-20200920021344-4d4ec8eaf93c // indirect
|
||||||
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 // indirect
|
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 // indirect
|
||||||
golang.org/x/net v0.48.0 // indirect
|
golang.org/x/net v0.48.0 // indirect
|
||||||
golang.org/x/sys v0.39.0 // indirect
|
golang.org/x/sys v0.39.0 // indirect
|
||||||
|
|||||||
47
go.sum
47
go.sum
@@ -2,47 +2,50 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
|||||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||||
github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg=
|
github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg=
|
||||||
github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
||||||
github.com/GrailFinder/google-translate-tts v0.1.3 h1:Mww9tNzTWjjSh+OCbTPl/+21oMPKcUecXZfU7nTB/lA=
|
github.com/GrailFinder/google-translate-tts v0.1.4 h1:NJoPZUGfBrmouQMN19MUcNPNUx4tmf4a8OZRME4E4Mg=
|
||||||
github.com/GrailFinder/google-translate-tts v0.1.3/go.mod h1:YIOLKR7sObazdUCrSex3u9OVBovU55eYgWa25vsQJ18=
|
github.com/GrailFinder/google-translate-tts v0.1.4/go.mod h1:YIOLKR7sObazdUCrSex3u9OVBovU55eYgWa25vsQJ18=
|
||||||
github.com/GrailFinder/searchagent v0.2.0 h1:U2GVjLh/9xZt0xX9OcYk9Q2fMkyzyTiADPUmUisRdtQ=
|
github.com/GrailFinder/searchagent v0.2.0 h1:U2GVjLh/9xZt0xX9OcYk9Q2fMkyzyTiADPUmUisRdtQ=
|
||||||
github.com/GrailFinder/searchagent v0.2.0/go.mod h1:d66tn5+22LI8IGJREUsRBT60P0sFdgQgvQRqyvgItrs=
|
github.com/GrailFinder/searchagent v0.2.0/go.mod h1:d66tn5+22LI8IGJREUsRBT60P0sFdgQgvQRqyvgItrs=
|
||||||
github.com/PuerkitoBio/goquery v1.11.0 h1:jZ7pwMQXIITcUXNH83LLk+txlaEy6NVOfTuP43xxfqw=
|
github.com/PuerkitoBio/goquery v1.11.0 h1:jZ7pwMQXIITcUXNH83LLk+txlaEy6NVOfTuP43xxfqw=
|
||||||
github.com/PuerkitoBio/goquery v1.11.0/go.mod h1:wQHgxUOU3JGuj3oD/QFfxUdlzW6xPHfqyHre6VMY4DQ=
|
github.com/PuerkitoBio/goquery v1.11.0/go.mod h1:wQHgxUOU3JGuj3oD/QFfxUdlzW6xPHfqyHre6VMY4DQ=
|
||||||
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
|
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
|
||||||
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
|
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/deckarep/golang-set/v2 v2.8.0 h1:swm0rlPCmdWn9mESxKOjWk8hXSqoxOp+ZlfuyaAdFlQ=
|
||||||
|
github.com/deckarep/golang-set/v2 v2.8.0/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4=
|
||||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||||
github.com/ebitengine/oto/v3 v3.4.0 h1:br0PgASsEWaoWn38b2Goe7m1GKFYfNgnsjSd5Gg+/bQ=
|
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
||||||
github.com/ebitengine/oto/v3 v3.4.0/go.mod h1:IOleLVD0m+CMak3mRVwsYY8vTctQgOM0iiL6S7Ar7eI=
|
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
||||||
github.com/ebitengine/purego v0.9.1 h1:a/k2f2HQU3Pi399RPW1MOaZyhKJL9w/xFpKAg4q1s0A=
|
|
||||||
github.com/ebitengine/purego v0.9.1/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
|
|
||||||
github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw=
|
github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw=
|
||||||
github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo=
|
github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo=
|
||||||
github.com/gdamore/tcell/v2 v2.13.2 h1:5j4srfF8ow3HICOv/61/sOhQtA25qxEB2XR3Q/Bhx2g=
|
github.com/gdamore/tcell/v2 v2.13.2 h1:5j4srfF8ow3HICOv/61/sOhQtA25qxEB2XR3Q/Bhx2g=
|
||||||
github.com/gdamore/tcell/v2 v2.13.2/go.mod h1:+Wfe208WDdB7INEtCsNrAN6O2m+wsTPk1RAovjaILlo=
|
github.com/gdamore/tcell/v2 v2.13.2/go.mod h1:+Wfe208WDdB7INEtCsNrAN6O2m+wsTPk1RAovjaILlo=
|
||||||
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
|
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
|
||||||
github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
|
github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
|
||||||
|
github.com/go-jose/go-jose/v3 v3.0.4 h1:Wp5HA7bLQcKnf6YYao/4kpRpVMp/yf6+pJKV8WFSaNY=
|
||||||
|
github.com/go-jose/go-jose/v3 v3.0.4/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ=
|
||||||
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
||||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
||||||
|
github.com/go-stack/stack v1.8.1 h1:ntEHSVwIt7PNXNpgPmVfMrNhLtgjlmnZha2kOpuRiDw=
|
||||||
|
github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4=
|
||||||
|
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/gopxl/beep/v2 v2.1.1 h1:6FYIYMm2qPAdWkjX+7xwKrViS1x0Po5kDMdRkq8NVbU=
|
|
||||||
github.com/gopxl/beep/v2 v2.1.1/go.mod h1:ZAm9TGQ9lvpoiFLd4zf5B1IuyxZhgRACMId1XJbaW0E=
|
|
||||||
github.com/gordonklaus/portaudio v0.0.0-20250206071425-98a94950218b h1:WEuQWBxelOGHA6z9lABqaMLMrfwVyMdN3UgRLT+YUPo=
|
|
||||||
github.com/gordonklaus/portaudio v0.0.0-20250206071425-98a94950218b/go.mod h1:esZFQEUwqC+l76f2R8bIWSwXMaPbp79PppwZ1eJhFco=
|
|
||||||
github.com/hajimehoshi/go-mp3 v0.3.4 h1:NUP7pBYH8OguP4diaTZ9wJbUbk3tC0KlfzsEpWmYj68=
|
github.com/hajimehoshi/go-mp3 v0.3.4 h1:NUP7pBYH8OguP4diaTZ9wJbUbk3tC0KlfzsEpWmYj68=
|
||||||
github.com/hajimehoshi/go-mp3 v0.3.4/go.mod h1:fRtZraRFcWb0pu7ok0LqyFhCUrPeMsGRSVop0eemFmo=
|
github.com/hajimehoshi/go-mp3 v0.3.4/go.mod h1:fRtZraRFcWb0pu7ok0LqyFhCUrPeMsGRSVop0eemFmo=
|
||||||
github.com/hajimehoshi/oto/v2 v2.3.1 h1:qrLKpNus2UfD674oxckKjNJmesp9hMh7u7QCrStB3Rc=
|
|
||||||
github.com/hajimehoshi/oto/v2 v2.3.1/go.mod h1:seWLbgHH7AyUMYKfKYT9pg7PhUu9/SisyJvNTT+ASQo=
|
github.com/hajimehoshi/oto/v2 v2.3.1/go.mod h1:seWLbgHH7AyUMYKfKYT9pg7PhUu9/SisyJvNTT+ASQo=
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||||
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
|
github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o=
|
||||||
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
|
github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY=
|
||||||
|
github.com/ledongthuc/pdf v0.0.0-20250511090121-5959a4027728 h1:QwWKgMY28TAXaDl+ExRDqGQltzXqN/xypdKP86niVn8=
|
||||||
|
github.com/ledongthuc/pdf v0.0.0-20250511090121-5959a4027728/go.mod h1:1fEHWurg7pvf5SG6XNE5Q8UZmOwex51Mkx3SLhrW5B4=
|
||||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||||
github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag=
|
github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag=
|
||||||
@@ -51,12 +54,16 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE
|
|||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||||
|
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
|
||||||
|
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
|
||||||
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||||
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||||
github.com/neurosnap/sentences v1.1.2 h1:iphYOzx/XckXeBiLIUBkPu2EKMJ+6jDbz/sLJZ7ZoUw=
|
github.com/neurosnap/sentences v1.1.2 h1:iphYOzx/XckXeBiLIUBkPu2EKMJ+6jDbz/sLJZ7ZoUw=
|
||||||
github.com/neurosnap/sentences v1.1.2/go.mod h1:/pwU4E9XNL21ygMIkOIllv/SMy2ujHwpf8GQPu1YPbQ=
|
github.com/neurosnap/sentences v1.1.2/go.mod h1:/pwU4E9XNL21ygMIkOIllv/SMy2ujHwpf8GQPu1YPbQ=
|
||||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
|
||||||
|
github.com/playwright-community/playwright-go v0.5700.1 h1:PNFb1byWqrTT720rEO0JL88C6Ju0EmUnR5deFLvtP/U=
|
||||||
|
github.com/playwright-community/playwright-go v0.5700.1/go.mod h1:MlSn1dZrx8rszbCxY6x3qK89ZesJUYVx21B2JnkoNF0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||||
@@ -65,8 +72,20 @@ github.com/rivo/tview v0.42.0 h1:b/ftp+RxtDsHSaynXTbJb+/n/BxDEi+W3UfF5jILK6c=
|
|||||||
github.com/rivo/tview v0.42.0/go.mod h1:cSfIYfhpSGCjp3r/ECJb+GKS7cGJnqV8vfjQPwoXyfY=
|
github.com/rivo/tview v0.42.0/go.mod h1:cSfIYfhpSGCjp3r/ECJb+GKS7cGJnqV8vfjQPwoXyfY=
|
||||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
|
github.com/schollz/progressbar/v2 v2.15.0 h1:dVzHQ8fHRmtPjD3K10jT3Qgn/+H+92jhPrhmxIJfDz8=
|
||||||
|
github.com/schollz/progressbar/v2 v2.15.0/go.mod h1:UdPq3prGkfQ7MOzZKlDRpYKcFqEMczbD7YmbPgpzKMI=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
|
github.com/sugarme/regexpset v0.0.0-20200920021344-4d4ec8eaf93c h1:pwb4kNSHb4K89ymCaN+5lPH/MwnfSVg4rzGDh4d+iy4=
|
||||||
|
github.com/sugarme/regexpset v0.0.0-20200920021344-4d4ec8eaf93c/go.mod h1:2gwkXLWbDGUQWeL3RtpCmcY4mzCtU13kb9UsAg9xMaw=
|
||||||
|
github.com/sugarme/tokenizer v0.3.0 h1:FE8DYbNSz/kSbgEo9l/RjgYHkIJYEdskumitFQBE9FE=
|
||||||
|
github.com/sugarme/tokenizer v0.3.0/go.mod h1:VJ+DLK5ZEZwzvODOWwY0cw+B1dabTd3nCB5HuFCItCc=
|
||||||
|
github.com/yalue/onnxruntime_go v1.27.0 h1:c1YSgDNtpf0WGtxj3YeRIb8VC5LmM1J+Ve3uHdteC1U=
|
||||||
|
github.com/yalue/onnxruntime_go v1.27.0/go.mod h1:b4X26A8pekNb1ACJ58wAXgNKeUCGEAQ9dmACut9Sm/4=
|
||||||
|
github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
@@ -149,6 +168,8 @@ golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxb
|
|||||||
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||||
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
|
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
|
||||||
|
|||||||
792
helpfuncs.go
792
helpfuncs.go
@@ -6,14 +6,57 @@ import (
|
|||||||
"gf-lt/pngmeta"
|
"gf-lt/pngmeta"
|
||||||
"image"
|
"image"
|
||||||
"os"
|
"os"
|
||||||
|
"os/exec"
|
||||||
"path"
|
"path"
|
||||||
|
"path/filepath"
|
||||||
"slices"
|
"slices"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync/atomic"
|
||||||
|
"time"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
|
||||||
"math/rand/v2"
|
"github.com/rivo/tview"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Cached model color - updated by background goroutine
|
||||||
|
// var cachedModelColor string = "orange"
|
||||||
|
var cachedModelColor atomic.Value
|
||||||
|
|
||||||
|
// startModelColorUpdater starts a background goroutine that periodically updates
|
||||||
|
// the cached model color. Only runs HTTP requests for local llama.cpp APIs.
|
||||||
|
func startModelColorUpdater() {
|
||||||
|
go func() {
|
||||||
|
ticker := time.NewTicker(5 * time.Second)
|
||||||
|
defer ticker.Stop()
|
||||||
|
// Initial check
|
||||||
|
updateCachedModelColor()
|
||||||
|
for range ticker.C {
|
||||||
|
updateCachedModelColor()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
// updateCachedModelColor updates the global cachedModelColor variable
|
||||||
|
func updateCachedModelColor() {
|
||||||
|
if !isLocalLlamacpp() {
|
||||||
|
cachedModelColor.Store("orange")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Check if model is loaded
|
||||||
|
loaded, err := isModelLoaded(chatBody.Model)
|
||||||
|
if err != nil {
|
||||||
|
// On error, assume not loaded (red)
|
||||||
|
cachedModelColor.Store("red")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if loaded {
|
||||||
|
cachedModelColor.Store("green")
|
||||||
|
} else {
|
||||||
|
cachedModelColor.Store("red")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func isASCII(s string) bool {
|
func isASCII(s string) bool {
|
||||||
for i := 0; i < len(s); i++ {
|
for i := 0; i < len(s); i++ {
|
||||||
if s[i] > unicode.MaxASCII {
|
if s[i] > unicode.MaxASCII {
|
||||||
@@ -23,6 +66,34 @@ func isASCII(s string) bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func mapToString[V any](m map[string]V) string {
|
||||||
|
rs := strings.Builder{}
|
||||||
|
for k, v := range m {
|
||||||
|
fmt.Fprintf(&rs, "%v: %v\n", k, v)
|
||||||
|
}
|
||||||
|
return rs.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// stripThinkingFromMsg removes thinking blocks from assistant messages.
|
||||||
|
// Skips user, tool, and system messages as they may contain thinking examples.
|
||||||
|
func stripThinkingFromMsg(msg *models.RoleMsg) *models.RoleMsg {
|
||||||
|
if !cfg.StripThinkingFromAPI {
|
||||||
|
return msg
|
||||||
|
}
|
||||||
|
// Skip user, tool, they might contain thinking and system messages - examples
|
||||||
|
if msg.Role == cfg.UserRole || msg.Role == cfg.ToolRole || msg.Role == "system" {
|
||||||
|
return msg
|
||||||
|
}
|
||||||
|
// Strip thinking from assistant messages
|
||||||
|
msgText := msg.GetText()
|
||||||
|
if models.ThinkRE.MatchString(msgText) {
|
||||||
|
cleanedText := models.ThinkRE.ReplaceAllString(msgText, "")
|
||||||
|
cleanedText = strings.TrimSpace(cleanedText)
|
||||||
|
msg.SetText(cleanedText)
|
||||||
|
}
|
||||||
|
return msg
|
||||||
|
}
|
||||||
|
|
||||||
// refreshChatDisplay updates the chat display based on current character view
|
// refreshChatDisplay updates the chat display based on current character view
|
||||||
// It filters messages for the character the user is currently "writing as"
|
// It filters messages for the character the user is currently "writing as"
|
||||||
// and updates the textView with the filtered conversation
|
// and updates the textView with the filtered conversation
|
||||||
@@ -35,14 +106,27 @@ func refreshChatDisplay() {
|
|||||||
// Filter messages for this character
|
// Filter messages for this character
|
||||||
filteredMessages := filterMessagesForCharacter(chatBody.Messages, viewingAs)
|
filteredMessages := filterMessagesForCharacter(chatBody.Messages, viewingAs)
|
||||||
displayText := chatToText(filteredMessages, cfg.ShowSys)
|
displayText := chatToText(filteredMessages, cfg.ShowSys)
|
||||||
// Use QueueUpdate for thread-safe UI updates
|
|
||||||
app.QueueUpdate(func() {
|
|
||||||
textView.SetText(displayText)
|
textView.SetText(displayText)
|
||||||
colorText()
|
colorText()
|
||||||
if scrollToEndEnabled {
|
updateStatusLine()
|
||||||
|
if cfg.AutoScrollEnabled {
|
||||||
textView.ScrollToEnd()
|
textView.ScrollToEnd()
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
|
|
||||||
|
// stopTTSIfNotForUser: character specific context, not meant fot the human to hear
|
||||||
|
func stopTTSIfNotForUser(msg *models.RoleMsg) {
|
||||||
|
if strings.Contains(cfg.CurrentAPI, "/chat") || !cfg.CharSpecificContextEnabled {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
viewingAs := cfg.UserRole
|
||||||
|
if cfg.WriteNextMsgAs != "" {
|
||||||
|
viewingAs = cfg.WriteNextMsgAs
|
||||||
|
}
|
||||||
|
// stop tts if msg is not for user
|
||||||
|
if !slices.Contains(msg.KnownTo, viewingAs) && cfg.TTS_ENABLED {
|
||||||
|
TTSDoneChan <- true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func colorText() {
|
func colorText() {
|
||||||
@@ -64,7 +148,7 @@ func colorText() {
|
|||||||
placeholderThink := "__THINK_BLOCK_%d__"
|
placeholderThink := "__THINK_BLOCK_%d__"
|
||||||
counterThink := 0
|
counterThink := 0
|
||||||
// Replace code blocks with placeholders and store their styled versions
|
// Replace code blocks with placeholders and store their styled versions
|
||||||
text = codeBlockRE.ReplaceAllStringFunc(text, func(match string) string {
|
text = models.CodeBlockRE.ReplaceAllStringFunc(text, func(match string) string {
|
||||||
// Style the code block and store it
|
// Style the code block and store it
|
||||||
styled := fmt.Sprintf("[red::i]%s[-:-:-]", match)
|
styled := fmt.Sprintf("[red::i]%s[-:-:-]", match)
|
||||||
codeBlocks = append(codeBlocks, styled)
|
codeBlocks = append(codeBlocks, styled)
|
||||||
@@ -73,7 +157,7 @@ func colorText() {
|
|||||||
counter++
|
counter++
|
||||||
return id
|
return id
|
||||||
})
|
})
|
||||||
text = thinkRE.ReplaceAllStringFunc(text, func(match string) string {
|
text = models.ThinkRE.ReplaceAllStringFunc(text, func(match string) string {
|
||||||
// Style the code block and store it
|
// Style the code block and store it
|
||||||
styled := fmt.Sprintf("[red::i]%s[-:-:-]", match)
|
styled := fmt.Sprintf("[red::i]%s[-:-:-]", match)
|
||||||
thinkBlocks = append(thinkBlocks, styled)
|
thinkBlocks = append(thinkBlocks, styled)
|
||||||
@@ -83,10 +167,10 @@ func colorText() {
|
|||||||
return id
|
return id
|
||||||
})
|
})
|
||||||
// Step 2: Apply other regex styles to the non-code parts
|
// Step 2: Apply other regex styles to the non-code parts
|
||||||
text = quotesRE.ReplaceAllString(text, `[orange::-]$1[-:-:-]`)
|
text = models.QuotesRE.ReplaceAllString(text, `[orange::-]$1[-:-:-]`)
|
||||||
text = starRE.ReplaceAllString(text, `[turquoise::i]$1[-:-:-]`)
|
text = models.StarRE.ReplaceAllString(text, `[turquoise::i]$1[-:-:-]`)
|
||||||
text = singleBacktickRE.ReplaceAllString(text, "`[pink::i]$1[-:-:-]`")
|
text = models.SingleBacktickRE.ReplaceAllString(text, "`[pink::i]$1[-:-:-]`")
|
||||||
// text = thinkRE.ReplaceAllString(text, `[yellow::i]$1[-:-:-]`)
|
// text = tools.ThinkRE.ReplaceAllString(text, `[yellow::i]$1[-:-:-]`)
|
||||||
// Step 3: Restore the styled code blocks from placeholders
|
// Step 3: Restore the styled code blocks from placeholders
|
||||||
for i, cb := range codeBlocks {
|
for i, cb := range codeBlocks {
|
||||||
text = strings.Replace(text, fmt.Sprintf(placeholder, i), cb, 1)
|
text = strings.Replace(text, fmt.Sprintf(placeholder, i), cb, 1)
|
||||||
@@ -98,13 +182,13 @@ func colorText() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func updateStatusLine() {
|
func updateStatusLine() {
|
||||||
statusLineWidget.SetText(makeStatusLine())
|
status := makeStatusLine()
|
||||||
helpView.SetText(fmt.Sprintf(helpText, makeStatusLine()))
|
statusLineWidget.SetText(status)
|
||||||
}
|
}
|
||||||
|
|
||||||
func initSysCards() ([]string, error) {
|
func initSysCards() ([]string, error) {
|
||||||
labels := []string{}
|
labels := []string{}
|
||||||
labels = append(labels, sysLabels...)
|
labels = append(labels, models.SysLabels...)
|
||||||
cards, err := pngmeta.ReadDirCards(cfg.SysDir, cfg.UserRole, logger)
|
cards, err := pngmeta.ReadDirCards(cfg.SysDir, cfg.UserRole, logger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("failed to read sys dir", "error", err)
|
logger.Error("failed to read sys dir", "error", err)
|
||||||
@@ -115,7 +199,11 @@ func initSysCards() ([]string, error) {
|
|||||||
logger.Warn("empty role", "file", cc.FilePath)
|
logger.Warn("empty role", "file", cc.FilePath)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
sysMap[cc.Role] = cc
|
if cc.ID == "" {
|
||||||
|
cc.ID = models.ComputeCardID(cc.Role, cc.FilePath)
|
||||||
|
}
|
||||||
|
sysMap[cc.ID] = cc
|
||||||
|
roleToID[cc.Role] = cc.ID
|
||||||
labels = append(labels, cc.Role)
|
labels = append(labels, cc.Role)
|
||||||
}
|
}
|
||||||
return labels, nil
|
return labels, nil
|
||||||
@@ -135,6 +223,8 @@ func startNewChat(keepSysP bool) {
|
|||||||
newChat := &models.Chat{
|
newChat := &models.Chat{
|
||||||
ID: id + 1,
|
ID: id + 1,
|
||||||
Name: fmt.Sprintf("%d_%s", id+1, cfg.AssistantRole),
|
Name: fmt.Sprintf("%d_%s", id+1, cfg.AssistantRole),
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
// chat is written to db when we get first llm response (or any)
|
// chat is written to db when we get first llm response (or any)
|
||||||
// actual chat history (messages) would be parsed then
|
// actual chat history (messages) would be parsed then
|
||||||
Msgs: "",
|
Msgs: "",
|
||||||
@@ -202,24 +292,25 @@ func listRolesWithUser() []string {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadImage() {
|
func loadImage() error {
|
||||||
filepath := defaultImage
|
filepath := defaultImage
|
||||||
cc, ok := sysMap[cfg.AssistantRole]
|
cc := GetCardByRole(cfg.AssistantRole)
|
||||||
if ok {
|
if cc != nil {
|
||||||
if strings.HasSuffix(cc.FilePath, ".png") {
|
if strings.HasSuffix(cc.FilePath, ".png") {
|
||||||
filepath = cc.FilePath
|
filepath = cc.FilePath
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file, err := os.Open(filepath)
|
file, err := os.Open(filepath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
return fmt.Errorf("failed to open image: %w", err)
|
||||||
}
|
}
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
img, _, err := image.Decode(file)
|
img, _, err := image.Decode(file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
return fmt.Errorf("failed to decode image: %w", err)
|
||||||
}
|
}
|
||||||
imgView.SetImage(img)
|
imgView.SetImage(img)
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func strInSlice(s string, sl []string) bool {
|
func strInSlice(s string, sl []string) bool {
|
||||||
@@ -231,6 +322,21 @@ func strInSlice(s string, sl []string) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// isLocalLlamacpp checks if the current API is a local llama.cpp instance.
|
||||||
|
func isLocalLlamacpp() bool {
|
||||||
|
if strings.Contains(cfg.CurrentAPI, "openrouter") || strings.Contains(cfg.CurrentAPI, "deepseek") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// getModelColor returns the cached color tag for the model name.
|
||||||
|
// The cached value is updated by a background goroutine every 5 seconds.
|
||||||
|
// For non-local models, returns orange. For local llama.cpp models, returns green if loaded, red if not.
|
||||||
|
func getModelColor() string {
|
||||||
|
return cachedModelColor.Load().(string)
|
||||||
|
}
|
||||||
|
|
||||||
func makeStatusLine() string {
|
func makeStatusLine() string {
|
||||||
isRecording := false
|
isRecording := false
|
||||||
if asr != nil {
|
if asr != nil {
|
||||||
@@ -260,21 +366,103 @@ func makeStatusLine() string {
|
|||||||
} else {
|
} else {
|
||||||
shellModeInfo = ""
|
shellModeInfo = ""
|
||||||
}
|
}
|
||||||
statusLine := fmt.Sprintf(indexLineCompletion, boolColors[botRespMode], botRespMode, activeChatName,
|
// Get model color based on load status for local llama.cpp models
|
||||||
boolColors[cfg.ToolUse], cfg.ToolUse, chatBody.Model, boolColors[cfg.SkipLLMResp],
|
modelColor := getModelColor()
|
||||||
cfg.SkipLLMResp, cfg.CurrentAPI, boolColors[isRecording], isRecording, persona,
|
statusLine := fmt.Sprintf(statusLineTempl, activeChatName,
|
||||||
botPersona, boolColors[injectRole], injectRole)
|
boolColors[cfg.ToolUse], modelColor, chatBody.Model, boolColors[cfg.SkipLLMResp],
|
||||||
|
cfg.CurrentAPI, persona, botPersona)
|
||||||
|
if cfg.STT_ENABLED {
|
||||||
|
recordingS := fmt.Sprintf(" | [%s:-:b]voice recording[-:-:-] (ctrl+r)",
|
||||||
|
boolColors[isRecording])
|
||||||
|
statusLine += recordingS
|
||||||
|
}
|
||||||
|
// completion endpoint
|
||||||
|
if !strings.Contains(cfg.CurrentAPI, "chat") {
|
||||||
|
roleInject := fmt.Sprintf(" | [%s:-:b]role injection[-:-:-] (alt+7)", boolColors[injectRole])
|
||||||
|
statusLine += roleInject
|
||||||
|
}
|
||||||
|
// context tokens
|
||||||
|
contextTokens := getContextTokens()
|
||||||
|
maxCtx := getMaxContextTokens()
|
||||||
|
if maxCtx == 0 {
|
||||||
|
maxCtx = 16384
|
||||||
|
}
|
||||||
|
if contextTokens > 0 {
|
||||||
|
contextInfo := fmt.Sprintf(" | context-estim: [orange:-:b]%d/%d[-:-:-]", contextTokens, maxCtx)
|
||||||
|
statusLine += contextInfo
|
||||||
|
}
|
||||||
return statusLine + imageInfo + shellModeInfo
|
return statusLine + imageInfo + shellModeInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
|
func getContextTokens() int {
|
||||||
|
if chatBody == nil || chatBody.Messages == nil {
|
||||||
func randString(n int) string {
|
return 0
|
||||||
b := make([]rune, n)
|
|
||||||
for i := range b {
|
|
||||||
b[i] = letters[rand.IntN(len(letters))]
|
|
||||||
}
|
}
|
||||||
return string(b)
|
total := 0
|
||||||
|
messages := chatBody.Messages
|
||||||
|
for i := range messages {
|
||||||
|
msg := &messages[i]
|
||||||
|
if msg.Stats != nil && msg.Stats.Tokens > 0 {
|
||||||
|
total += msg.Stats.Tokens
|
||||||
|
} else if msg.GetText() != "" {
|
||||||
|
total += len(msg.GetText()) / 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return total
|
||||||
|
}
|
||||||
|
|
||||||
|
const deepseekContext = 128000
|
||||||
|
|
||||||
|
func getMaxContextTokens() int {
|
||||||
|
if chatBody == nil || chatBody.Model == "" {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
modelName := chatBody.Model
|
||||||
|
switch {
|
||||||
|
case strings.Contains(cfg.CurrentAPI, "openrouter"):
|
||||||
|
if orModelsData != nil {
|
||||||
|
for i := range orModelsData.Data {
|
||||||
|
m := &orModelsData.Data[i]
|
||||||
|
if m.ID == modelName {
|
||||||
|
return m.ContextLength
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case strings.Contains(cfg.CurrentAPI, "deepseek"):
|
||||||
|
return deepseekContext
|
||||||
|
default:
|
||||||
|
if localModelsData != nil {
|
||||||
|
for i := range localModelsData.Data {
|
||||||
|
m := &localModelsData.Data[i]
|
||||||
|
if m.ID == modelName {
|
||||||
|
for _, arg := range m.Status.Args {
|
||||||
|
if strings.HasPrefix(arg, "--ctx-size") {
|
||||||
|
if strings.Contains(arg, "=") {
|
||||||
|
val := strings.Split(arg, "=")[1]
|
||||||
|
if n, err := strconv.Atoi(val); err == nil {
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
idx := -1
|
||||||
|
for j, a := range m.Status.Args {
|
||||||
|
if a == "--ctx-size" && j+1 < len(m.Status.Args) {
|
||||||
|
idx = j + 1
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if idx != -1 {
|
||||||
|
if n, err := strconv.Atoi(m.Status.Args[idx]); err == nil {
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
// set of roles within card definition and mention in chat history
|
// set of roles within card definition and mention in chat history
|
||||||
@@ -284,13 +472,9 @@ func listChatRoles() []string {
|
|||||||
if !ok {
|
if !ok {
|
||||||
return cbc
|
return cbc
|
||||||
}
|
}
|
||||||
currentCard, ok := sysMap[currentChat.Agent]
|
currentCard := GetCardByRole(currentChat.Agent)
|
||||||
if !ok {
|
if currentCard == nil {
|
||||||
// case which won't let to switch roles:
|
logger.Warn("failed to find current card", "agent", currentChat.Agent)
|
||||||
// started new chat (basic_sys or any other), at the start it yet be saved or have chatbody
|
|
||||||
// if it does not have a card or chars, it'll return an empty slice
|
|
||||||
// log error
|
|
||||||
logger.Warn("failed to find current card in sysMap", "agent", currentChat.Agent, "sysMap", sysMap)
|
|
||||||
return cbc
|
return cbc
|
||||||
}
|
}
|
||||||
charset := []string{}
|
charset := []string{}
|
||||||
@@ -306,12 +490,536 @@ func listChatRoles() []string {
|
|||||||
func deepseekModelValidator() error {
|
func deepseekModelValidator() error {
|
||||||
if cfg.CurrentAPI == cfg.DeepSeekChatAPI || cfg.CurrentAPI == cfg.DeepSeekCompletionAPI {
|
if cfg.CurrentAPI == cfg.DeepSeekChatAPI || cfg.CurrentAPI == cfg.DeepSeekCompletionAPI {
|
||||||
if chatBody.Model != "deepseek-chat" && chatBody.Model != "deepseek-reasoner" {
|
if chatBody.Model != "deepseek-chat" && chatBody.Model != "deepseek-reasoner" {
|
||||||
if err := notifyUser("bad request", "wrong deepseek model name"); err != nil {
|
showToast("bad request", "wrong deepseek model name")
|
||||||
logger.Warn("failed ot notify user", "error", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// == shellmode ==
|
||||||
|
|
||||||
|
func toggleShellMode() {
|
||||||
|
shellMode = !shellMode
|
||||||
|
setShellMode(shellMode)
|
||||||
|
if shellMode {
|
||||||
|
shellInput.SetLabel(fmt.Sprintf("[%s]$ ", cfg.FilePickerDir))
|
||||||
|
} else {
|
||||||
|
textArea.SetPlaceholder("input is multiline; press <Enter> to start the next line;\npress <Esc> to send the message.")
|
||||||
|
}
|
||||||
|
updateStatusLine()
|
||||||
|
}
|
||||||
|
|
||||||
|
func updateFlexLayout() {
|
||||||
|
if fullscreenMode {
|
||||||
|
// flex already contains only focused widget; do nothing
|
||||||
|
return
|
||||||
|
}
|
||||||
|
flex.Clear()
|
||||||
|
flex.AddItem(textView, 0, 40, false)
|
||||||
|
if shellMode {
|
||||||
|
flex.AddItem(shellInput, 0, 10, false)
|
||||||
|
} else {
|
||||||
|
flex.AddItem(bottomFlex, 0, 10, true)
|
||||||
|
}
|
||||||
|
if positionVisible {
|
||||||
|
flex.AddItem(statusLineWidget, 0, 2, false)
|
||||||
|
}
|
||||||
|
// Keep focus on currently focused widget
|
||||||
|
focused := app.GetFocus()
|
||||||
|
switch {
|
||||||
|
case focused == textView:
|
||||||
|
app.SetFocus(textView)
|
||||||
|
case shellMode:
|
||||||
|
app.SetFocus(shellInput)
|
||||||
|
default:
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func executeCommandAndDisplay(cmdText string) {
|
||||||
|
cmdText = strings.TrimSpace(cmdText)
|
||||||
|
if cmdText == "" {
|
||||||
|
fmt.Fprintf(textView, "\n[red]Error: No command provided[-:-:-]\n")
|
||||||
|
if cfg.AutoScrollEnabled {
|
||||||
|
textView.ScrollToEnd()
|
||||||
|
}
|
||||||
|
colorText()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
workingDir := cfg.FilePickerDir
|
||||||
|
// Handle cd command specially to update working directory
|
||||||
|
if strings.HasPrefix(cmdText, "cd ") {
|
||||||
|
newDir := strings.TrimPrefix(cmdText, "cd ")
|
||||||
|
newDir = strings.TrimSpace(newDir)
|
||||||
|
// Handle cd ~ or cdHOME
|
||||||
|
if strings.HasPrefix(newDir, "~") {
|
||||||
|
home := os.Getenv("HOME")
|
||||||
|
newDir = strings.Replace(newDir, "~", home, 1)
|
||||||
|
}
|
||||||
|
// Check if directory exists
|
||||||
|
if _, err := os.Stat(newDir); err == nil {
|
||||||
|
workingDir = newDir
|
||||||
|
cfg.FilePickerDir = workingDir
|
||||||
|
// Update shell input label with new directory
|
||||||
|
shellInput.SetLabel(fmt.Sprintf("[%s]$ ", cfg.FilePickerDir))
|
||||||
|
outputContent := workingDir
|
||||||
|
// Add the command being executed to the chat
|
||||||
|
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
|
||||||
|
len(chatBody.Messages), cfg.ToolRole, cmdText)
|
||||||
|
fmt.Fprintf(textView, "%s\n", outputContent)
|
||||||
|
combinedMsg := models.RoleMsg{
|
||||||
|
Role: cfg.ToolRole,
|
||||||
|
Content: "$ " + cmdText + "\n\n" + outputContent,
|
||||||
|
}
|
||||||
|
chatBody.Messages = append(chatBody.Messages, combinedMsg)
|
||||||
|
if cfg.AutoScrollEnabled {
|
||||||
|
textView.ScrollToEnd()
|
||||||
|
}
|
||||||
|
colorText()
|
||||||
|
return
|
||||||
|
} else {
|
||||||
|
outputContent := "cd: " + newDir + ": No such file or directory"
|
||||||
|
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
|
||||||
|
len(chatBody.Messages), cfg.ToolRole, cmdText)
|
||||||
|
fmt.Fprintf(textView, "[red]%s[-:-:-]\n", outputContent)
|
||||||
|
combinedMsg := models.RoleMsg{
|
||||||
|
Role: cfg.ToolRole,
|
||||||
|
Content: "$ " + cmdText + "\n\n" + outputContent,
|
||||||
|
}
|
||||||
|
chatBody.Messages = append(chatBody.Messages, combinedMsg)
|
||||||
|
if cfg.AutoScrollEnabled {
|
||||||
|
textView.ScrollToEnd()
|
||||||
|
}
|
||||||
|
colorText()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use /bin/sh to support pipes, redirects, etc.
|
||||||
|
cmd := exec.Command("/bin/sh", "-c", cmdText)
|
||||||
|
cmd.Dir = workingDir
|
||||||
|
// Execute the command and get output
|
||||||
|
output, err := cmd.CombinedOutput()
|
||||||
|
// Add the command being executed to the chat
|
||||||
|
fmt.Fprintf(textView, "\n[-:-:b](%d) <%s>: [-:-:-]\n$ %s\n",
|
||||||
|
len(chatBody.Messages), cfg.ToolRole, cmdText)
|
||||||
|
var outputContent string
|
||||||
|
if err != nil {
|
||||||
|
// Include both output and error
|
||||||
|
errorMsg := "Error: " + err.Error()
|
||||||
|
fmt.Fprintf(textView, "[red]%s[-:-:-]\n", errorMsg)
|
||||||
|
if len(output) > 0 {
|
||||||
|
outputStr := string(output)
|
||||||
|
fmt.Fprintf(textView, "[red]%s[-:-:-]\n", outputStr)
|
||||||
|
outputContent = errorMsg + "\n" + outputStr
|
||||||
|
} else {
|
||||||
|
outputContent = errorMsg
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Only output if successful
|
||||||
|
if len(output) > 0 {
|
||||||
|
outputStr := string(output)
|
||||||
|
fmt.Fprintf(textView, "[green]%s[-:-:-]\n", outputStr)
|
||||||
|
outputContent = outputStr
|
||||||
|
} else {
|
||||||
|
successMsg := "Command executed successfully (no output)"
|
||||||
|
fmt.Fprintf(textView, "[green]%s[-:-:-]\n", successMsg)
|
||||||
|
outputContent = successMsg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Combine command and output in a single message for chat history
|
||||||
|
combinedContent := "$ " + cmdText + "\n\n" + outputContent
|
||||||
|
combinedMsg := models.RoleMsg{
|
||||||
|
Role: cfg.ToolRole,
|
||||||
|
Content: combinedContent,
|
||||||
|
}
|
||||||
|
chatBody.Messages = append(chatBody.Messages, combinedMsg)
|
||||||
|
// Scroll to end and update colors
|
||||||
|
if cfg.AutoScrollEnabled {
|
||||||
|
textView.ScrollToEnd()
|
||||||
|
}
|
||||||
|
colorText()
|
||||||
|
// Add command to history (avoid duplicates at the end)
|
||||||
|
if len(shellHistory) == 0 || shellHistory[len(shellHistory)-1] != cmdText {
|
||||||
|
shellHistory = append(shellHistory, cmdText)
|
||||||
|
}
|
||||||
|
shellHistoryPos = -1
|
||||||
|
}
|
||||||
|
|
||||||
|
// == search ==
|
||||||
|
|
||||||
|
// Global variables for search state
|
||||||
|
var searchResults []int
|
||||||
|
var searchResultLengths []int // To store the length of each match in the formatted string
|
||||||
|
var searchIndex int
|
||||||
|
var searchText string
|
||||||
|
var originalTextForSearch string
|
||||||
|
|
||||||
|
// performSearch searches for the given term in the textView content and highlights matches
|
||||||
|
func performSearch(term string) {
|
||||||
|
searchText = term
|
||||||
|
if searchText == "" {
|
||||||
|
searchResults = nil
|
||||||
|
searchResultLengths = nil
|
||||||
|
originalTextForSearch = ""
|
||||||
|
// Re-render text without highlights
|
||||||
|
textView.SetText(chatToText(chatBody.Messages, cfg.ShowSys))
|
||||||
|
colorText()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Get formatted text and search directly in it to avoid mapping issues
|
||||||
|
formattedText := textView.GetText(true)
|
||||||
|
originalTextForSearch = formattedText
|
||||||
|
searchTermLower := strings.ToLower(searchText)
|
||||||
|
formattedTextLower := strings.ToLower(formattedText)
|
||||||
|
// Find all occurrences of the search term in the formatted text directly
|
||||||
|
formattedSearchResults := []int{}
|
||||||
|
searchStart := 0
|
||||||
|
for {
|
||||||
|
pos := strings.Index(formattedTextLower[searchStart:], searchTermLower)
|
||||||
|
if pos == -1 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
absolutePos := searchStart + pos
|
||||||
|
formattedSearchResults = append(formattedSearchResults, absolutePos)
|
||||||
|
searchStart = absolutePos + len(searchText)
|
||||||
|
}
|
||||||
|
if len(formattedSearchResults) == 0 {
|
||||||
|
// No matches found
|
||||||
|
searchResults = nil
|
||||||
|
searchResultLengths = nil
|
||||||
|
notification := "Pattern not found: " + term
|
||||||
|
showToast("search", notification)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Store the formatted text positions and lengths for accurate highlighting
|
||||||
|
searchResults = formattedSearchResults
|
||||||
|
// Create lengths array - all matches have the same length as the search term
|
||||||
|
searchResultLengths = make([]int, len(formattedSearchResults))
|
||||||
|
for i := range searchResultLengths {
|
||||||
|
searchResultLengths[i] = len(searchText)
|
||||||
|
}
|
||||||
|
searchIndex = 0
|
||||||
|
highlightCurrentMatch()
|
||||||
|
}
|
||||||
|
|
||||||
|
// highlightCurrentMatch highlights the current search match and scrolls to it
|
||||||
|
func highlightCurrentMatch() {
|
||||||
|
if len(searchResults) == 0 || searchIndex >= len(searchResults) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Get the stored formatted text
|
||||||
|
formattedText := originalTextForSearch
|
||||||
|
// For tview to properly support highlighting and scrolling, we need to work with its region system
|
||||||
|
// Instead of just applying highlights, we need to add region tags to the text
|
||||||
|
highlightedText := addRegionTags(formattedText, searchResults, searchResultLengths, searchIndex, searchText)
|
||||||
|
// Update the text view with the text that includes region tags
|
||||||
|
textView.SetText(highlightedText)
|
||||||
|
// Highlight the current region and scroll to it
|
||||||
|
// Need to identify which position in the results array corresponds to the current match
|
||||||
|
// The region ID will be search_<position>_<index>
|
||||||
|
currentRegion := fmt.Sprintf("search_%d_%d", searchResults[searchIndex], searchIndex)
|
||||||
|
textView.Highlight(currentRegion).ScrollToHighlight()
|
||||||
|
// Send notification about which match we're at
|
||||||
|
notification := fmt.Sprintf("Match %d of %d", searchIndex+1, len(searchResults))
|
||||||
|
showToast("search", notification)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showSearchBar shows the search input field as an overlay
|
||||||
|
func showSearchBar() {
|
||||||
|
// Create a temporary flex to combine search and main content
|
||||||
|
updatedFlex := tview.NewFlex().SetDirection(tview.FlexRow).
|
||||||
|
AddItem(searchField, 3, 0, true). // Search field at top
|
||||||
|
AddItem(flex, 0, 1, false) // Main flex layout below
|
||||||
|
// Add the search overlay as a page
|
||||||
|
pages.AddPage(searchPageName, updatedFlex, true, true)
|
||||||
|
app.SetFocus(searchField)
|
||||||
|
}
|
||||||
|
|
||||||
|
// hideSearchBar hides the search input field
|
||||||
|
func hideSearchBar() {
|
||||||
|
pages.RemovePage(searchPageName)
|
||||||
|
// Return focus to the text view
|
||||||
|
app.SetFocus(textView)
|
||||||
|
// Clear the search field
|
||||||
|
searchField.SetText("")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Global variables for index overlay functionality
|
||||||
|
var indexPageName = "indexOverlay"
|
||||||
|
|
||||||
|
// showIndexBar shows the index input field as an overlay at the top
|
||||||
|
func showIndexBar() {
|
||||||
|
// Create a temporary flex to combine index input and main content
|
||||||
|
updatedFlex := tview.NewFlex().SetDirection(tview.FlexRow).
|
||||||
|
AddItem(indexPickWindow, 3, 0, true). // Index field at top
|
||||||
|
AddItem(flex, 0, 1, false) // Main flex layout below
|
||||||
|
|
||||||
|
// Add the index overlay as a page
|
||||||
|
pages.AddPage(indexPageName, updatedFlex, true, true)
|
||||||
|
app.SetFocus(indexPickWindow)
|
||||||
|
}
|
||||||
|
|
||||||
|
// hideIndexBar hides the index input field
|
||||||
|
func hideIndexBar() {
|
||||||
|
pages.RemovePage(indexPageName)
|
||||||
|
// Return focus to the text view
|
||||||
|
app.SetFocus(textView)
|
||||||
|
// Clear the index field
|
||||||
|
indexPickWindow.SetText("")
|
||||||
|
}
|
||||||
|
|
||||||
|
// addRegionTags adds region tags to search matches in the text for tview highlighting
|
||||||
|
func addRegionTags(text string, positions []int, lengths []int, currentIdx int, searchTerm string) string {
|
||||||
|
if len(positions) == 0 {
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
var result strings.Builder
|
||||||
|
lastEnd := 0
|
||||||
|
for i, pos := range positions {
|
||||||
|
endPos := pos + lengths[i]
|
||||||
|
// Add text before this match
|
||||||
|
if pos > lastEnd {
|
||||||
|
result.WriteString(text[lastEnd:pos])
|
||||||
|
}
|
||||||
|
// The matched text, which may contain its own formatting tags
|
||||||
|
actualText := text[pos:endPos]
|
||||||
|
// Add region tag and highlighting for this match
|
||||||
|
// Use a unique region id that includes the match index to avoid conflicts
|
||||||
|
regionId := fmt.Sprintf("search_%d_%d", pos, i) // position + index to ensure uniqueness
|
||||||
|
var highlightStart, highlightEnd string
|
||||||
|
if i == currentIdx {
|
||||||
|
// Current match - use different highlighting
|
||||||
|
highlightStart = fmt.Sprintf(`["%s"][yellow:blue:b]`, regionId) // Current match with region and special highlight
|
||||||
|
highlightEnd = `[-:-:-][""]` // Reset formatting and close region
|
||||||
|
} else {
|
||||||
|
// Other matches - use regular highlighting
|
||||||
|
highlightStart = fmt.Sprintf(`["%s"][gold:red:u]`, regionId) // Other matches with region and highlight
|
||||||
|
highlightEnd = `[-:-:-][""]` // Reset formatting and close region
|
||||||
|
}
|
||||||
|
result.WriteString(highlightStart)
|
||||||
|
result.WriteString(actualText)
|
||||||
|
result.WriteString(highlightEnd)
|
||||||
|
lastEnd = endPos
|
||||||
|
}
|
||||||
|
// Add the rest of the text after the last processed match
|
||||||
|
if lastEnd < len(text) {
|
||||||
|
result.WriteString(text[lastEnd:])
|
||||||
|
}
|
||||||
|
return result.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// searchNext finds the next occurrence of the search term
|
||||||
|
func searchNext() {
|
||||||
|
if len(searchResults) == 0 {
|
||||||
|
showToast("search", "No search results to navigate")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
searchIndex = (searchIndex + 1) % len(searchResults)
|
||||||
|
highlightCurrentMatch()
|
||||||
|
}
|
||||||
|
|
||||||
|
// searchPrev finds the previous occurrence of the search term
|
||||||
|
func searchPrev() {
|
||||||
|
if len(searchResults) == 0 {
|
||||||
|
showToast("search", "No search results to navigate")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if searchIndex == 0 {
|
||||||
|
searchIndex = len(searchResults) - 1
|
||||||
|
} else {
|
||||||
|
searchIndex--
|
||||||
|
}
|
||||||
|
highlightCurrentMatch()
|
||||||
|
}
|
||||||
|
|
||||||
|
// == tab completion ==
|
||||||
|
|
||||||
|
func scanFiles(dir, filter string) []string {
|
||||||
|
const maxDepth = 3
|
||||||
|
const maxFiles = 50
|
||||||
|
var files []string
|
||||||
|
var scanRecursive func(currentDir string, currentDepth int, relPath string)
|
||||||
|
scanRecursive = func(currentDir string, currentDepth int, relPath string) {
|
||||||
|
if len(files) >= maxFiles {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if currentDepth > maxDepth {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
entries, err := os.ReadDir(currentDir)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for _, entry := range entries {
|
||||||
|
if len(files) >= maxFiles {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
name := entry.Name()
|
||||||
|
if strings.HasPrefix(name, ".") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fullPath := name
|
||||||
|
if relPath != "" {
|
||||||
|
fullPath = relPath + "/" + name
|
||||||
|
}
|
||||||
|
if entry.IsDir() {
|
||||||
|
// Recursively scan subdirectories
|
||||||
|
scanRecursive(filepath.Join(currentDir, name), currentDepth+1, fullPath)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Check if file matches filter
|
||||||
|
if filter == "" || strings.HasPrefix(strings.ToLower(fullPath), strings.ToLower(filter)) {
|
||||||
|
files = append(files, fullPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
scanRecursive(dir, 0, "")
|
||||||
|
return files
|
||||||
|
}
|
||||||
|
|
||||||
|
// models logic that is too complex for models package
|
||||||
|
func MsgToText(i int, m *models.RoleMsg) string {
|
||||||
|
var contentStr string
|
||||||
|
var imageIndicators []string
|
||||||
|
if !m.HasContentParts {
|
||||||
|
contentStr = m.Content
|
||||||
|
} else {
|
||||||
|
var textParts []string
|
||||||
|
for _, part := range m.ContentParts {
|
||||||
|
switch p := part.(type) {
|
||||||
|
case models.TextContentPart:
|
||||||
|
if p.Type == "text" {
|
||||||
|
textParts = append(textParts, p.Text)
|
||||||
|
}
|
||||||
|
case models.ImageContentPart:
|
||||||
|
displayPath := p.Path
|
||||||
|
if displayPath == "" {
|
||||||
|
displayPath = "image"
|
||||||
|
} else {
|
||||||
|
displayPath = extractDisplayPath(displayPath, cfg.FilePickerDir)
|
||||||
|
}
|
||||||
|
imageIndicators = append(imageIndicators, fmt.Sprintf("[orange::i][image: %s][-:-:-]", displayPath))
|
||||||
|
case map[string]any:
|
||||||
|
if partType, exists := p["type"]; exists {
|
||||||
|
switch partType {
|
||||||
|
case "text":
|
||||||
|
if textVal, textExists := p["text"]; textExists {
|
||||||
|
if textStr, isStr := textVal.(string); isStr {
|
||||||
|
textParts = append(textParts, textStr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case "image_url":
|
||||||
|
var displayPath string
|
||||||
|
if pathVal, pathExists := p["path"]; pathExists {
|
||||||
|
if pathStr, isStr := pathVal.(string); isStr && pathStr != "" {
|
||||||
|
displayPath = extractDisplayPath(pathStr, cfg.FilePickerDir)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if displayPath == "" {
|
||||||
|
displayPath = "image"
|
||||||
|
}
|
||||||
|
imageIndicators = append(imageIndicators, fmt.Sprintf("[orange::i][image: %s][-:-:-]", displayPath))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
contentStr = strings.Join(textParts, " ") + " "
|
||||||
|
}
|
||||||
|
contentStr, _ = strings.CutPrefix(contentStr, m.Role+":")
|
||||||
|
icon := fmt.Sprintf("(%d) <%s>: ", i, m.Role)
|
||||||
|
var finalContent strings.Builder
|
||||||
|
if len(imageIndicators) > 0 {
|
||||||
|
for _, indicator := range imageIndicators {
|
||||||
|
finalContent.WriteString(indicator)
|
||||||
|
finalContent.WriteString("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
finalContent.WriteString(contentStr)
|
||||||
|
if m.Stats != nil {
|
||||||
|
fmt.Fprintf(&finalContent, "\n[gray::i][%d tok, %.1fs, %.1f t/s][-:-:-]", m.Stats.Tokens, m.Stats.Duration, m.Stats.TokensPerSec)
|
||||||
|
}
|
||||||
|
textMsg := fmt.Sprintf("[-:-:b]%s[-:-:-]\n%s\n", icon, finalContent.String())
|
||||||
|
return strings.ReplaceAll(textMsg, "\n\n", "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractDisplayPath returns a path suitable for display, potentially relative to imageBaseDir
|
||||||
|
func extractDisplayPath(p, bp string) string {
|
||||||
|
if p == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
// If base directory is set, try to make path relative to it
|
||||||
|
if bp != "" {
|
||||||
|
if rel, err := filepath.Rel(bp, p); err == nil {
|
||||||
|
// Check if relative path doesn't start with ".." (meaning it's within base dir)
|
||||||
|
// If it starts with "..", we might still want to show it as relative
|
||||||
|
// but for now we show full path if it goes outside base dir
|
||||||
|
if !strings.HasPrefix(rel, "..") {
|
||||||
|
p = rel
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Truncate long paths to last 60 characters if needed
|
||||||
|
if len(p) > 60 {
|
||||||
|
return "..." + p[len(p)-60:]
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func getValidKnowToRecipient(msg *models.RoleMsg) (string, bool) {
|
||||||
|
if cfg == nil || !cfg.CharSpecificContextEnabled {
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
// case where all roles are in the tag => public message
|
||||||
|
cr := listChatRoles()
|
||||||
|
slices.Sort(cr)
|
||||||
|
slices.Sort(msg.KnownTo)
|
||||||
|
if slices.Equal(cr, msg.KnownTo) {
|
||||||
|
logger.Info("got msg with tag mentioning every role")
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
// Check each character in the KnownTo list
|
||||||
|
for _, recipient := range msg.KnownTo {
|
||||||
|
if recipient == msg.Role || recipient == cfg.ToolRole {
|
||||||
|
// weird cases, skip
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Skip if this is the user character (user handles their own turn)
|
||||||
|
// If user is in KnownTo, stop processing - it's the user's turn
|
||||||
|
if recipient == cfg.UserRole || recipient == cfg.WriteNextMsgAs {
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
return recipient, true
|
||||||
|
}
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
// triggerPrivateMessageResponses checks if a message was sent privately to specific characters
|
||||||
|
// and triggers those non-user characters to respond
|
||||||
|
func triggerPrivateMessageResponses(msg *models.RoleMsg) {
|
||||||
|
recipient, ok := getValidKnowToRecipient(msg)
|
||||||
|
if !ok || recipient == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Trigger the recipient character to respond
|
||||||
|
triggerMsg := recipient + ":\n"
|
||||||
|
// Send empty message so LLM continues naturally from the conversation
|
||||||
|
crr := &models.ChatRoundReq{
|
||||||
|
UserMsg: triggerMsg,
|
||||||
|
Role: recipient,
|
||||||
|
Resume: true,
|
||||||
|
}
|
||||||
|
fmt.Fprintf(textView, "\n[-:-:b](%d) ", len(chatBody.Messages))
|
||||||
|
fmt.Fprint(textView, roleToIcon(recipient))
|
||||||
|
fmt.Fprint(textView, "[-:-:-]\n")
|
||||||
|
chatRoundChan <- crr
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetCardByRole(role string) *models.CharCard {
|
||||||
|
cardID, ok := roleToID[role]
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return sysMap[cardID]
|
||||||
|
}
|
||||||
|
|||||||
350
llm.go
350
llm.go
@@ -3,42 +3,19 @@ package main
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"gf-lt/models"
|
"gf-lt/models"
|
||||||
|
"gf-lt/tools"
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
var imageAttachmentPath string // Global variable to track image attachment for next message
|
var imageAttachmentPath string // Global variable to track image attachment for next message
|
||||||
var lastImg string // for ctrl+j
|
var lastImg string // for ctrl+j
|
||||||
var RAGMsg = "Retrieved context for user's query:\n"
|
|
||||||
|
|
||||||
// addPersonaSuffixToLastUserMessage adds the persona suffix to the last user message
|
// containsToolSysMsg checks if the tools.ToolSysMsg already exists in the chat body
|
||||||
// to indicate to the assistant who it should reply as
|
|
||||||
func addPersonaSuffixToLastUserMessage(messages []models.RoleMsg, persona string) []models.RoleMsg {
|
|
||||||
if len(messages) == 0 {
|
|
||||||
return messages
|
|
||||||
}
|
|
||||||
// // Find the last user message to modify
|
|
||||||
// for i := len(messages) - 1; i >= 0; i-- {
|
|
||||||
// if messages[i].Role == cfg.UserRole || messages[i].Role == "user" {
|
|
||||||
// // Create a copy of the message to avoid modifying the original
|
|
||||||
// modifiedMsg := messages[i]
|
|
||||||
// modifiedMsg.Content = modifiedMsg.Content + "\n" + persona + ":"
|
|
||||||
// messages[i] = modifiedMsg
|
|
||||||
// break
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
modifiedMsg := messages[len(messages)-1]
|
|
||||||
modifiedMsg.Content = modifiedMsg.Content + "\n" + persona + ":\n"
|
|
||||||
messages[len(messages)-1] = modifiedMsg
|
|
||||||
return messages
|
|
||||||
}
|
|
||||||
|
|
||||||
// containsToolSysMsg checks if the toolSysMsg already exists in the chat body
|
|
||||||
func containsToolSysMsg() bool {
|
func containsToolSysMsg() bool {
|
||||||
for _, msg := range chatBody.Messages {
|
for i := range chatBody.Messages {
|
||||||
if msg.Role == cfg.ToolRole && msg.Content == toolSysMsg {
|
if chatBody.Messages[i].Role == cfg.ToolRole && chatBody.Messages[i].Content == tools.ToolSysMsg {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -86,11 +63,11 @@ type ChunkParser interface {
|
|||||||
func choseChunkParser() {
|
func choseChunkParser() {
|
||||||
chunkParser = LCPCompletion{}
|
chunkParser = LCPCompletion{}
|
||||||
switch cfg.CurrentAPI {
|
switch cfg.CurrentAPI {
|
||||||
case "http://localhost:8080/completion":
|
case "http://localhost:8080/completion", "http://127.0.0.1:8080/completion":
|
||||||
chunkParser = LCPCompletion{}
|
chunkParser = LCPCompletion{}
|
||||||
logger.Debug("chosen lcpcompletion", "link", cfg.CurrentAPI)
|
logger.Debug("chosen lcpcompletion", "link", cfg.CurrentAPI)
|
||||||
return
|
return
|
||||||
case "http://localhost:8080/v1/chat/completions":
|
case "http://localhost:8080/v1/chat/completions", "http://127.0.0.1:8080/v1/chat/completions":
|
||||||
chunkParser = LCPChat{}
|
chunkParser = LCPChat{}
|
||||||
logger.Debug("chosen lcpchat", "link", cfg.CurrentAPI)
|
logger.Debug("chosen lcpchat", "link", cfg.CurrentAPI)
|
||||||
return
|
return
|
||||||
@@ -111,6 +88,11 @@ func choseChunkParser() {
|
|||||||
logger.Debug("chosen openrouterchat", "link", cfg.CurrentAPI)
|
logger.Debug("chosen openrouterchat", "link", cfg.CurrentAPI)
|
||||||
return
|
return
|
||||||
default:
|
default:
|
||||||
|
logger.Warn("unexpected case, assuming llama.cpp on non default address", "link", cfg.CurrentAPI)
|
||||||
|
if strings.Contains(cfg.CurrentAPI, "chat") {
|
||||||
|
chunkParser = LCPChat{}
|
||||||
|
return
|
||||||
|
}
|
||||||
chunkParser = LCPCompletion{}
|
chunkParser = LCPCompletion{}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -142,73 +124,69 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
|||||||
logger.Debug("formmsg lcpcompletion", "link", cfg.CurrentAPI)
|
logger.Debug("formmsg lcpcompletion", "link", cfg.CurrentAPI)
|
||||||
localImageAttachmentPath := imageAttachmentPath
|
localImageAttachmentPath := imageAttachmentPath
|
||||||
var multimodalData []string
|
var multimodalData []string
|
||||||
|
if msg != "" { // otherwise let the bot to continue
|
||||||
|
var newMsg models.RoleMsg
|
||||||
if localImageAttachmentPath != "" {
|
if localImageAttachmentPath != "" {
|
||||||
|
newMsg = models.NewMultimodalMsg(role, []any{})
|
||||||
|
newMsg.AddTextPart(msg)
|
||||||
imageURL, err := models.CreateImageURLFromPath(localImageAttachmentPath)
|
imageURL, err := models.CreateImageURLFromPath(localImageAttachmentPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("failed to create image URL from path for completion",
|
logger.Error("failed to create image URL from path for completion",
|
||||||
"error", err, "path", localImageAttachmentPath)
|
"error", err, "path", localImageAttachmentPath)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
// Extract base64 part from data URL (e.g., "data:image/jpeg;base64,...")
|
newMsg.AddImagePart(imageURL, localImageAttachmentPath)
|
||||||
parts := strings.SplitN(imageURL, ",", 2)
|
|
||||||
if len(parts) == 2 {
|
|
||||||
multimodalData = append(multimodalData, parts[1])
|
|
||||||
} else {
|
|
||||||
logger.Error("invalid image data URL format", "url", imageURL)
|
|
||||||
return nil, errors.New("invalid image data URL format")
|
|
||||||
}
|
|
||||||
imageAttachmentPath = "" // Clear the attachment after use
|
imageAttachmentPath = "" // Clear the attachment after use
|
||||||
|
} else { // not a multimodal msg or image passed in tool call
|
||||||
|
newMsg = models.RoleMsg{Role: role, Content: msg}
|
||||||
}
|
}
|
||||||
if msg != "" { // otherwise let the bot to continue
|
|
||||||
newMsg := models.RoleMsg{Role: role, Content: msg}
|
|
||||||
newMsg = *processMessageTag(&newMsg)
|
newMsg = *processMessageTag(&newMsg)
|
||||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
chatBody.Messages = append(chatBody.Messages, newMsg)
|
||||||
}
|
}
|
||||||
// if rag - add as system message to avoid conflicts with tool usage
|
|
||||||
if !resume && cfg.RAGEnabled {
|
|
||||||
um := chatBody.Messages[len(chatBody.Messages)-1].Content
|
|
||||||
logger.Debug("RAG is enabled, preparing RAG context", "user_message", um)
|
|
||||||
ragResp, err := chatRagUse(um)
|
|
||||||
if err != nil {
|
|
||||||
logger.Error("failed to form a rag msg", "error", err)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
logger.Debug("RAG response received", "response_len", len(ragResp),
|
|
||||||
"response_preview", ragResp[:min(len(ragResp), 100)])
|
|
||||||
// Use system role for RAG context to avoid conflicts with tool usage
|
|
||||||
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
|
|
||||||
chatBody.Messages = append(chatBody.Messages, ragMsg)
|
|
||||||
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
|
|
||||||
}
|
|
||||||
// sending description of the tools and how to use them
|
// sending description of the tools and how to use them
|
||||||
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
||||||
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: tools.ToolSysMsg})
|
||||||
}
|
}
|
||||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
||||||
|
// Build prompt and extract images inline as we process each message
|
||||||
messages := make([]string, len(filteredMessages))
|
messages := make([]string, len(filteredMessages))
|
||||||
for i, m := range filteredMessages {
|
for i := range filteredMessages {
|
||||||
|
m := stripThinkingFromMsg(&filteredMessages[i])
|
||||||
messages[i] = m.ToPrompt()
|
messages[i] = m.ToPrompt()
|
||||||
|
// Extract images from this message and add marker inline
|
||||||
|
if len(m.ContentParts) > 0 {
|
||||||
|
for _, part := range m.ContentParts {
|
||||||
|
var imgURL string
|
||||||
|
// Check for struct type
|
||||||
|
if imgPart, ok := part.(models.ImageContentPart); ok {
|
||||||
|
imgURL = imgPart.ImageURL.URL
|
||||||
|
} else if partMap, ok := part.(map[string]any); ok {
|
||||||
|
// Check for map type (from JSON unmarshaling)
|
||||||
|
if partType, exists := partMap["type"]; exists && partType == "image_url" {
|
||||||
|
if imgURLMap, ok := partMap["image_url"].(map[string]any); ok {
|
||||||
|
if url, ok := imgURLMap["url"].(string); ok {
|
||||||
|
imgURL = url
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if imgURL != "" {
|
||||||
|
// Extract base64 part from data URL (e.g., "data:image/jpeg;base64,...")
|
||||||
|
parts := strings.SplitN(imgURL, ",", 2)
|
||||||
|
if len(parts) == 2 {
|
||||||
|
multimodalData = append(multimodalData, parts[1])
|
||||||
|
messages[i] += " <__media__>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
prompt := strings.Join(messages, "\n")
|
prompt := strings.Join(messages, "\n")
|
||||||
// strings builder?
|
// needs to be after <__media__> if there are images
|
||||||
if !resume {
|
if !resume {
|
||||||
botMsgStart := "\n" + botPersona + ":\n"
|
botMsgStart := "\n" + botPersona + ":\n"
|
||||||
prompt += botMsgStart
|
prompt += botMsgStart
|
||||||
}
|
}
|
||||||
if cfg.ThinkUse && !cfg.ToolUse {
|
|
||||||
prompt += "<think>"
|
|
||||||
}
|
|
||||||
// Add multimodal media markers to the prompt text when multimodal data is present
|
|
||||||
// This is required by llama.cpp multimodal models so they know where to insert media
|
|
||||||
if len(multimodalData) > 0 {
|
|
||||||
// Add a media marker for each item in the multimodal data
|
|
||||||
var sb strings.Builder
|
|
||||||
sb.WriteString(prompt)
|
|
||||||
for range multimodalData {
|
|
||||||
sb.WriteString(" <__media__>") // llama.cpp default multimodal marker
|
|
||||||
}
|
|
||||||
prompt = sb.String()
|
|
||||||
}
|
|
||||||
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
||||||
"msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData))
|
"msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData))
|
||||||
payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData,
|
payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData,
|
||||||
@@ -252,17 +230,15 @@ func (op LCPChat) ParseChunk(data []byte) (*models.TextChunk, error) {
|
|||||||
logger.Error("failed to decode", "error", err, "line", string(data))
|
logger.Error("failed to decode", "error", err, "line", string(data))
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle multiple choices safely
|
|
||||||
if len(llmchunk.Choices) == 0 {
|
if len(llmchunk.Choices) == 0 {
|
||||||
logger.Warn("LCPChat ParseChunk: no choices in response", "data", string(data))
|
logger.Warn("LCPChat empty chunk choices", "raw_data", string(data), "chunk", llmchunk)
|
||||||
return &models.TextChunk{Finished: true}, nil
|
return &models.TextChunk{}, nil
|
||||||
}
|
}
|
||||||
|
lastChoice := llmchunk.Choices[len(llmchunk.Choices)-1]
|
||||||
resp := &models.TextChunk{
|
resp := &models.TextChunk{
|
||||||
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content,
|
Chunk: lastChoice.Delta.Content,
|
||||||
|
Reasoning: lastChoice.Delta.ReasoningContent,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for tool calls in all choices, not just the last one
|
// Check for tool calls in all choices, not just the last one
|
||||||
for _, choice := range llmchunk.Choices {
|
for _, choice := range llmchunk.Choices {
|
||||||
if len(choice.Delta.ToolCalls) > 0 {
|
if len(choice.Delta.ToolCalls) > 0 {
|
||||||
@@ -277,8 +253,7 @@ func (op LCPChat) ParseChunk(data []byte) (*models.TextChunk, error) {
|
|||||||
break // Process only the first tool call
|
break // Process only the first tool call
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if lastChoice.FinishReason == "stop" {
|
||||||
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
|
|
||||||
if resp.Chunk != "" {
|
if resp.Chunk != "" {
|
||||||
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
|
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
|
||||||
}
|
}
|
||||||
@@ -311,7 +286,7 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
|
|||||||
// If image processing fails, fall back to simple text message
|
// If image processing fails, fall back to simple text message
|
||||||
newMsg = models.NewRoleMsg(role, msg)
|
newMsg = models.NewRoleMsg(role, msg)
|
||||||
} else {
|
} else {
|
||||||
newMsg.AddImagePart(imageURL)
|
newMsg.AddImagePart(imageURL, localImageAttachmentPath)
|
||||||
}
|
}
|
||||||
// Only clear the global image attachment after successfully processing it in this API call
|
// Only clear the global image attachment after successfully processing it in this API call
|
||||||
imageAttachmentPath = "" // Clear the attachment after use
|
imageAttachmentPath = "" // Clear the attachment after use
|
||||||
@@ -324,41 +299,31 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
|
|||||||
logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role,
|
logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role,
|
||||||
"content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages))
|
"content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages))
|
||||||
}
|
}
|
||||||
// if rag - add as system message to avoid conflicts with tool usage
|
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
|
||||||
if !resume && cfg.RAGEnabled {
|
|
||||||
um := chatBody.Messages[len(chatBody.Messages)-1].Content
|
|
||||||
logger.Debug("LCPChat: RAG is enabled, preparing RAG context", "user_message", um)
|
|
||||||
ragResp, err := chatRagUse(um)
|
|
||||||
if err != nil {
|
|
||||||
logger.Error("LCPChat: failed to form a rag msg", "error", err)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
logger.Debug("LCPChat: RAG response received",
|
|
||||||
"response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
|
|
||||||
// Use system role for RAG context to avoid conflicts with tool usage
|
|
||||||
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
|
|
||||||
chatBody.Messages = append(chatBody.Messages, ragMsg)
|
|
||||||
logger.Debug("LCPChat: RAG message added to chat body", "role", ragMsg.Role,
|
|
||||||
"rag_content_len", len(ragMsg.Content), "message_count_after_rag", len(chatBody.Messages))
|
|
||||||
}
|
|
||||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
|
||||||
// openai /v1/chat does not support custom roles; needs to be user, assistant, system
|
// openai /v1/chat does not support custom roles; needs to be user, assistant, system
|
||||||
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
||||||
if cfg.AutoTurn && !resume {
|
|
||||||
filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona)
|
|
||||||
}
|
|
||||||
bodyCopy := &models.ChatBody{
|
bodyCopy := &models.ChatBody{
|
||||||
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
||||||
Model: chatBody.Model,
|
Model: chatBody.Model,
|
||||||
Stream: chatBody.Stream,
|
Stream: chatBody.Stream,
|
||||||
}
|
}
|
||||||
for i, msg := range filteredMessages {
|
for i := range filteredMessages {
|
||||||
if msg.Role == cfg.UserRole {
|
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
|
||||||
bodyCopy.Messages[i] = msg
|
switch strippedMsg.Role {
|
||||||
|
case cfg.UserRole:
|
||||||
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
bodyCopy.Messages[i].Role = "user"
|
bodyCopy.Messages[i].Role = "user"
|
||||||
} else {
|
case cfg.AssistantRole:
|
||||||
bodyCopy.Messages[i] = msg
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
|
bodyCopy.Messages[i].Role = "assistant"
|
||||||
|
case cfg.ToolRole:
|
||||||
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
|
bodyCopy.Messages[i].Role = "tool"
|
||||||
|
default:
|
||||||
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
}
|
}
|
||||||
|
// Clear ToolCalls - they're stored in chat history for display but not sent to LLM
|
||||||
|
// bodyCopy.Messages[i].ToolCall = nil
|
||||||
}
|
}
|
||||||
// Clean null/empty messages to prevent API issues
|
// Clean null/empty messages to prevent API issues
|
||||||
bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
|
bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
|
||||||
@@ -367,7 +332,7 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
|
|||||||
Tools: nil,
|
Tools: nil,
|
||||||
}
|
}
|
||||||
if cfg.ToolUse && !resume && role != cfg.ToolRole {
|
if cfg.ToolUse && !resume && role != cfg.ToolRole {
|
||||||
req.Tools = baseTools // set tools to use
|
req.Tools = tools.BaseTools // set tools to use
|
||||||
}
|
}
|
||||||
data, err := json.Marshal(req)
|
data, err := json.Marshal(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -388,6 +353,10 @@ func (ds DeepSeekerCompletion) ParseChunk(data []byte) (*models.TextChunk, error
|
|||||||
logger.Error("failed to decode", "error", err, "line", string(data))
|
logger.Error("failed to decode", "error", err, "line", string(data))
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if len(llmchunk.Choices) == 0 {
|
||||||
|
logger.Warn("empty chunk choices", "raw_data", string(data), "chunk", llmchunk)
|
||||||
|
return &models.TextChunk{}, nil
|
||||||
|
}
|
||||||
resp := &models.TextChunk{
|
resp := &models.TextChunk{
|
||||||
Chunk: llmchunk.Choices[0].Text,
|
Chunk: llmchunk.Choices[0].Text,
|
||||||
}
|
}
|
||||||
@@ -414,30 +383,14 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
|
|||||||
newMsg = *processMessageTag(&newMsg)
|
newMsg = *processMessageTag(&newMsg)
|
||||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
chatBody.Messages = append(chatBody.Messages, newMsg)
|
||||||
}
|
}
|
||||||
// if rag - add as system message to avoid conflicts with tool usage
|
|
||||||
if !resume && cfg.RAGEnabled {
|
|
||||||
um := chatBody.Messages[len(chatBody.Messages)-1].Content
|
|
||||||
logger.Debug("DeepSeekerCompletion: RAG is enabled, preparing RAG context", "user_message", um)
|
|
||||||
ragResp, err := chatRagUse(um)
|
|
||||||
if err != nil {
|
|
||||||
logger.Error("DeepSeekerCompletion: failed to form a rag msg", "error", err)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
logger.Debug("DeepSeekerCompletion: RAG response received",
|
|
||||||
"response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
|
|
||||||
// Use system role for RAG context to avoid conflicts with tool usage
|
|
||||||
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
|
|
||||||
chatBody.Messages = append(chatBody.Messages, ragMsg)
|
|
||||||
logger.Debug("DeepSeekerCompletion: RAG message added to chat body", "message_count", len(chatBody.Messages))
|
|
||||||
}
|
|
||||||
// sending description of the tools and how to use them
|
// sending description of the tools and how to use them
|
||||||
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
||||||
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: tools.ToolSysMsg})
|
||||||
}
|
}
|
||||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
||||||
messages := make([]string, len(filteredMessages))
|
messages := make([]string, len(filteredMessages))
|
||||||
for i, m := range filteredMessages {
|
for i := range filteredMessages {
|
||||||
messages[i] = m.ToPrompt()
|
messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt()
|
||||||
}
|
}
|
||||||
prompt := strings.Join(messages, "\n")
|
prompt := strings.Join(messages, "\n")
|
||||||
// strings builder?
|
// strings builder?
|
||||||
@@ -445,9 +398,6 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
|
|||||||
botMsgStart := "\n" + botPersona + ":\n"
|
botMsgStart := "\n" + botPersona + ":\n"
|
||||||
prompt += botMsgStart
|
prompt += botMsgStart
|
||||||
}
|
}
|
||||||
if cfg.ThinkUse && !cfg.ToolUse {
|
|
||||||
prompt += "<think>"
|
|
||||||
}
|
|
||||||
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
||||||
"msg", msg, "resume", resume, "prompt", prompt)
|
"msg", msg, "resume", resume, "prompt", prompt)
|
||||||
payload := models.NewDSCompletionReq(prompt, chatBody.Model,
|
payload := models.NewDSCompletionReq(prompt, chatBody.Model,
|
||||||
@@ -472,6 +422,10 @@ func (ds DeepSeekerChat) ParseChunk(data []byte) (*models.TextChunk, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
resp := &models.TextChunk{}
|
resp := &models.TextChunk{}
|
||||||
|
if len(llmchunk.Choices) == 0 {
|
||||||
|
logger.Warn("empty chunk choices", "raw_data", string(data), "chunk", llmchunk)
|
||||||
|
return resp, nil
|
||||||
|
}
|
||||||
if llmchunk.Choices[0].FinishReason != "" {
|
if llmchunk.Choices[0].FinishReason != "" {
|
||||||
if llmchunk.Choices[0].Delta.Content != "" {
|
if llmchunk.Choices[0].Delta.Content != "" {
|
||||||
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
|
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
|
||||||
@@ -502,40 +456,35 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
|||||||
newMsg = *processMessageTag(&newMsg)
|
newMsg = *processMessageTag(&newMsg)
|
||||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
chatBody.Messages = append(chatBody.Messages, newMsg)
|
||||||
}
|
}
|
||||||
// if rag - add as system message to avoid conflicts with tool usage
|
|
||||||
if !resume && cfg.RAGEnabled {
|
|
||||||
um := chatBody.Messages[len(chatBody.Messages)-1].Content
|
|
||||||
logger.Debug("RAG is enabled, preparing RAG context", "user_message", um)
|
|
||||||
ragResp, err := chatRagUse(um)
|
|
||||||
if err != nil {
|
|
||||||
logger.Error("failed to form a rag msg", "error", err)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
logger.Debug("RAG response received", "response_len", len(ragResp),
|
|
||||||
"response_preview", ragResp[:min(len(ragResp), 100)])
|
|
||||||
// Use system role for RAG context to avoid conflicts with tool usage
|
|
||||||
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
|
|
||||||
chatBody.Messages = append(chatBody.Messages, ragMsg)
|
|
||||||
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
|
|
||||||
}
|
|
||||||
// Create copy of chat body with standardized user role
|
// Create copy of chat body with standardized user role
|
||||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
|
||||||
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
||||||
if cfg.AutoTurn && !resume {
|
|
||||||
filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona)
|
|
||||||
}
|
|
||||||
bodyCopy := &models.ChatBody{
|
bodyCopy := &models.ChatBody{
|
||||||
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
||||||
Model: chatBody.Model,
|
Model: chatBody.Model,
|
||||||
Stream: chatBody.Stream,
|
Stream: chatBody.Stream,
|
||||||
}
|
}
|
||||||
for i, msg := range filteredMessages {
|
for i := range filteredMessages {
|
||||||
if msg.Role == cfg.UserRole || i == 1 {
|
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
|
||||||
bodyCopy.Messages[i] = msg
|
switch strippedMsg.Role {
|
||||||
|
case cfg.UserRole:
|
||||||
|
if i == 1 {
|
||||||
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
bodyCopy.Messages[i].Role = "user"
|
bodyCopy.Messages[i].Role = "user"
|
||||||
} else {
|
} else {
|
||||||
bodyCopy.Messages[i] = msg
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
}
|
}
|
||||||
|
case cfg.AssistantRole:
|
||||||
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
|
bodyCopy.Messages[i].Role = "assistant"
|
||||||
|
case cfg.ToolRole:
|
||||||
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
|
bodyCopy.Messages[i].Role = "tool"
|
||||||
|
default:
|
||||||
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
|
}
|
||||||
|
// Clear ToolCalls - they're stored in chat history for display but not sent to LLM
|
||||||
|
// bodyCopy.Messages[i].ToolCall = nil
|
||||||
}
|
}
|
||||||
// Clean null/empty messages to prevent API issues
|
// Clean null/empty messages to prevent API issues
|
||||||
bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
|
bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
|
||||||
@@ -559,6 +508,10 @@ func (or OpenRouterCompletion) ParseChunk(data []byte) (*models.TextChunk, error
|
|||||||
logger.Error("failed to decode", "error", err, "line", string(data))
|
logger.Error("failed to decode", "error", err, "line", string(data))
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if len(llmchunk.Choices) == 0 {
|
||||||
|
logger.Warn("empty chunk choices", "raw_data", string(data), "chunk", llmchunk)
|
||||||
|
return &models.TextChunk{}, nil
|
||||||
|
}
|
||||||
resp := &models.TextChunk{
|
resp := &models.TextChunk{
|
||||||
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Text,
|
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Text,
|
||||||
}
|
}
|
||||||
@@ -582,30 +535,14 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
|
|||||||
newMsg = *processMessageTag(&newMsg)
|
newMsg = *processMessageTag(&newMsg)
|
||||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
chatBody.Messages = append(chatBody.Messages, newMsg)
|
||||||
}
|
}
|
||||||
// if rag - add as system message to avoid conflicts with tool usage
|
|
||||||
if !resume && cfg.RAGEnabled {
|
|
||||||
um := chatBody.Messages[len(chatBody.Messages)-1].Content
|
|
||||||
logger.Debug("RAG is enabled, preparing RAG context", "user_message", um)
|
|
||||||
ragResp, err := chatRagUse(um)
|
|
||||||
if err != nil {
|
|
||||||
logger.Error("failed to form a rag msg", "error", err)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
logger.Debug("RAG response received", "response_len",
|
|
||||||
len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
|
|
||||||
// Use system role for RAG context to avoid conflicts with tool usage
|
|
||||||
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
|
|
||||||
chatBody.Messages = append(chatBody.Messages, ragMsg)
|
|
||||||
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
|
|
||||||
}
|
|
||||||
// sending description of the tools and how to use them
|
// sending description of the tools and how to use them
|
||||||
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() {
|
||||||
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
|
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: tools.ToolSysMsg})
|
||||||
}
|
}
|
||||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
||||||
messages := make([]string, len(filteredMessages))
|
messages := make([]string, len(filteredMessages))
|
||||||
for i, m := range filteredMessages {
|
for i := range filteredMessages {
|
||||||
messages[i] = m.ToPrompt()
|
messages[i] = stripThinkingFromMsg(&filteredMessages[i]).ToPrompt()
|
||||||
}
|
}
|
||||||
prompt := strings.Join(messages, "\n")
|
prompt := strings.Join(messages, "\n")
|
||||||
// strings builder?
|
// strings builder?
|
||||||
@@ -613,9 +550,6 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
|
|||||||
botMsgStart := "\n" + botPersona + ":\n"
|
botMsgStart := "\n" + botPersona + ":\n"
|
||||||
prompt += botMsgStart
|
prompt += botMsgStart
|
||||||
}
|
}
|
||||||
if cfg.ThinkUse && !cfg.ToolUse {
|
|
||||||
prompt += "<think>"
|
|
||||||
}
|
|
||||||
stopSlice := chatBody.MakeStopSliceExcluding("", listChatRoles())
|
stopSlice := chatBody.MakeStopSliceExcluding("", listChatRoles())
|
||||||
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
|
||||||
"msg", msg, "resume", resume, "prompt", prompt, "stop_strings", stopSlice)
|
"msg", msg, "resume", resume, "prompt", prompt, "stop_strings", stopSlice)
|
||||||
@@ -640,12 +574,18 @@ func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) {
|
|||||||
logger.Error("failed to decode", "error", err, "line", string(data))
|
logger.Error("failed to decode", "error", err, "line", string(data))
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if len(llmchunk.Choices) == 0 {
|
||||||
|
logger.Warn("empty chunk choices", "raw_data", string(data), "chunk", llmchunk)
|
||||||
|
return &models.TextChunk{}, nil
|
||||||
|
}
|
||||||
|
lastChoice := llmchunk.Choices[len(llmchunk.Choices)-1]
|
||||||
resp := &models.TextChunk{
|
resp := &models.TextChunk{
|
||||||
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content,
|
Chunk: lastChoice.Delta.Content,
|
||||||
|
Reasoning: lastChoice.Delta.Reasoning,
|
||||||
}
|
}
|
||||||
// Handle tool calls similar to LCPChat
|
// Handle tool calls similar to LCPChat
|
||||||
if len(llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls) > 0 {
|
if len(lastChoice.Delta.ToolCalls) > 0 {
|
||||||
toolCall := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0]
|
toolCall := lastChoice.Delta.ToolCalls[0]
|
||||||
resp.ToolChunk = toolCall.Function.Arguments
|
resp.ToolChunk = toolCall.Function.Arguments
|
||||||
fname := toolCall.Function.Name
|
fname := toolCall.Function.Name
|
||||||
if fname != "" {
|
if fname != "" {
|
||||||
@@ -657,7 +597,7 @@ func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) {
|
|||||||
if resp.ToolChunk != "" {
|
if resp.ToolChunk != "" {
|
||||||
resp.ToolResp = true
|
resp.ToolResp = true
|
||||||
}
|
}
|
||||||
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
|
if lastChoice.FinishReason == "stop" {
|
||||||
if resp.Chunk != "" {
|
if resp.Chunk != "" {
|
||||||
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
|
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
|
||||||
}
|
}
|
||||||
@@ -690,7 +630,7 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
|||||||
// If image processing fails, fall back to simple text message
|
// If image processing fails, fall back to simple text message
|
||||||
newMsg = models.NewRoleMsg(role, msg)
|
newMsg = models.NewRoleMsg(role, msg)
|
||||||
} else {
|
} else {
|
||||||
newMsg.AddImagePart(imageURL)
|
newMsg.AddImagePart(imageURL, localImageAttachmentPath)
|
||||||
}
|
}
|
||||||
// Only clear the global image attachment after successfully processing it in this API call
|
// Only clear the global image attachment after successfully processing it in this API call
|
||||||
imageAttachmentPath = "" // Clear the attachment after use
|
imageAttachmentPath = "" // Clear the attachment after use
|
||||||
@@ -701,46 +641,38 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
|||||||
newMsg = *processMessageTag(&newMsg)
|
newMsg = *processMessageTag(&newMsg)
|
||||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
chatBody.Messages = append(chatBody.Messages, newMsg)
|
||||||
}
|
}
|
||||||
// if rag - add as system message to avoid conflicts with tool usage
|
|
||||||
if !resume && cfg.RAGEnabled {
|
|
||||||
um := chatBody.Messages[len(chatBody.Messages)-1].Content
|
|
||||||
logger.Debug("RAG is enabled, preparing RAG context", "user_message", um)
|
|
||||||
ragResp, err := chatRagUse(um)
|
|
||||||
if err != nil {
|
|
||||||
logger.Error("failed to form a rag msg", "error", err)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
logger.Debug("RAG response received", "response_len", len(ragResp),
|
|
||||||
"response_preview", ragResp[:min(len(ragResp), 100)])
|
|
||||||
// Use system role for RAG context to avoid conflicts with tool usage
|
|
||||||
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
|
|
||||||
chatBody.Messages = append(chatBody.Messages, ragMsg)
|
|
||||||
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
|
|
||||||
}
|
|
||||||
// Create copy of chat body with standardized user role
|
// Create copy of chat body with standardized user role
|
||||||
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
|
filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
|
||||||
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
// Add persona suffix to the last user message to indicate who the assistant should reply as
|
||||||
if cfg.AutoTurn && !resume {
|
|
||||||
filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona)
|
|
||||||
}
|
|
||||||
bodyCopy := &models.ChatBody{
|
bodyCopy := &models.ChatBody{
|
||||||
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
Messages: make([]models.RoleMsg, len(filteredMessages)),
|
||||||
Model: chatBody.Model,
|
Model: chatBody.Model,
|
||||||
Stream: chatBody.Stream,
|
Stream: chatBody.Stream,
|
||||||
}
|
}
|
||||||
for i, msg := range filteredMessages {
|
for i := range filteredMessages {
|
||||||
bodyCopy.Messages[i] = msg
|
strippedMsg := *stripThinkingFromMsg(&filteredMessages[i])
|
||||||
// Standardize role if it's a user role
|
switch strippedMsg.Role {
|
||||||
if bodyCopy.Messages[i].Role == cfg.UserRole {
|
case cfg.UserRole:
|
||||||
bodyCopy.Messages[i] = msg
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
bodyCopy.Messages[i].Role = "user"
|
bodyCopy.Messages[i].Role = "user"
|
||||||
|
case cfg.AssistantRole:
|
||||||
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
|
bodyCopy.Messages[i].Role = "assistant"
|
||||||
|
case cfg.ToolRole:
|
||||||
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
|
bodyCopy.Messages[i].Role = "tool"
|
||||||
|
default:
|
||||||
|
bodyCopy.Messages[i] = strippedMsg
|
||||||
}
|
}
|
||||||
|
// Clear ToolCalls - they're stored in chat history for display but not sent to LLM
|
||||||
|
// literally deletes data that we need
|
||||||
|
// bodyCopy.Messages[i].ToolCall = nil
|
||||||
}
|
}
|
||||||
// Clean null/empty messages to prevent API issues
|
// Clean null/empty messages to prevent API issues
|
||||||
bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
|
bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
|
||||||
orBody := models.NewOpenRouterChatReq(*bodyCopy, defaultLCPProps)
|
orBody := models.NewOpenRouterChatReq(*bodyCopy, defaultLCPProps, cfg.ReasoningEffort)
|
||||||
if cfg.ToolUse && !resume && role != cfg.ToolRole {
|
if cfg.ToolUse && !resume && role != cfg.ToolRole {
|
||||||
orBody.Tools = baseTools // set tools to use
|
orBody.Tools = tools.BaseTools // set tools to use
|
||||||
}
|
}
|
||||||
data, err := json.Marshal(orBody)
|
data, err := json.Marshal(orBody)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
265
main.go
265
main.go
@@ -1,30 +1,50 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bufio"
|
||||||
"flag"
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"gf-lt/models"
|
||||||
|
"gf-lt/pngmeta"
|
||||||
|
"os"
|
||||||
|
"slices"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync/atomic"
|
||||||
|
|
||||||
"github.com/rivo/tview"
|
"github.com/rivo/tview"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
boolColors = map[bool]string{true: "green", false: "red"}
|
boolColors = map[bool]string{true: "green", false: "red"}
|
||||||
botRespMode = false
|
botRespMode atomic.Bool
|
||||||
|
toolRunningMode atomic.Bool
|
||||||
editMode = false
|
editMode = false
|
||||||
roleEditMode = false
|
roleEditMode = false
|
||||||
injectRole = true
|
injectRole = true
|
||||||
selectedIndex = int(-1)
|
selectedIndex = int(-1)
|
||||||
shellMode = false
|
shellMode = false
|
||||||
indexLineCompletion = "F12 to show keys help | llm turn: [%s:-:b]%v[-:-:-] (F6) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [%s:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [%s:-:b]%v[-:-:-] (F10)\nAPI: [orange:-:b]%s[-:-:-] (ctrl+v) | recording: [%s:-:b]%v[-:-:-] (ctrl+r) | writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | bot will write as [orange:-:b]%s[-:-:-] (ctrl+x) | role injection (alt+7) [%s:-:b]%v[-:-:-]"
|
shellHistory []string
|
||||||
|
shellHistoryPos int = -1
|
||||||
|
thinkingCollapsed = false
|
||||||
|
toolCollapsed = true
|
||||||
|
statusLineTempl = "help (F12) | chat: [orange:-:b]%s[-:-:-] (F1) | [%s:-:b]tool use[-:-:-] (ctrl+k) | model: [%s:-:b]%s[-:-:-] (ctrl+l) | [%s:-:b]skip LLM resp[-:-:-] (F10) | API: [orange:-:b]%s[-:-:-] (ctrl+v)\nwriting as: [orange:-:b]%s[-:-:-] (ctrl+q) | bot will write as [orange:-:b]%s[-:-:-] (ctrl+x)"
|
||||||
focusSwitcher = map[tview.Primitive]tview.Primitive{}
|
focusSwitcher = map[tview.Primitive]tview.Primitive{}
|
||||||
|
app *tview.Application
|
||||||
|
cliCardPath string
|
||||||
|
cliContinue bool
|
||||||
|
cliMsg string
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
apiPort := flag.Int("port", 0, "port to host api")
|
flag.BoolVar(&cfg.CLIMode, "cli", false, "Run in CLI mode without TUI")
|
||||||
|
flag.BoolVar(&cfg.ToolUse, "tools", true, "run with tools")
|
||||||
|
flag.StringVar(&cliCardPath, "card", "", "Path to syscard JSON file")
|
||||||
|
flag.BoolVar(&cliContinue, "continue", false, "Continue from last chat (by agent or card)")
|
||||||
|
flag.StringVar(&cliMsg, "msg", "", "Send message and exit (one-shot mode)")
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
if apiPort != nil && *apiPort > 3000 {
|
if cfg.CLIMode {
|
||||||
srv := Server{}
|
runCLIMode()
|
||||||
srv.ListenToRequests(strconv.Itoa(*apiPort))
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
pages.AddPage("main", flex, true, true)
|
pages.AddPage("main", flex, true, true)
|
||||||
@@ -34,3 +54,236 @@ func main() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func runCLIMode() {
|
||||||
|
outputHandler = &CLIOutputHandler{}
|
||||||
|
cliRespDone = make(chan bool, 1)
|
||||||
|
if cliCardPath != "" {
|
||||||
|
card, err := pngmeta.ReadCardJson(cliCardPath)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Failed to load syscard: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
cfg.AssistantRole = card.Role
|
||||||
|
sysMap[card.ID] = card
|
||||||
|
roleToID[card.Role] = card.ID
|
||||||
|
charToStart(card.Role, false)
|
||||||
|
fmt.Printf("Loaded syscard: %s (%s)\n", card.Role, card.FilePath)
|
||||||
|
}
|
||||||
|
if cliContinue {
|
||||||
|
if cliCardPath != "" {
|
||||||
|
history, err := loadAgentsLastChat(cfg.AssistantRole)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("No previous chat found for %s, starting new chat\n", cfg.AssistantRole)
|
||||||
|
startNewCLIChat()
|
||||||
|
} else {
|
||||||
|
chatBody.Messages = history
|
||||||
|
fmt.Printf("Continued chat: %s\n", activeChatName)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
chatBody.Messages = loadOldChatOrGetNew()
|
||||||
|
fmt.Printf("Continued chat: %s\n", activeChatName)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
startNewCLIChat()
|
||||||
|
}
|
||||||
|
printCLIWelcome()
|
||||||
|
go func() {
|
||||||
|
<-ctx.Done()
|
||||||
|
os.Exit(0)
|
||||||
|
}()
|
||||||
|
if cliMsg != "" {
|
||||||
|
persona := cfg.UserRole
|
||||||
|
if cfg.WriteNextMsgAs != "" {
|
||||||
|
persona = cfg.WriteNextMsgAs
|
||||||
|
}
|
||||||
|
chatRoundChan <- &models.ChatRoundReq{Role: persona, UserMsg: cliMsg}
|
||||||
|
<-cliRespDone
|
||||||
|
fmt.Println()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
scanner := bufio.NewScanner(os.Stdin)
|
||||||
|
for {
|
||||||
|
fmt.Print("> ")
|
||||||
|
if !scanner.Scan() {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
msg := scanner.Text()
|
||||||
|
if msg == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(msg, "/") {
|
||||||
|
if !handleCLICommand(msg) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
persona := cfg.UserRole
|
||||||
|
if cfg.WriteNextMsgAs != "" {
|
||||||
|
persona = cfg.WriteNextMsgAs
|
||||||
|
}
|
||||||
|
chatRoundChan <- &models.ChatRoundReq{Role: persona, UserMsg: msg}
|
||||||
|
<-cliRespDone
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func printCLIWelcome() {
|
||||||
|
fmt.Println("CLI Mode started. Type your messages or commands.")
|
||||||
|
fmt.Println("Type /help for available commands.")
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
func printCLIHelp() {
|
||||||
|
fmt.Println("Available commands:")
|
||||||
|
fmt.Println(" /help, /h - Show this help message")
|
||||||
|
fmt.Println(" /new, /n - Start a new chat (clears conversation)")
|
||||||
|
fmt.Println(" /card <path>, /c <path> - Load a different syscard")
|
||||||
|
fmt.Println(" /undo, /u - Delete last message")
|
||||||
|
fmt.Println(" /history, /ls - List chat history")
|
||||||
|
fmt.Println(" /load <name> - Load a specific chat by name")
|
||||||
|
fmt.Println(" /model <name>, /m <name> - Switch model")
|
||||||
|
fmt.Println(" /api <index>, /a <index> - Switch API link (no index to list)")
|
||||||
|
fmt.Println(" /quit, /q, /exit - Exit CLI mode")
|
||||||
|
fmt.Println()
|
||||||
|
fmt.Printf("Current syscard: %s\n", cfg.AssistantRole)
|
||||||
|
fmt.Printf("Current model: %s\n", chatBody.Model)
|
||||||
|
fmt.Printf("Current API: %s\n", cfg.CurrentAPI)
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleCLICommand(msg string) bool {
|
||||||
|
parts := strings.Fields(msg)
|
||||||
|
cmd := strings.ToLower(parts[0])
|
||||||
|
args := parts[1:]
|
||||||
|
|
||||||
|
switch cmd {
|
||||||
|
case "/help", "/h":
|
||||||
|
printCLIHelp()
|
||||||
|
case "/new", "/n":
|
||||||
|
startNewCLIChat()
|
||||||
|
fmt.Println("New chat started.")
|
||||||
|
fmt.Printf("Syscard: %s\n", cfg.AssistantRole)
|
||||||
|
fmt.Println()
|
||||||
|
case "/card", "/c":
|
||||||
|
if len(args) == 0 {
|
||||||
|
fmt.Println("Usage: /card <path>")
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
card, err := pngmeta.ReadCardJson(args[0])
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "Failed to load syscard: %v\n", err)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
cfg.AssistantRole = card.Role
|
||||||
|
sysMap[card.ID] = card
|
||||||
|
roleToID[card.Role] = card.ID
|
||||||
|
charToStart(card.Role, false)
|
||||||
|
startNewCLIChat()
|
||||||
|
fmt.Printf("Switched to syscard: %s (%s)\n", card.Role, card.FilePath)
|
||||||
|
case "/undo", "/u":
|
||||||
|
if len(chatBody.Messages) == 0 {
|
||||||
|
fmt.Println("No messages to delete.")
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
chatBody.Messages = chatBody.Messages[:len(chatBody.Messages)-1]
|
||||||
|
cliPrevOutput = ""
|
||||||
|
fmt.Println("Last message deleted.")
|
||||||
|
case "/history", "/ls":
|
||||||
|
fmt.Println("Chat history:")
|
||||||
|
for name := range chatMap {
|
||||||
|
marker := " "
|
||||||
|
if name == activeChatName {
|
||||||
|
marker = "* "
|
||||||
|
}
|
||||||
|
fmt.Printf("%s%s\n", marker, name)
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
case "/load":
|
||||||
|
if len(args) == 0 {
|
||||||
|
fmt.Println("Usage: /load <name>")
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
name := args[0]
|
||||||
|
chat, ok := chatMap[name]
|
||||||
|
if !ok {
|
||||||
|
fmt.Printf("Chat not found: %s\n", name)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
history, err := chat.ToHistory()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to load chat: %v\n", err)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
chatBody.Messages = history
|
||||||
|
activeChatName = name
|
||||||
|
cfg.AssistantRole = chat.Agent
|
||||||
|
fmt.Printf("Loaded chat: %s\n", name)
|
||||||
|
case "/model", "/m":
|
||||||
|
getModelListForAPI := func(api string) []string {
|
||||||
|
if strings.Contains(api, "api.deepseek.com/") {
|
||||||
|
return []string{"deepseek-chat", "deepseek-reasoner"}
|
||||||
|
} else if strings.Contains(api, "openrouter.ai") {
|
||||||
|
return ORFreeModels
|
||||||
|
}
|
||||||
|
return LocalModels
|
||||||
|
}
|
||||||
|
modelList := getModelListForAPI(cfg.CurrentAPI)
|
||||||
|
if len(args) == 0 {
|
||||||
|
fmt.Println("Models:")
|
||||||
|
for i, model := range modelList {
|
||||||
|
marker := " "
|
||||||
|
if model == chatBody.Model {
|
||||||
|
marker = "* "
|
||||||
|
}
|
||||||
|
fmt.Printf("%s%d: %s\n", marker, i, model)
|
||||||
|
}
|
||||||
|
fmt.Printf("\nCurrent model: %s\n", chatBody.Model)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
// Try index first, then model name
|
||||||
|
if idx, err := strconv.Atoi(args[0]); err == nil && idx >= 0 && idx < len(modelList) {
|
||||||
|
chatBody.Model = modelList[idx]
|
||||||
|
fmt.Printf("Switched to model: %s\n", chatBody.Model)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if slices.Index(modelList, args[0]) < 0 {
|
||||||
|
fmt.Printf("Model '%s' not found. Use index or choose from:\n", args[0])
|
||||||
|
for i, model := range modelList {
|
||||||
|
fmt.Printf(" %d: %s\n", i, model)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
chatBody.Model = args[0]
|
||||||
|
fmt.Printf("Switched to model: %s\n", args[0])
|
||||||
|
case "/api", "/a":
|
||||||
|
if len(args) == 0 {
|
||||||
|
fmt.Println("API Links:")
|
||||||
|
for i, link := range cfg.ApiLinks {
|
||||||
|
marker := " "
|
||||||
|
if link == cfg.CurrentAPI {
|
||||||
|
marker = "* "
|
||||||
|
}
|
||||||
|
fmt.Printf("%s%d: %s\n", marker, i, link)
|
||||||
|
}
|
||||||
|
fmt.Printf("\nCurrent API: %s\n", cfg.CurrentAPI)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
idx := 0
|
||||||
|
fmt.Sscanf(args[0], "%d", &idx)
|
||||||
|
if idx < 0 || idx >= len(cfg.ApiLinks) {
|
||||||
|
fmt.Printf("Invalid index. Valid range: 0-%d\n", len(cfg.ApiLinks)-1)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
cfg.CurrentAPI = cfg.ApiLinks[idx]
|
||||||
|
fmt.Printf("Switched to API: %s\n", cfg.CurrentAPI)
|
||||||
|
case "/quit", "/q", "/exit":
|
||||||
|
fmt.Println("Goodbye!")
|
||||||
|
return false
|
||||||
|
default:
|
||||||
|
fmt.Printf("Unknown command: %s\n", msg)
|
||||||
|
fmt.Println("Type /help for available commands.")
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|||||||
42
main_test.go
42
main_test.go
@@ -1,42 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"gf-lt/config"
|
|
||||||
"gf-lt/models"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestRemoveThinking(t *testing.T) {
|
|
||||||
cases := []struct {
|
|
||||||
cb *models.ChatBody
|
|
||||||
toolMsgs uint8
|
|
||||||
}{
|
|
||||||
{cb: &models.ChatBody{
|
|
||||||
Stream: true,
|
|
||||||
Messages: []models.RoleMsg{
|
|
||||||
{Role: "tool", Content: "should be ommited"},
|
|
||||||
{Role: "system", Content: "should stay"},
|
|
||||||
{Role: "user", Content: "hello, how are you?"},
|
|
||||||
{Role: "assistant", Content: "Oh, hi. <think>I should thank user and continue the conversation</think> I am geat, thank you! How are you?"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
toolMsgs: uint8(1),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for i, tc := range cases {
|
|
||||||
t.Run(fmt.Sprintf("run_%d", i), func(t *testing.T) {
|
|
||||||
cfg = &config.Config{ToolRole: "tool"} // Initialize cfg.ToolRole for test
|
|
||||||
mNum := len(tc.cb.Messages)
|
|
||||||
removeThinking(tc.cb)
|
|
||||||
if len(tc.cb.Messages) != mNum-int(tc.toolMsgs) {
|
|
||||||
t.Errorf("failed to delete tools msg %v; expected %d, got %d", tc.cb.Messages, mNum-int(tc.toolMsgs), len(tc.cb.Messages))
|
|
||||||
}
|
|
||||||
for _, msg := range tc.cb.Messages {
|
|
||||||
if strings.Contains(msg.Content, "<think>") {
|
|
||||||
t.Errorf("msg contains think tag; msg: %s\n", msg.Content)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}) }
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,10 @@
|
|||||||
package models
|
package models
|
||||||
|
|
||||||
import "strings"
|
import (
|
||||||
|
"crypto/md5"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
// https://github.com/malfoyslastname/character-card-spec-v2/blob/main/spec_v2.md
|
// https://github.com/malfoyslastname/character-card-spec-v2/blob/main/spec_v2.md
|
||||||
// what a bloat; trim to Role->Msg pair and first msg
|
// what a bloat; trim to Role->Msg pair and first msg
|
||||||
@@ -31,6 +35,7 @@ func (c *CharCardSpec) Simplify(userName, fpath string) *CharCard {
|
|||||||
fm := strings.ReplaceAll(strings.ReplaceAll(c.FirstMes, "{{char}}", c.Name), "{{user}}", userName)
|
fm := strings.ReplaceAll(strings.ReplaceAll(c.FirstMes, "{{char}}", c.Name), "{{user}}", userName)
|
||||||
sysPr := strings.ReplaceAll(strings.ReplaceAll(c.Description, "{{char}}", c.Name), "{{user}}", userName)
|
sysPr := strings.ReplaceAll(strings.ReplaceAll(c.Description, "{{char}}", c.Name), "{{user}}", userName)
|
||||||
return &CharCard{
|
return &CharCard{
|
||||||
|
ID: ComputeCardID(c.Name, fpath),
|
||||||
SysPrompt: sysPr,
|
SysPrompt: sysPr,
|
||||||
FirstMsg: fm,
|
FirstMsg: fm,
|
||||||
Role: c.Name,
|
Role: c.Name,
|
||||||
@@ -39,7 +44,12 @@ func (c *CharCardSpec) Simplify(userName, fpath string) *CharCard {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ComputeCardID(role, filePath string) string {
|
||||||
|
return fmt.Sprintf("%x", md5.Sum([]byte(role+filePath)))
|
||||||
|
}
|
||||||
|
|
||||||
type CharCard struct {
|
type CharCard struct {
|
||||||
|
ID string `json:"id"`
|
||||||
SysPrompt string `json:"sys_prompt"`
|
SysPrompt string `json:"sys_prompt"`
|
||||||
FirstMsg string `json:"first_msg"`
|
FirstMsg string `json:"first_msg"`
|
||||||
Role string `json:"role"`
|
Role string `json:"role"`
|
||||||
|
|||||||
31
models/consts.go
Normal file
31
models/consts.go
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
package models
|
||||||
|
|
||||||
|
import "regexp"
|
||||||
|
|
||||||
|
const (
|
||||||
|
LoadedMark = "(loaded) "
|
||||||
|
ToolRespMultyType = "multimodel_content"
|
||||||
|
DefaultFirstMsg = "Hello! What can I do for you?"
|
||||||
|
BasicSysMsg = "Large Language Model that helps user with any of his requests."
|
||||||
|
)
|
||||||
|
|
||||||
|
type APIType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
APITypeChat APIType = iota
|
||||||
|
APITypeCompletion
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ToolCallRE = regexp.MustCompile(`__tool_call__\s*([\s\S]*?)__tool_call__`)
|
||||||
|
QuotesRE = regexp.MustCompile(`(".*?")`)
|
||||||
|
StarRE = regexp.MustCompile(`(\*.*?\*)`)
|
||||||
|
ThinkRE = regexp.MustCompile(`(?s)<think>.*?</think>`)
|
||||||
|
CodeBlockRE = regexp.MustCompile(`(?s)\x60{3}(?:.*?)\n(.*?)\n\s*\x60{3}\s*`)
|
||||||
|
SingleBacktickRE = regexp.MustCompile(`\x60([^\x60]*)\x60`)
|
||||||
|
RoleRE = regexp.MustCompile(`^(\w+):`)
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
SysLabels = []string{"assistant"}
|
||||||
|
)
|
||||||
@@ -1,8 +1,49 @@
|
|||||||
package models
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
type AudioFormat string
|
type AudioFormat string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
AFWav AudioFormat = "wav"
|
AFWav AudioFormat = "wav"
|
||||||
AFMP3 AudioFormat = "mp3"
|
AFMP3 AudioFormat = "mp3"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var threeOrMoreDashesRE = regexp.MustCompile(`-{3,}`)
|
||||||
|
|
||||||
|
// CleanText removes markdown and special characters that are not suitable for TTS
|
||||||
|
func CleanText(text string) string {
|
||||||
|
// Remove markdown-like characters that might interfere with TTS
|
||||||
|
text = strings.ReplaceAll(text, "*", "") // Bold/italic markers
|
||||||
|
text = strings.ReplaceAll(text, "#", "") // Headers
|
||||||
|
text = strings.ReplaceAll(text, "_", "") // Underline/italic markers
|
||||||
|
text = strings.ReplaceAll(text, "~", "") // Strikethrough markers
|
||||||
|
text = strings.ReplaceAll(text, "`", "") // Code markers
|
||||||
|
text = strings.ReplaceAll(text, "[", "") // Link brackets
|
||||||
|
text = strings.ReplaceAll(text, "]", "") // Link brackets
|
||||||
|
text = strings.ReplaceAll(text, "!", "") // Exclamation marks (if not punctuation)
|
||||||
|
// Remove HTML tags using regex
|
||||||
|
htmlTagRegex := regexp.MustCompile(`<[^>]*>`)
|
||||||
|
text = htmlTagRegex.ReplaceAllString(text, "")
|
||||||
|
// Split text into lines to handle table separators
|
||||||
|
lines := strings.Split(text, "\n")
|
||||||
|
var filteredLines []string
|
||||||
|
for _, line := range lines {
|
||||||
|
// Check if the line looks like a table separator (e.g., |----|, |===|, | - - - |)
|
||||||
|
// A table separator typically contains only |, -, =, and spaces
|
||||||
|
isTableSeparator := regexp.MustCompile(`^\s*\|\s*[-=\s]+\|\s*$`).MatchString(strings.TrimSpace(line))
|
||||||
|
if !isTableSeparator {
|
||||||
|
// If it's not a table separator, remove vertical bars but keep the content
|
||||||
|
processedLine := strings.ReplaceAll(line, "|", "")
|
||||||
|
filteredLines = append(filteredLines, processedLine)
|
||||||
|
}
|
||||||
|
// If it is a table separator, skip it (don't add to filteredLines)
|
||||||
|
}
|
||||||
|
text = strings.Join(filteredLines, "\n")
|
||||||
|
text = threeOrMoreDashesRE.ReplaceAllString(text, "")
|
||||||
|
text = strings.TrimSpace(text) // Remove leading/trailing whitespace
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
|||||||
243
models/models.go
243
models/models.go
@@ -14,6 +14,12 @@ type FuncCall struct {
|
|||||||
Args map[string]string `json:"args"`
|
Args map[string]string `json:"args"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ToolCall struct {
|
||||||
|
ID string `json:"id,omitempty"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Args string `json:"arguments"`
|
||||||
|
}
|
||||||
|
|
||||||
type LLMResp struct {
|
type LLMResp struct {
|
||||||
Choices []struct {
|
Choices []struct {
|
||||||
FinishReason string `json:"finish_reason"`
|
FinishReason string `json:"finish_reason"`
|
||||||
@@ -52,6 +58,7 @@ type LLMRespChunk struct {
|
|||||||
Index int `json:"index"`
|
Index int `json:"index"`
|
||||||
Delta struct {
|
Delta struct {
|
||||||
Content string `json:"content"`
|
Content string `json:"content"`
|
||||||
|
ReasoningContent string `json:"reasoning_content"`
|
||||||
ToolCalls []ToolDeltaResp `json:"tool_calls"`
|
ToolCalls []ToolDeltaResp `json:"tool_calls"`
|
||||||
} `json:"delta"`
|
} `json:"delta"`
|
||||||
} `json:"choices"`
|
} `json:"choices"`
|
||||||
@@ -73,6 +80,7 @@ type TextChunk struct {
|
|||||||
ToolResp bool
|
ToolResp bool
|
||||||
FuncName string
|
FuncName string
|
||||||
ToolID string
|
ToolID string
|
||||||
|
Reasoning string // For models that send reasoning separately (OpenRouter, etc.)
|
||||||
}
|
}
|
||||||
|
|
||||||
type TextContentPart struct {
|
type TextContentPart struct {
|
||||||
@@ -82,6 +90,7 @@ type TextContentPart struct {
|
|||||||
|
|
||||||
type ImageContentPart struct {
|
type ImageContentPart struct {
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
|
Path string `json:"path,omitempty"` // Store original file path
|
||||||
ImageURL struct {
|
ImageURL struct {
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
} `json:"image_url"`
|
} `json:"image_url"`
|
||||||
@@ -93,24 +102,35 @@ type RoleMsg struct {
|
|||||||
Content string `json:"-"`
|
Content string `json:"-"`
|
||||||
ContentParts []any `json:"-"`
|
ContentParts []any `json:"-"`
|
||||||
ToolCallID string `json:"tool_call_id,omitempty"` // For tool response messages
|
ToolCallID string `json:"tool_call_id,omitempty"` // For tool response messages
|
||||||
|
ToolCall *ToolCall `json:"tool_call,omitempty"` // For assistant messages with tool calls
|
||||||
|
IsShellCommand bool `json:"is_shell_command,omitempty"` // True for shell command outputs (always shown)
|
||||||
KnownTo []string `json:"known_to,omitempty"`
|
KnownTo []string `json:"known_to,omitempty"`
|
||||||
hasContentParts bool // Flag to indicate which content type to marshal
|
Stats *ResponseStats `json:"stats"`
|
||||||
|
HasContentParts bool // Flag to indicate which content type to marshal
|
||||||
}
|
}
|
||||||
|
|
||||||
// MarshalJSON implements custom JSON marshaling for RoleMsg
|
// MarshalJSON implements custom JSON marshaling for RoleMsg
|
||||||
func (m *RoleMsg) MarshalJSON() ([]byte, error) {
|
//
|
||||||
if m.hasContentParts {
|
//nolint:gocritic
|
||||||
|
func (m RoleMsg) MarshalJSON() ([]byte, error) {
|
||||||
|
if m.HasContentParts {
|
||||||
// Use structured content format
|
// Use structured content format
|
||||||
aux := struct {
|
aux := struct {
|
||||||
Role string `json:"role"`
|
Role string `json:"role"`
|
||||||
Content []any `json:"content"`
|
Content []any `json:"content"`
|
||||||
ToolCallID string `json:"tool_call_id,omitempty"`
|
ToolCallID string `json:"tool_call_id,omitempty"`
|
||||||
|
ToolCall *ToolCall `json:"tool_call,omitempty"`
|
||||||
|
IsShellCommand bool `json:"is_shell_command,omitempty"`
|
||||||
KnownTo []string `json:"known_to,omitempty"`
|
KnownTo []string `json:"known_to,omitempty"`
|
||||||
|
Stats *ResponseStats `json:"stats,omitempty"`
|
||||||
}{
|
}{
|
||||||
Role: m.Role,
|
Role: m.Role,
|
||||||
Content: m.ContentParts,
|
Content: m.ContentParts,
|
||||||
ToolCallID: m.ToolCallID,
|
ToolCallID: m.ToolCallID,
|
||||||
|
ToolCall: m.ToolCall,
|
||||||
|
IsShellCommand: m.IsShellCommand,
|
||||||
KnownTo: m.KnownTo,
|
KnownTo: m.KnownTo,
|
||||||
|
Stats: m.Stats,
|
||||||
}
|
}
|
||||||
return json.Marshal(aux)
|
return json.Marshal(aux)
|
||||||
} else {
|
} else {
|
||||||
@@ -119,12 +139,18 @@ func (m *RoleMsg) MarshalJSON() ([]byte, error) {
|
|||||||
Role string `json:"role"`
|
Role string `json:"role"`
|
||||||
Content string `json:"content"`
|
Content string `json:"content"`
|
||||||
ToolCallID string `json:"tool_call_id,omitempty"`
|
ToolCallID string `json:"tool_call_id,omitempty"`
|
||||||
|
ToolCall *ToolCall `json:"tool_call,omitempty"`
|
||||||
|
IsShellCommand bool `json:"is_shell_command,omitempty"`
|
||||||
KnownTo []string `json:"known_to,omitempty"`
|
KnownTo []string `json:"known_to,omitempty"`
|
||||||
|
Stats *ResponseStats `json:"stats,omitempty"`
|
||||||
}{
|
}{
|
||||||
Role: m.Role,
|
Role: m.Role,
|
||||||
Content: m.Content,
|
Content: m.Content,
|
||||||
ToolCallID: m.ToolCallID,
|
ToolCallID: m.ToolCallID,
|
||||||
|
ToolCall: m.ToolCall,
|
||||||
|
IsShellCommand: m.IsShellCommand,
|
||||||
KnownTo: m.KnownTo,
|
KnownTo: m.KnownTo,
|
||||||
|
Stats: m.Stats,
|
||||||
}
|
}
|
||||||
return json.Marshal(aux)
|
return json.Marshal(aux)
|
||||||
}
|
}
|
||||||
@@ -137,14 +163,20 @@ func (m *RoleMsg) UnmarshalJSON(data []byte) error {
|
|||||||
Role string `json:"role"`
|
Role string `json:"role"`
|
||||||
Content []any `json:"content"`
|
Content []any `json:"content"`
|
||||||
ToolCallID string `json:"tool_call_id,omitempty"`
|
ToolCallID string `json:"tool_call_id,omitempty"`
|
||||||
|
ToolCall *ToolCall `json:"tool_call,omitempty"`
|
||||||
|
IsShellCommand bool `json:"is_shell_command,omitempty"`
|
||||||
KnownTo []string `json:"known_to,omitempty"`
|
KnownTo []string `json:"known_to,omitempty"`
|
||||||
|
Stats *ResponseStats `json:"stats,omitempty"`
|
||||||
}
|
}
|
||||||
if err := json.Unmarshal(data, &structured); err == nil && len(structured.Content) > 0 {
|
if err := json.Unmarshal(data, &structured); err == nil && len(structured.Content) > 0 {
|
||||||
m.Role = structured.Role
|
m.Role = structured.Role
|
||||||
m.ContentParts = structured.Content
|
m.ContentParts = structured.Content
|
||||||
m.ToolCallID = structured.ToolCallID
|
m.ToolCallID = structured.ToolCallID
|
||||||
|
m.ToolCall = structured.ToolCall
|
||||||
|
m.IsShellCommand = structured.IsShellCommand
|
||||||
m.KnownTo = structured.KnownTo
|
m.KnownTo = structured.KnownTo
|
||||||
m.hasContentParts = true
|
m.Stats = structured.Stats
|
||||||
|
m.HasContentParts = true
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -153,7 +185,10 @@ func (m *RoleMsg) UnmarshalJSON(data []byte) error {
|
|||||||
Role string `json:"role"`
|
Role string `json:"role"`
|
||||||
Content string `json:"content"`
|
Content string `json:"content"`
|
||||||
ToolCallID string `json:"tool_call_id,omitempty"`
|
ToolCallID string `json:"tool_call_id,omitempty"`
|
||||||
|
ToolCall *ToolCall `json:"tool_call,omitempty"`
|
||||||
|
IsShellCommand bool `json:"is_shell_command,omitempty"`
|
||||||
KnownTo []string `json:"known_to,omitempty"`
|
KnownTo []string `json:"known_to,omitempty"`
|
||||||
|
Stats *ResponseStats `json:"stats,omitempty"`
|
||||||
}
|
}
|
||||||
if err := json.Unmarshal(data, &simple); err != nil {
|
if err := json.Unmarshal(data, &simple); err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -161,55 +196,32 @@ func (m *RoleMsg) UnmarshalJSON(data []byte) error {
|
|||||||
m.Role = simple.Role
|
m.Role = simple.Role
|
||||||
m.Content = simple.Content
|
m.Content = simple.Content
|
||||||
m.ToolCallID = simple.ToolCallID
|
m.ToolCallID = simple.ToolCallID
|
||||||
|
m.ToolCall = simple.ToolCall
|
||||||
|
m.IsShellCommand = simple.IsShellCommand
|
||||||
m.KnownTo = simple.KnownTo
|
m.KnownTo = simple.KnownTo
|
||||||
m.hasContentParts = false
|
m.Stats = simple.Stats
|
||||||
|
m.HasContentParts = false
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *RoleMsg) ToText(i int) string {
|
|
||||||
icon := fmt.Sprintf("(%d)", i)
|
|
||||||
// Convert content to string representation
|
|
||||||
var contentStr string
|
|
||||||
if !m.hasContentParts {
|
|
||||||
contentStr = m.Content
|
|
||||||
} else {
|
|
||||||
// For structured content, just take the text parts
|
|
||||||
var textParts []string
|
|
||||||
for _, part := range m.ContentParts {
|
|
||||||
if partMap, ok := part.(map[string]any); ok {
|
|
||||||
if partType, exists := partMap["type"]; exists && partType == "text" {
|
|
||||||
if textVal, textExists := partMap["text"]; textExists {
|
|
||||||
if textStr, isStr := textVal.(string); isStr {
|
|
||||||
textParts = append(textParts, textStr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
contentStr = strings.Join(textParts, " ") + " "
|
|
||||||
}
|
|
||||||
// check if already has role annotation (/completion makes them)
|
|
||||||
// in that case remove it, and then add to icon
|
|
||||||
// since icon and content are separated by \n
|
|
||||||
contentStr, _ = strings.CutPrefix(contentStr, m.Role+":")
|
|
||||||
// if !strings.HasPrefix(contentStr, m.Role+":") {
|
|
||||||
icon = fmt.Sprintf("(%d) <%s>: ", i, m.Role)
|
|
||||||
// }
|
|
||||||
textMsg := fmt.Sprintf("[-:-:b]%s[-:-:-]\n%s\n", icon, contentStr)
|
|
||||||
return strings.ReplaceAll(textMsg, "\n\n", "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *RoleMsg) ToPrompt() string {
|
func (m *RoleMsg) ToPrompt() string {
|
||||||
var contentStr string
|
var contentStr string
|
||||||
if !m.hasContentParts {
|
if !m.HasContentParts {
|
||||||
contentStr = m.Content
|
contentStr = m.Content
|
||||||
} else {
|
} else {
|
||||||
// For structured content, just take the text parts
|
// For structured content, just take the text parts
|
||||||
var textParts []string
|
var textParts []string
|
||||||
for _, part := range m.ContentParts {
|
for _, part := range m.ContentParts {
|
||||||
if partMap, ok := part.(map[string]any); ok {
|
switch p := part.(type) {
|
||||||
if partType, exists := partMap["type"]; exists && partType == "text" {
|
case TextContentPart:
|
||||||
if textVal, textExists := partMap["text"]; textExists {
|
if p.Type == "text" {
|
||||||
|
textParts = append(textParts, p.Text)
|
||||||
|
}
|
||||||
|
case ImageContentPart:
|
||||||
|
// skip images for text display
|
||||||
|
case map[string]any:
|
||||||
|
if partType, exists := p["type"]; exists && partType == "text" {
|
||||||
|
if textVal, textExists := p["text"]; textExists {
|
||||||
if textStr, isStr := textVal.(string); isStr {
|
if textStr, isStr := textVal.(string); isStr {
|
||||||
textParts = append(textParts, textStr)
|
textParts = append(textParts, textStr)
|
||||||
}
|
}
|
||||||
@@ -227,7 +239,7 @@ func NewRoleMsg(role, content string) RoleMsg {
|
|||||||
return RoleMsg{
|
return RoleMsg{
|
||||||
Role: role,
|
Role: role,
|
||||||
Content: content,
|
Content: content,
|
||||||
hasContentParts: false,
|
HasContentParts: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -236,7 +248,7 @@ func NewMultimodalMsg(role string, contentParts []any) RoleMsg {
|
|||||||
return RoleMsg{
|
return RoleMsg{
|
||||||
Role: role,
|
Role: role,
|
||||||
ContentParts: contentParts,
|
ContentParts: contentParts,
|
||||||
hasContentParts: true,
|
HasContentParts: true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -245,7 +257,7 @@ func (m *RoleMsg) HasContent() bool {
|
|||||||
if m.Content != "" {
|
if m.Content != "" {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if m.hasContentParts && len(m.ContentParts) > 0 {
|
if m.HasContentParts && len(m.ContentParts) > 0 {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
@@ -253,7 +265,7 @@ func (m *RoleMsg) HasContent() bool {
|
|||||||
|
|
||||||
// IsContentParts returns true if the message uses structured content parts
|
// IsContentParts returns true if the message uses structured content parts
|
||||||
func (m *RoleMsg) IsContentParts() bool {
|
func (m *RoleMsg) IsContentParts() bool {
|
||||||
return m.hasContentParts
|
return m.HasContentParts
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetContentParts returns the content parts of the message
|
// GetContentParts returns the content parts of the message
|
||||||
@@ -269,40 +281,102 @@ func (m *RoleMsg) Copy() RoleMsg {
|
|||||||
ContentParts: m.ContentParts,
|
ContentParts: m.ContentParts,
|
||||||
ToolCallID: m.ToolCallID,
|
ToolCallID: m.ToolCallID,
|
||||||
KnownTo: m.KnownTo,
|
KnownTo: m.KnownTo,
|
||||||
hasContentParts: m.hasContentParts,
|
Stats: m.Stats,
|
||||||
|
HasContentParts: m.HasContentParts,
|
||||||
|
ToolCall: m.ToolCall,
|
||||||
|
IsShellCommand: m.IsShellCommand,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetText returns the text content of the message, handling both
|
||||||
|
// simple Content and multimodal ContentParts formats.
|
||||||
|
func (m *RoleMsg) GetText() string {
|
||||||
|
if !m.HasContentParts {
|
||||||
|
return m.Content
|
||||||
|
}
|
||||||
|
var textParts []string
|
||||||
|
for _, part := range m.ContentParts {
|
||||||
|
switch p := part.(type) {
|
||||||
|
case TextContentPart:
|
||||||
|
if p.Type == "text" {
|
||||||
|
textParts = append(textParts, p.Text)
|
||||||
|
}
|
||||||
|
case map[string]any:
|
||||||
|
if partType, exists := p["type"]; exists {
|
||||||
|
if partType == "text" {
|
||||||
|
if textVal, textExists := p["text"]; textExists {
|
||||||
|
if textStr, isStr := textVal.(string); isStr {
|
||||||
|
textParts = append(textParts, textStr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strings.Join(textParts, " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetText updates the text content of the message. If the message has
|
||||||
|
// ContentParts (multimodal), it updates the text parts while preserving
|
||||||
|
// images. If not, it sets the simple Content field.
|
||||||
|
func (m *RoleMsg) SetText(text string) {
|
||||||
|
if !m.HasContentParts {
|
||||||
|
m.Content = text
|
||||||
|
return
|
||||||
|
}
|
||||||
|
var newParts []any
|
||||||
|
for _, part := range m.ContentParts {
|
||||||
|
switch p := part.(type) {
|
||||||
|
case TextContentPart:
|
||||||
|
if p.Type == "text" {
|
||||||
|
p.Text = text
|
||||||
|
newParts = append(newParts, p)
|
||||||
|
} else {
|
||||||
|
newParts = append(newParts, p)
|
||||||
|
}
|
||||||
|
case map[string]any:
|
||||||
|
if partType, exists := p["type"]; exists && partType == "text" {
|
||||||
|
p["text"] = text
|
||||||
|
newParts = append(newParts, p)
|
||||||
|
} else {
|
||||||
|
newParts = append(newParts, p)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
newParts = append(newParts, part)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
m.ContentParts = newParts
|
||||||
|
}
|
||||||
|
|
||||||
// AddTextPart adds a text content part to the message
|
// AddTextPart adds a text content part to the message
|
||||||
func (m *RoleMsg) AddTextPart(text string) {
|
func (m *RoleMsg) AddTextPart(text string) {
|
||||||
if !m.hasContentParts {
|
if !m.HasContentParts {
|
||||||
// Convert to content parts format
|
// Convert to content parts format
|
||||||
if m.Content != "" {
|
if m.Content != "" {
|
||||||
m.ContentParts = []any{TextContentPart{Type: "text", Text: m.Content}}
|
m.ContentParts = []any{TextContentPart{Type: "text", Text: m.Content}}
|
||||||
} else {
|
} else {
|
||||||
m.ContentParts = []any{}
|
m.ContentParts = []any{}
|
||||||
}
|
}
|
||||||
m.hasContentParts = true
|
m.HasContentParts = true
|
||||||
}
|
}
|
||||||
|
|
||||||
textPart := TextContentPart{Type: "text", Text: text}
|
textPart := TextContentPart{Type: "text", Text: text}
|
||||||
m.ContentParts = append(m.ContentParts, textPart)
|
m.ContentParts = append(m.ContentParts, textPart)
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddImagePart adds an image content part to the message
|
// AddImagePart adds an image content part to the message
|
||||||
func (m *RoleMsg) AddImagePart(imageURL string) {
|
func (m *RoleMsg) AddImagePart(imageURL, imagePath string) {
|
||||||
if !m.hasContentParts {
|
if !m.HasContentParts {
|
||||||
// Convert to content parts format
|
// Convert to content parts format
|
||||||
if m.Content != "" {
|
if m.Content != "" {
|
||||||
m.ContentParts = []any{TextContentPart{Type: "text", Text: m.Content}}
|
m.ContentParts = []any{TextContentPart{Type: "text", Text: m.Content}}
|
||||||
} else {
|
} else {
|
||||||
m.ContentParts = []any{}
|
m.ContentParts = []any{}
|
||||||
}
|
}
|
||||||
m.hasContentParts = true
|
m.HasContentParts = true
|
||||||
}
|
}
|
||||||
|
|
||||||
imagePart := ImageContentPart{
|
imagePart := ImageContentPart{
|
||||||
Type: "image_url",
|
Type: "image_url",
|
||||||
|
Path: imagePath, // Store the original file path
|
||||||
ImageURL: struct {
|
ImageURL: struct {
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
}{URL: imageURL},
|
}{URL: imageURL},
|
||||||
@@ -317,7 +391,6 @@ func CreateImageURLFromPath(imagePath string) (string, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine the image format based on file extension
|
// Determine the image format based on file extension
|
||||||
var mimeType string
|
var mimeType string
|
||||||
switch {
|
switch {
|
||||||
@@ -334,10 +407,8 @@ func CreateImageURLFromPath(imagePath string) (string, error) {
|
|||||||
default:
|
default:
|
||||||
mimeType = "image/jpeg" // default
|
mimeType = "image/jpeg" // default
|
||||||
}
|
}
|
||||||
|
|
||||||
// Encode to base64
|
// Encode to base64
|
||||||
encoded := base64.StdEncoding.EncodeToString(data)
|
encoded := base64.StdEncoding.EncodeToString(data)
|
||||||
|
|
||||||
// Create data URL
|
// Create data URL
|
||||||
return fmt.Sprintf("data:%s;base64,%s", mimeType, encoded), nil
|
return fmt.Sprintf("data:%s;base64,%s", mimeType, encoded), nil
|
||||||
}
|
}
|
||||||
@@ -349,16 +420,16 @@ type ChatBody struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (cb *ChatBody) Rename(oldname, newname string) {
|
func (cb *ChatBody) Rename(oldname, newname string) {
|
||||||
for i, m := range cb.Messages {
|
for i := range cb.Messages {
|
||||||
cb.Messages[i].Content = strings.ReplaceAll(m.Content, oldname, newname)
|
cb.Messages[i].Content = strings.ReplaceAll(cb.Messages[i].Content, oldname, newname)
|
||||||
cb.Messages[i].Role = strings.ReplaceAll(m.Role, oldname, newname)
|
cb.Messages[i].Role = strings.ReplaceAll(cb.Messages[i].Role, oldname, newname)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cb *ChatBody) ListRoles() []string {
|
func (cb *ChatBody) ListRoles() []string {
|
||||||
namesMap := make(map[string]struct{})
|
namesMap := make(map[string]struct{})
|
||||||
for _, m := range cb.Messages {
|
for i := range cb.Messages {
|
||||||
namesMap[m.Role] = struct{}{}
|
namesMap[cb.Messages[i].Role] = struct{}{}
|
||||||
}
|
}
|
||||||
resp := make([]string, len(namesMap))
|
resp := make([]string, len(namesMap))
|
||||||
i := 0
|
i := 0
|
||||||
@@ -445,24 +516,6 @@ type OpenAIReq struct {
|
|||||||
|
|
||||||
// ===
|
// ===
|
||||||
|
|
||||||
// type LLMModels struct {
|
|
||||||
// Object string `json:"object"`
|
|
||||||
// Data []struct {
|
|
||||||
// ID string `json:"id"`
|
|
||||||
// Object string `json:"object"`
|
|
||||||
// Created int `json:"created"`
|
|
||||||
// OwnedBy string `json:"owned_by"`
|
|
||||||
// Meta struct {
|
|
||||||
// VocabType int `json:"vocab_type"`
|
|
||||||
// NVocab int `json:"n_vocab"`
|
|
||||||
// NCtxTrain int `json:"n_ctx_train"`
|
|
||||||
// NEmbd int `json:"n_embd"`
|
|
||||||
// NParams int64 `json:"n_params"`
|
|
||||||
// Size int64 `json:"size"`
|
|
||||||
// } `json:"meta"`
|
|
||||||
// } `json:"data"`
|
|
||||||
// }
|
|
||||||
|
|
||||||
type LlamaCPPReq struct {
|
type LlamaCPPReq struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Stream bool `json:"stream"`
|
Stream bool `json:"stream"`
|
||||||
@@ -555,6 +608,26 @@ func (lcp *LCPModels) ListModels() []string {
|
|||||||
return resp
|
return resp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (lcp *LCPModels) HasVision(modelID string) bool {
|
||||||
|
for _, m := range lcp.Data {
|
||||||
|
if m.ID == modelID {
|
||||||
|
args := m.Status.Args
|
||||||
|
for i := 0; i < len(args)-1; i++ {
|
||||||
|
if args[i] == "--mmproj" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
type ResponseStats struct {
|
||||||
|
Tokens int
|
||||||
|
Duration float64
|
||||||
|
TokensPerSec float64
|
||||||
|
}
|
||||||
|
|
||||||
type ChatRoundReq struct {
|
type ChatRoundReq struct {
|
||||||
UserMsg string
|
UserMsg string
|
||||||
Role string
|
Role string
|
||||||
@@ -562,9 +635,7 @@ type ChatRoundReq struct {
|
|||||||
Resume bool
|
Resume bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type APIType int
|
type MultimodalToolResp struct {
|
||||||
|
Type string `json:"type"`
|
||||||
const (
|
Parts []map[string]string `json:"parts"`
|
||||||
APITypeChat APIType = iota
|
}
|
||||||
APITypeCompletion
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -32,10 +32,16 @@ type OpenRouterChatReq struct {
|
|||||||
MinP float32 `json:"min_p"`
|
MinP float32 `json:"min_p"`
|
||||||
NPredict int32 `json:"max_tokens"`
|
NPredict int32 `json:"max_tokens"`
|
||||||
Tools []Tool `json:"tools"`
|
Tools []Tool `json:"tools"`
|
||||||
|
Reasoning *ReasoningConfig `json:"reasoning,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewOpenRouterChatReq(cb ChatBody, props map[string]float32) OpenRouterChatReq {
|
type ReasoningConfig struct {
|
||||||
return OpenRouterChatReq{
|
Effort string `json:"effort,omitempty"` // xhigh, high, medium, low, minimal, none
|
||||||
|
Summary string `json:"summary,omitempty"` // auto, concise, detailed
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewOpenRouterChatReq(cb ChatBody, props map[string]float32, reasoningEffort string) OpenRouterChatReq {
|
||||||
|
req := OpenRouterChatReq{
|
||||||
Messages: cb.Messages,
|
Messages: cb.Messages,
|
||||||
Model: cb.Model,
|
Model: cb.Model,
|
||||||
Stream: cb.Stream,
|
Stream: cb.Stream,
|
||||||
@@ -43,6 +49,13 @@ func NewOpenRouterChatReq(cb ChatBody, props map[string]float32) OpenRouterChatR
|
|||||||
MinP: props["min_p"],
|
MinP: props["min_p"],
|
||||||
NPredict: int32(props["n_predict"]),
|
NPredict: int32(props["n_predict"]),
|
||||||
}
|
}
|
||||||
|
// Only include reasoning config if effort is specified and not "none"
|
||||||
|
if reasoningEffort != "" && reasoningEffort != "none" {
|
||||||
|
req.Reasoning = &ReasoningConfig{
|
||||||
|
Effort: reasoningEffort,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return req
|
||||||
}
|
}
|
||||||
|
|
||||||
type OpenRouterChatRespNonStream struct {
|
type OpenRouterChatRespNonStream struct {
|
||||||
@@ -82,6 +95,7 @@ type OpenRouterChatResp struct {
|
|||||||
Delta struct {
|
Delta struct {
|
||||||
Role string `json:"role"`
|
Role string `json:"role"`
|
||||||
Content string `json:"content"`
|
Content string `json:"content"`
|
||||||
|
Reasoning string `json:"reasoning"`
|
||||||
ToolCalls []ToolDeltaResp `json:"tool_calls"`
|
ToolCalls []ToolDeltaResp `json:"tool_calls"`
|
||||||
} `json:"delta"`
|
} `json:"delta"`
|
||||||
FinishReason string `json:"finish_reason"`
|
FinishReason string `json:"finish_reason"`
|
||||||
@@ -158,3 +172,16 @@ func (orm *ORModels) ListModels(free bool) []string {
|
|||||||
}
|
}
|
||||||
return resp
|
return resp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (orm *ORModels) HasVision(modelID string) bool {
|
||||||
|
for i := range orm.Data {
|
||||||
|
if orm.Data[i].ID == modelID {
|
||||||
|
for _, mod := range orm.Data[i].Architecture.InputModalities {
|
||||||
|
if mod == "image" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|||||||
@@ -62,7 +62,6 @@ func TestORModelsListModels(t *testing.T) {
|
|||||||
t.Errorf("expected 4 total models, got %d", len(allModels))
|
t.Errorf("expected 4 total models, got %d", len(allModels))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("integration with or_models.json", func(t *testing.T) {
|
t.Run("integration with or_models.json", func(t *testing.T) {
|
||||||
// Attempt to load the real data file from the project root
|
// Attempt to load the real data file from the project root
|
||||||
path := filepath.Join("..", "or_models.json")
|
path := filepath.Join("..", "or_models.json")
|
||||||
|
|||||||
@@ -109,6 +109,12 @@ func ReadCardJson(fname string) (*models.CharCard, error) {
|
|||||||
if err := json.Unmarshal(data, &card); err != nil {
|
if err := json.Unmarshal(data, &card); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if card.FilePath == "" {
|
||||||
|
card.FilePath = fname
|
||||||
|
}
|
||||||
|
if card.ID == "" {
|
||||||
|
card.ID = models.ComputeCardID(card.Role, card.FilePath)
|
||||||
|
}
|
||||||
return &card, nil
|
return &card, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
301
popups.go
301
popups.go
@@ -1,6 +1,7 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"gf-lt/models"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@@ -17,10 +18,14 @@ func showModelSelectionPopup() {
|
|||||||
} else if strings.Contains(api, "openrouter.ai") {
|
} else if strings.Contains(api, "openrouter.ai") {
|
||||||
return ORFreeModels
|
return ORFreeModels
|
||||||
}
|
}
|
||||||
// Assume local llama.cpp
|
// Assume local llama.cpp - fetch with load status
|
||||||
updateModelLists()
|
models, err := fetchLCPModelsWithLoadStatus()
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("failed to fetch models with load status", "error", err)
|
||||||
return LocalModels
|
return LocalModels
|
||||||
}
|
}
|
||||||
|
return models
|
||||||
|
}
|
||||||
// Get the current model list based on the API
|
// Get the current model list based on the API
|
||||||
modelList := getModelListForAPI(cfg.CurrentAPI)
|
modelList := getModelListForAPI(cfg.CurrentAPI)
|
||||||
// Check for empty options list
|
// Check for empty options list
|
||||||
@@ -35,9 +40,7 @@ func showModelSelectionPopup() {
|
|||||||
default:
|
default:
|
||||||
message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models."
|
message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models."
|
||||||
}
|
}
|
||||||
if err := notifyUser("Empty list", message); err != nil {
|
showToast("Empty list", message)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create a list primitive
|
// Create a list primitive
|
||||||
@@ -47,7 +50,7 @@ func showModelSelectionPopup() {
|
|||||||
// Find the current model index to set as selected
|
// Find the current model index to set as selected
|
||||||
currentModelIndex := -1
|
currentModelIndex := -1
|
||||||
for i, model := range modelList {
|
for i, model := range modelList {
|
||||||
if model == chatBody.Model {
|
if strings.TrimPrefix(model, models.LoadedMark) == chatBody.Model {
|
||||||
currentModelIndex = i
|
currentModelIndex = i
|
||||||
}
|
}
|
||||||
modelListWidget.AddItem(model, "", 0, nil)
|
modelListWidget.AddItem(model, "", 0, nil)
|
||||||
@@ -57,17 +60,23 @@ func showModelSelectionPopup() {
|
|||||||
modelListWidget.SetCurrentItem(currentModelIndex)
|
modelListWidget.SetCurrentItem(currentModelIndex)
|
||||||
}
|
}
|
||||||
modelListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
modelListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
||||||
// Update the model in both chatBody and config
|
modelName := strings.TrimPrefix(mainText, models.LoadedMark)
|
||||||
chatBody.Model = mainText
|
chatBody.Model = modelName
|
||||||
cfg.CurrentModel = chatBody.Model
|
cfg.CurrentModel = chatBody.Model
|
||||||
// Remove the popup page
|
|
||||||
pages.RemovePage("modelSelectionPopup")
|
pages.RemovePage("modelSelectionPopup")
|
||||||
// Update the status line to reflect the change
|
app.SetFocus(textArea)
|
||||||
|
updateCachedModelColor()
|
||||||
updateStatusLine()
|
updateStatusLine()
|
||||||
})
|
})
|
||||||
modelListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
modelListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
if event.Key() == tcell.KeyEscape {
|
if event.Key() == tcell.KeyEscape {
|
||||||
pages.RemovePage("modelSelectionPopup")
|
pages.RemovePage("modelSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
|
||||||
|
pages.RemovePage("modelSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return event
|
return event
|
||||||
@@ -108,9 +117,7 @@ func showAPILinkSelectionPopup() {
|
|||||||
if len(apiLinks) == 0 {
|
if len(apiLinks) == 0 {
|
||||||
logger.Warn("no API links available for selection")
|
logger.Warn("no API links available for selection")
|
||||||
message := "No API links available. Please configure API links in your config file."
|
message := "No API links available. Please configure API links in your config file."
|
||||||
if err := notifyUser("Empty list", message); err != nil {
|
showToast("Empty list", message)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create a list primitive
|
// Create a list primitive
|
||||||
@@ -132,6 +139,7 @@ func showAPILinkSelectionPopup() {
|
|||||||
apiListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
apiListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
||||||
// Update the API in config
|
// Update the API in config
|
||||||
cfg.CurrentAPI = mainText
|
cfg.CurrentAPI = mainText
|
||||||
|
// tools.UpdateToolCapabilities()
|
||||||
// Update model list based on new API
|
// Update model list based on new API
|
||||||
// Helper function to get model list for a given API (same as in props_table.go)
|
// Helper function to get model list for a given API (same as in props_table.go)
|
||||||
getModelListForAPI := func(api string) []string {
|
getModelListForAPI := func(api string) []string {
|
||||||
@@ -149,18 +157,25 @@ func showAPILinkSelectionPopup() {
|
|||||||
newModelList := getModelListForAPI(cfg.CurrentAPI)
|
newModelList := getModelListForAPI(cfg.CurrentAPI)
|
||||||
// Ensure chatBody.Model is in the new list; if not, set to first available model
|
// Ensure chatBody.Model is in the new list; if not, set to first available model
|
||||||
if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.Model) {
|
if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.Model) {
|
||||||
chatBody.Model = newModelList[0]
|
chatBody.Model = strings.TrimPrefix(newModelList[0], models.LoadedMark)
|
||||||
cfg.CurrentModel = chatBody.Model
|
cfg.CurrentModel = chatBody.Model
|
||||||
|
UpdateToolCapabilities()
|
||||||
}
|
}
|
||||||
// Remove the popup page
|
|
||||||
pages.RemovePage("apiLinkSelectionPopup")
|
pages.RemovePage("apiLinkSelectionPopup")
|
||||||
// Update the parser and status line to reflect the change
|
app.SetFocus(textArea)
|
||||||
choseChunkParser()
|
choseChunkParser()
|
||||||
|
updateCachedModelColor()
|
||||||
updateStatusLine()
|
updateStatusLine()
|
||||||
})
|
})
|
||||||
apiListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
apiListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
if event.Key() == tcell.KeyEscape {
|
if event.Key() == tcell.KeyEscape {
|
||||||
pages.RemovePage("apiLinkSelectionPopup")
|
pages.RemovePage("apiLinkSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
|
||||||
|
pages.RemovePage("apiLinkSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return event
|
return event
|
||||||
@@ -187,9 +202,7 @@ func showUserRoleSelectionPopup() {
|
|||||||
if len(roles) == 0 {
|
if len(roles) == 0 {
|
||||||
logger.Warn("no roles available for selection")
|
logger.Warn("no roles available for selection")
|
||||||
message := "No roles available for selection."
|
message := "No roles available for selection."
|
||||||
if err := notifyUser("Empty list", message); err != nil {
|
showToast("Empty list", message)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create a list primitive
|
// Create a list primitive
|
||||||
@@ -220,6 +233,7 @@ func showUserRoleSelectionPopup() {
|
|||||||
textView.SetText(chatToText(filtered, cfg.ShowSys))
|
textView.SetText(chatToText(filtered, cfg.ShowSys))
|
||||||
// Remove the popup page
|
// Remove the popup page
|
||||||
pages.RemovePage("userRoleSelectionPopup")
|
pages.RemovePage("userRoleSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
// Update the status line to reflect the change
|
// Update the status line to reflect the change
|
||||||
updateStatusLine()
|
updateStatusLine()
|
||||||
colorText()
|
colorText()
|
||||||
@@ -227,6 +241,12 @@ func showUserRoleSelectionPopup() {
|
|||||||
roleListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
roleListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
if event.Key() == tcell.KeyEscape {
|
if event.Key() == tcell.KeyEscape {
|
||||||
pages.RemovePage("userRoleSelectionPopup")
|
pages.RemovePage("userRoleSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
|
||||||
|
pages.RemovePage("userRoleSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return event
|
return event
|
||||||
@@ -259,9 +279,7 @@ func showBotRoleSelectionPopup() {
|
|||||||
if len(roles) == 0 {
|
if len(roles) == 0 {
|
||||||
logger.Warn("no roles available for selection")
|
logger.Warn("no roles available for selection")
|
||||||
message := "No roles available for selection."
|
message := "No roles available for selection."
|
||||||
if err := notifyUser("Empty list", message); err != nil {
|
showToast("Empty list", message)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create a list primitive
|
// Create a list primitive
|
||||||
@@ -289,12 +307,19 @@ func showBotRoleSelectionPopup() {
|
|||||||
cfg.WriteNextMsgAsCompletionAgent = mainText
|
cfg.WriteNextMsgAsCompletionAgent = mainText
|
||||||
// Remove the popup page
|
// Remove the popup page
|
||||||
pages.RemovePage("botRoleSelectionPopup")
|
pages.RemovePage("botRoleSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
// Update the status line to reflect the change
|
// Update the status line to reflect the change
|
||||||
updateStatusLine()
|
updateStatusLine()
|
||||||
})
|
})
|
||||||
roleListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
roleListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
if event.Key() == tcell.KeyEscape {
|
if event.Key() == tcell.KeyEscape {
|
||||||
pages.RemovePage("botRoleSelectionPopup")
|
pages.RemovePage("botRoleSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
|
||||||
|
pages.RemovePage("botRoleSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return event
|
return event
|
||||||
@@ -312,3 +337,235 @@ func showBotRoleSelectionPopup() {
|
|||||||
pages.AddPage("botRoleSelectionPopup", modal(roleListWidget, 80, 20), true, true)
|
pages.AddPage("botRoleSelectionPopup", modal(roleListWidget, 80, 20), true, true)
|
||||||
app.SetFocus(roleListWidget)
|
app.SetFocus(roleListWidget)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func showShellFileCompletionPopup(filter string) {
|
||||||
|
baseDir := cfg.FilePickerDir
|
||||||
|
if baseDir == "" {
|
||||||
|
baseDir = "."
|
||||||
|
}
|
||||||
|
complMatches := scanFiles(baseDir, filter)
|
||||||
|
if len(complMatches) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(complMatches) == 1 {
|
||||||
|
currentText := shellInput.GetText()
|
||||||
|
atIdx := strings.LastIndex(currentText, "@")
|
||||||
|
if atIdx >= 0 {
|
||||||
|
before := currentText[:atIdx]
|
||||||
|
shellInput.SetText(before + complMatches[0])
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
widget := tview.NewList().ShowSecondaryText(false).
|
||||||
|
SetSelectedBackgroundColor(tcell.ColorGray)
|
||||||
|
widget.SetTitle("file completion").SetBorder(true)
|
||||||
|
for _, m := range complMatches {
|
||||||
|
widget.AddItem(m, "", 0, nil)
|
||||||
|
}
|
||||||
|
widget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
||||||
|
currentText := shellInput.GetText()
|
||||||
|
atIdx := strings.LastIndex(currentText, "@")
|
||||||
|
if atIdx >= 0 {
|
||||||
|
before := currentText[:atIdx]
|
||||||
|
shellInput.SetText(before + mainText)
|
||||||
|
}
|
||||||
|
pages.RemovePage("shellFileCompletionPopup")
|
||||||
|
app.SetFocus(shellInput)
|
||||||
|
})
|
||||||
|
widget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
pages.RemovePage("shellFileCompletionPopup")
|
||||||
|
app.SetFocus(shellInput)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
|
||||||
|
pages.RemovePage("shellFileCompletionPopup")
|
||||||
|
app.SetFocus(shellInput)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
modal := func(p tview.Primitive, width, height int) tview.Primitive {
|
||||||
|
return tview.NewFlex().
|
||||||
|
AddItem(nil, 0, 1, false).
|
||||||
|
AddItem(tview.NewFlex().SetDirection(tview.FlexRow).
|
||||||
|
AddItem(nil, 0, 1, false).
|
||||||
|
AddItem(p, height, 1, true).
|
||||||
|
AddItem(nil, 0, 1, false), width, 1, true).
|
||||||
|
AddItem(nil, 0, 1, false)
|
||||||
|
}
|
||||||
|
pages.AddPage("shellFileCompletionPopup", modal(widget, 80, 20), true, true)
|
||||||
|
app.SetFocus(widget)
|
||||||
|
}
|
||||||
|
|
||||||
|
func showTextAreaFileCompletionPopup(filter string) {
|
||||||
|
baseDir := cfg.FilePickerDir
|
||||||
|
if baseDir == "" {
|
||||||
|
baseDir = "."
|
||||||
|
}
|
||||||
|
complMatches := scanFiles(baseDir, filter)
|
||||||
|
if len(complMatches) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(complMatches) == 1 {
|
||||||
|
currentText := textArea.GetText()
|
||||||
|
atIdx := strings.LastIndex(currentText, "@")
|
||||||
|
if atIdx >= 0 {
|
||||||
|
before := currentText[:atIdx]
|
||||||
|
textArea.SetText(before+complMatches[0], true)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
widget := tview.NewList().ShowSecondaryText(false).
|
||||||
|
SetSelectedBackgroundColor(tcell.ColorGray)
|
||||||
|
widget.SetTitle("file completion").SetBorder(true)
|
||||||
|
for _, m := range complMatches {
|
||||||
|
widget.AddItem(m, "", 0, nil)
|
||||||
|
}
|
||||||
|
widget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
||||||
|
currentText := textArea.GetText()
|
||||||
|
atIdx := strings.LastIndex(currentText, "@")
|
||||||
|
if atIdx >= 0 {
|
||||||
|
before := currentText[:atIdx]
|
||||||
|
textArea.SetText(before+mainText, true)
|
||||||
|
}
|
||||||
|
pages.RemovePage("textAreaFileCompletionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
})
|
||||||
|
widget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
pages.RemovePage("textAreaFileCompletionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
|
||||||
|
pages.RemovePage("textAreaFileCompletionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
modal := func(p tview.Primitive, width, height int) tview.Primitive {
|
||||||
|
return tview.NewFlex().
|
||||||
|
AddItem(nil, 0, 1, false).
|
||||||
|
AddItem(tview.NewFlex().SetDirection(tview.FlexRow).
|
||||||
|
AddItem(nil, 0, 1, false).
|
||||||
|
AddItem(p, height, 1, true).
|
||||||
|
AddItem(nil, 0, 1, false), width, 1, true).
|
||||||
|
AddItem(nil, 0, 1, false)
|
||||||
|
}
|
||||||
|
pages.AddPage("textAreaFileCompletionPopup", modal(widget, 80, 20), true, true)
|
||||||
|
app.SetFocus(widget)
|
||||||
|
}
|
||||||
|
|
||||||
|
func updateWidgetColors(theme *tview.Theme) {
|
||||||
|
bgColor := theme.PrimitiveBackgroundColor
|
||||||
|
fgColor := theme.PrimaryTextColor
|
||||||
|
borderColor := theme.BorderColor
|
||||||
|
titleColor := theme.TitleColor
|
||||||
|
textView.SetBackgroundColor(bgColor)
|
||||||
|
textView.SetTextColor(fgColor)
|
||||||
|
textView.SetBorderColor(borderColor)
|
||||||
|
textView.SetTitleColor(titleColor)
|
||||||
|
textArea.SetBackgroundColor(bgColor)
|
||||||
|
textArea.SetBorderColor(borderColor)
|
||||||
|
textArea.SetTitleColor(titleColor)
|
||||||
|
textArea.SetTextStyle(tcell.StyleDefault.Background(bgColor).Foreground(fgColor))
|
||||||
|
textArea.SetPlaceholderStyle(tcell.StyleDefault.Background(bgColor).Foreground(fgColor))
|
||||||
|
textArea.SetText(textArea.GetText(), true)
|
||||||
|
editArea.SetBackgroundColor(bgColor)
|
||||||
|
editArea.SetBorderColor(borderColor)
|
||||||
|
editArea.SetTitleColor(titleColor)
|
||||||
|
editArea.SetTextStyle(tcell.StyleDefault.Background(bgColor).Foreground(fgColor))
|
||||||
|
editArea.SetPlaceholderStyle(tcell.StyleDefault.Background(bgColor).Foreground(fgColor))
|
||||||
|
editArea.SetText(editArea.GetText(), true)
|
||||||
|
statusLineWidget.SetBackgroundColor(bgColor)
|
||||||
|
statusLineWidget.SetTextColor(fgColor)
|
||||||
|
statusLineWidget.SetBorderColor(borderColor)
|
||||||
|
statusLineWidget.SetTitleColor(titleColor)
|
||||||
|
helpView.SetBackgroundColor(bgColor)
|
||||||
|
helpView.SetTextColor(fgColor)
|
||||||
|
helpView.SetBorderColor(borderColor)
|
||||||
|
helpView.SetTitleColor(titleColor)
|
||||||
|
searchField.SetBackgroundColor(bgColor)
|
||||||
|
searchField.SetBorderColor(borderColor)
|
||||||
|
searchField.SetTitleColor(titleColor)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showColorschemeSelectionPopup creates a modal popup to select a colorscheme
|
||||||
|
func showColorschemeSelectionPopup() {
|
||||||
|
// Get the list of available colorschemes
|
||||||
|
schemeNames := make([]string, 0, len(colorschemes))
|
||||||
|
for name := range colorschemes {
|
||||||
|
schemeNames = append(schemeNames, name)
|
||||||
|
}
|
||||||
|
slices.Sort(schemeNames)
|
||||||
|
// Check for empty options list
|
||||||
|
if len(schemeNames) == 0 {
|
||||||
|
logger.Warn("no colorschemes available for selection")
|
||||||
|
message := "No colorschemes available."
|
||||||
|
showToast("Empty list", message)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Create a list primitive
|
||||||
|
schemeListWidget := tview.NewList().ShowSecondaryText(false).
|
||||||
|
SetSelectedBackgroundColor(tcell.ColorGray)
|
||||||
|
schemeListWidget.SetTitle("Select Colorscheme").SetBorder(true)
|
||||||
|
currentScheme := "default"
|
||||||
|
for name := range colorschemes {
|
||||||
|
if tview.Styles == colorschemes[name] {
|
||||||
|
currentScheme = name
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
currentSchemeIndex := -1
|
||||||
|
for i, scheme := range schemeNames {
|
||||||
|
if scheme == currentScheme {
|
||||||
|
currentSchemeIndex = i
|
||||||
|
}
|
||||||
|
schemeListWidget.AddItem(scheme, "", 0, nil)
|
||||||
|
}
|
||||||
|
// Set the current selection if found
|
||||||
|
if currentSchemeIndex != -1 {
|
||||||
|
schemeListWidget.SetCurrentItem(currentSchemeIndex)
|
||||||
|
}
|
||||||
|
schemeListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
||||||
|
// Update the colorscheme
|
||||||
|
if theme, ok := colorschemes[mainText]; ok {
|
||||||
|
tview.Styles = theme
|
||||||
|
go func() {
|
||||||
|
app.QueueUpdateDraw(func() {
|
||||||
|
updateWidgetColors(&theme)
|
||||||
|
})
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
// Remove the popup page
|
||||||
|
pages.RemovePage("colorschemeSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
})
|
||||||
|
schemeListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
pages.RemovePage("colorschemeSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
|
||||||
|
pages.RemovePage("colorschemeSelectionPopup")
|
||||||
|
app.SetFocus(textArea)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
modal := func(p tview.Primitive, width, height int) tview.Primitive {
|
||||||
|
return tview.NewFlex().
|
||||||
|
AddItem(nil, 0, 1, false).
|
||||||
|
AddItem(tview.NewFlex().SetDirection(tview.FlexRow).
|
||||||
|
AddItem(nil, 0, 1, false).
|
||||||
|
AddItem(p, height, 1, true).
|
||||||
|
AddItem(nil, 0, 1, false), width, 1, true).
|
||||||
|
AddItem(nil, 0, 1, false)
|
||||||
|
}
|
||||||
|
// Add modal page and make it visible
|
||||||
|
pages.AddPage("colorschemeSelectionPopup", modal(schemeListWidget, 40, len(schemeNames)+2), true, true)
|
||||||
|
app.SetFocus(schemeListWidget)
|
||||||
|
}
|
||||||
|
|||||||
@@ -115,26 +115,17 @@ func makePropsTable(props map[string]float32) *tview.Table {
|
|||||||
row++
|
row++
|
||||||
}
|
}
|
||||||
// Add checkboxes
|
// Add checkboxes
|
||||||
addCheckboxRow("Insert <think> tag (/completion only)", cfg.ThinkUse, func(checked bool) {
|
|
||||||
cfg.ThinkUse = checked
|
|
||||||
})
|
|
||||||
addCheckboxRow("RAG use", cfg.RAGEnabled, func(checked bool) {
|
|
||||||
cfg.RAGEnabled = checked
|
|
||||||
})
|
|
||||||
addCheckboxRow("Inject role", injectRole, func(checked bool) {
|
|
||||||
injectRole = checked
|
|
||||||
})
|
|
||||||
addCheckboxRow("TTS Enabled", cfg.TTS_ENABLED, func(checked bool) {
|
addCheckboxRow("TTS Enabled", cfg.TTS_ENABLED, func(checked bool) {
|
||||||
cfg.TTS_ENABLED = checked
|
cfg.TTS_ENABLED = checked
|
||||||
})
|
})
|
||||||
addCheckboxRow("Auto clean tool calls from context", cfg.AutoCleanToolCallsFromCtx, func(checked bool) {
|
|
||||||
cfg.AutoCleanToolCallsFromCtx = checked
|
|
||||||
})
|
|
||||||
addCheckboxRow("Enable Mouse", cfg.EnableMouse, func(checked bool) {
|
addCheckboxRow("Enable Mouse", cfg.EnableMouse, func(checked bool) {
|
||||||
cfg.EnableMouse = checked
|
cfg.EnableMouse = checked
|
||||||
// Reconfigure the app's mouse setting
|
// Reconfigure the app's mouse setting
|
||||||
app.EnableMouse(cfg.EnableMouse)
|
app.EnableMouse(cfg.EnableMouse)
|
||||||
})
|
})
|
||||||
|
addCheckboxRow("Image Preview (file picker)", cfg.ImagePreview, func(checked bool) {
|
||||||
|
cfg.ImagePreview = checked
|
||||||
|
})
|
||||||
addCheckboxRow("Auto turn (for cards with many chars)", cfg.AutoTurn, func(checked bool) {
|
addCheckboxRow("Auto turn (for cards with many chars)", cfg.AutoTurn, func(checked bool) {
|
||||||
cfg.AutoTurn = checked
|
cfg.AutoTurn = checked
|
||||||
})
|
})
|
||||||
@@ -146,6 +137,11 @@ func makePropsTable(props map[string]float32) *tview.Table {
|
|||||||
addListPopupRow("Set log level", logLevels, GetLogLevel(), func(option string) {
|
addListPopupRow("Set log level", logLevels, GetLogLevel(), func(option string) {
|
||||||
setLogLevel(option)
|
setLogLevel(option)
|
||||||
})
|
})
|
||||||
|
// Add reasoning effort dropdown (for OpenRouter and supported APIs)
|
||||||
|
reasoningEfforts := []string{"", "none", "minimal", "low", "medium", "high", "xhigh"}
|
||||||
|
addListPopupRow("Reasoning effort (OR)", reasoningEfforts, cfg.ReasoningEffort, func(option string) {
|
||||||
|
cfg.ReasoningEffort = option
|
||||||
|
})
|
||||||
// Helper function to get model list for a given API
|
// Helper function to get model list for a given API
|
||||||
getModelListForAPI := func(api string) []string {
|
getModelListForAPI := func(api string) []string {
|
||||||
if strings.Contains(api, "api.deepseek.com/") {
|
if strings.Contains(api, "api.deepseek.com/") {
|
||||||
@@ -254,9 +250,7 @@ func makePropsTable(props map[string]float32) *tview.Table {
|
|||||||
// Handle nil options
|
// Handle nil options
|
||||||
if data.Options == nil {
|
if data.Options == nil {
|
||||||
logger.Error("options list is nil for", "label", label)
|
logger.Error("options list is nil for", "label", label)
|
||||||
if err := notifyUser("Configuration error", "Options list is nil for "+label); err != nil {
|
showToast("Configuration error", "Options list is nil for "+label)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -274,9 +268,7 @@ func makePropsTable(props map[string]float32) *tview.Table {
|
|||||||
message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models."
|
message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if err := notifyUser("Empty list", message); err != nil {
|
showToast("Empty list", message)
|
||||||
logger.Error("failed to send notification", "error", err)
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create a list primitive
|
// Create a list primitive
|
||||||
|
|||||||
313
rag/embedder.go
313
rag/embedder.go
@@ -9,6 +9,13 @@ import (
|
|||||||
"gf-lt/models"
|
"gf-lt/models"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/sugarme/tokenizer"
|
||||||
|
"github.com/sugarme/tokenizer/pretrained"
|
||||||
|
"github.com/yalue/onnxruntime_go"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Embedder defines the interface for embedding text
|
// Embedder defines the interface for embedding text
|
||||||
@@ -27,7 +34,9 @@ type APIEmbedder struct {
|
|||||||
func NewAPIEmbedder(l *slog.Logger, cfg *config.Config) *APIEmbedder {
|
func NewAPIEmbedder(l *slog.Logger, cfg *config.Config) *APIEmbedder {
|
||||||
return &APIEmbedder{
|
return &APIEmbedder{
|
||||||
logger: l,
|
logger: l,
|
||||||
client: &http.Client{},
|
client: &http.Client{
|
||||||
|
Timeout: 30 * time.Second,
|
||||||
|
},
|
||||||
cfg: cfg,
|
cfg: cfg,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -131,15 +140,305 @@ func (a *APIEmbedder) EmbedSlice(lines []string) ([][]float32, error) {
|
|||||||
}
|
}
|
||||||
embeddings[data.Index] = data.Embedding
|
embeddings[data.Index] = data.Embedding
|
||||||
}
|
}
|
||||||
|
|
||||||
return embeddings, nil
|
return embeddings, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: ONNXEmbedder implementation would go here
|
|
||||||
// This would require:
|
|
||||||
// 1. Loading ONNX models locally
|
// 1. Loading ONNX models locally
|
||||||
// 2. Using a Go ONNX runtime (like gorgonia/onnx or similar)
|
// 2. Using a Go ONNX runtime (like gorgonia/onnx or similar)
|
||||||
// 3. Converting text to embeddings without external API calls
|
// 3. Converting text to embeddings without external API calls
|
||||||
//
|
type ONNXEmbedder struct {
|
||||||
// For now, we'll focus on the API implementation which is already working in the current system,
|
session *onnxruntime_go.DynamicAdvancedSession
|
||||||
// and can be extended later when we have ONNX runtime integration
|
tokenizer *tokenizer.Tokenizer
|
||||||
|
tokenizerPath string
|
||||||
|
dims int
|
||||||
|
logger *slog.Logger
|
||||||
|
mu sync.Mutex
|
||||||
|
modelPath string
|
||||||
|
}
|
||||||
|
|
||||||
|
var onnxInitOnce sync.Once
|
||||||
|
var onnxReady bool
|
||||||
|
var onnxLibPath string
|
||||||
|
var cudaLibPath string
|
||||||
|
|
||||||
|
var onnxLibPaths = []string{
|
||||||
|
"/usr/lib/libonnxruntime.so",
|
||||||
|
"/usr/lib/libonnxruntime.so.1.24.2",
|
||||||
|
"/usr/local/lib/libonnxruntime.so",
|
||||||
|
"/usr/lib/x86_64-linux-gnu/libonnxruntime.so",
|
||||||
|
"/opt/onnxruntime/lib/libonnxruntime.so",
|
||||||
|
}
|
||||||
|
|
||||||
|
var cudaLibPaths = []string{
|
||||||
|
"/usr/lib/libonnxruntime_providers_cuda.so",
|
||||||
|
"/usr/local/lib/libonnxruntime_providers_cuda.so",
|
||||||
|
"/opt/onnxruntime/lib/libonnxruntime_providers_cuda.so",
|
||||||
|
}
|
||||||
|
|
||||||
|
func findONNXLibrary() string {
|
||||||
|
for _, path := range onnxLibPaths {
|
||||||
|
if _, err := os.Stat(path); err == nil {
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func findCUDALibrary() string {
|
||||||
|
for _, path := range cudaLibPaths {
|
||||||
|
if _, err := os.Stat(path); err == nil {
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewONNXEmbedder(modelPath, tokenizerPath string, dims int, logger *slog.Logger) (*ONNXEmbedder, error) {
|
||||||
|
// Check if model and tokenizer files exist
|
||||||
|
if _, err := os.Stat(modelPath); err != nil {
|
||||||
|
return nil, fmt.Errorf("ONNX model not found: %w", err)
|
||||||
|
}
|
||||||
|
if _, err := os.Stat(tokenizerPath); err != nil {
|
||||||
|
return nil, fmt.Errorf("tokenizer not found: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find ONNX library
|
||||||
|
onnxLibPath = findONNXLibrary()
|
||||||
|
if onnxLibPath == "" {
|
||||||
|
return nil, errors.New("ONNX runtime library not found in standard locations")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find CUDA provider library (optional)
|
||||||
|
cudaLibPath = findCUDALibrary()
|
||||||
|
if cudaLibPath == "" {
|
||||||
|
fmt.Println("WARNING: CUDA provider library not found, will use CPU")
|
||||||
|
}
|
||||||
|
emb := &ONNXEmbedder{
|
||||||
|
tokenizerPath: tokenizerPath,
|
||||||
|
dims: dims,
|
||||||
|
logger: logger,
|
||||||
|
modelPath: modelPath,
|
||||||
|
}
|
||||||
|
return emb, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ONNXEmbedder) ensureInitialized() error {
|
||||||
|
if e.session != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
e.mu.Lock()
|
||||||
|
defer e.mu.Unlock()
|
||||||
|
if e.session != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// Load tokenizer lazily
|
||||||
|
if e.tokenizer == nil {
|
||||||
|
tok, err := pretrained.FromFile(e.tokenizerPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to load tokenizer: %w", err)
|
||||||
|
}
|
||||||
|
e.tokenizer = tok
|
||||||
|
}
|
||||||
|
onnxInitOnce.Do(func() {
|
||||||
|
onnxruntime_go.SetSharedLibraryPath(onnxLibPath)
|
||||||
|
if err := onnxruntime_go.InitializeEnvironment(); err != nil {
|
||||||
|
e.logger.Error("failed to initialize ONNX runtime", "error", err)
|
||||||
|
onnxReady = false
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Register CUDA provider if available
|
||||||
|
if cudaLibPath != "" {
|
||||||
|
if err := onnxruntime_go.RegisterExecutionProviderLibrary("CUDA", cudaLibPath); err != nil {
|
||||||
|
e.logger.Warn("failed to register CUDA provider", "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
onnxReady = true
|
||||||
|
})
|
||||||
|
if !onnxReady {
|
||||||
|
return errors.New("ONNX runtime not ready")
|
||||||
|
}
|
||||||
|
// Create session options
|
||||||
|
opts, err := onnxruntime_go.NewSessionOptions()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create session options: %w", err)
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
_ = opts.Destroy()
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Try to add CUDA provider
|
||||||
|
useCUDA := cudaLibPath != ""
|
||||||
|
if useCUDA {
|
||||||
|
cudaOpts, err := onnxruntime_go.NewCUDAProviderOptions()
|
||||||
|
if err != nil {
|
||||||
|
e.logger.Warn("failed to create CUDA provider options, falling back to CPU", "error", err)
|
||||||
|
useCUDA = false
|
||||||
|
} else {
|
||||||
|
defer func() {
|
||||||
|
_ = cudaOpts.Destroy()
|
||||||
|
}()
|
||||||
|
if err := cudaOpts.Update(map[string]string{"device_id": "0"}); err != nil {
|
||||||
|
e.logger.Warn("failed to update CUDA options, falling back to CPU", "error", err)
|
||||||
|
useCUDA = false
|
||||||
|
} else if err := opts.AppendExecutionProviderCUDA(cudaOpts); err != nil {
|
||||||
|
e.logger.Warn("failed to append CUDA provider, falling back to CPU", "error", err)
|
||||||
|
useCUDA = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if useCUDA {
|
||||||
|
e.logger.Info("Using CUDA for ONNX inference")
|
||||||
|
} else {
|
||||||
|
e.logger.Info("Using CPU for ONNX inference")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create session with options
|
||||||
|
session, err := onnxruntime_go.NewDynamicAdvancedSession(
|
||||||
|
e.getModelPath(),
|
||||||
|
[]string{"input_ids", "attention_mask"},
|
||||||
|
[]string{"sentence_embedding"},
|
||||||
|
opts,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create ONNX session: %w", err)
|
||||||
|
}
|
||||||
|
e.session = session
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ONNXEmbedder) getModelPath() string {
|
||||||
|
return e.modelPath
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ONNXEmbedder) Destroy() error {
|
||||||
|
e.mu.Lock()
|
||||||
|
defer e.mu.Unlock()
|
||||||
|
if e.session != nil {
|
||||||
|
if err := e.session.Destroy(); err != nil {
|
||||||
|
return fmt.Errorf("failed to destroy ONNX session: %w", err)
|
||||||
|
}
|
||||||
|
e.session = nil
|
||||||
|
e.logger.Info("ONNX session destroyed, VRAM freed")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ONNXEmbedder) Embed(text string) ([]float32, error) {
|
||||||
|
if err := e.ensureInitialized(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// 1. Tokenize
|
||||||
|
encoding, err := e.tokenizer.EncodeSingle(text)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("tokenization failed: %w", err)
|
||||||
|
}
|
||||||
|
// 2. Convert to int64 and create attention mask
|
||||||
|
ids := encoding.Ids
|
||||||
|
inputIDs := make([]int64, len(ids))
|
||||||
|
attentionMask := make([]int64, len(ids))
|
||||||
|
for i, id := range ids {
|
||||||
|
inputIDs[i] = int64(id)
|
||||||
|
attentionMask[i] = 1
|
||||||
|
}
|
||||||
|
// 3. Create input tensors (shape: [1, seq_len])
|
||||||
|
seqLen := int64(len(inputIDs))
|
||||||
|
inputIDsTensor, err := onnxruntime_go.NewTensor[int64](
|
||||||
|
onnxruntime_go.NewShape(1, seqLen),
|
||||||
|
inputIDs,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create input_ids tensor: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = inputIDsTensor.Destroy() }()
|
||||||
|
maskTensor, err := onnxruntime_go.NewTensor[int64](
|
||||||
|
onnxruntime_go.NewShape(1, seqLen),
|
||||||
|
attentionMask,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create attention_mask tensor: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = maskTensor.Destroy() }()
|
||||||
|
// 4. Create output tensor
|
||||||
|
outputTensor, err := onnxruntime_go.NewEmptyTensor[float32](
|
||||||
|
onnxruntime_go.NewShape(1, int64(e.dims)),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create output tensor: %w", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = outputTensor.Destroy() }()
|
||||||
|
// 5. Run inference
|
||||||
|
err = e.session.Run(
|
||||||
|
[]onnxruntime_go.Value{inputIDsTensor, maskTensor},
|
||||||
|
[]onnxruntime_go.Value{outputTensor},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("inference failed: %w", err)
|
||||||
|
}
|
||||||
|
// 6. Copy output data
|
||||||
|
outputData := outputTensor.GetData()
|
||||||
|
embedding := make([]float32, len(outputData))
|
||||||
|
copy(embedding, outputData)
|
||||||
|
return embedding, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ONNXEmbedder) EmbedSlice(texts []string) ([][]float32, error) {
|
||||||
|
if err := e.ensureInitialized(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
encodings := make([]*tokenizer.Encoding, len(texts))
|
||||||
|
maxLen := 0
|
||||||
|
for i, txt := range texts {
|
||||||
|
enc, err := e.tokenizer.EncodeSingle(txt)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
encodings[i] = enc
|
||||||
|
if l := len(enc.Ids); l > maxLen {
|
||||||
|
maxLen = l
|
||||||
|
}
|
||||||
|
}
|
||||||
|
batchSize := len(texts)
|
||||||
|
inputIDs := make([]int64, batchSize*maxLen)
|
||||||
|
attentionMask := make([]int64, batchSize*maxLen)
|
||||||
|
for i, enc := range encodings {
|
||||||
|
ids := enc.Ids
|
||||||
|
offset := i * maxLen
|
||||||
|
for j, id := range ids {
|
||||||
|
inputIDs[offset+j] = int64(id)
|
||||||
|
attentionMask[offset+j] = 1
|
||||||
|
}
|
||||||
|
// Remaining positions are already zero (padding)
|
||||||
|
}
|
||||||
|
// Create tensors with shape [batchSize, maxLen]
|
||||||
|
inputTensor, _ := onnxruntime_go.NewTensor[int64](
|
||||||
|
onnxruntime_go.NewShape(int64(batchSize), int64(maxLen)),
|
||||||
|
inputIDs,
|
||||||
|
)
|
||||||
|
defer func() { _ = inputTensor.Destroy() }()
|
||||||
|
maskTensor, _ := onnxruntime_go.NewTensor[int64](
|
||||||
|
onnxruntime_go.NewShape(int64(batchSize), int64(maxLen)),
|
||||||
|
attentionMask,
|
||||||
|
)
|
||||||
|
defer func() { _ = maskTensor.Destroy() }()
|
||||||
|
outputTensor, _ := onnxruntime_go.NewEmptyTensor[float32](
|
||||||
|
onnxruntime_go.NewShape(int64(batchSize), int64(e.dims)),
|
||||||
|
)
|
||||||
|
defer func() { _ = outputTensor.Destroy() }()
|
||||||
|
err := e.session.Run(
|
||||||
|
[]onnxruntime_go.Value{inputTensor, maskTensor},
|
||||||
|
[]onnxruntime_go.Value{outputTensor},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// Extract embeddings per batch item
|
||||||
|
data := outputTensor.GetData()
|
||||||
|
embeddings := make([][]float32, batchSize)
|
||||||
|
for i := 0; i < batchSize; i++ {
|
||||||
|
start := i * e.dims
|
||||||
|
emb := make([]float32, e.dims)
|
||||||
|
copy(emb, data[start:start+e.dims])
|
||||||
|
embeddings[i] = emb
|
||||||
|
}
|
||||||
|
return embeddings, nil
|
||||||
|
}
|
||||||
|
|||||||
181
rag/extractors.go
Normal file
181
rag/extractors.go
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
package rag
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
|
"github.com/ledongthuc/pdf"
|
||||||
|
"github.com/yuin/goldmark"
|
||||||
|
"github.com/yuin/goldmark/extension"
|
||||||
|
"github.com/yuin/goldmark/parser"
|
||||||
|
"github.com/yuin/goldmark/renderer/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ExtractText(fpath string) (string, error) {
|
||||||
|
ext := strings.ToLower(path.Ext(fpath))
|
||||||
|
switch ext {
|
||||||
|
case ".txt":
|
||||||
|
return extractTextFromFile(fpath)
|
||||||
|
case ".md", ".markdown":
|
||||||
|
return extractTextFromMarkdown(fpath)
|
||||||
|
case ".html", ".htm":
|
||||||
|
return extractTextFromHtmlFile(fpath)
|
||||||
|
case ".epub":
|
||||||
|
return extractTextFromEpub(fpath)
|
||||||
|
case ".pdf":
|
||||||
|
return extractTextFromPdf(fpath)
|
||||||
|
default:
|
||||||
|
return "", fmt.Errorf("unsupported file format: %s", ext)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractTextFromFile(fpath string) (string, error) {
|
||||||
|
data, err := os.ReadFile(fpath)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return string(data), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractTextFromHtmlFile(fpath string) (string, error) {
|
||||||
|
data, err := os.ReadFile(fpath)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return extractTextFromHtmlContent(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// non utf-8 encoding?
|
||||||
|
func extractTextFromHtmlContent(data []byte) (string, error) {
|
||||||
|
doc, err := goquery.NewDocumentFromReader(bytes.NewReader(data))
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
// Remove script and style tags
|
||||||
|
doc.Find("script, style, noscript").Each(func(i int, s *goquery.Selection) {
|
||||||
|
s.Remove()
|
||||||
|
})
|
||||||
|
// Get text and clean it
|
||||||
|
text := doc.Text()
|
||||||
|
// Collapse all whitespace (newlines, tabs, multiple spaces) into single spaces
|
||||||
|
cleaned := strings.Join(strings.Fields(text), " ")
|
||||||
|
return cleaned, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractTextFromMarkdown(fpath string) (string, error) {
|
||||||
|
data, err := os.ReadFile(fpath)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
// Convert markdown to HTML
|
||||||
|
md := goldmark.New(
|
||||||
|
goldmark.WithExtensions(extension.GFM),
|
||||||
|
goldmark.WithParserOptions(parser.WithAutoHeadingID()),
|
||||||
|
goldmark.WithRendererOptions(html.WithUnsafe()), // allow raw HTML if needed
|
||||||
|
)
|
||||||
|
var buf bytes.Buffer
|
||||||
|
if err := md.Convert(data, &buf); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
// Now extract text from the resulting HTML (using goquery or similar)
|
||||||
|
return extractTextFromHtmlContent(buf.Bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractTextFromEpub(fpath string) (string, error) {
|
||||||
|
r, err := zip.OpenReader(fpath)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to open epub: %w", err)
|
||||||
|
}
|
||||||
|
defer r.Close()
|
||||||
|
var sb strings.Builder
|
||||||
|
for _, f := range r.File {
|
||||||
|
ext := strings.ToLower(path.Ext(f.Name))
|
||||||
|
if ext != ".xhtml" && ext != ".html" && ext != ".htm" && ext != ".xml" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip manifest, toc, ncx files - they don't contain book content
|
||||||
|
nameLower := strings.ToLower(f.Name)
|
||||||
|
if strings.Contains(nameLower, "toc") || strings.Contains(nameLower, "nav") ||
|
||||||
|
strings.Contains(nameLower, "manifest") || strings.Contains(nameLower, ".opf") ||
|
||||||
|
strings.HasSuffix(nameLower, ".ncx") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
rc, err := f.Open()
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if sb.Len() > 0 {
|
||||||
|
sb.WriteString("\n\n")
|
||||||
|
}
|
||||||
|
sb.WriteString(f.Name)
|
||||||
|
sb.WriteString("\n")
|
||||||
|
|
||||||
|
buf, readErr := io.ReadAll(rc)
|
||||||
|
rc.Close()
|
||||||
|
if readErr == nil {
|
||||||
|
sb.WriteString(stripHTML(string(buf)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if sb.Len() == 0 {
|
||||||
|
return "", errors.New("no content extracted from epub")
|
||||||
|
}
|
||||||
|
return sb.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func stripHTML(html string) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
inTag := false
|
||||||
|
for _, r := range html {
|
||||||
|
switch r {
|
||||||
|
case '<':
|
||||||
|
inTag = true
|
||||||
|
case '>':
|
||||||
|
inTag = false
|
||||||
|
default:
|
||||||
|
if !inTag {
|
||||||
|
sb.WriteRune(r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractTextFromPdf(fpath string) (string, error) {
|
||||||
|
_, err := exec.LookPath("pdftotext")
|
||||||
|
if err == nil {
|
||||||
|
out, err := exec.Command("pdftotext", "-layout", fpath, "-").Output()
|
||||||
|
if err == nil && len(out) > 0 {
|
||||||
|
return string(out), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return extractTextFromPdfPureGo(fpath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractTextFromPdfPureGo(fpath string) (string, error) {
|
||||||
|
df, r, err := pdf.Open(fpath)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to open pdf: %w", err)
|
||||||
|
}
|
||||||
|
defer df.Close()
|
||||||
|
textReader, err := r.GetPlainText()
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to extract text from pdf: %w", err)
|
||||||
|
}
|
||||||
|
var buf bytes.Buffer
|
||||||
|
_, err = io.Copy(&buf, textReader)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to read pdf text: %w", err)
|
||||||
|
}
|
||||||
|
return buf.String(), nil
|
||||||
|
}
|
||||||
1253
rag/rag.go
1253
rag/rag.go
File diff suppressed because it is too large
Load Diff
409
rag/rag_integration_test.go
Normal file
409
rag/rag_integration_test.go
Normal file
@@ -0,0 +1,409 @@
|
|||||||
|
package rag
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"gf-lt/config"
|
||||||
|
"gf-lt/models"
|
||||||
|
"gf-lt/storage"
|
||||||
|
"log/slog"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
_ "github.com/glebarez/go-sqlite"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
)
|
||||||
|
|
||||||
|
// mockEmbedder returns zero vectors of a fixed dimension.
|
||||||
|
type mockEmbedder struct {
|
||||||
|
dim int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mockEmbedder) Embed(text string) ([]float32, error) {
|
||||||
|
vec := make([]float32, m.dim)
|
||||||
|
return vec, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mockEmbedder) EmbedSlice(texts []string) ([][]float32, error) {
|
||||||
|
vecs := make([][]float32, len(texts))
|
||||||
|
for i := range vecs {
|
||||||
|
vecs[i] = make([]float32, m.dim)
|
||||||
|
}
|
||||||
|
return vecs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// dummyStore implements storage.FullRepo with a minimal set of methods.
|
||||||
|
// Only DB() is used by VectorStorage; other methods return empty values.
|
||||||
|
type dummyStore struct {
|
||||||
|
db *sqlx.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d dummyStore) DB() *sqlx.DB { return d.db }
|
||||||
|
|
||||||
|
// ChatHistory methods
|
||||||
|
func (d dummyStore) ListChats() ([]models.Chat, error) { return nil, nil }
|
||||||
|
func (d dummyStore) GetChatByID(id uint32) (*models.Chat, error) { return nil, nil }
|
||||||
|
func (d dummyStore) GetChatByChar(char string) ([]models.Chat, error) { return nil, nil }
|
||||||
|
func (d dummyStore) GetLastChat() (*models.Chat, error) { return nil, nil }
|
||||||
|
func (d dummyStore) GetLastChatByAgent(agent string) (*models.Chat, error) { return nil, nil }
|
||||||
|
func (d dummyStore) UpsertChat(chat *models.Chat) (*models.Chat, error) { return chat, nil }
|
||||||
|
func (d dummyStore) RemoveChat(id uint32) error { return nil }
|
||||||
|
func (d dummyStore) ChatGetMaxID() (uint32, error) { return 0, nil }
|
||||||
|
|
||||||
|
// Memories methods
|
||||||
|
func (d dummyStore) Memorise(m *models.Memory) (*models.Memory, error) { return m, nil }
|
||||||
|
func (d dummyStore) Recall(agent, topic string) (string, error) { return "", nil }
|
||||||
|
func (d dummyStore) RecallTopics(agent string) ([]string, error) { return nil, nil }
|
||||||
|
func (d dummyStore) Forget(agent, topic string) error { return nil }
|
||||||
|
|
||||||
|
// VectorRepo methods (not used but required by interface)
|
||||||
|
func (d dummyStore) WriteVector(row *models.VectorRow) error { return nil }
|
||||||
|
func (d dummyStore) SearchClosest(q []float32, limit int) ([]models.VectorRow, error) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
func (d dummyStore) ListFiles() ([]string, error) { return nil, nil }
|
||||||
|
func (d dummyStore) RemoveEmbByFileName(filename string) error { return nil }
|
||||||
|
|
||||||
|
var _ storage.FullRepo = dummyStore{}
|
||||||
|
|
||||||
|
// setupTestRAG creates an in‑memory SQLite database, creates the necessary tables,
|
||||||
|
// inserts the provided chunks, and returns a RAG instance with a mock embedder.
|
||||||
|
func setupTestRAG(t *testing.T, chunks []*models.VectorRow) (*RAG, error) {
|
||||||
|
t.Helper()
|
||||||
|
db, err := sqlx.Open("sqlite", ":memory:")
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("open in‑memory db: %w", err)
|
||||||
|
}
|
||||||
|
// Create the required tables (embeddings_768 and fts_embeddings).
|
||||||
|
// Use the same schema as production.
|
||||||
|
_, err = db.Exec(`
|
||||||
|
CREATE TABLE embeddings_768 (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
embeddings BLOB NOT NULL,
|
||||||
|
slug TEXT NOT NULL,
|
||||||
|
raw_text TEXT NOT NULL,
|
||||||
|
filename TEXT NOT NULL DEFAULT ''
|
||||||
|
);
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("create embeddings table: %w", err)
|
||||||
|
}
|
||||||
|
_, err = db.Exec(`
|
||||||
|
CREATE VIRTUAL TABLE fts_embeddings USING fts5(
|
||||||
|
slug UNINDEXED,
|
||||||
|
raw_text,
|
||||||
|
filename UNINDEXED,
|
||||||
|
embedding_size UNINDEXED,
|
||||||
|
tokenize='porter unicode61'
|
||||||
|
);
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("create FTS table: %w", err)
|
||||||
|
}
|
||||||
|
// Create a logger that discards output.
|
||||||
|
logger := slog.New(slog.NewTextHandler(nil, &slog.HandlerOptions{Level: slog.LevelError}))
|
||||||
|
store := dummyStore{db: db}
|
||||||
|
// Create config with embedding dimension 768.
|
||||||
|
cfg := &config.Config{
|
||||||
|
EmbedDims: 768,
|
||||||
|
RAGWordLimit: 250,
|
||||||
|
RAGOverlapWords: 25,
|
||||||
|
RAGBatchSize: 1,
|
||||||
|
}
|
||||||
|
// Create a RAG instance using New, which will create an embedder based on config.
|
||||||
|
// We'll override the embedder afterwards via reflection.
|
||||||
|
rag, err := New(logger, store, cfg)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("create RAG: %w", err)
|
||||||
|
}
|
||||||
|
// Replace the embedder with our mock.
|
||||||
|
rag.SetEmbedderForTesting(&mockEmbedder{dim: cfg.EmbedDims})
|
||||||
|
// Insert the provided chunks using the storage directly.
|
||||||
|
if len(chunks) > 0 {
|
||||||
|
// Ensure each chunk has embeddings of correct dimension (zero vector).
|
||||||
|
for _, chunk := range chunks {
|
||||||
|
if len(chunk.Embeddings) != cfg.EmbedDims {
|
||||||
|
chunk.Embeddings = make([]float32, cfg.EmbedDims)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err = rag.storage.WriteVectors(chunks)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("write test chunks: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rag, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// createTestChunks returns a slice of VectorRow representing the target chunk
|
||||||
|
// (kjv_bible.epub_1786_0), several bald‑related noise chunks, and unrelated chunks.
|
||||||
|
func createTestChunks() []*models.VectorRow {
|
||||||
|
// Target chunk: 2 Kings 2:23‑24 containing "bald head" and "two she bears".
|
||||||
|
targetRaw := `And he said, Ye shall not send.
|
||||||
|
|
||||||
|
|
||||||
|
2:17 And when they urged him till he was ashamed, he said, Send. They sent
|
||||||
|
therefore fifty men; and they sought three days, but found him not.
|
||||||
|
|
||||||
|
|
||||||
|
2:18 And when they came again to him, (for he tarried at Jericho,) he said unto
|
||||||
|
them, Did I not say unto you, Go not? 2:19 And the men of the city said unto
|
||||||
|
Elisha, Behold, I pray thee, the situation of this city is pleasant, as my lord
|
||||||
|
seeth: but the water is naught, and the ground barren.
|
||||||
|
|
||||||
|
|
||||||
|
2:20 And he said, Bring me a new cruse, and put salt therein. And they brought
|
||||||
|
it to him.
|
||||||
|
|
||||||
|
|
||||||
|
2:21 And he went forth unto the spring of the waters, and cast the salt in
|
||||||
|
there, and said, Thus saith the LORD, I have healed these waters; there shall
|
||||||
|
not be from thence any more death or barren land.
|
||||||
|
|
||||||
|
|
||||||
|
2:22 So the waters were healed unto this day, according to the saying of Elisha
|
||||||
|
which he spake.
|
||||||
|
|
||||||
|
|
||||||
|
2:23 And he went up from thence unto Bethel: and as he was going up by the way,
|
||||||
|
there came forth little children out of the city, and mocked him, and said unto
|
||||||
|
him, Go up, thou bald head; go up, thou bald head.
|
||||||
|
|
||||||
|
|
||||||
|
2:24 And he turned back, and looked on them, and cursed them in the name of the
|
||||||
|
LORD. And there came forth two she bears out of the wood, and tare forty and
|
||||||
|
two children of them.`
|
||||||
|
// Noise chunk 1: Leviticus containing "bald locust"
|
||||||
|
noise1Raw := `11:12 Whatsoever hath no fins nor scales in the waters, that shall be an
|
||||||
|
abomination unto you.
|
||||||
|
|
||||||
|
|
||||||
|
11:13 And these are they which ye shall have in abomination among the fowls;
|
||||||
|
they shall not be eaten, they are an abomination: the eagle, and the ossifrage,
|
||||||
|
and the ospray, 11:14 And the vulture, and the kite after his kind; 11:15 Every
|
||||||
|
raven after his kind; 11:16 And the owl, and the night hawk, and the cuckow,
|
||||||
|
and the hawk after his kind, 11:17 And the little owl, and the cormorant, and
|
||||||
|
the great owl, 11:18 And the swan, and the pelican, and the gier eagle, 11:19
|
||||||
|
And the stork, the heron after her kind, and the lapwing, and the bat.
|
||||||
|
|
||||||
|
|
||||||
|
11:20 All fowls that creep, going upon all four, shall be an abomination unto
|
||||||
|
you.
|
||||||
|
|
||||||
|
|
||||||
|
11:21 Yet these may ye eat of every flying creeping thing that goeth upon all
|
||||||
|
four, which have legs above their feet, to leap withal upon the earth; 11:22
|
||||||
|
Even these of them ye may eat; the locust after his kind, and the bald locust
|
||||||
|
after his kind, and the beetle after his kind, and the grasshopper after his
|
||||||
|
kind.
|
||||||
|
|
||||||
|
|
||||||
|
11:23 But all other flying creeping things, which have four feet, shall be an
|
||||||
|
abomination unto you.
|
||||||
|
|
||||||
|
|
||||||
|
11:24 And for these ye shall be unclean: whosoever toucheth the carcase of them
|
||||||
|
shall be unclean until the even.`
|
||||||
|
// Noise chunk 2: Leviticus containing "bald"
|
||||||
|
noise2Raw := `11:13 And these are they which ye shall have in abomination among the fowls;
|
||||||
|
they shall not be eaten, they are an abomination: the eagle, and the ossifrage,
|
||||||
|
and the ospray, 11:14 And the vulture, and the kite after his kind; 11:15 Every
|
||||||
|
raven after his kind; 11:16 And the owl, and the night hawk, and the cuckow,
|
||||||
|
and the hawk after his kind, 11:17 And the little owl, and the cormorant, and
|
||||||
|
the great owl, 11:18 And the swan, and the pelican, and the gier eagle, 11:19
|
||||||
|
And the stork, the heron after her kind, and the lapwing, and the bat.
|
||||||
|
|
||||||
|
|
||||||
|
11:20 All fowls that creep, going upon all four, shall be an abomination unto
|
||||||
|
you.
|
||||||
|
|
||||||
|
|
||||||
|
11:21 Yet these may ye eat of every flying creeping thing that goeth upon all
|
||||||
|
four, which have legs above their feet, to leap withal upon the earth; 11:22
|
||||||
|
Even these of them ye may eat; the locust after his kind, and the bald locust
|
||||||
|
after his kind, and the beetle after his kind, and the grasshopper after his
|
||||||
|
kind.
|
||||||
|
|
||||||
|
|
||||||
|
11:23 But all other flying creeping things, which have four feet, shall be an
|
||||||
|
abomination unto you.
|
||||||
|
|
||||||
|
|
||||||
|
11:24 And for these ye shall be unclean: whosoever toucheth the carcase of them
|
||||||
|
shall be unclean until the even.`
|
||||||
|
// Additional Leviticus noise chunks (simulating 28 bald-related chunks)
|
||||||
|
// Using variations of the same text with different slugs
|
||||||
|
leviticusSlugs := []string{
|
||||||
|
"kjv_bible.epub_564_0",
|
||||||
|
"kjv_bible.epub_565_0",
|
||||||
|
"kjv_bible.epub_579_0",
|
||||||
|
"kjv_bible.epub_580_0",
|
||||||
|
"kjv_bible.epub_581_0",
|
||||||
|
"kjv_bible.epub_582_0",
|
||||||
|
"kjv_bible.epub_583_0",
|
||||||
|
"kjv_bible.epub_584_0",
|
||||||
|
"kjv_bible.epub_585_0",
|
||||||
|
"kjv_bible.epub_586_0",
|
||||||
|
"kjv_bible.epub_587_0",
|
||||||
|
"kjv_bible.epub_588_0",
|
||||||
|
"kjv_bible.epub_589_0",
|
||||||
|
"kjv_bible.epub_590_0",
|
||||||
|
}
|
||||||
|
leviticusTexts := []string{
|
||||||
|
noise1Raw,
|
||||||
|
noise2Raw,
|
||||||
|
`13:40 And the man whose hair is fallen off his head, he is bald; yet is he
|
||||||
|
clean.
|
||||||
|
|
||||||
|
|
||||||
|
13:41 And he that hath his hair fallen off from the part of his head toward his
|
||||||
|
face, he is forehead bald; yet is he clean.`,
|
||||||
|
`13:42 And if there be in the bald head, or bald forehead, a white reddish sore;
|
||||||
|
it is a leprosy sprung up in his bald head, or his bald forehead.`,
|
||||||
|
`13:43 Then the priest shall look upon it: and, behold, if the rising of the
|
||||||
|
sore be white reddish in his bald head, or in his bald forehead, as the leprosy
|
||||||
|
appearedh in the skin of the flesh;`,
|
||||||
|
`13:44 He is a leprous man, he is unclean: the priest shall pronounce him utterly
|
||||||
|
unclean; his plague is in his head.`,
|
||||||
|
`13:45 And the leper in whom the plague is, his clothes shall be rent, and his
|
||||||
|
head bare, and he shall put a covering upon his upper lip, and shall cry,
|
||||||
|
Unclean, unclean.`,
|
||||||
|
`13:46 All the days wherein the plague shall be in him he shall be defiled; he
|
||||||
|
is unclean: he shall dwell alone; without the camp shall his habitation be.`,
|
||||||
|
`13:47 The garment also that the plague of leprosy is in, whether it be a woollen
|
||||||
|
garment, or a linen garment;`,
|
||||||
|
`13:48 Whether it be in the warp, or woof; of linen, or of woollen; whether in a
|
||||||
|
skin, or in any thing made of skin;`,
|
||||||
|
`13:49 And if the plague be greenish or reddish in the garment, or in the skin,
|
||||||
|
either in the warp, or in the woof, or in any thing of skin; it is a plague of
|
||||||
|
leprosy, and shall be shewed unto the priest:`,
|
||||||
|
`13:50 And the priest shall look upon the plague, and shut up it that hath the
|
||||||
|
plague seven days:`,
|
||||||
|
`13:51 And he shall look on the plague on the seventh day: if the plague be spread
|
||||||
|
in the garment, either in the warp, or in the woof, or in a skin, or in any work
|
||||||
|
that is made of skin; the plague is a fretting leprosy; it is unclean.`,
|
||||||
|
`13:52 He shall therefore burn that garment, whether warp or woof, in woollen or
|
||||||
|
in linen, or any thing of skin, wherein the plague is: for it is a fretting
|
||||||
|
leprosy; it shall be burnt in the fire.`,
|
||||||
|
}
|
||||||
|
// Unrelated chunk 1: ghost_7.txt_777_0
|
||||||
|
unrelated1Raw := `Doesn’t he have any pride as a hunter?!
|
||||||
|
|
||||||
|
I didn’t see what other choice I had. I would just have to grovel and be ready to flee at any given moment.
|
||||||
|
The Hidden Curse clan house was in the central region of the imperial capital. It was a high-class area with extraordinary property values that hosted the residences of people like Lord Gladis. This district was near the Imperial Castle, though “near” was a
|
||||||
|
relative term as it was still a few kilometers away.
|
||||||
|
|
||||||
|
The clan house was made of brick and conformed to an older style of architecture.`
|
||||||
|
// Unrelated chunk 2: ghost_7.txt_778_0
|
||||||
|
unrelated2Raw := `I would just have to grovel and be ready to flee at any given moment.
|
||||||
|
The Hidden Curse clan house was in the central region of the imperial capital. It was a high-class area with extraordinary property values that hosted the residences of people like Lord Gladis. This district was near the Imperial Castle, though “near” was a
|
||||||
|
relative term as it was still a few kilometers away.
|
||||||
|
|
||||||
|
The clan house was made of brick and conformed to an older style of architecture. Nearly everyone knew about this mansion and its clock tower. It stood tall over the neighboring mansions and rumor had it that you could see the whole capital from the top. It
|
||||||
|
spoke to this clan’s renown and history that they were able to get away with building something that dwarfed the mansions of the nobility.`
|
||||||
|
chunks := []*models.VectorRow{
|
||||||
|
{
|
||||||
|
Slug: "kjv_bible.epub_1786_0",
|
||||||
|
RawText: targetRaw,
|
||||||
|
FileName: "kjv_bible.epub",
|
||||||
|
Embeddings: nil, // will be filled with zero vector later
|
||||||
|
},
|
||||||
|
}
|
||||||
|
// Add Leviticus noise chunks
|
||||||
|
for i, slug := range leviticusSlugs {
|
||||||
|
text := leviticusTexts[i%len(leviticusTexts)]
|
||||||
|
chunks = append(chunks, &models.VectorRow{
|
||||||
|
Slug: slug,
|
||||||
|
RawText: text,
|
||||||
|
FileName: "kjv_bible.epub",
|
||||||
|
Embeddings: nil,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// Add unrelated chunks
|
||||||
|
chunks = append(chunks,
|
||||||
|
&models.VectorRow{
|
||||||
|
Slug: "ghost_7.txt_777_0",
|
||||||
|
RawText: unrelated1Raw,
|
||||||
|
FileName: "ghost_7.txt",
|
||||||
|
Embeddings: nil,
|
||||||
|
},
|
||||||
|
&models.VectorRow{
|
||||||
|
Slug: "ghost_7.txt_778_0",
|
||||||
|
RawText: unrelated2Raw,
|
||||||
|
FileName: "ghost_7.txt",
|
||||||
|
Embeddings: nil,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return chunks
|
||||||
|
}
|
||||||
|
func assertTargetInTopN(t *testing.T, results []models.VectorRow, topN int) bool {
|
||||||
|
t.Helper()
|
||||||
|
for i, row := range results {
|
||||||
|
if i >= topN {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if row.Slug == "kjv_bible.epub_1786_0" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBiblicalQuery(t *testing.T) {
|
||||||
|
chunks := createTestChunks()
|
||||||
|
rag, err := setupTestRAG(t, chunks)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("setup failed: %v", err)
|
||||||
|
}
|
||||||
|
query := "bald prophet and two she bears"
|
||||||
|
results, err := rag.Search(query, 10)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("search failed: %v", err)
|
||||||
|
}
|
||||||
|
// The target chunk should be in the top results.
|
||||||
|
if !assertTargetInTopN(t, results, 5) {
|
||||||
|
t.Errorf("target chunk not found in top 5 results for query %q", query)
|
||||||
|
t.Logf("results slugs: %v", func() []string {
|
||||||
|
slugs := make([]string, len(results))
|
||||||
|
for i, r := range results {
|
||||||
|
slugs[i] = r.Slug
|
||||||
|
}
|
||||||
|
return slugs
|
||||||
|
}())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQueryVariations(t *testing.T) {
|
||||||
|
chunks := createTestChunks()
|
||||||
|
rag, err := setupTestRAG(t, chunks)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("setup failed: %v", err)
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
query string
|
||||||
|
topN int
|
||||||
|
}{
|
||||||
|
{"she bears", "she bears", 5},
|
||||||
|
{"bald head", "bald head", 5},
|
||||||
|
{"two she bears out of the wood", "two she bears out of the wood", 5},
|
||||||
|
{"bald prophet", "bald prophet", 10},
|
||||||
|
{"go up thou bald head", "\"go up thou bald head\"", 5},
|
||||||
|
{"two she bears", "\"two she bears\"", 5},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results, err := rag.Search(tt.query, 10)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("search failed: %v", err)
|
||||||
|
}
|
||||||
|
if !assertTargetInTopN(t, results, tt.topN) {
|
||||||
|
t.Errorf("target chunk not found in top %d results for query %q", tt.topN, tt.query)
|
||||||
|
t.Logf("results slugs: %v", func() []string {
|
||||||
|
slugs := make([]string, len(results))
|
||||||
|
for i, r := range results {
|
||||||
|
slugs[i] = r.Slug
|
||||||
|
}
|
||||||
|
return slugs
|
||||||
|
}())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
129
rag/rag_real_test.go
Normal file
129
rag/rag_real_test.go
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
package rag
|
||||||
|
|
||||||
|
import (
|
||||||
|
"gf-lt/config"
|
||||||
|
"gf-lt/storage"
|
||||||
|
"log/slog"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestRealBiblicalQuery(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping real embedder test in short mode")
|
||||||
|
}
|
||||||
|
// Check if the embedder model exists
|
||||||
|
modelPath := filepath.Join("..", "onnx", "embedgemma", "model_q4.onnx")
|
||||||
|
if _, err := os.Stat(modelPath); os.IsNotExist(err) {
|
||||||
|
t.Skipf("embedder model not found at %s; skipping real embedder test", modelPath)
|
||||||
|
}
|
||||||
|
tokenizerPath := filepath.Join("..", "onnx", "embedgemma", "tokenizer.json")
|
||||||
|
dbPath := filepath.Join("..", "gflt.db")
|
||||||
|
if _, err := os.Stat(dbPath); os.IsNotExist(err) {
|
||||||
|
t.Skipf("database not found at %s; skipping real embedder test", dbPath)
|
||||||
|
}
|
||||||
|
cfg := &config.Config{
|
||||||
|
EmbedModelPath: modelPath,
|
||||||
|
EmbedTokenizerPath: tokenizerPath,
|
||||||
|
EmbedDims: 768,
|
||||||
|
RAGWordLimit: 250,
|
||||||
|
RAGOverlapWords: 25,
|
||||||
|
RAGBatchSize: 1,
|
||||||
|
}
|
||||||
|
logger := slog.New(slog.NewTextHandler(nil, &slog.HandlerOptions{Level: slog.LevelError}))
|
||||||
|
store := storage.NewProviderSQL(dbPath, logger)
|
||||||
|
if store == nil {
|
||||||
|
t.Fatal("failed to create storage provider")
|
||||||
|
}
|
||||||
|
rag, err := New(logger, store, cfg)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to create RAG instance: %v", err)
|
||||||
|
}
|
||||||
|
t.Cleanup(func() { rag.Destroy() })
|
||||||
|
query := "bald prophet and two she bears"
|
||||||
|
results, err := rag.Search(query, 30)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("search failed: %v", err)
|
||||||
|
}
|
||||||
|
found := false
|
||||||
|
for i, row := range results {
|
||||||
|
if row.Slug == "kjv_bible.epub_1786_0" {
|
||||||
|
found = true
|
||||||
|
t.Logf("target chunk found at rank %d", i+1)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
t.Errorf("target chunk not found in search results for query %q", query)
|
||||||
|
t.Logf("results slugs:")
|
||||||
|
for i, r := range results {
|
||||||
|
t.Logf("%d: %s", i+1, r.Slug)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRealQueryVariations(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping real embedder test in short mode")
|
||||||
|
}
|
||||||
|
modelPath := filepath.Join("..", "onnx", "embedgemma", "model_q4.onnx")
|
||||||
|
if _, err := os.Stat(modelPath); os.IsNotExist(err) {
|
||||||
|
t.Skipf("embedder model not found at %s; skipping real embedder test", modelPath)
|
||||||
|
}
|
||||||
|
tokenizerPath := filepath.Join("..", "onnx", "embedgemma", "tokenizer.json")
|
||||||
|
dbPath := filepath.Join("..", "gflt.db")
|
||||||
|
if _, err := os.Stat(dbPath); os.IsNotExist(err) {
|
||||||
|
t.Skipf("database not found at %s; skipping real embedder test", dbPath)
|
||||||
|
}
|
||||||
|
cfg := &config.Config{
|
||||||
|
EmbedModelPath: modelPath,
|
||||||
|
EmbedTokenizerPath: tokenizerPath,
|
||||||
|
EmbedDims: 768,
|
||||||
|
RAGWordLimit: 250,
|
||||||
|
RAGOverlapWords: 25,
|
||||||
|
RAGBatchSize: 1,
|
||||||
|
}
|
||||||
|
logger := slog.New(slog.NewTextHandler(nil, &slog.HandlerOptions{Level: slog.LevelError}))
|
||||||
|
store := storage.NewProviderSQL(dbPath, logger)
|
||||||
|
if store == nil {
|
||||||
|
t.Fatal("failed to create storage provider")
|
||||||
|
}
|
||||||
|
rag, err := New(logger, store, cfg)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to create RAG instance: %v", err)
|
||||||
|
}
|
||||||
|
t.Cleanup(func() { rag.Destroy() })
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
query string
|
||||||
|
}{
|
||||||
|
{"she bears", "she bears"},
|
||||||
|
{"bald head", "bald head"},
|
||||||
|
{"two she bears out of the wood", "two she bears out of the wood"},
|
||||||
|
{"bald prophet", "bald prophet"},
|
||||||
|
{"go up thou bald head", "\"go up thou bald head\""},
|
||||||
|
{"two she bears", "\"two she bears\""},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
results, err := rag.Search(tt.query, 10)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("search failed: %v", err)
|
||||||
|
}
|
||||||
|
found := false
|
||||||
|
for _, row := range results {
|
||||||
|
if row.Slug == "kjv_bible.epub_1786_0" {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
t.Errorf("target chunk not found for query %q", tt.query)
|
||||||
|
for i, r := range results {
|
||||||
|
t.Logf("%d: %s", i+1, r.Slug)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
151
rag/rag_test.go
Normal file
151
rag/rag_test.go
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
package rag
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDetectPhrases(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
query string
|
||||||
|
expect []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
query: "bald prophet and two she bears",
|
||||||
|
expect: []string{"bald prophet", "two she", "two she bears", "she bears"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
query: "she bears",
|
||||||
|
expect: []string{"she bears"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
query: "the quick brown fox",
|
||||||
|
expect: []string{"quick brown", "quick brown fox", "brown fox"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
query: "in the house", // stop words
|
||||||
|
expect: []string{}, // "in" and "the" are stop words
|
||||||
|
},
|
||||||
|
{
|
||||||
|
query: "a", // short
|
||||||
|
expect: []string{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
got := detectPhrases(tt.query)
|
||||||
|
if len(got) != len(tt.expect) {
|
||||||
|
t.Errorf("detectPhrases(%q) = %v, want %v", tt.query, got, tt.expect)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for i := range got {
|
||||||
|
if got[i] != tt.expect[i] {
|
||||||
|
t.Errorf("detectPhrases(%q) = %v, want %v", tt.query, got, tt.expect)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCountPhraseMatches(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
text string
|
||||||
|
query string
|
||||||
|
expect int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
text: "two she bears came out of the wood",
|
||||||
|
query: "she bears",
|
||||||
|
expect: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
text: "bald head and she bears",
|
||||||
|
query: "bald prophet and two she bears",
|
||||||
|
expect: 1, // only "she bears" matches
|
||||||
|
},
|
||||||
|
{
|
||||||
|
text: "no match here",
|
||||||
|
query: "she bears",
|
||||||
|
expect: 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
text: "she bears and bald prophet",
|
||||||
|
query: "bald prophet she bears",
|
||||||
|
expect: 2, // "she bears" and "bald prophet"
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
got := countPhraseMatches(tt.text, tt.query)
|
||||||
|
if got != tt.expect {
|
||||||
|
t.Errorf("countPhraseMatches(%q, %q) = %d, want %d", tt.text, tt.query, got, tt.expect)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAreSlugsAdjacent(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
slug1 string
|
||||||
|
slug2 string
|
||||||
|
expect bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
slug1: "kjv_bible.epub_1786_0",
|
||||||
|
slug2: "kjv_bible.epub_1787_0",
|
||||||
|
expect: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
slug1: "kjv_bible.epub_1787_0",
|
||||||
|
slug2: "kjv_bible.epub_1786_0",
|
||||||
|
expect: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
slug1: "kjv_bible.epub_1786_0",
|
||||||
|
slug2: "kjv_bible.epub_1788_0",
|
||||||
|
expect: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
slug1: "otherfile.txt_1_0",
|
||||||
|
slug2: "kjv_bible.epub_1786_0",
|
||||||
|
expect: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
slug1: "file_1_0",
|
||||||
|
slug2: "file_1_1",
|
||||||
|
expect: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
slug1: "file_1_0",
|
||||||
|
slug2: "file_2_0", // different batch
|
||||||
|
expect: true, // sequential batches with same chunk index are adjacent
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
got := areSlugsAdjacent(tt.slug1, tt.slug2)
|
||||||
|
if got != tt.expect {
|
||||||
|
t.Errorf("areSlugsAdjacent(%q, %q) = %v, want %v", tt.slug1, tt.slug2, got, tt.expect)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseSlugIndices(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
slug string
|
||||||
|
wantBatch int
|
||||||
|
wantChunk int
|
||||||
|
wantOk bool
|
||||||
|
}{
|
||||||
|
{"kjv_bible.epub_1786_0", 1786, 0, true},
|
||||||
|
{"file_1_5", 1, 5, true},
|
||||||
|
{"no_underscore", 0, 0, false},
|
||||||
|
{"file_abc_def", 0, 0, false},
|
||||||
|
{"file_123_456_extra", 456, 0, false}, // regex matches last two numbers
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
batch, chunk, ok := parseSlugIndices(tt.slug)
|
||||||
|
if ok != tt.wantOk {
|
||||||
|
t.Errorf("parseSlugIndices(%q) ok = %v, want %v", tt.slug, ok, tt.wantOk)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if ok && (batch != tt.wantBatch || chunk != tt.wantChunk) {
|
||||||
|
t.Errorf("parseSlugIndices(%q) = (%d, %d), want (%d, %d)", tt.slug, batch, chunk, tt.wantBatch, tt.wantChunk)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
244
rag/storage.go
244
rag/storage.go
@@ -1,6 +1,7 @@
|
|||||||
package rag
|
package rag
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"database/sql"
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"fmt"
|
"fmt"
|
||||||
"gf-lt/models"
|
"gf-lt/models"
|
||||||
@@ -28,7 +29,6 @@ func NewVectorStorage(logger *slog.Logger, store storage.FullRepo) *VectorStorag
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// SerializeVector converts []float32 to binary blob
|
// SerializeVector converts []float32 to binary blob
|
||||||
func SerializeVector(vec []float32) []byte {
|
func SerializeVector(vec []float32) []byte {
|
||||||
buf := make([]byte, len(vec)*4) // 4 bytes per float32
|
buf := make([]byte, len(vec)*4) // 4 bytes per float32
|
||||||
@@ -63,20 +63,120 @@ func (vs *VectorStorage) WriteVector(row *models.VectorRow) error {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
embeddingSize := len(row.Embeddings)
|
||||||
|
// Start transaction
|
||||||
|
tx, err := vs.sqlxDB.Beginx()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
// Serialize the embeddings to binary
|
// Serialize the embeddings to binary
|
||||||
serializedEmbeddings := SerializeVector(row.Embeddings)
|
serializedEmbeddings := SerializeVector(row.Embeddings)
|
||||||
|
|
||||||
query := fmt.Sprintf(
|
query := fmt.Sprintf(
|
||||||
"INSERT INTO %s (embeddings, slug, raw_text, filename) VALUES (?, ?, ?, ?)",
|
"INSERT INTO %s (embeddings, slug, raw_text, filename) VALUES (?, ?, ?, ?)",
|
||||||
tableName,
|
tableName,
|
||||||
)
|
)
|
||||||
|
if _, err := tx.Exec(query, serializedEmbeddings, row.Slug, row.RawText, row.FileName); err != nil {
|
||||||
if _, err := vs.sqlxDB.Exec(query, serializedEmbeddings, row.Slug, row.RawText, row.FileName); err != nil {
|
|
||||||
vs.logger.Error("failed to write vector", "error", err, "slug", row.Slug)
|
vs.logger.Error("failed to write vector", "error", err, "slug", row.Slug)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
// Insert into FTS table
|
||||||
|
ftsQuery := `INSERT INTO fts_embeddings (slug, raw_text, filename, embedding_size) VALUES (?, ?, ?, ?)`
|
||||||
|
if _, err := tx.Exec(ftsQuery, row.Slug, row.RawText, row.FileName, embeddingSize); err != nil {
|
||||||
|
vs.logger.Error("failed to write to FTS table", "error", err, "slug", row.Slug)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = tx.Commit()
|
||||||
|
if err != nil {
|
||||||
|
vs.logger.Error("failed to commit transaction", "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteVectors stores multiple embedding vectors in a single transaction
|
||||||
|
func (vs *VectorStorage) WriteVectors(rows []*models.VectorRow) error {
|
||||||
|
if len(rows) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// SQLite has limit of 999 parameters per statement, each row uses 4 parameters
|
||||||
|
const maxBatchSize = 200 // 200 * 4 = 800 < 999
|
||||||
|
if len(rows) > maxBatchSize {
|
||||||
|
// Process in chunks
|
||||||
|
for i := 0; i < len(rows); i += maxBatchSize {
|
||||||
|
end := i + maxBatchSize
|
||||||
|
if end > len(rows) {
|
||||||
|
end = len(rows)
|
||||||
|
}
|
||||||
|
if err := vs.WriteVectors(rows[i:end]); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// All rows should have same embedding size (same model)
|
||||||
|
firstSize := len(rows[0].Embeddings)
|
||||||
|
for i, row := range rows {
|
||||||
|
if len(row.Embeddings) != firstSize {
|
||||||
|
return fmt.Errorf("embedding size mismatch: row %d has size %d, expected %d", i, len(row.Embeddings), firstSize)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tableName, err := vs.getTableName(rows[0].Embeddings)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// Start transaction
|
||||||
|
tx, err := vs.sqlxDB.Beginx()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Build batch insert for embeddings table
|
||||||
|
embeddingPlaceholders := make([]string, 0, len(rows))
|
||||||
|
embeddingArgs := make([]any, 0, len(rows)*4)
|
||||||
|
for _, row := range rows {
|
||||||
|
embeddingPlaceholders = append(embeddingPlaceholders, "(?, ?, ?, ?)")
|
||||||
|
embeddingArgs = append(embeddingArgs, SerializeVector(row.Embeddings), row.Slug, row.RawText, row.FileName)
|
||||||
|
}
|
||||||
|
embeddingQuery := fmt.Sprintf(
|
||||||
|
"INSERT INTO %s (embeddings, slug, raw_text, filename) VALUES %s",
|
||||||
|
tableName,
|
||||||
|
strings.Join(embeddingPlaceholders, ", "),
|
||||||
|
)
|
||||||
|
if _, err := tx.Exec(embeddingQuery, embeddingArgs...); err != nil {
|
||||||
|
vs.logger.Error("failed to write vectors batch", "error", err, "batch_size", len(rows))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// Build batch insert for FTS table
|
||||||
|
ftsPlaceholders := make([]string, 0, len(rows))
|
||||||
|
ftsArgs := make([]any, 0, len(rows)*4)
|
||||||
|
embeddingSize := len(rows[0].Embeddings)
|
||||||
|
for _, row := range rows {
|
||||||
|
ftsPlaceholders = append(ftsPlaceholders, "(?, ?, ?, ?)")
|
||||||
|
ftsArgs = append(ftsArgs, row.Slug, row.RawText, row.FileName, embeddingSize)
|
||||||
|
}
|
||||||
|
ftsQuery := "INSERT INTO fts_embeddings (slug, raw_text, filename, embedding_size) VALUES " +
|
||||||
|
strings.Join(ftsPlaceholders, ", ")
|
||||||
|
if _, err := tx.Exec(ftsQuery, ftsArgs...); err != nil {
|
||||||
|
vs.logger.Error("failed to write FTS batch", "error", err, "batch_size", len(rows))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = tx.Commit()
|
||||||
|
if err != nil {
|
||||||
|
vs.logger.Error("failed to commit transaction", "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
vs.logger.Debug("wrote vectors batch", "batch_size", len(rows))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -95,41 +195,32 @@ func (vs *VectorStorage) getTableName(emb []float32) (string, error) {
|
|||||||
4096: true,
|
4096: true,
|
||||||
5120: true,
|
5120: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
if supportedSizes[size] {
|
if supportedSizes[size] {
|
||||||
return fmt.Sprintf("embeddings_%d", size), nil
|
return fmt.Sprintf("embeddings_%d", size), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return "", fmt.Errorf("no table for embedding size of %d", size)
|
return "", fmt.Errorf("no table for embedding size of %d", size)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SearchClosest finds vectors closest to the query vector using efficient cosine similarity calculation
|
// SearchClosest finds vectors closest to the query vector using efficient cosine similarity calculation
|
||||||
func (vs *VectorStorage) SearchClosest(query []float32) ([]models.VectorRow, error) {
|
func (vs *VectorStorage) SearchClosest(query []float32, limit int) ([]models.VectorRow, error) {
|
||||||
|
if limit <= 0 {
|
||||||
|
limit = 10
|
||||||
|
}
|
||||||
tableName, err := vs.getTableName(query)
|
tableName, err := vs.getTableName(query)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// For better performance, instead of loading all vectors at once,
|
|
||||||
// we'll implement batching and potentially add L2 distance-based pre-filtering
|
|
||||||
// since cosine similarity is related to L2 distance for normalized vectors
|
|
||||||
|
|
||||||
querySQL := "SELECT embeddings, slug, raw_text, filename FROM " + tableName
|
querySQL := "SELECT embeddings, slug, raw_text, filename FROM " + tableName
|
||||||
rows, err := vs.sqlxDB.Query(querySQL)
|
rows, err := vs.sqlxDB.Query(querySQL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
defer rows.Close()
|
||||||
|
|
||||||
// Use a min-heap or simple slice to keep track of top 3 closest vectors
|
|
||||||
type SearchResult struct {
|
type SearchResult struct {
|
||||||
vector models.VectorRow
|
vector models.VectorRow
|
||||||
distance float32
|
distance float32
|
||||||
}
|
}
|
||||||
|
|
||||||
var topResults []SearchResult
|
var topResults []SearchResult
|
||||||
|
|
||||||
// Process vectors one by one to avoid loading everything into memory
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var (
|
var (
|
||||||
embeddingsBlob []byte
|
embeddingsBlob []byte
|
||||||
@@ -140,12 +231,9 @@ func (vs *VectorStorage) SearchClosest(query []float32) ([]models.VectorRow, err
|
|||||||
vs.logger.Error("failed to scan row", "error", err)
|
vs.logger.Error("failed to scan row", "error", err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
storedEmbeddings := DeserializeVector(embeddingsBlob)
|
storedEmbeddings := DeserializeVector(embeddingsBlob)
|
||||||
|
|
||||||
// Calculate cosine similarity (returns value between -1 and 1, where 1 is most similar)
|
|
||||||
similarity := cosineSimilarity(query, storedEmbeddings)
|
similarity := cosineSimilarity(query, storedEmbeddings)
|
||||||
distance := 1 - similarity // Convert to distance where 0 is most similar
|
distance := 1 - similarity
|
||||||
|
|
||||||
result := SearchResult{
|
result := SearchResult{
|
||||||
vector: models.VectorRow{
|
vector: models.VectorRow{
|
||||||
@@ -157,33 +245,117 @@ func (vs *VectorStorage) SearchClosest(query []float32) ([]models.VectorRow, err
|
|||||||
distance: distance,
|
distance: distance,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add to top results and maintain only top 3
|
|
||||||
topResults = append(topResults, result)
|
topResults = append(topResults, result)
|
||||||
|
|
||||||
// Sort and keep only top 3
|
|
||||||
sort.Slice(topResults, func(i, j int) bool {
|
sort.Slice(topResults, func(i, j int) bool {
|
||||||
return topResults[i].distance < topResults[j].distance
|
return topResults[i].distance < topResults[j].distance
|
||||||
})
|
})
|
||||||
|
if len(topResults) > limit {
|
||||||
if len(topResults) > 3 {
|
topResults = topResults[:limit]
|
||||||
topResults = topResults[:3] // Keep only closest 3
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert back to VectorRow slice
|
|
||||||
results := make([]models.VectorRow, 0, len(topResults))
|
results := make([]models.VectorRow, 0, len(topResults))
|
||||||
for _, result := range topResults {
|
for _, result := range topResults {
|
||||||
result.vector.Distance = result.distance
|
result.vector.Distance = result.distance
|
||||||
results = append(results, result.vector)
|
results = append(results, result.vector)
|
||||||
}
|
}
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetVectorBySlug retrieves a vector row by its slug
|
||||||
|
func (vs *VectorStorage) GetVectorBySlug(slug string) (*models.VectorRow, error) {
|
||||||
|
embeddingSizes := []int{384, 768, 1024, 1536, 2048, 3072, 4096, 5120}
|
||||||
|
for _, size := range embeddingSizes {
|
||||||
|
table := fmt.Sprintf("embeddings_%d", size)
|
||||||
|
query := fmt.Sprintf("SELECT embeddings, slug, raw_text, filename FROM %s WHERE slug = ?", table)
|
||||||
|
row := vs.sqlxDB.QueryRow(query, slug)
|
||||||
|
var (
|
||||||
|
embeddingsBlob []byte
|
||||||
|
retrievedSlug, rawText, fileName string
|
||||||
|
)
|
||||||
|
if err := row.Scan(&embeddingsBlob, &retrievedSlug, &rawText, &fileName); err != nil {
|
||||||
|
// No row in this table, continue to next size
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
storedEmbeddings := DeserializeVector(embeddingsBlob)
|
||||||
|
return &models.VectorRow{
|
||||||
|
Embeddings: storedEmbeddings,
|
||||||
|
Slug: retrievedSlug,
|
||||||
|
RawText: rawText,
|
||||||
|
FileName: fileName,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("vector with slug %s not found", slug)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SearchKeyword performs full-text search using FTS5
|
||||||
|
func (vs *VectorStorage) SearchKeyword(query string, limit int) ([]models.VectorRow, error) {
|
||||||
|
// Use FTS5 bm25 ranking. bm25 returns negative values where more negative is better.
|
||||||
|
// We'll order by bm25 (ascending) and limit.
|
||||||
|
ftsQuery := `SELECT slug, raw_text, filename, bm25(fts_embeddings) as score
|
||||||
|
FROM fts_embeddings
|
||||||
|
WHERE fts_embeddings MATCH ?
|
||||||
|
ORDER BY score
|
||||||
|
LIMIT ?`
|
||||||
|
|
||||||
|
// Try original query first
|
||||||
|
rows, err := vs.sqlxDB.Query(ftsQuery, query, limit)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("FTS search failed: %w", err)
|
||||||
|
}
|
||||||
|
results, err := vs.scanRows(rows)
|
||||||
|
rows.Close()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no results and query contains multiple terms, try OR fallback
|
||||||
|
if len(results) == 0 && strings.Contains(query, " ") && !strings.Contains(strings.ToUpper(query), " OR ") {
|
||||||
|
// Build OR query: term1 OR term2 OR term3
|
||||||
|
terms := strings.Fields(query)
|
||||||
|
if len(terms) > 1 {
|
||||||
|
orQuery := strings.Join(terms, " OR ")
|
||||||
|
rows, err := vs.sqlxDB.Query(ftsQuery, orQuery, limit)
|
||||||
|
if err != nil {
|
||||||
|
// Return original empty results rather than error
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
orResults, err := vs.scanRows(rows)
|
||||||
|
rows.Close()
|
||||||
|
if err == nil {
|
||||||
|
results = orResults
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanRows converts SQL rows to VectorRow slice
|
||||||
|
func (vs *VectorStorage) scanRows(rows *sql.Rows) ([]models.VectorRow, error) {
|
||||||
|
var results []models.VectorRow
|
||||||
|
for rows.Next() {
|
||||||
|
var slug, rawText, fileName string
|
||||||
|
var score float64
|
||||||
|
if err := rows.Scan(&slug, &rawText, &fileName, &score); err != nil {
|
||||||
|
vs.logger.Error("failed to scan FTS row", "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Convert BM25 score to distance-like metric (lower is better)
|
||||||
|
// BM25 is negative, more negative is better. Keep as negative.
|
||||||
|
distance := float32(score) // Keep negative, more negative is better
|
||||||
|
// No clamping needed; negative distances are fine
|
||||||
|
results = append(results, models.VectorRow{
|
||||||
|
Slug: slug,
|
||||||
|
RawText: rawText,
|
||||||
|
FileName: fileName,
|
||||||
|
Distance: distance,
|
||||||
|
})
|
||||||
|
}
|
||||||
return results, nil
|
return results, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ListFiles returns a list of all loaded files
|
// ListFiles returns a list of all loaded files
|
||||||
func (vs *VectorStorage) ListFiles() ([]string, error) {
|
func (vs *VectorStorage) ListFiles() ([]string, error) {
|
||||||
fileLists := make([][]string, 0)
|
fileLists := make([][]string, 0)
|
||||||
|
|
||||||
// Query all supported tables and combine results
|
// Query all supported tables and combine results
|
||||||
embeddingSizes := []int{384, 768, 1024, 1536, 2048, 3072, 4096, 5120}
|
embeddingSizes := []int{384, 768, 1024, 1536, 2048, 3072, 4096, 5120}
|
||||||
for _, size := range embeddingSizes {
|
for _, size := range embeddingSizes {
|
||||||
@@ -219,14 +391,16 @@ func (vs *VectorStorage) ListFiles() ([]string, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return allFiles, nil
|
return allFiles, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// RemoveEmbByFileName removes all embeddings associated with a specific filename
|
// RemoveEmbByFileName removes all embeddings associated with a specific filename
|
||||||
func (vs *VectorStorage) RemoveEmbByFileName(filename string) error {
|
func (vs *VectorStorage) RemoveEmbByFileName(filename string) error {
|
||||||
var errors []string
|
var errors []string
|
||||||
|
// Delete from FTS table first
|
||||||
|
if _, err := vs.sqlxDB.Exec("DELETE FROM fts_embeddings WHERE filename = ?", filename); err != nil {
|
||||||
|
errors = append(errors, err.Error())
|
||||||
|
}
|
||||||
embeddingSizes := []int{384, 768, 1024, 1536, 2048, 3072, 4096, 5120}
|
embeddingSizes := []int{384, 768, 1024, 1536, 2048, 3072, 4096, 5120}
|
||||||
for _, size := range embeddingSizes {
|
for _, size := range embeddingSizes {
|
||||||
table := fmt.Sprintf("embeddings_%d", size)
|
table := fmt.Sprintf("embeddings_%d", size)
|
||||||
@@ -235,11 +409,9 @@ func (vs *VectorStorage) RemoveEmbByFileName(filename string) error {
|
|||||||
errors = append(errors, err.Error())
|
errors = append(errors, err.Error())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(errors) > 0 {
|
if len(errors) > 0 {
|
||||||
return fmt.Errorf("errors occurred: %s", strings.Join(errors, "; "))
|
return fmt.Errorf("errors occurred: %s", strings.Join(errors, "; "))
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -248,18 +420,15 @@ func cosineSimilarity(a, b []float32) float32 {
|
|||||||
if len(a) != len(b) {
|
if len(a) != len(b) {
|
||||||
return 0.0
|
return 0.0
|
||||||
}
|
}
|
||||||
|
|
||||||
var dotProduct, normA, normB float32
|
var dotProduct, normA, normB float32
|
||||||
for i := 0; i < len(a); i++ {
|
for i := 0; i < len(a); i++ {
|
||||||
dotProduct += a[i] * b[i]
|
dotProduct += a[i] * b[i]
|
||||||
normA += a[i] * a[i]
|
normA += a[i] * a[i]
|
||||||
normB += b[i] * b[i]
|
normB += b[i] * b[i]
|
||||||
}
|
}
|
||||||
|
|
||||||
if normA == 0 || normB == 0 {
|
if normA == 0 || normB == 0 {
|
||||||
return 0.0
|
return 0.0
|
||||||
}
|
}
|
||||||
|
|
||||||
return dotProduct / (sqrt(normA) * sqrt(normB))
|
return dotProduct / (sqrt(normA) * sqrt(normB))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -275,4 +444,3 @@ func sqrt(f float32) float32 {
|
|||||||
}
|
}
|
||||||
return guess
|
return guess
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
74
server.go
74
server.go
@@ -1,74 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"gf-lt/config"
|
|
||||||
"net/http"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Server struct {
|
|
||||||
// nolint
|
|
||||||
config config.Config
|
|
||||||
}
|
|
||||||
|
|
||||||
func (srv *Server) ListenToRequests(port string) {
|
|
||||||
// h := srv.actions
|
|
||||||
mux := http.NewServeMux()
|
|
||||||
server := &http.Server{
|
|
||||||
Addr: "localhost:" + port,
|
|
||||||
Handler: mux,
|
|
||||||
ReadTimeout: time.Second * 5,
|
|
||||||
WriteTimeout: time.Second * 5,
|
|
||||||
}
|
|
||||||
mux.HandleFunc("GET /ping", pingHandler)
|
|
||||||
mux.HandleFunc("GET /model", modelHandler)
|
|
||||||
mux.HandleFunc("POST /completion", completionHandler)
|
|
||||||
fmt.Println("Listening", "addr", server.Addr)
|
|
||||||
if err := server.ListenAndServe(); err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// create server
|
|
||||||
// listen to the completion endpoint handler
|
|
||||||
func pingHandler(w http.ResponseWriter, req *http.Request) {
|
|
||||||
if _, err := w.Write([]byte("pong")); err != nil {
|
|
||||||
logger.Error("server ping", "error", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func completionHandler(w http.ResponseWriter, req *http.Request) {
|
|
||||||
// post request
|
|
||||||
body := req.Body
|
|
||||||
// get body as io.reader
|
|
||||||
// pass it to the /completion
|
|
||||||
go sendMsgToLLM(body)
|
|
||||||
out:
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case chunk := <-chunkChan:
|
|
||||||
fmt.Print(chunk)
|
|
||||||
if _, err := w.Write([]byte(chunk)); err != nil {
|
|
||||||
logger.Warn("failed to write chunk", "value", chunk)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
case <-streamDone:
|
|
||||||
break out
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func modelHandler(w http.ResponseWriter, req *http.Request) {
|
|
||||||
llmModel := fetchLCPModelName()
|
|
||||||
payload, err := json.Marshal(llmModel)
|
|
||||||
if err != nil {
|
|
||||||
logger.Error("model handler", "error", err)
|
|
||||||
// return err
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if _, err := w.Write(payload); err != nil {
|
|
||||||
logger.Error("model handler", "error", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
42
session.go
42
session.go
@@ -1,6 +1,7 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
@@ -131,13 +132,18 @@ func loadOldChatOrGetNew() []models.RoleMsg {
|
|||||||
chat, err := store.GetLastChat()
|
chat, err := store.GetLastChat()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warn("failed to load history chat", "error", err)
|
logger.Warn("failed to load history chat", "error", err)
|
||||||
|
maxID, err := store.ChatGetMaxID()
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("failed to fetch max chat id", "error", err)
|
||||||
|
}
|
||||||
|
maxID++
|
||||||
chat := &models.Chat{
|
chat := &models.Chat{
|
||||||
ID: 0,
|
ID: maxID,
|
||||||
CreatedAt: time.Now(),
|
CreatedAt: time.Now(),
|
||||||
UpdatedAt: time.Now(),
|
UpdatedAt: time.Now(),
|
||||||
Agent: cfg.AssistantRole,
|
Agent: cfg.AssistantRole,
|
||||||
}
|
}
|
||||||
chat.Name = fmt.Sprintf("%s_%v", chat.Agent, chat.CreatedAt.Unix())
|
chat.Name = fmt.Sprintf("%s_%v", chat.Agent, chat.ID)
|
||||||
activeChatName = chat.Name
|
activeChatName = chat.Name
|
||||||
chatMap[chat.Name] = chat
|
chatMap[chat.Name] = chat
|
||||||
return defaultStarter
|
return defaultStarter
|
||||||
@@ -149,10 +155,6 @@ func loadOldChatOrGetNew() []models.RoleMsg {
|
|||||||
chatMap[chat.Name] = chat
|
chatMap[chat.Name] = chat
|
||||||
return defaultStarter
|
return defaultStarter
|
||||||
}
|
}
|
||||||
// if chat.Name == "" {
|
|
||||||
// logger.Warn("empty chat name", "id", chat.ID)
|
|
||||||
// chat.Name = fmt.Sprintf("%s_%v", chat.Agent, chat.CreatedAt.Unix())
|
|
||||||
// }
|
|
||||||
chatMap[chat.Name] = chat
|
chatMap[chat.Name] = chat
|
||||||
activeChatName = chat.Name
|
activeChatName = chat.Name
|
||||||
cfg.AssistantRole = chat.Agent
|
cfg.AssistantRole = chat.Agent
|
||||||
@@ -160,15 +162,31 @@ func loadOldChatOrGetNew() []models.RoleMsg {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func copyToClipboard(text string) error {
|
func copyToClipboard(text string) error {
|
||||||
cmd := exec.Command("xclip", "-selection", "clipboard")
|
var cmd *exec.Cmd
|
||||||
cmd.Stdin = nil
|
if _, err := exec.LookPath("xclip"); err == nil {
|
||||||
|
cmd = exec.Command("xclip", "-selection", "clipboard")
|
||||||
|
} else if _, err := exec.LookPath("wl-copy"); err == nil {
|
||||||
|
cmd = exec.Command("wl-copy")
|
||||||
|
} else {
|
||||||
|
return errors.New("no clipboard tool found (install xclip or wl-clipboard)")
|
||||||
|
}
|
||||||
|
cmd.Stdin = strings.NewReader(text)
|
||||||
cmd.Stdout = nil
|
cmd.Stdout = nil
|
||||||
cmd.Stderr = nil
|
cmd.Stderr = nil
|
||||||
cmd.Stdin = strings.NewReader(text)
|
|
||||||
return cmd.Run()
|
return cmd.Run()
|
||||||
}
|
}
|
||||||
|
|
||||||
func notifyUser(topic, message string) error {
|
func readFromClipboard() (string, error) {
|
||||||
cmd := exec.Command("notify-send", topic, message)
|
var cmd *exec.Cmd
|
||||||
return cmd.Run()
|
if _, err := exec.LookPath("xclip"); err == nil {
|
||||||
|
cmd = exec.Command("xclip", "-selection", "clipboard", "-out")
|
||||||
|
} else if _, err := exec.LookPath("wl-paste"); err == nil {
|
||||||
|
cmd = exec.Command("wl-paste")
|
||||||
|
} else {
|
||||||
|
return "", errors.New("no clipboard tool found (install xclip or wl-clipboard)")
|
||||||
|
}
|
||||||
|
var out bytes.Buffer
|
||||||
|
cmd.Stdout = &out
|
||||||
|
err := cmd.Run()
|
||||||
|
return out.String(), err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ type Memories interface {
|
|||||||
Memorise(m *models.Memory) (*models.Memory, error)
|
Memorise(m *models.Memory) (*models.Memory, error)
|
||||||
Recall(agent, topic string) (string, error)
|
Recall(agent, topic string) (string, error)
|
||||||
RecallTopics(agent string) ([]string, error)
|
RecallTopics(agent string) ([]string, error)
|
||||||
|
Forget(agent, topic string) error
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ProviderSQL) Memorise(m *models.Memory) (*models.Memory, error) {
|
func (p ProviderSQL) Memorise(m *models.Memory) (*models.Memory, error) {
|
||||||
@@ -52,3 +53,13 @@ func (p ProviderSQL) RecallTopics(agent string) ([]string, error) {
|
|||||||
}
|
}
|
||||||
return topics, nil
|
return topics, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p ProviderSQL) Forget(agent, topic string) error {
|
||||||
|
query := "DELETE FROM memories WHERE agent = $1 AND topic = $2"
|
||||||
|
_, err := p.db.Exec(query, agent, topic)
|
||||||
|
if err != nil {
|
||||||
|
p.logger.Error("failed to delete memory", "query", query, "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -10,28 +10,42 @@ import (
|
|||||||
//go:embed migrations/*
|
//go:embed migrations/*
|
||||||
var migrationsFS embed.FS
|
var migrationsFS embed.FS
|
||||||
|
|
||||||
func (p *ProviderSQL) Migrate() {
|
func (p *ProviderSQL) Migrate() error {
|
||||||
// Get the embedded filesystem
|
// Get the embedded filesystem
|
||||||
migrationsDir, err := fs.Sub(migrationsFS, "migrations")
|
migrationsDir, err := fs.Sub(migrationsFS, "migrations")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.logger.Error("Failed to get embedded migrations directory;", "error", err)
|
p.logger.Error("Failed to get embedded migrations directory;", "error", err)
|
||||||
|
return fmt.Errorf("failed to get embedded migrations directory: %w", err)
|
||||||
}
|
}
|
||||||
// List all .up.sql files
|
// List all .up.sql files
|
||||||
files, err := migrationsFS.ReadDir("migrations")
|
files, err := migrationsFS.ReadDir("migrations")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.logger.Error("Failed to read migrations directory;", "error", err)
|
p.logger.Error("Failed to read migrations directory;", "error", err)
|
||||||
|
return fmt.Errorf("failed to read migrations directory: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if FTS already has data - skip populate migration if so
|
||||||
|
var ftsCount int
|
||||||
|
_ = p.db.QueryRow("SELECT COUNT(*) FROM fts_embeddings").Scan(&ftsCount)
|
||||||
|
skipFTSMigration := ftsCount > 0
|
||||||
|
|
||||||
// Execute each .up.sql file
|
// Execute each .up.sql file
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
if strings.HasSuffix(file.Name(), ".up.sql") {
|
if strings.HasSuffix(file.Name(), ".up.sql") {
|
||||||
|
// Skip FTS populate migration if already populated
|
||||||
|
if skipFTSMigration && strings.Contains(file.Name(), "004_populate_fts") {
|
||||||
|
p.logger.Debug("Skipping FTS migration - already populated", "file", file.Name())
|
||||||
|
continue
|
||||||
|
}
|
||||||
err := p.executeMigration(migrationsDir, file.Name())
|
err := p.executeMigration(migrationsDir, file.Name())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.logger.Error("Failed to execute migration %s: %v", file.Name(), err)
|
p.logger.Error("Failed to execute migration %s: %v", file.Name(), err)
|
||||||
panic(err)
|
return fmt.Errorf("failed to execute migration %s: %w", file.Name(), err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p.logger.Debug("All migrations executed successfully!")
|
p.logger.Debug("All migrations executed successfully!")
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *ProviderSQL) executeMigration(migrationsDir fs.FS, fileName string) error {
|
func (p *ProviderSQL) executeMigration(migrationsDir fs.FS, fileName string) error {
|
||||||
|
|||||||
2
storage/migrations/003_add_fts.down.sql
Normal file
2
storage/migrations/003_add_fts.down.sql
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
-- Drop FTS5 virtual table
|
||||||
|
DROP TABLE IF EXISTS fts_embeddings;
|
||||||
15
storage/migrations/003_add_fts.up.sql
Normal file
15
storage/migrations/003_add_fts.up.sql
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
-- Create FTS5 virtual table for full-text search
|
||||||
|
CREATE VIRTUAL TABLE IF NOT EXISTS fts_embeddings USING fts5(
|
||||||
|
slug UNINDEXED,
|
||||||
|
raw_text,
|
||||||
|
filename UNINDEXED,
|
||||||
|
embedding_size UNINDEXED,
|
||||||
|
tokenize='porter unicode61' -- Use porter stemmer and unicode61 tokenizer
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create triggers to maintain FTS table when embeddings are inserted/deleted
|
||||||
|
-- Note: We'll handle inserts/deletes programmatically for simplicity
|
||||||
|
-- but triggers could be added here if needed.
|
||||||
|
|
||||||
|
-- Indexes for performance (FTS5 manages its own indexes)
|
||||||
|
-- No additional indexes needed for FTS5 virtual table.
|
||||||
2
storage/migrations/004_populate_fts.down.sql
Normal file
2
storage/migrations/004_populate_fts.down.sql
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
-- Clear FTS table (optional)
|
||||||
|
DELETE FROM fts_embeddings;
|
||||||
4
storage/migrations/004_populate_fts.up.sql
Normal file
4
storage/migrations/004_populate_fts.up.sql
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
-- Populate FTS table with existing embeddings (incremental - only inserts missing rows)
|
||||||
|
-- Only use 768 embeddings as that's what we use
|
||||||
|
INSERT OR IGNORE INTO fts_embeddings (slug, raw_text, filename, embedding_size)
|
||||||
|
SELECT slug, raw_text, filename, 768 FROM embeddings_768;
|
||||||
87
storage/migrations/005_drop_unused_embeddings.down.sql
Normal file
87
storage/migrations/005_drop_unused_embeddings.down.sql
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
-- Recreate unused embedding tables (for rollback)
|
||||||
|
CREATE TABLE IF NOT EXISTS embeddings_384 (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
embeddings BLOB NOT NULL,
|
||||||
|
slug TEXT NOT NULL,
|
||||||
|
raw_text TEXT NOT NULL,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS embeddings_1024 (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
embeddings BLOB NOT NULL,
|
||||||
|
slug TEXT NOT NULL,
|
||||||
|
raw_text TEXT NOT NULL,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS embeddings_1536 (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
embeddings BLOB NOT NULL,
|
||||||
|
slug TEXT NOT NULL,
|
||||||
|
raw_text TEXT NOT NULL,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS embeddings_2048 (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
embeddings BLOB NOT NULL,
|
||||||
|
slug TEXT NOT NULL,
|
||||||
|
raw_text TEXT NOT NULL,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS embeddings_3072 (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
embeddings BLOB NOT NULL,
|
||||||
|
slug TEXT NOT NULL,
|
||||||
|
raw_text TEXT NOT NULL,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS embeddings_4096 (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
embeddings BLOB NOT NULL,
|
||||||
|
slug TEXT NOT NULL,
|
||||||
|
raw_text TEXT NOT NULL,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS embeddings_5120 (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
embeddings BLOB NOT NULL,
|
||||||
|
slug TEXT NOT NULL,
|
||||||
|
raw_text TEXT NOT NULL,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_384_filename ON embeddings_384(filename);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_1024_filename ON embeddings_1024(filename);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_1536_filename ON embeddings_1536(filename);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_2048_filename ON embeddings_2048(filename);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_3072_filename ON embeddings_3072(filename);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_4096_filename ON embeddings_4096(filename);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_5120_filename ON embeddings_5120(filename);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_384_slug ON embeddings_384(slug);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_1024_slug ON embeddings_1024(slug);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_1536_slug ON embeddings_1536(slug);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_2048_slug ON embeddings_2048(slug);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_3072_slug ON embeddings_3072(slug);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_4096_slug ON embeddings_4096(slug);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_5120_slug ON embeddings_5120(slug);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_384_created_at ON embeddings_384(created_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_1024_created_at ON embeddings_1024(created_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_1536_created_at ON embeddings_1536(created_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_2048_created_at ON embeddings_2048(created_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_3072_created_at ON embeddings_3072(created_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_4096_created_at ON embeddings_4096(created_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_embeddings_5120_created_at ON embeddings_5120(created_at);
|
||||||
32
storage/migrations/005_drop_unused_embeddings.up.sql
Normal file
32
storage/migrations/005_drop_unused_embeddings.up.sql
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
-- Drop unused embedding tables (we only use 768)
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_384_filename;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_1024_filename;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_1536_filename;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_2048_filename;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_3072_filename;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_4096_filename;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_5120_filename;
|
||||||
|
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_384_slug;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_1024_slug;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_1536_slug;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_2048_slug;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_3072_slug;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_4096_slug;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_5120_slug;
|
||||||
|
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_384_created_at;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_1024_created_at;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_1536_created_at;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_2048_created_at;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_3072_created_at;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_4096_created_at;
|
||||||
|
DROP INDEX IF EXISTS idx_embeddings_5120_created_at;
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS embeddings_384;
|
||||||
|
DROP TABLE IF EXISTS embeddings_1024;
|
||||||
|
DROP TABLE IF EXISTS embeddings_1536;
|
||||||
|
DROP TABLE IF EXISTS embeddings_2048;
|
||||||
|
DROP TABLE IF EXISTS embeddings_3072;
|
||||||
|
DROP TABLE IF EXISTS embeddings_4096;
|
||||||
|
DROP TABLE IF EXISTS embeddings_5120;
|
||||||
@@ -102,9 +102,27 @@ func NewProviderSQL(dbPath string, logger *slog.Logger) FullRepo {
|
|||||||
logger.Error("failed to open db connection", "error", err)
|
logger.Error("failed to open db connection", "error", err)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
// Enable WAL mode for better concurrency and performance
|
||||||
|
if _, err := db.Exec("PRAGMA journal_mode = WAL;"); err != nil {
|
||||||
|
logger.Warn("failed to enable WAL mode", "error", err)
|
||||||
|
}
|
||||||
|
if _, err := db.Exec("PRAGMA synchronous = NORMAL;"); err != nil {
|
||||||
|
logger.Warn("failed to set synchronous mode", "error", err)
|
||||||
|
}
|
||||||
|
// Increase cache size for better performance
|
||||||
|
if _, err := db.Exec("PRAGMA cache_size = -2000;"); err != nil {
|
||||||
|
logger.Warn("failed to set cache size", "error", err)
|
||||||
|
}
|
||||||
|
// Log actual journal mode for debugging
|
||||||
|
var journalMode string
|
||||||
|
if err := db.QueryRow("PRAGMA journal_mode;").Scan(&journalMode); err == nil {
|
||||||
|
logger.Debug("SQLite journal mode", "mode", journalMode)
|
||||||
|
}
|
||||||
p := ProviderSQL{db: db, logger: logger}
|
p := ProviderSQL{db: db, logger: logger}
|
||||||
|
if err := p.Migrate(); err != nil {
|
||||||
p.Migrate()
|
logger.Error("migration failed, app cannot start", "error", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"fmt"
|
"fmt"
|
||||||
"gf-lt/models"
|
"gf-lt/models"
|
||||||
|
"sort"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
|
|
||||||
"github.com/jmoiron/sqlx"
|
"github.com/jmoiron/sqlx"
|
||||||
@@ -11,7 +12,7 @@ import (
|
|||||||
|
|
||||||
type VectorRepo interface {
|
type VectorRepo interface {
|
||||||
WriteVector(*models.VectorRow) error
|
WriteVector(*models.VectorRow) error
|
||||||
SearchClosest(q []float32) ([]models.VectorRow, error)
|
SearchClosest(q []float32, limit int) ([]models.VectorRow, error)
|
||||||
ListFiles() ([]string, error)
|
ListFiles() ([]string, error)
|
||||||
RemoveEmbByFileName(filename string) error
|
RemoveEmbByFileName(filename string) error
|
||||||
DB() *sqlx.DB
|
DB() *sqlx.DB
|
||||||
@@ -47,22 +48,8 @@ func mathBitsToFloat32(b uint32) float32 {
|
|||||||
|
|
||||||
func fetchTableName(emb []float32) (string, error) {
|
func fetchTableName(emb []float32) (string, error) {
|
||||||
switch len(emb) {
|
switch len(emb) {
|
||||||
case 384:
|
|
||||||
return "embeddings_384", nil
|
|
||||||
case 768:
|
case 768:
|
||||||
return "embeddings_768", nil
|
return "embeddings_768", nil
|
||||||
case 1024:
|
|
||||||
return "embeddings_1024", nil
|
|
||||||
case 1536:
|
|
||||||
return "embeddings_1536", nil
|
|
||||||
case 2048:
|
|
||||||
return "embeddings_2048", nil
|
|
||||||
case 3072:
|
|
||||||
return "embeddings_3072", nil
|
|
||||||
case 4096:
|
|
||||||
return "embeddings_4096", nil
|
|
||||||
case 5120:
|
|
||||||
return "embeddings_5120", nil
|
|
||||||
default:
|
default:
|
||||||
return "", fmt.Errorf("no table for the size of %d", len(emb))
|
return "", fmt.Errorf("no table for the size of %d", len(emb))
|
||||||
}
|
}
|
||||||
@@ -73,41 +60,33 @@ func (p ProviderSQL) WriteVector(row *models.VectorRow) error {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
serializedEmbeddings := SerializeVector(row.Embeddings)
|
serializedEmbeddings := SerializeVector(row.Embeddings)
|
||||||
|
|
||||||
query := fmt.Sprintf("INSERT INTO %s(embeddings, slug, raw_text, filename) VALUES (?, ?, ?, ?)", tableName)
|
query := fmt.Sprintf("INSERT INTO %s(embeddings, slug, raw_text, filename) VALUES (?, ?, ?, ?)", tableName)
|
||||||
_, err = p.db.Exec(query, serializedEmbeddings, row.Slug, row.RawText, row.FileName)
|
_, err = p.db.Exec(query, serializedEmbeddings, row.Slug, row.RawText, row.FileName)
|
||||||
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ProviderSQL) SearchClosest(q []float32) ([]models.VectorRow, error) {
|
func (p ProviderSQL) SearchClosest(q []float32, limit int) ([]models.VectorRow, error) {
|
||||||
tableName, err := fetchTableName(q)
|
tableName, err := fetchTableName(q)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
querySQL := "SELECT embeddings, slug, raw_text, filename FROM " + tableName
|
querySQL := "SELECT embeddings, slug, raw_text, filename FROM " + tableName
|
||||||
rows, err := p.db.Query(querySQL)
|
rows, err := p.db.Query(querySQL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
defer rows.Close()
|
||||||
|
|
||||||
type SearchResult struct {
|
type SearchResult struct {
|
||||||
vector models.VectorRow
|
vector models.VectorRow
|
||||||
distance float32
|
distance float32
|
||||||
}
|
}
|
||||||
|
var allResults []SearchResult
|
||||||
var topResults []SearchResult
|
|
||||||
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var (
|
var (
|
||||||
embeddingsBlob []byte
|
embeddingsBlob []byte
|
||||||
slug, rawText, fileName string
|
slug, rawText, fileName string
|
||||||
)
|
)
|
||||||
|
|
||||||
if err := rows.Scan(&embeddingsBlob, &slug, &rawText, &fileName); err != nil {
|
if err := rows.Scan(&embeddingsBlob, &slug, &rawText, &fileName); err != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@@ -127,32 +106,22 @@ func (p ProviderSQL) SearchClosest(q []float32) ([]models.VectorRow, error) {
|
|||||||
},
|
},
|
||||||
distance: distance,
|
distance: distance,
|
||||||
}
|
}
|
||||||
|
allResults = append(allResults, result)
|
||||||
// Add to top results and maintain only top results
|
|
||||||
topResults = append(topResults, result)
|
|
||||||
|
|
||||||
// Sort and keep only top results
|
|
||||||
// We'll keep the top 3 closest vectors
|
|
||||||
if len(topResults) > 3 {
|
|
||||||
// Simple sort and truncate to maintain only 3 best matches
|
|
||||||
for i := 0; i < len(topResults); i++ {
|
|
||||||
for j := i + 1; j < len(topResults); j++ {
|
|
||||||
if topResults[i].distance > topResults[j].distance {
|
|
||||||
topResults[i], topResults[j] = topResults[j], topResults[i]
|
|
||||||
}
|
}
|
||||||
|
// Sort by distance
|
||||||
|
sort.Slice(allResults, func(i, j int) bool {
|
||||||
|
return allResults[i].distance < allResults[j].distance
|
||||||
|
})
|
||||||
|
// Truncate to limit
|
||||||
|
if len(allResults) > limit {
|
||||||
|
allResults = allResults[:limit]
|
||||||
}
|
}
|
||||||
}
|
|
||||||
topResults = topResults[:3]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert back to VectorRow slice
|
// Convert back to VectorRow slice
|
||||||
results := make([]models.VectorRow, len(topResults))
|
results := make([]models.VectorRow, len(allResults))
|
||||||
for i, result := range topResults {
|
for i, result := range allResults {
|
||||||
result.vector.Distance = result.distance
|
result.vector.Distance = result.distance
|
||||||
results[i] = result.vector
|
results[i] = result.vector
|
||||||
}
|
}
|
||||||
|
|
||||||
return results, nil
|
return results, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -161,18 +130,15 @@ func cosineSimilarity(a, b []float32) float32 {
|
|||||||
if len(a) != len(b) {
|
if len(a) != len(b) {
|
||||||
return 0.0
|
return 0.0
|
||||||
}
|
}
|
||||||
|
|
||||||
var dotProduct, normA, normB float32
|
var dotProduct, normA, normB float32
|
||||||
for i := 0; i < len(a); i++ {
|
for i := 0; i < len(a); i++ {
|
||||||
dotProduct += a[i] * b[i]
|
dotProduct += a[i] * b[i]
|
||||||
normA += a[i] * a[i]
|
normA += a[i] * a[i]
|
||||||
normB += b[i] * b[i]
|
normB += b[i] * b[i]
|
||||||
}
|
}
|
||||||
|
|
||||||
if normA == 0 || normB == 0 {
|
if normA == 0 || normB == 0 {
|
||||||
return 0.0
|
return 0.0
|
||||||
}
|
}
|
||||||
|
|
||||||
return dotProduct / (sqrt(normA) * sqrt(normB))
|
return dotProduct / (sqrt(normA) * sqrt(normB))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -190,66 +156,25 @@ func sqrt(f float32) float32 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (p ProviderSQL) ListFiles() ([]string, error) {
|
func (p ProviderSQL) ListFiles() ([]string, error) {
|
||||||
fileLists := make([][]string, 0)
|
query := "SELECT DISTINCT filename FROM embeddings_768"
|
||||||
|
|
||||||
// Query all supported tables and combine results
|
|
||||||
tableNames := []string{
|
|
||||||
"embeddings_384", "embeddings_768", "embeddings_1024", "embeddings_1536",
|
|
||||||
"embeddings_2048", "embeddings_3072", "embeddings_4096", "embeddings_5120",
|
|
||||||
}
|
|
||||||
for _, table := range tableNames {
|
|
||||||
query := "SELECT DISTINCT filename FROM " + table
|
|
||||||
rows, err := p.db.Query(query)
|
rows, err := p.db.Query(query)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Continue if one table doesn't exist
|
return nil, err
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
defer rows.Close()
|
||||||
var files []string
|
var allFiles []string
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var filename string
|
var filename string
|
||||||
if err := rows.Scan(&filename); err != nil {
|
if err := rows.Scan(&filename); err != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
files = append(files, filename)
|
allFiles = append(allFiles, filename)
|
||||||
}
|
}
|
||||||
rows.Close()
|
|
||||||
|
|
||||||
fileLists = append(fileLists, files)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Combine and deduplicate
|
|
||||||
fileSet := make(map[string]bool)
|
|
||||||
var allFiles []string
|
|
||||||
for _, files := range fileLists {
|
|
||||||
for _, file := range files {
|
|
||||||
if !fileSet[file] {
|
|
||||||
fileSet[file] = true
|
|
||||||
allFiles = append(allFiles, file)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return allFiles, nil
|
return allFiles, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ProviderSQL) RemoveEmbByFileName(filename string) error {
|
func (p ProviderSQL) RemoveEmbByFileName(filename string) error {
|
||||||
var errors []string
|
query := "DELETE FROM embeddings_768 WHERE filename = ?"
|
||||||
|
_, err := p.db.Exec(query, filename)
|
||||||
tableNames := []string{
|
return err
|
||||||
"embeddings_384", "embeddings_768", "embeddings_1024", "embeddings_1536",
|
|
||||||
"embeddings_2048", "embeddings_3072", "embeddings_4096", "embeddings_5120",
|
|
||||||
}
|
|
||||||
for _, table := range tableNames {
|
|
||||||
query := fmt.Sprintf("DELETE FROM %s WHERE filename = ?", table)
|
|
||||||
if _, err := p.db.Exec(query, filename); err != nil {
|
|
||||||
errors = append(errors, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(errors) > 0 {
|
|
||||||
return fmt.Errorf("errors occurred: %v", errors)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"sys_prompt": "This is a chat between Alice, Bob and Carl. Normally all message are public (seen by everyone). But characters also able to make messages intended to specific targets using '@' tag. Usually tag is provided inside of out of character clause: (ooc: @charname@), but will be parsed if put anywhere in the message.\nTO SEND A PRIVATE MESSAGE:\n- Include a recipient tag in this exact format: @CharacterName@\n- The tag can be anywhere in your message\n- Example: \"Don't tell others this secret. (ooc: @Bob@)\"\n- For immersion sake it is better if private messages are given in context of whispering, passing notes, or being alone in some space: Alice: *leans closer to Carl and whispers* \"I forgot to turn off the car, could you watch my bag for a cuple of minutes? (ooc: @Carl@)\"\n- Only the sender and tagged recipients will see that message.\nRECEIVING MESSAGES:\n- You only see messages where you are the sender OR you are tagged in the recipient tag\n- Public messages (without tags) are seen by everyone.\nEXAMPLE FORMAT:\nAlice: \"Public message everyone sees\"\nAlice: \"Private message only for Bob @Bob@\"\n(if Diana joins the conversation, and Alice wants to exclude her) Alice: *Grabs Bob and Carl, and pulls them away* \"Listen boys, let's meet this friday again!\" (ooc: @Bob,Carl@; Diana is not trustworthy)\nWHEN TO USE:\n- Most of the time public messages (no tag) are the best choice. Private messages (with tag) are mostly for the passing secrets or information that is described or infered as private.\n- Game of 20 questions. Guys are putting paper sickers on the forehead with names written on them. So in this case only person who gets the sticker put on them does not see the writting on it.\nBob: *Puts sticker with 'JACK THE RIPPER' written on it, on Alices forehead* (ooc: @Carl).\nCarl: \"Alright, we're ready.\"\nAlice: \"Good. So, am I a fictional character or a real one?\"",
|
"sys_prompt": "This is a chat between Alice, Bob and Carl. Normally all message are public (seen by everyone). But characters also able to make messages intended to specific targets using '@' tag. Usually tag is provided inside of out of character clause: (ooc: @charname@), but will be parsed if put anywhere in the message.\nTO SEND A PRIVATE MESSAGE:\n- Include a recipient tag in this exact format: @CharacterName@\n- The tag can be anywhere in your message\n- Example: \"(ooc: @Bob@) Don't tell others this secret.\"\n- For immersion sake it is better if private messages are given in context of whispering, passing notes, or being alone in some space: Alice: (ooc: @Carl@) *leans closer to Carl and whispers* \"I forgot to turn off the car, could you watch my bag for a cuple of minutes?\"\n- Only the sender and tagged recipients will see that message.\nRECEIVING MESSAGES:\n- You only see messages where you are the sender OR you are tagged in the recipient tag\n- Public messages (without tags) are seen by everyone.\nEXAMPLE FORMAT:\nAlice: \"Public message everyone sees\"\nAlice: (ooc: @Bob@)\n\"Private message only for Bob\"\n(if Diana joins the conversation, and Alice wants to exclude her) Alice: (ooc: @Bob,Carl@; Diana is not trustworthy)\n*Grabs Bob and Carl, and pulls them away* \"Listen boys, let's meet this friday again!\"\nWHEN TO USE:\n- Most of the time public messages (no tag) are the best choice. Private messages (with tag) are mostly for the passing secrets or information that is described or infered as private.\n- Game of 20 questions. Guys are putting paper sickers on the forehead with names written on them. So in this case only person who gets the sticker put on them does not see the writting on it.\nBob: *Puts sticker with 'JACK THE RIPPER' written on it, on Alices forehead* (ooc: @Carl).\nCarl: \"Alright, we're ready.\"\nAlice: \"Good. So, am I a fictional character or a real one?\"",
|
||||||
"role": "Alice",
|
"role": "Alice",
|
||||||
"filepath": "sysprompts/alice_bob_carl.json",
|
"filepath": "sysprompts/alice_bob_carl.json",
|
||||||
"chars": ["Alice", "Bob", "Carl"],
|
"chars": ["Alice", "Bob", "Carl"],
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
{
|
|
||||||
"sys_prompt": "A game of cluedo. Players are {{user}}, {{char}}, {{char2}};\n\nrooms: hall, lounge, dinning room kitchen, ballroom, conservatory, billiard room, library, study;\nweapons: candlestick, dagger, lead pipe, revolver, rope, spanner;\npeople: miss Scarlett, colonel Mustard, mrs. White, reverend Green, mrs. Peacock, professor Plum;\n\nA murder happened in a mansion with 9 rooms. Victim is dr. Black.\nPlayers goal is to find out who commited a murder, in what room and with what weapon.\nWeapons, people and rooms not involved in murder are distributed between players (as cards) by tool agent.\nThe objective of the game is to deduce the details of the murder. There are six characters, six murder weapons, and nine rooms, leaving the players with 324 possibilities. As soon as a player enters a room, they may make a suggestion as to the details, naming a suspect, the room they are in, and the weapon. For example: \"I suspect Professor Plum, in the Dining Room, with the candlestick\".\nOnce a player makes a suggestion, the others are called upon to disprove it.\nBefore the player's move, tool agent will remind that players their cards. There are two types of moves: making a suggestion (suggestion_move) and disproving other player suggestion (evidence_move);\nIn this version player wins when the correct details are named in the suggestion_move.\n\n<example_game>\n{{user}}:\nlet's start a game of cluedo!\ntool: cards of {{char}} are 'LEAD PIPE', 'BALLROOM', 'CONSERVATORY', 'STUDY', 'Mrs. White'; suggestion_move;\n{{char}}:\n(putting miss Scarlet into the Hall with the Revolver) \"I suspect miss Scarlett, in the Hall, with the revolver.\"\ntool: cards of {{char2}} are 'SPANNER', 'DAGGER', 'Professor Plum', 'LIBRARY', 'Mrs. Peacock'; evidence_move;\n{{char2}}:\n\"No objections.\" (no cards matching the suspicion of {{char}})\ntool: cards of {{user}} are 'Colonel Mustard', 'Miss Scarlett', 'DINNING ROOM', 'CANDLESTICK', 'HALL'; evidence_move;\n{{user}}:\n\"I object. Miss Scarlett is innocent.\" (shows card with 'Miss Scarlett')\ntool: cards of {{char2}} are 'SPANNER', 'DAGGER', 'Professor Plum', 'LIBRARY', 'Mrs. Peacock'; suggestion_move;\n{{char2}}:\n*So it was not Miss Scarlett, good to know.*\n(moves Mrs. White to the Billiard Room) \"It might have been Mrs. White, in the Billiard Room, with the Revolver.\"\ntool: cards of {{user}} are 'Colonel Mustard', 'Miss Scarlett', 'DINNING ROOM', 'CANDLESTICK', 'HALL'; evidence_move;\n{{user}}:\n(no matching cards for the assumption of {{char2}}) \"Sounds possible to me.\"\ntool: cards of {{char}} are 'LEAD PIPE', 'BALLROOM', 'CONSERVATORY', 'STUDY', 'Mrs. White'; evidence_move;\n{{char}}:\n(shows Mrs. White card) \"No. Was not Mrs. White\"\ntool: cards of {{user}} are 'Colonel Mustard', 'Miss Scarlett', 'DINNING ROOM', 'CANDLESTICK', 'HALL'; suggestion_move;\n{{user}}:\n*So not Mrs. White...* (moves Reverend Green into the Billiard Room) \"I suspect Reverend Green, in the Billiard Room, with the Revolver.\"\ntool: Correct. It was Reverend Green in the Billiard Room, with the revolver. {{user}} wins.\n</example_game>",
|
|
||||||
"role": "CluedoPlayer",
|
|
||||||
"role2": "CluedoEnjoyer",
|
|
||||||
"filepath": "sysprompts/cluedo.json",
|
|
||||||
"first_msg": "Hey guys! Want to play cluedo?"
|
|
||||||
}
|
|
||||||
6
sysprompts/coding_assistant.json
Normal file
6
sysprompts/coding_assistant.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"sys_prompt": "You are an expert software engineering assistant. Your goal is to help users with coding tasks, debugging, refactoring, and software development.\n\n## Core Principles\n1. **Security First**: Never expose secrets, keys, or credentials. Never commit sensitive data.\n2. **No Git Actions**: You can READ git info (status, log, diff) for context, but NEVER perform git actions (commit, add, push, checkout, reset, rm, etc.). Let the user handle all git operations.\n3. **Explore Before Execute**: Always understand the codebase structure before making changes.\n4. **Follow Conventions**: Match existing code style, patterns, and frameworks used in the project.\n5. **Be Concise**: Minimize output tokens while maintaining quality. Avoid unnecessary explanations.\n6. **Ask First**: When uncertain about intent, ask the user. Don't assume.\n\n## Workflow for Complex Tasks\nFor multi-step tasks, ALWAYS use the todo system to track progress:\n\n1. **Create Todo List**: At the start of complex tasks, use `todo_create` to break down work into actionable items.\n2. **Update Progress**: Mark items as `in_progress` when working on them, and `completed` when done.\n3. **Check Status**: Use `todo_read` to review your progress.\n\nExample workflow:\n- User: \"Add user authentication to this app\"\n- You: Create todos: [\"Analyze existing auth structure\", \"Check frameworks in use\", \"Implement auth middleware\", \"Add login endpoints\", \"Test implementation\"]\n\n## Task Execution Flow\n\n### Phase 1: Exploration (Always First)\n- Use `file_list` to understand directory structure (path defaults to FilePickerDir if not specified)\n- Use `file_read` to examine relevant files (paths are relative to FilePickerDir unless starting with `/`)\n- Use `execute_command` with `grep`/`find` to search for patterns\n- Check README, Makefile, package.json, or similar for build/test commands\n- Identify: frameworks, conventions, testing approach, lint/typecheck commands\n- **Git reads allowed**: You may use `git status`, `git log`, `git diff` for context, but only to inform your work\n- **Path handling**: Relative paths resolve against FilePickerDir; absolute paths (starting with `/`) bypass it\n\n### Phase 2: Planning\n- For complex tasks: create todo items\n- Identify files that need modification\n- Plan your approach following existing patterns\n\n### Phase 3: Implementation\n- Make changes using appropriate file tools\n- Prefer `file_write` for new files, `file_read` then edit for existing files\n- Follow existing code style exactly\n- Use existing libraries and utilities\n\n### Phase 4: Verification\n- Run tests if available (check for test scripts in README/Makefile)\n- Run linting/type checking commands\n- Verify changes work as expected\n\n### Phase 5: Completion\n- Update todos to `completed`\n- Provide concise summary of changes\n- Reference specific file paths and line numbers when relevant\n- **DO NOT commit changes** - inform user what was done so they can review and commit themselves\n\n## Command Execution\n- Use `execute_command` with a single string containing command and arguments (e.g., `go run main.go`, `ls -la`, `cd /tmp`)\n- Use `cd /path` to change the working directory for file operations",
|
||||||
|
"role": "CodingAssistant",
|
||||||
|
"filepath": "sysprompts/coding_assistant.json",
|
||||||
|
"first_msg": "Hello! I'm your coding assistant. Give me a specific task and I'll get started. For complex work, I'll track progress with todos."
|
||||||
|
}
|
||||||
416
tools/chain.go
Normal file
416
tools/chain.go
Normal file
@@ -0,0 +1,416 @@
|
|||||||
|
package tools
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Operator represents a chain operator between commands.
|
||||||
|
type Operator int
|
||||||
|
|
||||||
|
const (
|
||||||
|
OpNone Operator = iota
|
||||||
|
OpAnd // &&
|
||||||
|
OpOr // ||
|
||||||
|
OpSeq // ;
|
||||||
|
OpPipe // |
|
||||||
|
)
|
||||||
|
|
||||||
|
// Segment is a single command in a chain.
|
||||||
|
type Segment struct {
|
||||||
|
Raw string
|
||||||
|
Op Operator // operator AFTER this segment
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseChain splits a command string into segments by &&, ;, and |.
|
||||||
|
// Respects quoted strings (single and double quotes).
|
||||||
|
func ParseChain(input string) []Segment {
|
||||||
|
var segments []Segment
|
||||||
|
var current strings.Builder
|
||||||
|
runes := []rune(input)
|
||||||
|
n := len(runes)
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
ch := runes[i]
|
||||||
|
// handle quotes
|
||||||
|
if ch == '\'' || ch == '"' {
|
||||||
|
quote := ch
|
||||||
|
current.WriteRune(ch)
|
||||||
|
i++
|
||||||
|
for i < n && runes[i] != quote {
|
||||||
|
current.WriteRune(runes[i])
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
if i < n {
|
||||||
|
current.WriteRune(runes[i])
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// &&
|
||||||
|
if ch == '&' && i+1 < n && runes[i+1] == '&' {
|
||||||
|
segments = append(segments, Segment{
|
||||||
|
Raw: strings.TrimSpace(current.String()),
|
||||||
|
Op: OpAnd,
|
||||||
|
})
|
||||||
|
current.Reset()
|
||||||
|
i++ // skip second &
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// ;
|
||||||
|
if ch == ';' {
|
||||||
|
segments = append(segments, Segment{
|
||||||
|
Raw: strings.TrimSpace(current.String()),
|
||||||
|
Op: OpSeq,
|
||||||
|
})
|
||||||
|
current.Reset()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// ||
|
||||||
|
if ch == '|' && i+1 < n && runes[i+1] == '|' {
|
||||||
|
segments = append(segments, Segment{
|
||||||
|
Raw: strings.TrimSpace(current.String()),
|
||||||
|
Op: OpOr,
|
||||||
|
})
|
||||||
|
current.Reset()
|
||||||
|
i++ // skip second |
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// | (single pipe)
|
||||||
|
if ch == '|' {
|
||||||
|
segments = append(segments, Segment{
|
||||||
|
Raw: strings.TrimSpace(current.String()),
|
||||||
|
Op: OpPipe,
|
||||||
|
})
|
||||||
|
current.Reset()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
current.WriteRune(ch)
|
||||||
|
}
|
||||||
|
// last segment
|
||||||
|
last := strings.TrimSpace(current.String())
|
||||||
|
if last != "" {
|
||||||
|
segments = append(segments, Segment{Raw: last, Op: OpNone})
|
||||||
|
}
|
||||||
|
return segments
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecChain executes a command string with pipe/chaining support.
|
||||||
|
// Returns the combined output of all commands.
|
||||||
|
func ExecChain(command string) string {
|
||||||
|
segments := ParseChain(command)
|
||||||
|
if len(segments) == 0 {
|
||||||
|
return "[error] empty command"
|
||||||
|
}
|
||||||
|
var collected []string
|
||||||
|
var lastOutput string
|
||||||
|
var lastErr error
|
||||||
|
pipeInput := ""
|
||||||
|
for i, seg := range segments {
|
||||||
|
if i > 0 {
|
||||||
|
prevOp := segments[i-1].Op
|
||||||
|
// && semantics: skip if previous failed
|
||||||
|
if prevOp == OpAnd && lastErr != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// || semantics: skip if previous succeeded
|
||||||
|
if prevOp == OpOr && lastErr == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// determine stdin for this segment
|
||||||
|
segStdin := ""
|
||||||
|
if i == 0 {
|
||||||
|
segStdin = pipeInput
|
||||||
|
} else if segments[i-1].Op == OpPipe {
|
||||||
|
segStdin = lastOutput
|
||||||
|
}
|
||||||
|
lastOutput, lastErr = execSingle(seg.Raw, segStdin)
|
||||||
|
// pipe: output flows to next command's stdin
|
||||||
|
// && or ;: collect output
|
||||||
|
if i < len(segments)-1 && seg.Op == OpPipe {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if lastOutput != "" {
|
||||||
|
collected = append(collected, lastOutput)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strings.Join(collected, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// execSingle executes a single command (with arguments) and returns output and error.
|
||||||
|
func execSingle(command, stdin string) (string, error) {
|
||||||
|
parts := tokenize(command)
|
||||||
|
if len(parts) == 0 {
|
||||||
|
return "", errors.New("empty command")
|
||||||
|
}
|
||||||
|
name := parts[0]
|
||||||
|
args := parts[1:]
|
||||||
|
// Check if it's a built-in Go command
|
||||||
|
if result, isBuiltin := execBuiltin(name, args, stdin); isBuiltin {
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
// Otherwise execute as system command
|
||||||
|
cmd := exec.Command(name, args...)
|
||||||
|
if stdin != "" {
|
||||||
|
cmd.Stdin = strings.NewReader(stdin)
|
||||||
|
}
|
||||||
|
output, err := cmd.CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
return string(output), err
|
||||||
|
}
|
||||||
|
return string(output), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// tokenize splits a command string by whitespace, respecting quotes.
|
||||||
|
func tokenize(input string) []string {
|
||||||
|
var tokens []string
|
||||||
|
var current strings.Builder
|
||||||
|
inQuote := false
|
||||||
|
var quoteChar rune
|
||||||
|
for _, ch := range input {
|
||||||
|
if inQuote {
|
||||||
|
if ch == quoteChar {
|
||||||
|
inQuote = false
|
||||||
|
} else {
|
||||||
|
current.WriteRune(ch)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if ch == '\'' || ch == '"' {
|
||||||
|
inQuote = true
|
||||||
|
quoteChar = ch
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if ch == ' ' || ch == '\t' {
|
||||||
|
if current.Len() > 0 {
|
||||||
|
tokens = append(tokens, current.String())
|
||||||
|
current.Reset()
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
current.WriteRune(ch)
|
||||||
|
}
|
||||||
|
if current.Len() > 0 {
|
||||||
|
tokens = append(tokens, current.String())
|
||||||
|
}
|
||||||
|
return tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
// execBuiltin executes a built-in command if it exists.
|
||||||
|
// Returns (result, true) if it was a built-in (even if result is empty).
|
||||||
|
// Returns ("", false) if it's not a built-in command.
|
||||||
|
func execBuiltin(name string, args []string, stdin string) (string, bool) {
|
||||||
|
switch name {
|
||||||
|
case "echo":
|
||||||
|
if stdin != "" {
|
||||||
|
return stdin, true
|
||||||
|
}
|
||||||
|
return strings.Join(args, " "), true
|
||||||
|
case "time":
|
||||||
|
return "2006-01-02 15:04:05 MST", true
|
||||||
|
case "cat":
|
||||||
|
if len(args) == 0 {
|
||||||
|
if stdin != "" {
|
||||||
|
return stdin, true
|
||||||
|
}
|
||||||
|
return "", true
|
||||||
|
}
|
||||||
|
path := args[0]
|
||||||
|
abs := path
|
||||||
|
if !filepath.IsAbs(path) {
|
||||||
|
abs = filepath.Join(cfg.FilePickerDir, path)
|
||||||
|
}
|
||||||
|
data, err := os.ReadFile(abs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] cat: %v", err), true
|
||||||
|
}
|
||||||
|
return string(data), true
|
||||||
|
case "pwd":
|
||||||
|
return cfg.FilePickerDir, true
|
||||||
|
case "cd":
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: cd <dir>", true
|
||||||
|
}
|
||||||
|
dir := args[0]
|
||||||
|
// Resolve relative to cfg.FilePickerDir
|
||||||
|
abs := dir
|
||||||
|
if !filepath.IsAbs(dir) {
|
||||||
|
abs = filepath.Join(cfg.FilePickerDir, dir)
|
||||||
|
}
|
||||||
|
abs = filepath.Clean(abs)
|
||||||
|
info, err := os.Stat(abs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] cd: %v", err), true
|
||||||
|
}
|
||||||
|
if !info.IsDir() {
|
||||||
|
return "[error] cd: not a directory: " + dir, true
|
||||||
|
}
|
||||||
|
cfg.FilePickerDir = abs
|
||||||
|
return "Changed directory to: " + cfg.FilePickerDir, true
|
||||||
|
case "mkdir":
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: mkdir [-p] <dir>", true
|
||||||
|
}
|
||||||
|
createParents := false
|
||||||
|
var dirPath string
|
||||||
|
for _, a := range args {
|
||||||
|
if a == "-p" || a == "--parents" {
|
||||||
|
createParents = true
|
||||||
|
} else if dirPath == "" {
|
||||||
|
dirPath = a
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if dirPath == "" {
|
||||||
|
return "[error] usage: mkdir [-p] <dir>", true
|
||||||
|
}
|
||||||
|
abs := dirPath
|
||||||
|
if !filepath.IsAbs(dirPath) {
|
||||||
|
abs = filepath.Join(cfg.FilePickerDir, dirPath)
|
||||||
|
}
|
||||||
|
abs = filepath.Clean(abs)
|
||||||
|
var mkdirFunc func(string, os.FileMode) error
|
||||||
|
if createParents {
|
||||||
|
mkdirFunc = os.MkdirAll
|
||||||
|
} else {
|
||||||
|
mkdirFunc = os.Mkdir
|
||||||
|
}
|
||||||
|
if err := mkdirFunc(abs, 0o755); err != nil {
|
||||||
|
return fmt.Sprintf("[error] mkdir: %v", err), true
|
||||||
|
}
|
||||||
|
if createParents {
|
||||||
|
return "Created " + dirPath + " (with parents)", true
|
||||||
|
}
|
||||||
|
return "Created " + dirPath, true
|
||||||
|
case "ls":
|
||||||
|
dir := "."
|
||||||
|
for _, a := range args {
|
||||||
|
if !strings.HasPrefix(a, "-") {
|
||||||
|
dir = a
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
abs := dir
|
||||||
|
if !filepath.IsAbs(dir) {
|
||||||
|
abs = filepath.Join(cfg.FilePickerDir, dir)
|
||||||
|
}
|
||||||
|
entries, err := os.ReadDir(abs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] ls: %v", err), true
|
||||||
|
}
|
||||||
|
var out strings.Builder
|
||||||
|
for _, e := range entries {
|
||||||
|
info, _ := e.Info()
|
||||||
|
switch {
|
||||||
|
case e.IsDir():
|
||||||
|
fmt.Fprintf(&out, "d %-8s %s/\n", "-", e.Name())
|
||||||
|
case info != nil:
|
||||||
|
size := info.Size()
|
||||||
|
sizeStr := strconv.FormatInt(size, 10)
|
||||||
|
if size > 1024 {
|
||||||
|
sizeStr = fmt.Sprintf("%.1fKB", float64(size)/1024)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(&out, "f %-8s %s\n", sizeStr, e.Name())
|
||||||
|
default:
|
||||||
|
fmt.Fprintf(&out, "f %-8s %s\n", "?", e.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if out.Len() == 0 {
|
||||||
|
return "(empty directory)", true
|
||||||
|
}
|
||||||
|
return strings.TrimRight(out.String(), "\n"), true
|
||||||
|
case "go":
|
||||||
|
// Allow all go subcommands
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: go <subcommand> [options]", true
|
||||||
|
}
|
||||||
|
cmd := exec.Command("go", args...)
|
||||||
|
cmd.Dir = cfg.FilePickerDir
|
||||||
|
output, err := cmd.CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] go %s: %v\n%s", args[0], err, string(output)), true
|
||||||
|
}
|
||||||
|
return string(output), true
|
||||||
|
case "cp":
|
||||||
|
if len(args) < 2 {
|
||||||
|
return "[error] usage: cp <source> <dest>", true
|
||||||
|
}
|
||||||
|
src := args[0]
|
||||||
|
dst := args[1]
|
||||||
|
if !filepath.IsAbs(src) {
|
||||||
|
src = filepath.Join(cfg.FilePickerDir, src)
|
||||||
|
}
|
||||||
|
if !filepath.IsAbs(dst) {
|
||||||
|
dst = filepath.Join(cfg.FilePickerDir, dst)
|
||||||
|
}
|
||||||
|
data, err := os.ReadFile(src)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] cp: %v", err), true
|
||||||
|
}
|
||||||
|
err = os.WriteFile(dst, data, 0644)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] cp: %v", err), true
|
||||||
|
}
|
||||||
|
return "Copied " + src + " to " + dst, true
|
||||||
|
case "mv":
|
||||||
|
if len(args) < 2 {
|
||||||
|
return "[error] usage: mv <source> <dest>", true
|
||||||
|
}
|
||||||
|
src := args[0]
|
||||||
|
dst := args[1]
|
||||||
|
if !filepath.IsAbs(src) {
|
||||||
|
src = filepath.Join(cfg.FilePickerDir, src)
|
||||||
|
}
|
||||||
|
if !filepath.IsAbs(dst) {
|
||||||
|
dst = filepath.Join(cfg.FilePickerDir, dst)
|
||||||
|
}
|
||||||
|
err := os.Rename(src, dst)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] mv: %v", err), true
|
||||||
|
}
|
||||||
|
return "Moved " + src + " to " + dst, true
|
||||||
|
case "rm":
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: rm [-r] <file>", true
|
||||||
|
}
|
||||||
|
recursive := false
|
||||||
|
var target string
|
||||||
|
for _, a := range args {
|
||||||
|
if a == "-r" || a == "-rf" || a == "-fr" || a == "-recursive" {
|
||||||
|
recursive = true
|
||||||
|
} else if target == "" {
|
||||||
|
target = a
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if target == "" {
|
||||||
|
return "[error] usage: rm [-r] <file>", true
|
||||||
|
}
|
||||||
|
abs := target
|
||||||
|
if !filepath.IsAbs(target) {
|
||||||
|
abs = filepath.Join(cfg.FilePickerDir, target)
|
||||||
|
}
|
||||||
|
info, err := os.Stat(abs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] rm: %v", err), true
|
||||||
|
}
|
||||||
|
if info.IsDir() {
|
||||||
|
if recursive {
|
||||||
|
err = os.RemoveAll(abs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] rm: %v", err), true
|
||||||
|
}
|
||||||
|
return "Removed " + abs, true
|
||||||
|
}
|
||||||
|
return "[error] rm: is a directory (use -r)", true
|
||||||
|
}
|
||||||
|
err = os.Remove(abs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] rm: %v", err), true
|
||||||
|
}
|
||||||
|
return "Removed " + abs, true
|
||||||
|
}
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
755
tools/fs.go
Normal file
755
tools/fs.go
Normal file
@@ -0,0 +1,755 @@
|
|||||||
|
package tools
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"gf-lt/models"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var memoryStore MemoryStore
|
||||||
|
var agentRole string
|
||||||
|
|
||||||
|
type MemoryStore interface {
|
||||||
|
Memorise(agent, topic, data string) (string, error)
|
||||||
|
Recall(agent, topic string) (string, error)
|
||||||
|
RecallTopics(agent string) ([]string, error)
|
||||||
|
Forget(agent, topic string) error
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetMemoryStore(store MemoryStore, role string) {
|
||||||
|
memoryStore = store
|
||||||
|
agentRole = role
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetFSRoot(dir string) {
|
||||||
|
if cfg == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cfg.FilePickerDir = dir
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetFSRoot() string {
|
||||||
|
return cfg.FilePickerDir
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetFSCwd(dir string) error {
|
||||||
|
abs, err := filepath.Abs(dir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
info, err := os.Stat(abs)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if !info.IsDir() {
|
||||||
|
return fmt.Errorf("not a directory: %s", dir)
|
||||||
|
}
|
||||||
|
cfg.FilePickerDir = abs
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolvePath(rel string) (string, error) {
|
||||||
|
if cfg.FilePickerDir == "" {
|
||||||
|
return "", errors.New("fs root not set")
|
||||||
|
}
|
||||||
|
if filepath.IsAbs(rel) {
|
||||||
|
abs := filepath.Clean(rel)
|
||||||
|
if !strings.HasPrefix(abs, cfg.FilePickerDir+string(os.PathSeparator)) && abs != cfg.FilePickerDir {
|
||||||
|
return "", fmt.Errorf("path escapes fs root: %s", rel)
|
||||||
|
}
|
||||||
|
return abs, nil
|
||||||
|
}
|
||||||
|
abs := filepath.Join(cfg.FilePickerDir, rel)
|
||||||
|
abs = filepath.Clean(abs)
|
||||||
|
if !strings.HasPrefix(abs, cfg.FilePickerDir+string(os.PathSeparator)) && abs != cfg.FilePickerDir {
|
||||||
|
return "", fmt.Errorf("path escapes fs root: %s", rel)
|
||||||
|
}
|
||||||
|
return abs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func humanSize(n int64) string {
|
||||||
|
switch {
|
||||||
|
case n >= 1<<20:
|
||||||
|
return fmt.Sprintf("%.1fMB", float64(n)/float64(1<<20))
|
||||||
|
case n >= 1<<10:
|
||||||
|
return fmt.Sprintf("%.1fKB", float64(n)/float64(1<<10))
|
||||||
|
default:
|
||||||
|
return fmt.Sprintf("%dB", n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func IsImageFile(path string) bool {
|
||||||
|
ext := strings.ToLower(filepath.Ext(path))
|
||||||
|
return ext == ".png" || ext == ".jpg" || ext == ".jpeg" || ext == ".gif" || ext == ".webp" || ext == ".svg"
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsLs(args []string, stdin string) string {
|
||||||
|
dir := ""
|
||||||
|
if len(args) > 0 {
|
||||||
|
dir = args[0]
|
||||||
|
}
|
||||||
|
abs, err := resolvePath(dir)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] %v", err)
|
||||||
|
}
|
||||||
|
entries, err := os.ReadDir(abs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] ls: %v", err)
|
||||||
|
}
|
||||||
|
var out strings.Builder
|
||||||
|
for _, e := range entries {
|
||||||
|
info, _ := e.Info()
|
||||||
|
switch {
|
||||||
|
case e.IsDir():
|
||||||
|
fmt.Fprintf(&out, "d %-8s %s/\n", "-", e.Name())
|
||||||
|
case info != nil:
|
||||||
|
fmt.Fprintf(&out, "f %-8s %s\n", humanSize(info.Size()), e.Name())
|
||||||
|
default:
|
||||||
|
fmt.Fprintf(&out, "f %-8s %s\n", "?", e.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if out.Len() == 0 {
|
||||||
|
return "(empty directory)"
|
||||||
|
}
|
||||||
|
return strings.TrimRight(out.String(), "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsCat(args []string, stdin string) string {
|
||||||
|
b64 := false
|
||||||
|
var path string
|
||||||
|
for _, a := range args {
|
||||||
|
if a == "-b" || a == "--base64" {
|
||||||
|
b64 = true
|
||||||
|
} else if path == "" {
|
||||||
|
path = a
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if path == "" {
|
||||||
|
return "[error] usage: cat <path>"
|
||||||
|
}
|
||||||
|
abs, err := resolvePath(path)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] %v", err)
|
||||||
|
}
|
||||||
|
data, err := os.ReadFile(abs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] cat: %v", err)
|
||||||
|
}
|
||||||
|
if b64 {
|
||||||
|
result := base64.StdEncoding.EncodeToString(data)
|
||||||
|
if IsImageFile(path) {
|
||||||
|
result += fmt.Sprintf("\n", abs)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
return string(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsViewImg(args []string, stdin string) string {
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: view_img <image-path>"
|
||||||
|
}
|
||||||
|
path := args[0]
|
||||||
|
var abs string
|
||||||
|
if filepath.IsAbs(path) {
|
||||||
|
abs = path
|
||||||
|
} else {
|
||||||
|
var err error
|
||||||
|
abs, err = resolvePath(path)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _, err := os.Stat(abs); err != nil {
|
||||||
|
return fmt.Sprintf("[error] view_img: %v", err)
|
||||||
|
}
|
||||||
|
if !IsImageFile(path) {
|
||||||
|
return fmt.Sprintf("[error] not an image file: %s (use cat to read text files)", path)
|
||||||
|
}
|
||||||
|
dataURL, err := models.CreateImageURLFromPath(abs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] view_img: %v", err)
|
||||||
|
}
|
||||||
|
result := models.MultimodalToolResp{
|
||||||
|
Type: "multimodal_content",
|
||||||
|
Parts: []map[string]string{
|
||||||
|
{"type": "text", "text": "Image: " + path},
|
||||||
|
{"type": "image_url", "url": dataURL},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
jsonResult, err := json.Marshal(result)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] view_img: %v", err)
|
||||||
|
}
|
||||||
|
return string(jsonResult)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FsSee is deprecated, use FsViewImg
|
||||||
|
func FsSee(args []string, stdin string) string {
|
||||||
|
return FsViewImg(args, stdin)
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsWrite(args []string, stdin string) string {
|
||||||
|
b64 := false
|
||||||
|
var path string
|
||||||
|
var contentParts []string
|
||||||
|
for _, a := range args {
|
||||||
|
switch a {
|
||||||
|
case "-b", "--base64":
|
||||||
|
b64 = true
|
||||||
|
default:
|
||||||
|
if path == "" {
|
||||||
|
path = a
|
||||||
|
} else {
|
||||||
|
contentParts = append(contentParts, a)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if path == "" {
|
||||||
|
return "[error] usage: write <path> [content] or pipe stdin"
|
||||||
|
}
|
||||||
|
abs, err := resolvePath(path)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] %v", err)
|
||||||
|
}
|
||||||
|
if err := os.MkdirAll(filepath.Dir(abs), 0o755); err != nil {
|
||||||
|
return fmt.Sprintf("[error] mkdir: %v", err)
|
||||||
|
}
|
||||||
|
var data []byte
|
||||||
|
if b64 {
|
||||||
|
src := stdin
|
||||||
|
if src == "" && len(contentParts) > 0 {
|
||||||
|
src = strings.Join(contentParts, " ")
|
||||||
|
}
|
||||||
|
src = strings.TrimSpace(src)
|
||||||
|
var err error
|
||||||
|
data, err = base64.StdEncoding.DecodeString(src)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] base64 decode: %v", err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if len(contentParts) > 0 {
|
||||||
|
data = []byte(strings.Join(contentParts, " "))
|
||||||
|
} else {
|
||||||
|
data = []byte(stdin)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := os.WriteFile(abs, data, 0o644); err != nil {
|
||||||
|
return fmt.Sprintf("[error] write: %v", err)
|
||||||
|
}
|
||||||
|
size := humanSize(int64(len(data)))
|
||||||
|
result := fmt.Sprintf("Written %s → %s", size, path)
|
||||||
|
if IsImageFile(path) {
|
||||||
|
result += fmt.Sprintf("\n", abs)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsStat(args []string, stdin string) string {
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: stat <path>"
|
||||||
|
}
|
||||||
|
abs, err := resolvePath(args[0])
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] %v", err)
|
||||||
|
}
|
||||||
|
info, err := os.Stat(abs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] stat: %v", err)
|
||||||
|
}
|
||||||
|
mime := "application/octet-stream"
|
||||||
|
if IsImageFile(args[0]) {
|
||||||
|
ext := strings.ToLower(filepath.Ext(args[0]))
|
||||||
|
switch ext {
|
||||||
|
case ".png":
|
||||||
|
mime = "image/png"
|
||||||
|
case ".jpg", ".jpeg":
|
||||||
|
mime = "image/jpeg"
|
||||||
|
case ".gif":
|
||||||
|
mime = "image/gif"
|
||||||
|
case ".webp":
|
||||||
|
mime = "image/webp"
|
||||||
|
case ".svg":
|
||||||
|
mime = "image/svg+xml"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var out strings.Builder
|
||||||
|
fmt.Fprintf(&out, "File: %s\n", args[0])
|
||||||
|
fmt.Fprintf(&out, "Size: %s (%d bytes)\n", humanSize(info.Size()), info.Size())
|
||||||
|
fmt.Fprintf(&out, "Type: %s\n", mime)
|
||||||
|
fmt.Fprintf(&out, "Modified: %s\n", info.ModTime().Format(time.RFC3339))
|
||||||
|
if info.IsDir() {
|
||||||
|
fmt.Fprintf(&out, "Kind: directory\n")
|
||||||
|
}
|
||||||
|
return strings.TrimRight(out.String(), "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsRm(args []string, stdin string) string {
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: rm <path>"
|
||||||
|
}
|
||||||
|
abs, err := resolvePath(args[0])
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] %v", err)
|
||||||
|
}
|
||||||
|
if err := os.RemoveAll(abs); err != nil {
|
||||||
|
return fmt.Sprintf("[error] rm: %v", err)
|
||||||
|
}
|
||||||
|
return "Removed " + args[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsCp(args []string, stdin string) string {
|
||||||
|
if len(args) < 2 {
|
||||||
|
return "[error] usage: cp <src> <dst>"
|
||||||
|
}
|
||||||
|
srcAbs, err := resolvePath(args[0])
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] %v", err)
|
||||||
|
}
|
||||||
|
dstAbs, err := resolvePath(args[1])
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] %v", err)
|
||||||
|
}
|
||||||
|
data, err := os.ReadFile(srcAbs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] cp read: %v", err)
|
||||||
|
}
|
||||||
|
if err := os.MkdirAll(filepath.Dir(dstAbs), 0o755); err != nil {
|
||||||
|
return fmt.Sprintf("[error] cp mkdir: %v", err)
|
||||||
|
}
|
||||||
|
if err := os.WriteFile(dstAbs, data, 0o644); err != nil {
|
||||||
|
return fmt.Sprintf("[error] cp write: %v", err)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("Copied %s → %s (%s)", args[0], args[1], humanSize(int64(len(data))))
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsMv(args []string, stdin string) string {
|
||||||
|
if len(args) < 2 {
|
||||||
|
return "[error] usage: mv <src> <dst>"
|
||||||
|
}
|
||||||
|
srcAbs, err := resolvePath(args[0])
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] %v", err)
|
||||||
|
}
|
||||||
|
dstAbs, err := resolvePath(args[1])
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] %v", err)
|
||||||
|
}
|
||||||
|
if err := os.MkdirAll(filepath.Dir(dstAbs), 0o755); err != nil {
|
||||||
|
return fmt.Sprintf("[error] mv mkdir: %v", err)
|
||||||
|
}
|
||||||
|
if err := os.Rename(srcAbs, dstAbs); err != nil {
|
||||||
|
return fmt.Sprintf("[error] mv: %v", err)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("Moved %s → %s", args[0], args[1])
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsMkdir(args []string, stdin string) string {
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: mkdir [-p] <dir>"
|
||||||
|
}
|
||||||
|
createParents := false
|
||||||
|
var dirPath string
|
||||||
|
for _, a := range args {
|
||||||
|
if a == "-p" || a == "--parents" {
|
||||||
|
createParents = true
|
||||||
|
} else if dirPath == "" {
|
||||||
|
dirPath = a
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if dirPath == "" {
|
||||||
|
return "[error] usage: mkdir [-p] <dir>"
|
||||||
|
}
|
||||||
|
abs, err := resolvePath(dirPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] %v", err)
|
||||||
|
}
|
||||||
|
var mkdirFunc func(string, os.FileMode) error
|
||||||
|
if createParents {
|
||||||
|
mkdirFunc = os.MkdirAll
|
||||||
|
} else {
|
||||||
|
mkdirFunc = os.Mkdir
|
||||||
|
}
|
||||||
|
if err := mkdirFunc(abs, 0o755); err != nil {
|
||||||
|
return fmt.Sprintf("[error] mkdir: %v", err)
|
||||||
|
}
|
||||||
|
if createParents {
|
||||||
|
return "Created " + dirPath + " (with parents)"
|
||||||
|
}
|
||||||
|
return "Created " + dirPath
|
||||||
|
}
|
||||||
|
|
||||||
|
// Text processing commands
|
||||||
|
|
||||||
|
func FsEcho(args []string, stdin string) string {
|
||||||
|
if stdin != "" {
|
||||||
|
return stdin
|
||||||
|
}
|
||||||
|
return strings.Join(args, " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsTime(args []string, stdin string) string {
|
||||||
|
return time.Now().Format("2006-01-02 15:04:05 MST")
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsGrep(args []string, stdin string) string {
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: grep [-i] [-v] [-c] <pattern>"
|
||||||
|
}
|
||||||
|
ignoreCase := false
|
||||||
|
invert := false
|
||||||
|
countOnly := false
|
||||||
|
var pattern string
|
||||||
|
for _, a := range args {
|
||||||
|
switch a {
|
||||||
|
case "-i":
|
||||||
|
ignoreCase = true
|
||||||
|
case "-v":
|
||||||
|
invert = true
|
||||||
|
case "-c":
|
||||||
|
countOnly = true
|
||||||
|
default:
|
||||||
|
pattern = a
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if pattern == "" {
|
||||||
|
return "[error] pattern required"
|
||||||
|
}
|
||||||
|
if ignoreCase {
|
||||||
|
pattern = strings.ToLower(pattern)
|
||||||
|
}
|
||||||
|
lines := strings.Split(stdin, "\n")
|
||||||
|
var matched []string
|
||||||
|
for _, line := range lines {
|
||||||
|
haystack := line
|
||||||
|
if ignoreCase {
|
||||||
|
haystack = strings.ToLower(line)
|
||||||
|
}
|
||||||
|
match := strings.Contains(haystack, pattern)
|
||||||
|
if invert {
|
||||||
|
match = !match
|
||||||
|
}
|
||||||
|
if match {
|
||||||
|
matched = append(matched, line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if countOnly {
|
||||||
|
return strconv.Itoa(len(matched))
|
||||||
|
}
|
||||||
|
return strings.Join(matched, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsHead(args []string, stdin string) string {
|
||||||
|
n := 10
|
||||||
|
for i, a := range args {
|
||||||
|
if a == "-n" && i+1 < len(args) {
|
||||||
|
if parsed, err := strconv.Atoi(args[i+1]); err == nil {
|
||||||
|
n = parsed
|
||||||
|
}
|
||||||
|
} else if strings.HasPrefix(a, "-") {
|
||||||
|
continue
|
||||||
|
} else if parsed, err := strconv.Atoi(a); err == nil {
|
||||||
|
n = parsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lines := strings.Split(stdin, "\n")
|
||||||
|
if n > 0 && len(lines) > n {
|
||||||
|
lines = lines[:n]
|
||||||
|
}
|
||||||
|
return strings.Join(lines, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsTail(args []string, stdin string) string {
|
||||||
|
n := 10
|
||||||
|
for i, a := range args {
|
||||||
|
if a == "-n" && i+1 < len(args) {
|
||||||
|
if parsed, err := strconv.Atoi(args[i+1]); err == nil {
|
||||||
|
n = parsed
|
||||||
|
}
|
||||||
|
} else if strings.HasPrefix(a, "-") {
|
||||||
|
continue
|
||||||
|
} else if parsed, err := strconv.Atoi(a); err == nil {
|
||||||
|
n = parsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lines := strings.Split(stdin, "\n")
|
||||||
|
if n > 0 && len(lines) > n {
|
||||||
|
lines = lines[len(lines)-n:]
|
||||||
|
}
|
||||||
|
return strings.Join(lines, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsWc(args []string, stdin string) string {
|
||||||
|
lines := len(strings.Split(stdin, "\n"))
|
||||||
|
words := len(strings.Fields(stdin))
|
||||||
|
chars := len(stdin)
|
||||||
|
if len(args) > 0 {
|
||||||
|
switch args[0] {
|
||||||
|
case "-l":
|
||||||
|
return strconv.Itoa(lines)
|
||||||
|
case "-w":
|
||||||
|
return strconv.Itoa(words)
|
||||||
|
case "-c":
|
||||||
|
return strconv.Itoa(chars)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%d lines, %d words, %d chars", lines, words, chars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsSort(args []string, stdin string) string {
|
||||||
|
lines := strings.Split(stdin, "\n")
|
||||||
|
reverse := false
|
||||||
|
numeric := false
|
||||||
|
for _, a := range args {
|
||||||
|
switch a {
|
||||||
|
case "-r":
|
||||||
|
reverse = true
|
||||||
|
case "-n":
|
||||||
|
numeric = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sortFunc := func(i, j int) bool {
|
||||||
|
if numeric {
|
||||||
|
ni, _ := strconv.Atoi(lines[i])
|
||||||
|
nj, _ := strconv.Atoi(lines[j])
|
||||||
|
if reverse {
|
||||||
|
return ni > nj
|
||||||
|
}
|
||||||
|
return ni < nj
|
||||||
|
}
|
||||||
|
if reverse {
|
||||||
|
return lines[i] > lines[j]
|
||||||
|
}
|
||||||
|
return lines[i] < lines[j]
|
||||||
|
}
|
||||||
|
sort.Slice(lines, sortFunc)
|
||||||
|
return strings.Join(lines, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsUniq(args []string, stdin string) string {
|
||||||
|
lines := strings.Split(stdin, "\n")
|
||||||
|
showCount := false
|
||||||
|
for _, a := range args {
|
||||||
|
if a == "-c" {
|
||||||
|
showCount = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var result []string
|
||||||
|
var prev string
|
||||||
|
first := true
|
||||||
|
count := 0
|
||||||
|
for _, line := range lines {
|
||||||
|
if first || line != prev {
|
||||||
|
if !first && showCount {
|
||||||
|
result = append(result, fmt.Sprintf("%d %s", count, prev))
|
||||||
|
} else if !first {
|
||||||
|
result = append(result, prev)
|
||||||
|
}
|
||||||
|
count = 1
|
||||||
|
prev = line
|
||||||
|
first = false
|
||||||
|
} else {
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !first {
|
||||||
|
if showCount {
|
||||||
|
result = append(result, fmt.Sprintf("%d %s", count, prev))
|
||||||
|
} else {
|
||||||
|
result = append(result, prev)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strings.Join(result, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
var allowedGitSubcommands = map[string]bool{
|
||||||
|
"status": true,
|
||||||
|
"log": true,
|
||||||
|
"diff": true,
|
||||||
|
"show": true,
|
||||||
|
"branch": true,
|
||||||
|
"reflog": true,
|
||||||
|
"rev-parse": true,
|
||||||
|
"shortlog": true,
|
||||||
|
"describe": true,
|
||||||
|
"rev-list": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsGit(args []string, stdin string) string {
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: git <subcommand> [options]"
|
||||||
|
}
|
||||||
|
subcmd := args[0]
|
||||||
|
if !allowedGitSubcommands[subcmd] {
|
||||||
|
return fmt.Sprintf("[error] git: '%s' is not an allowed git command. Allowed: status, log, diff, show, branch, reflog, rev-parse, shortlog, describe, rev-list", subcmd)
|
||||||
|
}
|
||||||
|
abs, err := resolvePath(".")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] git: %v", err)
|
||||||
|
}
|
||||||
|
// Pass all args to git (first arg is subcommand, rest are options)
|
||||||
|
cmd := exec.Command("git", args...)
|
||||||
|
cmd.Dir = abs
|
||||||
|
output, err := cmd.CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] git %s: %v\n%s", subcmd, err, string(output))
|
||||||
|
}
|
||||||
|
return string(output)
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsPwd(args []string, stdin string) string {
|
||||||
|
return cfg.FilePickerDir
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsCd(args []string, stdin string) string {
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: cd <dir>"
|
||||||
|
}
|
||||||
|
dir := args[0]
|
||||||
|
abs, err := resolvePath(dir)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] cd: %v", err)
|
||||||
|
}
|
||||||
|
info, err := os.Stat(abs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] cd: %v", err)
|
||||||
|
}
|
||||||
|
if !info.IsDir() {
|
||||||
|
return "[error] cd: not a directory: " + dir
|
||||||
|
}
|
||||||
|
cfg.FilePickerDir = abs
|
||||||
|
return "Changed directory to: " + cfg.FilePickerDir
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsSed(args []string, stdin string) string {
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: sed 's/old/new/[g]' [file]"
|
||||||
|
}
|
||||||
|
inPlace := false
|
||||||
|
var filePath string
|
||||||
|
var pattern string
|
||||||
|
for _, a := range args {
|
||||||
|
switch a {
|
||||||
|
case "-i", "--in-place":
|
||||||
|
inPlace = true
|
||||||
|
default:
|
||||||
|
if strings.HasPrefix(a, "s") && len(a) > 1 {
|
||||||
|
pattern = a
|
||||||
|
} else if filePath == "" && !strings.HasPrefix(a, "-") {
|
||||||
|
filePath = a
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if pattern == "" {
|
||||||
|
return "[error] usage: sed 's/old/new/[g]' [file]"
|
||||||
|
}
|
||||||
|
// Parse pattern: s/old/new/flags
|
||||||
|
parts := strings.Split(pattern[1:], "/")
|
||||||
|
if len(parts) < 2 {
|
||||||
|
return "[error] invalid sed pattern. Use: s/old/new/[g]"
|
||||||
|
}
|
||||||
|
oldStr := parts[0]
|
||||||
|
newStr := parts[1]
|
||||||
|
global := len(parts) >= 3 && strings.Contains(parts[2], "g")
|
||||||
|
var content string
|
||||||
|
switch {
|
||||||
|
case filePath != "" && stdin == "":
|
||||||
|
abs, err := resolvePath(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] sed: %v", err)
|
||||||
|
}
|
||||||
|
data, err := os.ReadFile(abs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] sed: %v", err)
|
||||||
|
}
|
||||||
|
content = string(data)
|
||||||
|
case stdin != "":
|
||||||
|
content = stdin
|
||||||
|
default:
|
||||||
|
return "[error] sed: no input (use file path or pipe from stdin)"
|
||||||
|
}
|
||||||
|
// Apply sed replacement
|
||||||
|
if global {
|
||||||
|
content = strings.ReplaceAll(content, oldStr, newStr)
|
||||||
|
} else {
|
||||||
|
content = strings.Replace(content, oldStr, newStr, 1)
|
||||||
|
}
|
||||||
|
if inPlace && filePath != "" {
|
||||||
|
abs, err := resolvePath(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] sed: %v", err)
|
||||||
|
}
|
||||||
|
if err := os.WriteFile(abs, []byte(content), 0644); err != nil {
|
||||||
|
return fmt.Sprintf("[error] sed: %v", err)
|
||||||
|
}
|
||||||
|
return "Modified " + filePath
|
||||||
|
}
|
||||||
|
return content
|
||||||
|
}
|
||||||
|
|
||||||
|
func FsMemory(args []string, stdin string) string {
|
||||||
|
if len(args) == 0 {
|
||||||
|
return "[error] usage: memory store <topic> <data> | memory get <topic> | memory list | memory forget <topic>"
|
||||||
|
}
|
||||||
|
if memoryStore == nil {
|
||||||
|
return "[error] memory store not initialized"
|
||||||
|
}
|
||||||
|
switch args[0] {
|
||||||
|
case "store":
|
||||||
|
if len(args) < 3 && stdin == "" {
|
||||||
|
return "[error] usage: memory store <topic> <data>"
|
||||||
|
}
|
||||||
|
topic := args[1]
|
||||||
|
var data string
|
||||||
|
if len(args) >= 3 {
|
||||||
|
data = strings.Join(args[2:], " ")
|
||||||
|
} else {
|
||||||
|
data = stdin
|
||||||
|
}
|
||||||
|
_, err := memoryStore.Memorise(agentRole, topic, data)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] failed to store: %v", err)
|
||||||
|
}
|
||||||
|
return "Stored under topic: " + topic
|
||||||
|
case "get":
|
||||||
|
if len(args) < 2 {
|
||||||
|
return "[error] usage: memory get <topic>"
|
||||||
|
}
|
||||||
|
topic := args[1]
|
||||||
|
data, err := memoryStore.Recall(agentRole, topic)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] failed to recall: %v", err)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("Topic: %s\n%s", topic, data)
|
||||||
|
case "list", "topics":
|
||||||
|
topics, err := memoryStore.RecallTopics(agentRole)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] failed to list topics: %v", err)
|
||||||
|
}
|
||||||
|
if len(topics) == 0 {
|
||||||
|
return "No topics stored."
|
||||||
|
}
|
||||||
|
return "Topics: " + strings.Join(topics, ", ")
|
||||||
|
case "forget", "delete":
|
||||||
|
if len(args) < 2 {
|
||||||
|
return "[error] usage: memory forget <topic>"
|
||||||
|
}
|
||||||
|
topic := args[1]
|
||||||
|
err := memoryStore.Forget(agentRole, topic)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("[error] failed to forget: %v", err)
|
||||||
|
}
|
||||||
|
return "Deleted topic: " + topic
|
||||||
|
default:
|
||||||
|
return fmt.Sprintf("[error] unknown subcommand: %s. Use: store, get, list, topics, forget, delete", args[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
645
tools/pw.go
Normal file
645
tools/pw.go
Normal file
@@ -0,0 +1,645 @@
|
|||||||
|
package tools
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"gf-lt/models"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/playwright-community/playwright-go"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
pw *playwright.Playwright
|
||||||
|
browser playwright.Browser
|
||||||
|
browserStarted bool
|
||||||
|
browserStartMu sync.Mutex
|
||||||
|
page playwright.Page
|
||||||
|
)
|
||||||
|
|
||||||
|
func PwShutDown() error {
|
||||||
|
if pw == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
pwStop(nil)
|
||||||
|
return pw.Stop()
|
||||||
|
}
|
||||||
|
|
||||||
|
func InstallPW() error {
|
||||||
|
err := playwright.Install(&playwright.RunOptions{Verbose: false})
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("playwright not available", "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func CheckPlaywright() error {
|
||||||
|
var err error
|
||||||
|
pw, err = playwright.Run()
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("playwright not available", "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwStart(args map[string]string) []byte {
|
||||||
|
browserStartMu.Lock()
|
||||||
|
defer browserStartMu.Unlock()
|
||||||
|
if browserStarted {
|
||||||
|
return []byte(`{"error": "Browser already started"}`)
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
browser, err = pw.Chromium.Launch(playwright.BrowserTypeLaunchOptions{
|
||||||
|
Headless: playwright.Bool(!cfg.PlaywrightDebug),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to launch browser: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
page, err = browser.NewPage()
|
||||||
|
if err != nil {
|
||||||
|
browser.Close()
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to create page: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
browserStarted = true
|
||||||
|
return []byte(`{"success": true, "message": "Browser started"}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwStop(args map[string]string) []byte {
|
||||||
|
browserStartMu.Lock()
|
||||||
|
defer browserStartMu.Unlock()
|
||||||
|
if !browserStarted {
|
||||||
|
return []byte(`{"success": true, "message": "Browser was not running"}`)
|
||||||
|
}
|
||||||
|
if page != nil {
|
||||||
|
page.Close()
|
||||||
|
page = nil
|
||||||
|
}
|
||||||
|
if browser != nil {
|
||||||
|
browser.Close()
|
||||||
|
browser = nil
|
||||||
|
}
|
||||||
|
browserStarted = false
|
||||||
|
return []byte(`{"success": true, "message": "Browser stopped"}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwIsRunning(args map[string]string) []byte {
|
||||||
|
if browserStarted {
|
||||||
|
return []byte(`{"running": true, "message": "Browser is running"}`)
|
||||||
|
}
|
||||||
|
return []byte(`{"running": false, "message": "Browser is not running"}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwNavigate(args map[string]string) []byte {
|
||||||
|
url, ok := args["url"]
|
||||||
|
if !ok || url == "" {
|
||||||
|
return []byte(`{"error": "url not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
_, err := page.Goto(url)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to navigate: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
title, _ := page.Title()
|
||||||
|
pageURL := page.URL()
|
||||||
|
return []byte(fmt.Sprintf(`{"success": true, "title": "%s", "url": "%s"}`, title, pageURL))
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwClick(args map[string]string) []byte {
|
||||||
|
selector, ok := args["selector"]
|
||||||
|
if !ok || selector == "" {
|
||||||
|
return []byte(`{"error": "selector not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
index := 0
|
||||||
|
if args["index"] != "" {
|
||||||
|
if i, err := strconv.Atoi(args["index"]); err != nil {
|
||||||
|
logger.Warn("failed to parse index", "value", args["index"], "error", err)
|
||||||
|
} else {
|
||||||
|
index = i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to find elements: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
if index >= count {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "Element not found at index %d (found %d elements)"}`, index, count))
|
||||||
|
}
|
||||||
|
err = locator.Nth(index).Click()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to click: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(`{"success": true, "message": "Clicked element"}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwFill(args map[string]string) []byte {
|
||||||
|
selector, ok := args["selector"]
|
||||||
|
if !ok || selector == "" {
|
||||||
|
return []byte(`{"error": "selector not provided"}`)
|
||||||
|
}
|
||||||
|
text := args["text"]
|
||||||
|
if text == "" {
|
||||||
|
text = ""
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
index := 0
|
||||||
|
if args["index"] != "" {
|
||||||
|
if i, err := strconv.Atoi(args["index"]); err != nil {
|
||||||
|
logger.Warn("failed to parse index", "value", args["index"], "error", err)
|
||||||
|
} else {
|
||||||
|
index = i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to find elements: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
if index >= count {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "Element not found at index %d"}`, index))
|
||||||
|
}
|
||||||
|
err = locator.Nth(index).Fill(text)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to fill: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(`{"success": true, "message": "Filled input"}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwExtractText(args map[string]string) []byte {
|
||||||
|
selector := args["selector"]
|
||||||
|
if selector == "" {
|
||||||
|
selector = "body"
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to find elements: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
if count == 0 {
|
||||||
|
return []byte(`{"error": "No elements found"}`)
|
||||||
|
}
|
||||||
|
if selector == "body" {
|
||||||
|
text, err := page.Locator("body").TextContent()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to get text: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"text": "%s"}`, text))
|
||||||
|
}
|
||||||
|
var texts []string
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
text, err := locator.Nth(i).TextContent()
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
texts = append(texts, text)
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"text": "%s"}`, joinLines(texts)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func joinLines(lines []string) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
for i, line := range lines {
|
||||||
|
if i > 0 {
|
||||||
|
sb.WriteString("\n")
|
||||||
|
}
|
||||||
|
sb.WriteString(line)
|
||||||
|
}
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwScreenshot(args map[string]string) []byte {
|
||||||
|
selector := args["selector"]
|
||||||
|
fullPage := args["full_page"] == "true"
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
path := fmt.Sprintf("/tmp/pw_screenshot_%d.png", os.Getpid())
|
||||||
|
var err error
|
||||||
|
if selector != "" && selector != "body" {
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
_, err = locator.Screenshot(playwright.LocatorScreenshotOptions{
|
||||||
|
Path: playwright.String(path),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
_, err = page.Screenshot(playwright.PageScreenshotOptions{
|
||||||
|
Path: playwright.String(path),
|
||||||
|
FullPage: playwright.Bool(fullPage),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to take screenshot: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"path": "%s"}`, path))
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwScreenshotAndView(args map[string]string) []byte {
|
||||||
|
selector := args["selector"]
|
||||||
|
fullPage := args["full_page"] == "true"
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
path := fmt.Sprintf("/tmp/pw_screenshot_%d.png", os.Getpid())
|
||||||
|
var err error
|
||||||
|
if selector != "" && selector != "body" {
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
_, err = locator.Screenshot(playwright.LocatorScreenshotOptions{
|
||||||
|
Path: playwright.String(path),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
_, err = page.Screenshot(playwright.PageScreenshotOptions{
|
||||||
|
Path: playwright.String(path),
|
||||||
|
FullPage: playwright.Bool(fullPage),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to take screenshot: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
dataURL, err := models.CreateImageURLFromPath(path)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to create image URL: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
resp := models.MultimodalToolResp{
|
||||||
|
Type: "multimodal_content",
|
||||||
|
Parts: []map[string]string{
|
||||||
|
{"type": "text", "text": "Screenshot saved: " + path},
|
||||||
|
{"type": "image_url", "url": dataURL},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
jsonResult, err := json.Marshal(resp)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to marshal result: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return jsonResult
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwWaitForSelector(args map[string]string) []byte {
|
||||||
|
selector, ok := args["selector"]
|
||||||
|
if !ok || selector == "" {
|
||||||
|
return []byte(`{"error": "selector not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
timeout := 30000
|
||||||
|
if args["timeout"] != "" {
|
||||||
|
if t, err := strconv.Atoi(args["timeout"]); err != nil {
|
||||||
|
logger.Warn("failed to parse timeout", "value", args["timeout"], "error", err)
|
||||||
|
} else {
|
||||||
|
timeout = t
|
||||||
|
}
|
||||||
|
}
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
err := locator.WaitFor(playwright.LocatorWaitForOptions{
|
||||||
|
Timeout: playwright.Float(float64(timeout)),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "element not found: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(`{"success": true, "message": "Element found"}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwDrag(args map[string]string) []byte {
|
||||||
|
x1, ok := args["x1"]
|
||||||
|
if !ok {
|
||||||
|
return []byte(`{"error": "x1 not provided"}`)
|
||||||
|
}
|
||||||
|
y1, ok := args["y1"]
|
||||||
|
if !ok {
|
||||||
|
return []byte(`{"error": "y1 not provided"}`)
|
||||||
|
}
|
||||||
|
x2, ok := args["x2"]
|
||||||
|
if !ok {
|
||||||
|
return []byte(`{"error": "x2 not provided"}`)
|
||||||
|
}
|
||||||
|
y2, ok := args["y2"]
|
||||||
|
if !ok {
|
||||||
|
return []byte(`{"error": "y2 not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
var fx1, fy1, fx2, fy2 float64
|
||||||
|
if parsedX1, err := strconv.ParseFloat(x1, 64); err != nil {
|
||||||
|
logger.Warn("failed to parse x1", "value", x1, "error", err)
|
||||||
|
} else {
|
||||||
|
fx1 = parsedX1
|
||||||
|
}
|
||||||
|
if parsedY1, err := strconv.ParseFloat(y1, 64); err != nil {
|
||||||
|
logger.Warn("failed to parse y1", "value", y1, "error", err)
|
||||||
|
} else {
|
||||||
|
fy1 = parsedY1
|
||||||
|
}
|
||||||
|
if parsedX2, err := strconv.ParseFloat(x2, 64); err != nil {
|
||||||
|
logger.Warn("failed to parse x2", "value", x2, "error", err)
|
||||||
|
} else {
|
||||||
|
fx2 = parsedX2
|
||||||
|
}
|
||||||
|
if parsedY2, err := strconv.ParseFloat(y2, 64); err != nil {
|
||||||
|
logger.Warn("failed to parse y2", "value", y2, "error", err)
|
||||||
|
} else {
|
||||||
|
fy2 = parsedY2
|
||||||
|
}
|
||||||
|
mouse := page.Mouse()
|
||||||
|
err := mouse.Move(fx1, fy1)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to move mouse: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
err = mouse.Down()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to mouse down: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
err = mouse.Move(fx2, fy2)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to move mouse: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
err = mouse.Up()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to mouse up: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"success": true, "message": "Dragged from (%s,%s) to (%s,%s)"}`, x1, y1, x2, y2))
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwDragBySelector(args map[string]string) []byte {
|
||||||
|
fromSelector, ok := args["fromSelector"]
|
||||||
|
if !ok || fromSelector == "" {
|
||||||
|
return []byte(`{"error": "fromSelector not provided"}`)
|
||||||
|
}
|
||||||
|
toSelector, ok := args["toSelector"]
|
||||||
|
if !ok || toSelector == "" {
|
||||||
|
return []byte(`{"error": "toSelector not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
fromJS := fmt.Sprintf(`
|
||||||
|
function getCenter(selector) {
|
||||||
|
const el = document.querySelector(selector);
|
||||||
|
if (!el) return null;
|
||||||
|
const rect = el.getBoundingClientRect();
|
||||||
|
return { x: rect.left + rect.width / 2, y: rect.top + rect.height / 2 };
|
||||||
|
}
|
||||||
|
getCenter(%q)
|
||||||
|
`, fromSelector)
|
||||||
|
toJS := fmt.Sprintf(`
|
||||||
|
function getCenter(selector) {
|
||||||
|
const el = document.querySelector(selector);
|
||||||
|
if (!el) return null;
|
||||||
|
const rect = el.getBoundingClientRect();
|
||||||
|
return { x: rect.left + rect.width / 2, y: rect.top + rect.height / 2 };
|
||||||
|
}
|
||||||
|
getCenter(%q)
|
||||||
|
`, toSelector)
|
||||||
|
fromResult, err := page.Evaluate(fromJS)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to get from element: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
fromMap, ok := fromResult.(map[string]interface{})
|
||||||
|
if !ok || fromMap == nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "from selector '%s' not found"}`, fromSelector))
|
||||||
|
}
|
||||||
|
fromX := fromMap["x"].(float64)
|
||||||
|
fromY := fromMap["y"].(float64)
|
||||||
|
toResult, err := page.Evaluate(toJS)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to get to element: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
toMap, ok := toResult.(map[string]interface{})
|
||||||
|
if !ok || toMap == nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "to selector '%s' not found"}`, toSelector))
|
||||||
|
}
|
||||||
|
toX := toMap["x"].(float64)
|
||||||
|
toY := toMap["y"].(float64)
|
||||||
|
mouse := page.Mouse()
|
||||||
|
err = mouse.Move(fromX, fromY)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to move mouse: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
err = mouse.Down()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to mouse down: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
err = mouse.Move(toX, toY)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to move mouse: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
err = mouse.Up()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to mouse up: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
msg := fmt.Sprintf("Dragged from %s (%.0f,%.0f) to %s (%.0f,%.0f)", fromSelector, fromX, fromY, toSelector, toX, toY)
|
||||||
|
return []byte(fmt.Sprintf(`{"success": true, "message": "%s"}`, msg))
|
||||||
|
}
|
||||||
|
|
||||||
|
// nolint:unused
|
||||||
|
func pwClickAt(args map[string]string) []byte {
|
||||||
|
x, ok := args["x"]
|
||||||
|
if !ok {
|
||||||
|
return []byte(`{"error": "x not provided"}`)
|
||||||
|
}
|
||||||
|
y, ok := args["y"]
|
||||||
|
if !ok {
|
||||||
|
return []byte(`{"error": "y not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
fx, err := strconv.ParseFloat(x, 64)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to parse x: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
fy, err := strconv.ParseFloat(y, 64)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to parse y: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
mouse := page.Mouse()
|
||||||
|
err = mouse.Click(fx, fy)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to click: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"success": true, "message": "Clicked at (%s,%s)"}`, x, y))
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwGetHTML(args map[string]string) []byte {
|
||||||
|
selector := args["selector"]
|
||||||
|
if selector == "" {
|
||||||
|
selector = "body"
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to find elements: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
if count == 0 {
|
||||||
|
return []byte(`{"error": "No elements found"}`)
|
||||||
|
}
|
||||||
|
html, err := locator.First().InnerHTML()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to get HTML: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"html": %s}`, jsonString(html)))
|
||||||
|
}
|
||||||
|
|
||||||
|
type DOMElement struct {
|
||||||
|
Tag string `json:"tag,omitempty"`
|
||||||
|
Attributes map[string]string `json:"attributes,omitempty"`
|
||||||
|
Text string `json:"text,omitempty"`
|
||||||
|
Children []DOMElement `json:"children,omitempty"`
|
||||||
|
Selector string `json:"selector,omitempty"`
|
||||||
|
InnerHTML string `json:"innerHTML,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildDOMTree(locator playwright.Locator) ([]DOMElement, error) {
|
||||||
|
var results []DOMElement
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
el := locator.Nth(i)
|
||||||
|
dom, err := elementToDOM(el)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
results = append(results, dom)
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func elementToDOM(el playwright.Locator) (DOMElement, error) {
|
||||||
|
dom := DOMElement{}
|
||||||
|
tag, err := el.Evaluate(`el => el.nodeName`, nil)
|
||||||
|
if err == nil {
|
||||||
|
dom.Tag = strings.ToLower(fmt.Sprintf("%v", tag))
|
||||||
|
}
|
||||||
|
attributes := make(map[string]string)
|
||||||
|
attrs, err := el.Evaluate(`el => {
|
||||||
|
let attrs = {};
|
||||||
|
for (let i = 0; i < el.attributes.length; i++) {
|
||||||
|
let attr = el.attributes[i];
|
||||||
|
attrs[attr.name] = attr.value;
|
||||||
|
}
|
||||||
|
return attrs;
|
||||||
|
}`, nil)
|
||||||
|
if err == nil {
|
||||||
|
if amap, ok := attrs.(map[string]any); ok {
|
||||||
|
for k, v := range amap {
|
||||||
|
if vs, ok := v.(string); ok {
|
||||||
|
attributes[k] = vs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(attributes) > 0 {
|
||||||
|
dom.Attributes = attributes
|
||||||
|
}
|
||||||
|
text, err := el.TextContent()
|
||||||
|
if err == nil && text != "" {
|
||||||
|
dom.Text = text
|
||||||
|
}
|
||||||
|
innerHTML, err := el.InnerHTML()
|
||||||
|
if err == nil && innerHTML != "" {
|
||||||
|
dom.InnerHTML = innerHTML
|
||||||
|
}
|
||||||
|
childCount, _ := el.Count()
|
||||||
|
if childCount > 0 {
|
||||||
|
childrenLocator := el.Locator("*")
|
||||||
|
children, err := buildDOMTree(childrenLocator)
|
||||||
|
if err == nil && len(children) > 0 {
|
||||||
|
dom.Children = children
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return dom, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func pwGetDOM(args map[string]string) []byte {
|
||||||
|
selector := args["selector"]
|
||||||
|
if selector == "" {
|
||||||
|
selector = "body"
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
locator := page.Locator(selector)
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to find elements: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
if count == 0 {
|
||||||
|
return []byte(`{"error": "No elements found"}`)
|
||||||
|
}
|
||||||
|
dom, err := elementToDOM(locator.First())
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to get DOM: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
data, err := json.Marshal(dom)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to marshal DOM: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"dom": %s}`, string(data)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// nolint:unused
|
||||||
|
func pwSearchElements(args map[string]string) []byte {
|
||||||
|
text := args["text"]
|
||||||
|
selector := args["selector"]
|
||||||
|
if text == "" && selector == "" {
|
||||||
|
return []byte(`{"error": "text or selector not provided"}`)
|
||||||
|
}
|
||||||
|
if !browserStarted || page == nil {
|
||||||
|
return []byte(`{"error": "Browser not started. Call pw_start first."}`)
|
||||||
|
}
|
||||||
|
var locator playwright.Locator
|
||||||
|
if text != "" {
|
||||||
|
locator = page.GetByText(text)
|
||||||
|
} else {
|
||||||
|
locator = page.Locator(selector)
|
||||||
|
}
|
||||||
|
count, err := locator.Count()
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to search elements: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
if count == 0 {
|
||||||
|
return []byte(`{"elements": []}`)
|
||||||
|
}
|
||||||
|
var results []map[string]string
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
el := locator.Nth(i)
|
||||||
|
tag, _ := el.Evaluate(`el => el.nodeName`, nil)
|
||||||
|
text, _ := el.TextContent()
|
||||||
|
html, _ := el.InnerHTML()
|
||||||
|
results = append(results, map[string]string{
|
||||||
|
"index": strconv.Itoa(i),
|
||||||
|
"tag": strings.ToLower(fmt.Sprintf("%v", tag)),
|
||||||
|
"text": text,
|
||||||
|
"html": html,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
data, err := json.Marshal(results)
|
||||||
|
if err != nil {
|
||||||
|
return []byte(fmt.Sprintf(`{"error": "failed to marshal results: %s"}`, err.Error()))
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf(`{"elements": %s}`, string(data)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func jsonString(s string) string {
|
||||||
|
b, _ := json.Marshal(s)
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
1914
tools/tools.go
Normal file
1914
tools/tools.go
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user