Compare commits
16 Commits
feat/tab-c
...
feat/reaso
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2c694e2b2b | ||
|
|
66ccb7a732 | ||
|
|
deece322ef | ||
|
|
e7c8fef32d | ||
|
|
eedda0ec4b | ||
|
|
96ffbd5cf5 | ||
|
|
85b11fa9ff | ||
|
|
1675af98d4 | ||
|
|
61a0ddfdfd | ||
|
|
26ab5c59e3 | ||
|
|
35cc8c068f | ||
|
|
27fdec1361 | ||
|
|
76827a71cc | ||
|
|
3a9a7dbe99 | ||
|
|
d3361c13c5 | ||
|
|
7c1a8b0122 |
@@ -15,10 +15,10 @@ import (
|
|||||||
var httpClient = &http.Client{}
|
var httpClient = &http.Client{}
|
||||||
|
|
||||||
var defaultProps = map[string]float32{
|
var defaultProps = map[string]float32{
|
||||||
"temperature": 0.8,
|
"temperature": 0.8,
|
||||||
"dry_multiplier": 0.0,
|
"dry_multiplier": 0.0,
|
||||||
"min_p": 0.05,
|
"min_p": 0.05,
|
||||||
"n_predict": -1.0,
|
"n_predict": -1.0,
|
||||||
}
|
}
|
||||||
|
|
||||||
func detectAPI(api string) (isCompletion, isChat, isDeepSeek, isOpenRouter bool) {
|
func detectAPI(api string) (isCompletion, isChat, isDeepSeek, isOpenRouter bool) {
|
||||||
@@ -110,8 +110,8 @@ func (ag *AgentClient) buildRequest(sysprompt, msg string) ([]byte, error) {
|
|||||||
req := models.NewDSChatReq(*chatBody)
|
req := models.NewDSChatReq(*chatBody)
|
||||||
return json.Marshal(req)
|
return json.Marshal(req)
|
||||||
case isOpenRouter:
|
case isOpenRouter:
|
||||||
// OpenRouter chat
|
// OpenRouter chat - agents don't use reasoning by default
|
||||||
req := models.NewOpenRouterChatReq(*chatBody, defaultProps)
|
req := models.NewOpenRouterChatReq(*chatBody, defaultProps, "")
|
||||||
return json.Marshal(req)
|
return json.Marshal(req)
|
||||||
default:
|
default:
|
||||||
// Assume llama.cpp chat (OpenAI format)
|
// Assume llama.cpp chat (OpenAI format)
|
||||||
|
|||||||
143
bot.go
143
bot.go
@@ -403,6 +403,23 @@ func fetchLCPModels() ([]string, error) {
|
|||||||
return localModels, nil
|
return localModels, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// fetchLCPModelsWithLoadStatus returns models with "(loaded)" indicator for loaded models
|
||||||
|
func fetchLCPModelsWithLoadStatus() ([]string, error) {
|
||||||
|
models, err := fetchLCPModelsWithStatus()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
result := make([]string, 0, len(models.Data))
|
||||||
|
for _, m := range models.Data {
|
||||||
|
modelName := m.ID
|
||||||
|
if m.Status.Value == "loaded" {
|
||||||
|
modelName = modelName + " (loaded)"
|
||||||
|
}
|
||||||
|
result = append(result, modelName)
|
||||||
|
}
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
// fetchLCPModelsWithStatus returns the full LCPModels struct including status information.
|
// fetchLCPModelsWithStatus returns the full LCPModels struct including status information.
|
||||||
func fetchLCPModelsWithStatus() (*models.LCPModels, error) {
|
func fetchLCPModelsWithStatus() (*models.LCPModels, error) {
|
||||||
resp, err := http.Get(cfg.FetchModelNameAPI)
|
resp, err := http.Get(cfg.FetchModelNameAPI)
|
||||||
@@ -456,6 +473,7 @@ func monitorModelLoad(modelID string) {
|
|||||||
if err := notifyUser("model loaded", "Model "+modelID+" is now loaded and ready."); err != nil {
|
if err := notifyUser("model loaded", "Model "+modelID+" is now loaded and ready."); err != nil {
|
||||||
logger.Debug("failed to notify user", "error", err)
|
logger.Debug("failed to notify user", "error", err)
|
||||||
}
|
}
|
||||||
|
refreshChatDisplay()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -572,6 +590,8 @@ func sendMsgToLLM(body io.Reader) {
|
|||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
reader := bufio.NewReader(resp.Body)
|
reader := bufio.NewReader(resp.Body)
|
||||||
counter := uint32(0)
|
counter := uint32(0)
|
||||||
|
hasReasoning := false
|
||||||
|
reasoningSent := false
|
||||||
for {
|
for {
|
||||||
var (
|
var (
|
||||||
answerText string
|
answerText string
|
||||||
@@ -644,6 +664,10 @@ func sendMsgToLLM(body io.Reader) {
|
|||||||
// break
|
// break
|
||||||
// }
|
// }
|
||||||
if chunk.Finished {
|
if chunk.Finished {
|
||||||
|
// Close the thinking block if we were streaming reasoning and haven't closed it yet
|
||||||
|
if hasReasoning && !reasoningSent {
|
||||||
|
chunkChan <- "</think>"
|
||||||
|
}
|
||||||
if chunk.Chunk != "" {
|
if chunk.Chunk != "" {
|
||||||
logger.Warn("text inside of finish llmchunk", "chunk", chunk, "counter", counter)
|
logger.Warn("text inside of finish llmchunk", "chunk", chunk, "counter", counter)
|
||||||
answerText = strings.ReplaceAll(chunk.Chunk, "\n\n", "\n")
|
answerText = strings.ReplaceAll(chunk.Chunk, "\n\n", "\n")
|
||||||
@@ -655,6 +679,27 @@ func sendMsgToLLM(body io.Reader) {
|
|||||||
if counter == 0 {
|
if counter == 0 {
|
||||||
chunk.Chunk = strings.TrimPrefix(chunk.Chunk, " ")
|
chunk.Chunk = strings.TrimPrefix(chunk.Chunk, " ")
|
||||||
}
|
}
|
||||||
|
// Handle reasoning chunks - stream them immediately as they arrive
|
||||||
|
if chunk.Reasoning != "" && !reasoningSent {
|
||||||
|
if !hasReasoning {
|
||||||
|
// First reasoning chunk - send opening tag
|
||||||
|
chunkChan <- "<think>"
|
||||||
|
hasReasoning = true
|
||||||
|
}
|
||||||
|
// Stream reasoning content immediately
|
||||||
|
answerText = strings.ReplaceAll(chunk.Reasoning, "\n\n", "\n")
|
||||||
|
if answerText != "" {
|
||||||
|
chunkChan <- answerText
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// When we get content and have been streaming reasoning, close the thinking block
|
||||||
|
if chunk.Chunk != "" && hasReasoning && !reasoningSent {
|
||||||
|
// Close the thinking block before sending actual content
|
||||||
|
chunkChan <- "</think>"
|
||||||
|
reasoningSent = true
|
||||||
|
}
|
||||||
|
|
||||||
// bot sends way too many \n
|
// bot sends way too many \n
|
||||||
answerText = strings.ReplaceAll(chunk.Chunk, "\n\n", "\n")
|
answerText = strings.ReplaceAll(chunk.Chunk, "\n\n", "\n")
|
||||||
// Accumulate text to check for stop strings that might span across chunks
|
// Accumulate text to check for stop strings that might span across chunks
|
||||||
@@ -665,7 +710,9 @@ func sendMsgToLLM(body io.Reader) {
|
|||||||
logger.Debug("stop string detected on client side for completion endpoint", "stop_string", answerText)
|
logger.Debug("stop string detected on client side for completion endpoint", "stop_string", answerText)
|
||||||
streamDone <- true
|
streamDone <- true
|
||||||
}
|
}
|
||||||
chunkChan <- answerText
|
if answerText != "" {
|
||||||
|
chunkChan <- answerText
|
||||||
|
}
|
||||||
openAIToolChan <- chunk.ToolChunk
|
openAIToolChan <- chunk.ToolChunk
|
||||||
if chunk.FuncName != "" {
|
if chunk.FuncName != "" {
|
||||||
lastToolCall.Name = chunk.FuncName
|
lastToolCall.Name = chunk.FuncName
|
||||||
@@ -763,6 +810,7 @@ func chatWatcher(ctx context.Context) {
|
|||||||
|
|
||||||
func chatRound(r *models.ChatRoundReq) error {
|
func chatRound(r *models.ChatRoundReq) error {
|
||||||
botRespMode = true
|
botRespMode = true
|
||||||
|
updateStatusLine()
|
||||||
botPersona := cfg.AssistantRole
|
botPersona := cfg.AssistantRole
|
||||||
if cfg.WriteNextMsgAsCompletionAgent != "" {
|
if cfg.WriteNextMsgAsCompletionAgent != "" {
|
||||||
botPersona = cfg.WriteNextMsgAsCompletionAgent
|
botPersona = cfg.WriteNextMsgAsCompletionAgent
|
||||||
@@ -780,23 +828,78 @@ func chatRound(r *models.ChatRoundReq) error {
|
|||||||
}
|
}
|
||||||
go sendMsgToLLM(reader)
|
go sendMsgToLLM(reader)
|
||||||
logger.Debug("looking at vars in chatRound", "msg", r.UserMsg, "regen", r.Regen, "resume", r.Resume)
|
logger.Debug("looking at vars in chatRound", "msg", r.UserMsg, "regen", r.Regen, "resume", r.Resume)
|
||||||
|
msgIdx := len(chatBody.Messages)
|
||||||
if !r.Resume {
|
if !r.Resume {
|
||||||
fmt.Fprintf(textView, "\n[-:-:b](%d) ", len(chatBody.Messages))
|
// Add empty message to chatBody immediately so it persists during Alt+T toggle
|
||||||
|
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{
|
||||||
|
Role: botPersona, Content: "",
|
||||||
|
})
|
||||||
|
fmt.Fprintf(textView, "\n[-:-:b](%d) ", msgIdx)
|
||||||
fmt.Fprint(textView, roleToIcon(botPersona))
|
fmt.Fprint(textView, roleToIcon(botPersona))
|
||||||
fmt.Fprint(textView, "[-:-:-]\n")
|
fmt.Fprint(textView, "[-:-:-]\n")
|
||||||
if cfg.ThinkUse && !strings.Contains(cfg.CurrentAPI, "v1") {
|
if cfg.ThinkUse && !strings.Contains(cfg.CurrentAPI, "v1") {
|
||||||
// fmt.Fprint(textView, "<think>")
|
// fmt.Fprint(textView, "<think>")
|
||||||
chunkChan <- "<think>"
|
chunkChan <- "<think>"
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
msgIdx = len(chatBody.Messages) - 1
|
||||||
}
|
}
|
||||||
respText := strings.Builder{}
|
respText := strings.Builder{}
|
||||||
toolResp := strings.Builder{}
|
toolResp := strings.Builder{}
|
||||||
|
// Variables for handling thinking blocks during streaming
|
||||||
|
inThinkingBlock := false
|
||||||
|
thinkingBuffer := strings.Builder{}
|
||||||
|
justExitedThinkingCollapsed := false
|
||||||
out:
|
out:
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case chunk := <-chunkChan:
|
case chunk := <-chunkChan:
|
||||||
|
// Handle thinking blocks during streaming
|
||||||
|
if strings.HasPrefix(chunk, "<think>") && !inThinkingBlock {
|
||||||
|
// Start of thinking block
|
||||||
|
inThinkingBlock = true
|
||||||
|
thinkingBuffer.Reset()
|
||||||
|
thinkingBuffer.WriteString(chunk)
|
||||||
|
if thinkingCollapsed {
|
||||||
|
// Show placeholder immediately when thinking starts in collapsed mode
|
||||||
|
fmt.Fprint(textView, "[yellow::i][thinking... (press Alt+T to expand)][-:-:-]")
|
||||||
|
if scrollToEndEnabled {
|
||||||
|
textView.ScrollToEnd()
|
||||||
|
}
|
||||||
|
respText.WriteString(chunk)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
} else if inThinkingBlock {
|
||||||
|
thinkingBuffer.WriteString(chunk)
|
||||||
|
if strings.Contains(chunk, "</think>") {
|
||||||
|
// End of thinking block
|
||||||
|
inThinkingBlock = false
|
||||||
|
if thinkingCollapsed {
|
||||||
|
// Thinking already displayed as placeholder, just update respText
|
||||||
|
respText.WriteString(chunk)
|
||||||
|
justExitedThinkingCollapsed = true
|
||||||
|
if scrollToEndEnabled {
|
||||||
|
textView.ScrollToEnd()
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// If not collapsed, fall through to normal display
|
||||||
|
} else if thinkingCollapsed {
|
||||||
|
// Still in thinking block and collapsed - just buffer, don't display
|
||||||
|
respText.WriteString(chunk)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// If not collapsed, fall through to normal display
|
||||||
|
}
|
||||||
|
// Add spacing after collapsed thinking block before real response
|
||||||
|
if justExitedThinkingCollapsed {
|
||||||
|
chunk = "\n\n" + chunk
|
||||||
|
justExitedThinkingCollapsed = false
|
||||||
|
}
|
||||||
fmt.Fprint(textView, chunk)
|
fmt.Fprint(textView, chunk)
|
||||||
respText.WriteString(chunk)
|
respText.WriteString(chunk)
|
||||||
|
// Update the message in chatBody.Messages so it persists during Alt+T
|
||||||
|
chatBody.Messages[msgIdx].Content = respText.String()
|
||||||
if scrollToEndEnabled {
|
if scrollToEndEnabled {
|
||||||
textView.ScrollToEnd()
|
textView.ScrollToEnd()
|
||||||
}
|
}
|
||||||
@@ -841,13 +944,11 @@ out:
|
|||||||
processedMsg := processMessageTag(&updatedMsg)
|
processedMsg := processMessageTag(&updatedMsg)
|
||||||
chatBody.Messages[len(chatBody.Messages)-1] = *processedMsg
|
chatBody.Messages[len(chatBody.Messages)-1] = *processedMsg
|
||||||
} else {
|
} else {
|
||||||
newMsg := models.RoleMsg{
|
// Message was already added at the start, just process it for known_to tags
|
||||||
Role: botPersona, Content: respText.String(),
|
chatBody.Messages[msgIdx].Content = respText.String()
|
||||||
}
|
processedMsg := processMessageTag(&chatBody.Messages[msgIdx])
|
||||||
// Process the new message to check for known_to tags in LLM response
|
chatBody.Messages[msgIdx] = *processedMsg
|
||||||
newMsg = *processMessageTag(&newMsg)
|
stopTTSIfNotForUser(&chatBody.Messages[msgIdx])
|
||||||
chatBody.Messages = append(chatBody.Messages, newMsg)
|
|
||||||
stopTTSIfNotForUser(&newMsg)
|
|
||||||
}
|
}
|
||||||
cleanChatBody()
|
cleanChatBody()
|
||||||
refreshChatDisplay()
|
refreshChatDisplay()
|
||||||
@@ -1070,7 +1171,7 @@ func findCall(msg, toolCall string) bool {
|
|||||||
}
|
}
|
||||||
resp := callToolWithAgent(fc.Name, fc.Args)
|
resp := callToolWithAgent(fc.Name, fc.Args)
|
||||||
toolMsg := string(resp) // Remove the "tool response: " prefix and %+v formatting
|
toolMsg := string(resp) // Remove the "tool response: " prefix and %+v formatting
|
||||||
logger.Info("llm used tool call", "tool_resp", toolMsg, "tool_attrs", fc)
|
logger.Info("llm used a tool call", "tool_name", fc.Name, "too_args", fc.Args, "id", fc.ID, "tool_resp", toolMsg)
|
||||||
fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
|
fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
|
||||||
"\n\n", len(chatBody.Messages), cfg.ToolRole, toolMsg)
|
"\n\n", len(chatBody.Messages), cfg.ToolRole, toolMsg)
|
||||||
// Create tool response message with the proper tool_call_id
|
// Create tool response message with the proper tool_call_id
|
||||||
@@ -1110,8 +1211,26 @@ func chatToText(messages []models.RoleMsg, showSys bool) string {
|
|||||||
|
|
||||||
// Collapse thinking blocks if enabled
|
// Collapse thinking blocks if enabled
|
||||||
if thinkingCollapsed {
|
if thinkingCollapsed {
|
||||||
placeholder := "[yellow::i][thinking... (press Alt+T to expand)][-:-:-]"
|
text = thinkRE.ReplaceAllStringFunc(text, func(match string) string {
|
||||||
text = thinkRE.ReplaceAllString(text, placeholder)
|
// Extract content between <think> and </think>
|
||||||
|
start := len("<think>")
|
||||||
|
end := len(match) - len("</think>")
|
||||||
|
if start < end && start < len(match) {
|
||||||
|
content := match[start:end]
|
||||||
|
return fmt.Sprintf("[yellow::i][thinking... (%d chars) (press Alt+T to expand)][-:-:-]", len(content))
|
||||||
|
}
|
||||||
|
return "[yellow::i][thinking... (press Alt+T to expand)][-:-:-]"
|
||||||
|
})
|
||||||
|
// Handle incomplete thinking blocks (during streaming when </think> hasn't arrived yet)
|
||||||
|
if strings.Contains(text, "<think>") && !strings.Contains(text, "</think>") {
|
||||||
|
// Find the incomplete thinking block and replace it
|
||||||
|
startIdx := strings.Index(text, "<think>")
|
||||||
|
if startIdx != -1 {
|
||||||
|
content := text[startIdx+len("<think>"):]
|
||||||
|
placeholder := fmt.Sprintf("[yellow::i][thinking... (%d chars) (press Alt+T to expand)][-:-:-]", len(content))
|
||||||
|
text = text[:startIdx] + placeholder
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return text
|
return text
|
||||||
|
|||||||
63
colors.go
Normal file
63
colors.go
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/gdamore/tcell/v2"
|
||||||
|
"github.com/rivo/tview"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
colorschemes = map[string]tview.Theme{
|
||||||
|
"default": tview.Theme{
|
||||||
|
PrimitiveBackgroundColor: tcell.ColorDefault,
|
||||||
|
ContrastBackgroundColor: tcell.ColorGray,
|
||||||
|
MoreContrastBackgroundColor: tcell.ColorSteelBlue,
|
||||||
|
BorderColor: tcell.ColorGray,
|
||||||
|
TitleColor: tcell.ColorRed,
|
||||||
|
GraphicsColor: tcell.ColorBlue,
|
||||||
|
PrimaryTextColor: tcell.ColorLightGray,
|
||||||
|
SecondaryTextColor: tcell.ColorYellow,
|
||||||
|
TertiaryTextColor: tcell.ColorOrange,
|
||||||
|
InverseTextColor: tcell.ColorPurple,
|
||||||
|
ContrastSecondaryTextColor: tcell.ColorLime,
|
||||||
|
},
|
||||||
|
"gruvbox": tview.Theme{
|
||||||
|
PrimitiveBackgroundColor: tcell.NewHexColor(0x282828), // Background: #282828 (dark gray)
|
||||||
|
ContrastBackgroundColor: tcell.ColorDarkGoldenrod, // Selected option: warm yellow (#b57614)
|
||||||
|
MoreContrastBackgroundColor: tcell.ColorDarkSlateGray, // Non-selected options: dark grayish-blue (#32302f)
|
||||||
|
BorderColor: tcell.ColorLightGray, // Light gray (#a89984)
|
||||||
|
TitleColor: tcell.ColorRed, // Red (#fb4934)
|
||||||
|
GraphicsColor: tcell.ColorDarkCyan, // Cyan (#689d6a)
|
||||||
|
PrimaryTextColor: tcell.ColorLightGray, // Light gray (#d5c4a1)
|
||||||
|
SecondaryTextColor: tcell.ColorYellow, // Yellow (#fabd2f)
|
||||||
|
TertiaryTextColor: tcell.ColorOrange, // Orange (#fe8019)
|
||||||
|
InverseTextColor: tcell.ColorWhite, // White (#f9f5d7) for selected text
|
||||||
|
ContrastSecondaryTextColor: tcell.ColorLightGreen, // Light green (#b8bb26)
|
||||||
|
},
|
||||||
|
"solarized": tview.Theme{
|
||||||
|
PrimitiveBackgroundColor: tcell.NewHexColor(0x002b36), // Background: #002b36 (base03)
|
||||||
|
ContrastBackgroundColor: tcell.ColorDarkCyan, // Selected option: cyan (#2aa198)
|
||||||
|
MoreContrastBackgroundColor: tcell.ColorDarkSlateGray, // Non-selected options: dark blue (#073642)
|
||||||
|
BorderColor: tcell.ColorLightBlue, // Light blue (#839496)
|
||||||
|
TitleColor: tcell.ColorRed, // Red (#dc322f)
|
||||||
|
GraphicsColor: tcell.ColorBlue, // Blue (#268bd2)
|
||||||
|
PrimaryTextColor: tcell.ColorWhite, // White (#fdf6e3)
|
||||||
|
SecondaryTextColor: tcell.ColorYellow, // Yellow (#b58900)
|
||||||
|
TertiaryTextColor: tcell.ColorOrange, // Orange (#cb4b16)
|
||||||
|
InverseTextColor: tcell.ColorWhite, // White (#eee8d5) for selected text
|
||||||
|
ContrastSecondaryTextColor: tcell.ColorLightCyan, // Light cyan (#93a1a1)
|
||||||
|
},
|
||||||
|
"dracula": tview.Theme{
|
||||||
|
PrimitiveBackgroundColor: tcell.NewHexColor(0x282a36), // Background: #282a36
|
||||||
|
ContrastBackgroundColor: tcell.ColorDarkMagenta, // Selected option: magenta (#bd93f9)
|
||||||
|
MoreContrastBackgroundColor: tcell.ColorDarkGray, // Non-selected options: dark gray (#44475a)
|
||||||
|
BorderColor: tcell.ColorLightGray, // Light gray (#f8f8f2)
|
||||||
|
TitleColor: tcell.ColorRed, // Red (#ff5555)
|
||||||
|
GraphicsColor: tcell.ColorDarkCyan, // Cyan (#8be9fd)
|
||||||
|
PrimaryTextColor: tcell.ColorWhite, // White (#f8f8f2)
|
||||||
|
SecondaryTextColor: tcell.ColorYellow, // Yellow (#f1fa8c)
|
||||||
|
TertiaryTextColor: tcell.ColorOrange, // Orange (#ffb86c)
|
||||||
|
InverseTextColor: tcell.ColorWhite, // White (#f8f8f2) for selected text
|
||||||
|
ContrastSecondaryTextColor: tcell.ColorLightGreen, // Light green (#50fa7b)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
@@ -50,3 +50,7 @@ CharSpecificContextEnabled = true
|
|||||||
CharSpecificContextTag = "@"
|
CharSpecificContextTag = "@"
|
||||||
AutoTurn = true
|
AutoTurn = true
|
||||||
StripThinkingFromAPI = true # Strip <think> blocks from messages before sending to LLM (keeps them in chat history)
|
StripThinkingFromAPI = true # Strip <think> blocks from messages before sending to LLM (keeps them in chat history)
|
||||||
|
# OpenRouter reasoning configuration (only applies to OpenRouter chat API)
|
||||||
|
# Valid values: xhigh, high, medium, low, minimal, none (empty or none = disabled)
|
||||||
|
# Models that support reasoning will include thinking content wrapped in <think> tags
|
||||||
|
ReasoningEffort = "medium"
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ type Config struct {
|
|||||||
ToolUse bool `toml:"ToolUse"`
|
ToolUse bool `toml:"ToolUse"`
|
||||||
ThinkUse bool `toml:"ThinkUse"`
|
ThinkUse bool `toml:"ThinkUse"`
|
||||||
StripThinkingFromAPI bool `toml:"StripThinkingFromAPI"`
|
StripThinkingFromAPI bool `toml:"StripThinkingFromAPI"`
|
||||||
|
ReasoningEffort string `toml:"ReasoningEffort"`
|
||||||
AssistantRole string `toml:"AssistantRole"`
|
AssistantRole string `toml:"AssistantRole"`
|
||||||
SysDir string `toml:"SysDir"`
|
SysDir string `toml:"SysDir"`
|
||||||
ChunkLimit uint32 `toml:"ChunkLimit"`
|
ChunkLimit uint32 `toml:"ChunkLimit"`
|
||||||
|
|||||||
118
helpfuncs.go
118
helpfuncs.go
@@ -9,8 +9,10 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path"
|
"path"
|
||||||
|
"path/filepath"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
|
||||||
"math/rand/v2"
|
"math/rand/v2"
|
||||||
@@ -18,6 +20,46 @@ import (
|
|||||||
"github.com/rivo/tview"
|
"github.com/rivo/tview"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Cached model color - updated by background goroutine
|
||||||
|
var cachedModelColor string = "orange"
|
||||||
|
|
||||||
|
// startModelColorUpdater starts a background goroutine that periodically updates
|
||||||
|
// the cached model color. Only runs HTTP requests for local llama.cpp APIs.
|
||||||
|
func startModelColorUpdater() {
|
||||||
|
go func() {
|
||||||
|
ticker := time.NewTicker(5 * time.Second)
|
||||||
|
defer ticker.Stop()
|
||||||
|
|
||||||
|
// Initial check
|
||||||
|
updateCachedModelColor()
|
||||||
|
|
||||||
|
for range ticker.C {
|
||||||
|
updateCachedModelColor()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
// updateCachedModelColor updates the global cachedModelColor variable
|
||||||
|
func updateCachedModelColor() {
|
||||||
|
if !isLocalLlamacpp() {
|
||||||
|
cachedModelColor = "orange"
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if model is loaded
|
||||||
|
loaded, err := isModelLoaded(chatBody.Model)
|
||||||
|
if err != nil {
|
||||||
|
// On error, assume not loaded (red)
|
||||||
|
cachedModelColor = "red"
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if loaded {
|
||||||
|
cachedModelColor = "green"
|
||||||
|
} else {
|
||||||
|
cachedModelColor = "red"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func isASCII(s string) bool {
|
func isASCII(s string) bool {
|
||||||
for i := 0; i < len(s); i++ {
|
for i := 0; i < len(s); i++ {
|
||||||
if s[i] > unicode.MaxASCII {
|
if s[i] > unicode.MaxASCII {
|
||||||
@@ -60,6 +102,7 @@ func refreshChatDisplay() {
|
|||||||
displayText := chatToText(filteredMessages, cfg.ShowSys)
|
displayText := chatToText(filteredMessages, cfg.ShowSys)
|
||||||
textView.SetText(displayText)
|
textView.SetText(displayText)
|
||||||
colorText()
|
colorText()
|
||||||
|
updateStatusLine()
|
||||||
if scrollToEndEnabled {
|
if scrollToEndEnabled {
|
||||||
textView.ScrollToEnd()
|
textView.ScrollToEnd()
|
||||||
}
|
}
|
||||||
@@ -130,8 +173,8 @@ func colorText() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func updateStatusLine() {
|
func updateStatusLine() {
|
||||||
statusLineWidget.SetText(makeStatusLine())
|
status := makeStatusLine()
|
||||||
helpView.SetText(fmt.Sprintf(helpText, makeStatusLine()))
|
statusLineWidget.SetText(status)
|
||||||
}
|
}
|
||||||
|
|
||||||
func initSysCards() ([]string, error) {
|
func initSysCards() ([]string, error) {
|
||||||
@@ -273,22 +316,11 @@ func isLocalLlamacpp() bool {
|
|||||||
return host == "localhost" || host == "127.0.0.1" || host == "::1"
|
return host == "localhost" || host == "127.0.0.1" || host == "::1"
|
||||||
}
|
}
|
||||||
|
|
||||||
// getModelColor returns the color tag for the model name based on its load status.
|
// getModelColor returns the cached color tag for the model name.
|
||||||
|
// The cached value is updated by a background goroutine every 5 seconds.
|
||||||
// For non-local models, returns orange. For local llama.cpp models, returns green if loaded, red if not.
|
// For non-local models, returns orange. For local llama.cpp models, returns green if loaded, red if not.
|
||||||
func getModelColor() string {
|
func getModelColor() string {
|
||||||
if !isLocalLlamacpp() {
|
return cachedModelColor
|
||||||
return "orange"
|
|
||||||
}
|
|
||||||
// Check if model is loaded
|
|
||||||
loaded, err := isModelLoaded(chatBody.Model)
|
|
||||||
if err != nil {
|
|
||||||
// On error, assume not loaded (red)
|
|
||||||
return "red"
|
|
||||||
}
|
|
||||||
if loaded {
|
|
||||||
return "green"
|
|
||||||
}
|
|
||||||
return "red"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeStatusLine() string {
|
func makeStatusLine() string {
|
||||||
@@ -322,10 +354,15 @@ func makeStatusLine() string {
|
|||||||
}
|
}
|
||||||
// Get model color based on load status for local llama.cpp models
|
// Get model color based on load status for local llama.cpp models
|
||||||
modelColor := getModelColor()
|
modelColor := getModelColor()
|
||||||
statusLine := fmt.Sprintf(indexLineCompletion, boolColors[botRespMode], botRespMode, activeChatName,
|
statusLine := fmt.Sprintf(statusLineTempl, boolColors[botRespMode], botRespMode, activeChatName,
|
||||||
boolColors[cfg.ToolUse], cfg.ToolUse, modelColor, chatBody.Model, boolColors[cfg.SkipLLMResp],
|
boolColors[cfg.ToolUse], cfg.ToolUse, modelColor, chatBody.Model, boolColors[cfg.SkipLLMResp],
|
||||||
cfg.SkipLLMResp, cfg.CurrentAPI, boolColors[isRecording], isRecording, persona,
|
cfg.SkipLLMResp, cfg.CurrentAPI, boolColors[isRecording], isRecording, persona,
|
||||||
botPersona, boolColors[injectRole], injectRole)
|
botPersona)
|
||||||
|
// completion endpoint
|
||||||
|
if !strings.Contains(cfg.CurrentAPI, "chat") {
|
||||||
|
roleInject := fmt.Sprintf(" | role injection [%s:-:b]%v[-:-:-] (alt+7)", boolColors[injectRole], injectRole)
|
||||||
|
statusLine += roleInject
|
||||||
|
}
|
||||||
return statusLine + imageInfo + shellModeInfo
|
return statusLine + imageInfo + shellModeInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -706,23 +743,44 @@ func searchPrev() {
|
|||||||
// == tab completion ==
|
// == tab completion ==
|
||||||
|
|
||||||
func scanFiles(dir, filter string) []string {
|
func scanFiles(dir, filter string) []string {
|
||||||
|
const maxDepth = 3
|
||||||
|
const maxFiles = 50
|
||||||
var files []string
|
var files []string
|
||||||
entries, err := os.ReadDir(dir)
|
var scanRecursive func(currentDir string, currentDepth int, relPath string)
|
||||||
if err != nil {
|
scanRecursive = func(currentDir string, currentDepth int, relPath string) {
|
||||||
return files
|
if len(files) >= maxFiles {
|
||||||
}
|
return
|
||||||
for _, entry := range entries {
|
|
||||||
name := entry.Name()
|
|
||||||
if strings.HasPrefix(name, ".") {
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
if filter == "" || strings.HasPrefix(strings.ToLower(name), strings.ToLower(filter)) {
|
if currentDepth > maxDepth {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
entries, err := os.ReadDir(currentDir)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for _, entry := range entries {
|
||||||
|
if len(files) >= maxFiles {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
name := entry.Name()
|
||||||
|
if strings.HasPrefix(name, ".") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fullPath := name
|
||||||
|
if relPath != "" {
|
||||||
|
fullPath = relPath + "/" + name
|
||||||
|
}
|
||||||
if entry.IsDir() {
|
if entry.IsDir() {
|
||||||
files = append(files, name+"/")
|
// Recursively scan subdirectories
|
||||||
} else {
|
scanRecursive(filepath.Join(currentDir, name), currentDepth+1, fullPath)
|
||||||
files = append(files, name)
|
continue
|
||||||
|
}
|
||||||
|
// Check if file matches filter
|
||||||
|
if filter == "" || strings.HasPrefix(strings.ToLower(fullPath), strings.ToLower(filter)) {
|
||||||
|
files = append(files, fullPath)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
scanRecursive(dir, 0, "")
|
||||||
return files
|
return files
|
||||||
}
|
}
|
||||||
|
|||||||
18
llm.go
18
llm.go
@@ -237,8 +237,10 @@ func (op LCPChat) ParseChunk(data []byte) (*models.TextChunk, error) {
|
|||||||
return &models.TextChunk{Finished: true}, nil
|
return &models.TextChunk{Finished: true}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lastChoice := llmchunk.Choices[len(llmchunk.Choices)-1]
|
||||||
resp := &models.TextChunk{
|
resp := &models.TextChunk{
|
||||||
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content,
|
Chunk: lastChoice.Delta.Content,
|
||||||
|
Reasoning: lastChoice.Delta.ReasoningContent,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for tool calls in all choices, not just the last one
|
// Check for tool calls in all choices, not just the last one
|
||||||
@@ -256,7 +258,7 @@ func (op LCPChat) ParseChunk(data []byte) (*models.TextChunk, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
|
if lastChoice.FinishReason == "stop" {
|
||||||
if resp.Chunk != "" {
|
if resp.Chunk != "" {
|
||||||
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
|
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
|
||||||
}
|
}
|
||||||
@@ -614,12 +616,14 @@ func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) {
|
|||||||
logger.Error("failed to decode", "error", err, "line", string(data))
|
logger.Error("failed to decode", "error", err, "line", string(data))
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
lastChoice := llmchunk.Choices[len(llmchunk.Choices)-1]
|
||||||
resp := &models.TextChunk{
|
resp := &models.TextChunk{
|
||||||
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content,
|
Chunk: lastChoice.Delta.Content,
|
||||||
|
Reasoning: lastChoice.Delta.Reasoning,
|
||||||
}
|
}
|
||||||
// Handle tool calls similar to LCPChat
|
// Handle tool calls similar to LCPChat
|
||||||
if len(llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls) > 0 {
|
if len(lastChoice.Delta.ToolCalls) > 0 {
|
||||||
toolCall := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0]
|
toolCall := lastChoice.Delta.ToolCalls[0]
|
||||||
resp.ToolChunk = toolCall.Function.Arguments
|
resp.ToolChunk = toolCall.Function.Arguments
|
||||||
fname := toolCall.Function.Name
|
fname := toolCall.Function.Name
|
||||||
if fname != "" {
|
if fname != "" {
|
||||||
@@ -631,7 +635,7 @@ func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) {
|
|||||||
if resp.ToolChunk != "" {
|
if resp.ToolChunk != "" {
|
||||||
resp.ToolResp = true
|
resp.ToolResp = true
|
||||||
}
|
}
|
||||||
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
|
if lastChoice.FinishReason == "stop" {
|
||||||
if resp.Chunk != "" {
|
if resp.Chunk != "" {
|
||||||
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
|
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
|
||||||
}
|
}
|
||||||
@@ -710,7 +714,7 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
|
|||||||
}
|
}
|
||||||
// Clean null/empty messages to prevent API issues
|
// Clean null/empty messages to prevent API issues
|
||||||
bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
|
bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages)
|
||||||
orBody := models.NewOpenRouterChatReq(*bodyCopy, defaultLCPProps)
|
orBody := models.NewOpenRouterChatReq(*bodyCopy, defaultLCPProps, cfg.ReasoningEffort)
|
||||||
if cfg.ToolUse && !resume && role != cfg.ToolRole {
|
if cfg.ToolUse && !resume && role != cfg.ToolRole {
|
||||||
orBody.Tools = baseTools // set tools to use
|
orBody.Tools = baseTools // set tools to use
|
||||||
}
|
}
|
||||||
|
|||||||
20
main.go
20
main.go
@@ -5,16 +5,16 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
boolColors = map[bool]string{true: "green", false: "red"}
|
boolColors = map[bool]string{true: "green", false: "red"}
|
||||||
botRespMode = false
|
botRespMode = false
|
||||||
editMode = false
|
editMode = false
|
||||||
roleEditMode = false
|
roleEditMode = false
|
||||||
injectRole = true
|
injectRole = true
|
||||||
selectedIndex = int(-1)
|
selectedIndex = int(-1)
|
||||||
shellMode = false
|
shellMode = false
|
||||||
thinkingCollapsed = false
|
thinkingCollapsed = false
|
||||||
indexLineCompletion = "F12 to show keys help | llm turn: [%s:-:b]%v[-:-:-] (F6) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [%s:-:b]%v[-:-:-] (ctrl+k) | model: [%s:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [%s:-:b]%v[-:-:-] (F10)\nAPI: [orange:-:b]%s[-:-:-] (ctrl+v) | recording: [%s:-:b]%v[-:-:-] (ctrl+r) | writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | bot will write as [orange:-:b]%s[-:-:-] (ctrl+x) | role injection (alt+7) [%s:-:b]%v[-:-:-]"
|
statusLineTempl = "help (F12) | llm turn: [%s:-:b]%v[-:-:-] (F6) | chat: [orange:-:b]%s[-:-:-] (F1) |tool-use: [%s:-:b]%v[-:-:-] (ctrl+k) | model: [%s:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [%s:-:b]%v[-:-:-] (F10)\nAPI: [orange:-:b]%s[-:-:-] (ctrl+v) | voice recording: [%s:-:b]%v[-:-:-] (ctrl+r) | writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | bot will write as [orange:-:b]%s[-:-:-] (ctrl+x)"
|
||||||
focusSwitcher = map[tview.Primitive]tview.Primitive{}
|
focusSwitcher = map[tview.Primitive]tview.Primitive{}
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
|||||||
@@ -64,8 +64,9 @@ type LLMRespChunk struct {
|
|||||||
FinishReason string `json:"finish_reason"`
|
FinishReason string `json:"finish_reason"`
|
||||||
Index int `json:"index"`
|
Index int `json:"index"`
|
||||||
Delta struct {
|
Delta struct {
|
||||||
Content string `json:"content"`
|
Content string `json:"content"`
|
||||||
ToolCalls []ToolDeltaResp `json:"tool_calls"`
|
ReasoningContent string `json:"reasoning_content"`
|
||||||
|
ToolCalls []ToolDeltaResp `json:"tool_calls"`
|
||||||
} `json:"delta"`
|
} `json:"delta"`
|
||||||
} `json:"choices"`
|
} `json:"choices"`
|
||||||
Created int `json:"created"`
|
Created int `json:"created"`
|
||||||
@@ -86,6 +87,7 @@ type TextChunk struct {
|
|||||||
ToolResp bool
|
ToolResp bool
|
||||||
FuncName string
|
FuncName string
|
||||||
ToolID string
|
ToolID string
|
||||||
|
Reasoning string // For models that send reasoning separately (OpenRouter, etc.)
|
||||||
}
|
}
|
||||||
|
|
||||||
type TextContentPart struct {
|
type TextContentPart struct {
|
||||||
|
|||||||
@@ -25,17 +25,23 @@ func NewOpenRouterCompletionReq(model, prompt string, props map[string]float32,
|
|||||||
}
|
}
|
||||||
|
|
||||||
type OpenRouterChatReq struct {
|
type OpenRouterChatReq struct {
|
||||||
Messages []RoleMsg `json:"messages"`
|
Messages []RoleMsg `json:"messages"`
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Stream bool `json:"stream"`
|
Stream bool `json:"stream"`
|
||||||
Temperature float32 `json:"temperature"`
|
Temperature float32 `json:"temperature"`
|
||||||
MinP float32 `json:"min_p"`
|
MinP float32 `json:"min_p"`
|
||||||
NPredict int32 `json:"max_tokens"`
|
NPredict int32 `json:"max_tokens"`
|
||||||
Tools []Tool `json:"tools"`
|
Tools []Tool `json:"tools"`
|
||||||
|
Reasoning *ReasoningConfig `json:"reasoning,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewOpenRouterChatReq(cb ChatBody, props map[string]float32) OpenRouterChatReq {
|
type ReasoningConfig struct {
|
||||||
return OpenRouterChatReq{
|
Effort string `json:"effort,omitempty"` // xhigh, high, medium, low, minimal, none
|
||||||
|
Summary string `json:"summary,omitempty"` // auto, concise, detailed
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewOpenRouterChatReq(cb ChatBody, props map[string]float32, reasoningEffort string) OpenRouterChatReq {
|
||||||
|
req := OpenRouterChatReq{
|
||||||
Messages: cb.Messages,
|
Messages: cb.Messages,
|
||||||
Model: cb.Model,
|
Model: cb.Model,
|
||||||
Stream: cb.Stream,
|
Stream: cb.Stream,
|
||||||
@@ -43,6 +49,13 @@ func NewOpenRouterChatReq(cb ChatBody, props map[string]float32) OpenRouterChatR
|
|||||||
MinP: props["min_p"],
|
MinP: props["min_p"],
|
||||||
NPredict: int32(props["n_predict"]),
|
NPredict: int32(props["n_predict"]),
|
||||||
}
|
}
|
||||||
|
// Only include reasoning config if effort is specified and not "none"
|
||||||
|
if reasoningEffort != "" && reasoningEffort != "none" {
|
||||||
|
req.Reasoning = &ReasoningConfig{
|
||||||
|
Effort: reasoningEffort,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return req
|
||||||
}
|
}
|
||||||
|
|
||||||
type OpenRouterChatRespNonStream struct {
|
type OpenRouterChatRespNonStream struct {
|
||||||
@@ -82,6 +95,7 @@ type OpenRouterChatResp struct {
|
|||||||
Delta struct {
|
Delta struct {
|
||||||
Role string `json:"role"`
|
Role string `json:"role"`
|
||||||
Content string `json:"content"`
|
Content string `json:"content"`
|
||||||
|
Reasoning string `json:"reasoning"`
|
||||||
ToolCalls []ToolDeltaResp `json:"tool_calls"`
|
ToolCalls []ToolDeltaResp `json:"tool_calls"`
|
||||||
} `json:"delta"`
|
} `json:"delta"`
|
||||||
FinishReason string `json:"finish_reason"`
|
FinishReason string `json:"finish_reason"`
|
||||||
|
|||||||
134
popups.go
134
popups.go
@@ -17,9 +17,13 @@ func showModelSelectionPopup() {
|
|||||||
} else if strings.Contains(api, "openrouter.ai") {
|
} else if strings.Contains(api, "openrouter.ai") {
|
||||||
return ORFreeModels
|
return ORFreeModels
|
||||||
}
|
}
|
||||||
// Assume local llama.cpp
|
// Assume local llama.cpp - fetch with load status
|
||||||
updateModelLists()
|
models, err := fetchLCPModelsWithLoadStatus()
|
||||||
return LocalModels
|
if err != nil {
|
||||||
|
logger.Error("failed to fetch models with load status", "error", err)
|
||||||
|
return LocalModels
|
||||||
|
}
|
||||||
|
return models
|
||||||
}
|
}
|
||||||
// Get the current model list based on the API
|
// Get the current model list based on the API
|
||||||
modelList := getModelListForAPI(cfg.CurrentAPI)
|
modelList := getModelListForAPI(cfg.CurrentAPI)
|
||||||
@@ -57,8 +61,10 @@ func showModelSelectionPopup() {
|
|||||||
modelListWidget.SetCurrentItem(currentModelIndex)
|
modelListWidget.SetCurrentItem(currentModelIndex)
|
||||||
}
|
}
|
||||||
modelListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
modelListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
||||||
|
// Strip "(loaded)" suffix if present for local llama.cpp models
|
||||||
|
modelName := strings.TrimSuffix(mainText, " (loaded)")
|
||||||
// Update the model in both chatBody and config
|
// Update the model in both chatBody and config
|
||||||
chatBody.Model = mainText
|
chatBody.Model = modelName
|
||||||
cfg.CurrentModel = chatBody.Model
|
cfg.CurrentModel = chatBody.Model
|
||||||
// Remove the popup page
|
// Remove the popup page
|
||||||
pages.RemovePage("modelSelectionPopup")
|
pages.RemovePage("modelSelectionPopup")
|
||||||
@@ -387,3 +393,123 @@ func showFileCompletionPopup(filter string) {
|
|||||||
pages.AddPage("fileCompletionPopup", modal(widget, 80, 20), true, true)
|
pages.AddPage("fileCompletionPopup", modal(widget, 80, 20), true, true)
|
||||||
app.SetFocus(widget)
|
app.SetFocus(widget)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func updateWidgetColors(theme *tview.Theme) {
|
||||||
|
bgColor := theme.PrimitiveBackgroundColor
|
||||||
|
fgColor := theme.PrimaryTextColor
|
||||||
|
borderColor := theme.BorderColor
|
||||||
|
titleColor := theme.TitleColor
|
||||||
|
|
||||||
|
textView.SetBackgroundColor(bgColor)
|
||||||
|
textView.SetTextColor(fgColor)
|
||||||
|
textView.SetBorderColor(borderColor)
|
||||||
|
textView.SetTitleColor(titleColor)
|
||||||
|
|
||||||
|
textArea.SetBackgroundColor(bgColor)
|
||||||
|
textArea.SetBorderColor(borderColor)
|
||||||
|
textArea.SetTitleColor(titleColor)
|
||||||
|
textArea.SetTextStyle(tcell.StyleDefault.Background(bgColor).Foreground(fgColor))
|
||||||
|
textArea.SetPlaceholderStyle(tcell.StyleDefault.Background(bgColor).Foreground(fgColor))
|
||||||
|
// Force textarea refresh by restoring text (SetTextStyle doesn't trigger redraw)
|
||||||
|
textArea.SetText(textArea.GetText(), true)
|
||||||
|
|
||||||
|
editArea.SetBackgroundColor(bgColor)
|
||||||
|
editArea.SetBorderColor(borderColor)
|
||||||
|
editArea.SetTitleColor(titleColor)
|
||||||
|
editArea.SetTextStyle(tcell.StyleDefault.Background(bgColor).Foreground(fgColor))
|
||||||
|
editArea.SetPlaceholderStyle(tcell.StyleDefault.Background(bgColor).Foreground(fgColor))
|
||||||
|
// Force textarea refresh by restoring text (SetTextStyle doesn't trigger redraw)
|
||||||
|
editArea.SetText(editArea.GetText(), true)
|
||||||
|
|
||||||
|
statusLineWidget.SetBackgroundColor(bgColor)
|
||||||
|
statusLineWidget.SetTextColor(fgColor)
|
||||||
|
statusLineWidget.SetBorderColor(borderColor)
|
||||||
|
statusLineWidget.SetTitleColor(titleColor)
|
||||||
|
|
||||||
|
helpView.SetBackgroundColor(bgColor)
|
||||||
|
helpView.SetTextColor(fgColor)
|
||||||
|
helpView.SetBorderColor(borderColor)
|
||||||
|
helpView.SetTitleColor(titleColor)
|
||||||
|
|
||||||
|
searchField.SetBackgroundColor(bgColor)
|
||||||
|
searchField.SetBorderColor(borderColor)
|
||||||
|
searchField.SetTitleColor(titleColor)
|
||||||
|
}
|
||||||
|
|
||||||
|
// showColorschemeSelectionPopup creates a modal popup to select a colorscheme
|
||||||
|
func showColorschemeSelectionPopup() {
|
||||||
|
// Get the list of available colorschemes
|
||||||
|
schemeNames := make([]string, 0, len(colorschemes))
|
||||||
|
for name := range colorschemes {
|
||||||
|
schemeNames = append(schemeNames, name)
|
||||||
|
}
|
||||||
|
slices.Sort(schemeNames)
|
||||||
|
// Check for empty options list
|
||||||
|
if len(schemeNames) == 0 {
|
||||||
|
logger.Warn("no colorschemes available for selection")
|
||||||
|
message := "No colorschemes available."
|
||||||
|
if err := notifyUser("Empty list", message); err != nil {
|
||||||
|
logger.Error("failed to send notification", "error", err)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Create a list primitive
|
||||||
|
schemeListWidget := tview.NewList().ShowSecondaryText(false).
|
||||||
|
SetSelectedBackgroundColor(tcell.ColorGray)
|
||||||
|
schemeListWidget.SetTitle("Select Colorscheme").SetBorder(true)
|
||||||
|
|
||||||
|
currentScheme := "default"
|
||||||
|
for name := range colorschemes {
|
||||||
|
if tview.Styles == colorschemes[name] {
|
||||||
|
currentScheme = name
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
currentSchemeIndex := -1
|
||||||
|
for i, scheme := range schemeNames {
|
||||||
|
if scheme == currentScheme {
|
||||||
|
currentSchemeIndex = i
|
||||||
|
}
|
||||||
|
schemeListWidget.AddItem(scheme, "", 0, nil)
|
||||||
|
}
|
||||||
|
// Set the current selection if found
|
||||||
|
if currentSchemeIndex != -1 {
|
||||||
|
schemeListWidget.SetCurrentItem(currentSchemeIndex)
|
||||||
|
}
|
||||||
|
schemeListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) {
|
||||||
|
// Update the colorscheme
|
||||||
|
if theme, ok := colorschemes[mainText]; ok {
|
||||||
|
tview.Styles = theme
|
||||||
|
go func() {
|
||||||
|
app.QueueUpdateDraw(func() {
|
||||||
|
updateWidgetColors(&theme)
|
||||||
|
})
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
// Remove the popup page
|
||||||
|
pages.RemovePage("colorschemeSelectionPopup")
|
||||||
|
})
|
||||||
|
schemeListWidget.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey {
|
||||||
|
if event.Key() == tcell.KeyEscape {
|
||||||
|
pages.RemovePage("colorschemeSelectionPopup")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyRune && event.Rune() == 'x' {
|
||||||
|
pages.RemovePage("colorschemeSelectionPopup")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return event
|
||||||
|
})
|
||||||
|
modal := func(p tview.Primitive, width, height int) tview.Primitive {
|
||||||
|
return tview.NewFlex().
|
||||||
|
AddItem(nil, 0, 1, false).
|
||||||
|
AddItem(tview.NewFlex().SetDirection(tview.FlexRow).
|
||||||
|
AddItem(nil, 0, 1, false).
|
||||||
|
AddItem(p, height, 1, true).
|
||||||
|
AddItem(nil, 0, 1, false), width, 1, true).
|
||||||
|
AddItem(nil, 0, 1, false)
|
||||||
|
}
|
||||||
|
// Add modal page and make it visible
|
||||||
|
pages.AddPage("colorschemeSelectionPopup", modal(schemeListWidget, 40, len(schemeNames)+2), true, true)
|
||||||
|
app.SetFocus(schemeListWidget)
|
||||||
|
}
|
||||||
|
|||||||
@@ -149,6 +149,11 @@ func makePropsTable(props map[string]float32) *tview.Table {
|
|||||||
addListPopupRow("Set log level", logLevels, GetLogLevel(), func(option string) {
|
addListPopupRow("Set log level", logLevels, GetLogLevel(), func(option string) {
|
||||||
setLogLevel(option)
|
setLogLevel(option)
|
||||||
})
|
})
|
||||||
|
// Add reasoning effort dropdown (for OpenRouter and supported APIs)
|
||||||
|
reasoningEfforts := []string{"", "none", "minimal", "low", "medium", "high", "xhigh"}
|
||||||
|
addListPopupRow("Reasoning effort (OR)", reasoningEfforts, cfg.ReasoningEffort, func(option string) {
|
||||||
|
cfg.ReasoningEffort = option
|
||||||
|
})
|
||||||
// Helper function to get model list for a given API
|
// Helper function to get model list for a given API
|
||||||
getModelListForAPI := func(api string) []string {
|
getModelListForAPI := func(api string) []string {
|
||||||
if strings.Contains(api, "api.deepseek.com/") {
|
if strings.Contains(api, "api.deepseek.com/") {
|
||||||
|
|||||||
@@ -1046,6 +1046,7 @@ func makeFilePicker() *tview.Flex {
|
|||||||
if bracketPos := strings.Index(itemText, " ["); bracketPos != -1 {
|
if bracketPos := strings.Index(itemText, " ["); bracketPos != -1 {
|
||||||
actualItemName = itemText[:bracketPos]
|
actualItemName = itemText[:bracketPos]
|
||||||
}
|
}
|
||||||
|
// nolint: gocritic
|
||||||
if strings.HasPrefix(actualItemName, "../") {
|
if strings.HasPrefix(actualItemName, "../") {
|
||||||
targetDir = path.Dir(currentDisplayDir)
|
targetDir = path.Dir(currentDisplayDir)
|
||||||
} else if strings.HasSuffix(actualItemName, "/") {
|
} else if strings.HasSuffix(actualItemName, "/") {
|
||||||
|
|||||||
87
tui.go
87
tui.go
@@ -10,14 +10,11 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/gdamore/tcell/v2"
|
"github.com/gdamore/tcell/v2"
|
||||||
"github.com/rivo/tview"
|
"github.com/rivo/tview"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ = sync.RWMutex{}
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
app *tview.Application
|
app *tview.Application
|
||||||
pages *tview.Pages
|
pages *tview.Pages
|
||||||
@@ -96,6 +93,7 @@ var (
|
|||||||
[yellow]Alt+8[white]: show char img or last picked img
|
[yellow]Alt+8[white]: show char img or last picked img
|
||||||
[yellow]Alt+9[white]: warm up (load) selected llama.cpp model
|
[yellow]Alt+9[white]: warm up (load) selected llama.cpp model
|
||||||
[yellow]Alt+t[white]: toggle thinking blocks visibility (collapse/expand <think> blocks)
|
[yellow]Alt+t[white]: toggle thinking blocks visibility (collapse/expand <think> blocks)
|
||||||
|
[yellow]Alt+i[white]: show colorscheme selection popup
|
||||||
|
|
||||||
=== scrolling chat window (some keys similar to vim) ===
|
=== scrolling chat window (some keys similar to vim) ===
|
||||||
[yellow]arrows up/down and j/k[white]: scroll up and down
|
[yellow]arrows up/down and j/k[white]: scroll up and down
|
||||||
@@ -112,70 +110,18 @@ var (
|
|||||||
[yellow]x[white]: to exit
|
[yellow]x[white]: to exit
|
||||||
|
|
||||||
=== shell mode ===
|
=== shell mode ===
|
||||||
[yellow]@match->Tab[white]: file completion (type @ in input to get file suggestions)
|
[yellow]@match->Tab[white]: file completion with relative paths (recursive, depth 3, max 50 files)
|
||||||
|
|
||||||
=== status line ===
|
=== status line ===
|
||||||
%s
|
%s
|
||||||
|
|
||||||
Press <Enter> or 'x' to return
|
Press <Enter> or 'x' to return
|
||||||
`
|
`
|
||||||
colorschemes = map[string]tview.Theme{
|
|
||||||
"default": tview.Theme{
|
|
||||||
PrimitiveBackgroundColor: tcell.ColorDefault,
|
|
||||||
ContrastBackgroundColor: tcell.ColorGray,
|
|
||||||
MoreContrastBackgroundColor: tcell.ColorSteelBlue,
|
|
||||||
BorderColor: tcell.ColorGray,
|
|
||||||
TitleColor: tcell.ColorRed,
|
|
||||||
GraphicsColor: tcell.ColorBlue,
|
|
||||||
PrimaryTextColor: tcell.ColorLightGray,
|
|
||||||
SecondaryTextColor: tcell.ColorYellow,
|
|
||||||
TertiaryTextColor: tcell.ColorOrange,
|
|
||||||
InverseTextColor: tcell.ColorPurple,
|
|
||||||
ContrastSecondaryTextColor: tcell.ColorLime,
|
|
||||||
},
|
|
||||||
"gruvbox": tview.Theme{
|
|
||||||
PrimitiveBackgroundColor: tcell.ColorBlack, // Matches #1e1e2e
|
|
||||||
ContrastBackgroundColor: tcell.ColorDarkGoldenrod, // Selected option: warm yellow (#b57614)
|
|
||||||
MoreContrastBackgroundColor: tcell.ColorDarkSlateGray, // Non-selected options: dark grayish-blue (#32302f)
|
|
||||||
BorderColor: tcell.ColorLightGray, // Light gray (#a89984)
|
|
||||||
TitleColor: tcell.ColorRed, // Red (#fb4934)
|
|
||||||
GraphicsColor: tcell.ColorDarkCyan, // Cyan (#689d6a)
|
|
||||||
PrimaryTextColor: tcell.ColorLightGray, // Light gray (#d5c4a1)
|
|
||||||
SecondaryTextColor: tcell.ColorYellow, // Yellow (#fabd2f)
|
|
||||||
TertiaryTextColor: tcell.ColorOrange, // Orange (#fe8019)
|
|
||||||
InverseTextColor: tcell.ColorWhite, // White (#f9f5d7) for selected text
|
|
||||||
ContrastSecondaryTextColor: tcell.ColorLightGreen, // Light green (#b8bb26)
|
|
||||||
},
|
|
||||||
"solarized": tview.Theme{
|
|
||||||
PrimitiveBackgroundColor: tcell.NewHexColor(0x1e1e2e), // #1e1e2e for main dropdown box
|
|
||||||
ContrastBackgroundColor: tcell.ColorDarkCyan, // Selected option: cyan (#2aa198)
|
|
||||||
MoreContrastBackgroundColor: tcell.ColorDarkSlateGray, // Non-selected options: dark blue (#073642)
|
|
||||||
BorderColor: tcell.ColorLightBlue, // Light blue (#839496)
|
|
||||||
TitleColor: tcell.ColorRed, // Red (#dc322f)
|
|
||||||
GraphicsColor: tcell.ColorBlue, // Blue (#268bd2)
|
|
||||||
PrimaryTextColor: tcell.ColorWhite, // White (#fdf6e3)
|
|
||||||
SecondaryTextColor: tcell.ColorYellow, // Yellow (#b58900)
|
|
||||||
TertiaryTextColor: tcell.ColorOrange, // Orange (#cb4b16)
|
|
||||||
InverseTextColor: tcell.ColorWhite, // White (#eee8d5) for selected text
|
|
||||||
ContrastSecondaryTextColor: tcell.ColorLightCyan, // Light cyan (#93a1a1)
|
|
||||||
},
|
|
||||||
"dracula": tview.Theme{
|
|
||||||
PrimitiveBackgroundColor: tcell.NewHexColor(0x1e1e2e), // #1e1e2e for main dropdown box
|
|
||||||
ContrastBackgroundColor: tcell.ColorDarkMagenta, // Selected option: magenta (#bd93f9)
|
|
||||||
MoreContrastBackgroundColor: tcell.ColorDarkGray, // Non-selected options: dark gray (#44475a)
|
|
||||||
BorderColor: tcell.ColorLightGray, // Light gray (#f8f8f2)
|
|
||||||
TitleColor: tcell.ColorRed, // Red (#ff5555)
|
|
||||||
GraphicsColor: tcell.ColorDarkCyan, // Cyan (#8be9fd)
|
|
||||||
PrimaryTextColor: tcell.ColorWhite, // White (#f8f8f2)
|
|
||||||
SecondaryTextColor: tcell.ColorYellow, // Yellow (#f1fa8c)
|
|
||||||
TertiaryTextColor: tcell.ColorOrange, // Orange (#ffb86c)
|
|
||||||
InverseTextColor: tcell.ColorWhite, // White (#f8f8f2) for selected text
|
|
||||||
ContrastSecondaryTextColor: tcell.ColorLightGreen, // Light green (#50fa7b)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
// Start background goroutine to update model color cache
|
||||||
|
startModelColorUpdater()
|
||||||
tview.Styles = colorschemes["default"]
|
tview.Styles = colorschemes["default"]
|
||||||
app = tview.NewApplication()
|
app = tview.NewApplication()
|
||||||
pages = tview.NewPages()
|
pages = tview.NewPages()
|
||||||
@@ -286,6 +232,11 @@ func init() {
|
|||||||
statusLineWidget = tview.NewTextView().
|
statusLineWidget = tview.NewTextView().
|
||||||
SetDynamicColors(true).
|
SetDynamicColors(true).
|
||||||
SetTextAlign(tview.AlignCenter)
|
SetTextAlign(tview.AlignCenter)
|
||||||
|
// // vertical text center alignment
|
||||||
|
// statusLineWidget.SetDrawFunc(func(screen tcell.Screen, x, y, w, h int) (int, int, int, int) {
|
||||||
|
// y += h / 2
|
||||||
|
// return x, y, w, h
|
||||||
|
// })
|
||||||
// Initially set up flex without search bar
|
// Initially set up flex without search bar
|
||||||
flex = tview.NewFlex().SetDirection(tview.FlexRow).
|
flex = tview.NewFlex().SetDirection(tview.FlexRow).
|
||||||
AddItem(textView, 0, 40, false).
|
AddItem(textView, 0, 40, false).
|
||||||
@@ -482,6 +433,19 @@ func init() {
|
|||||||
pages.RemovePage(helpPage)
|
pages.RemovePage(helpPage)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
// Allow scrolling keys to pass through to the TextView
|
||||||
|
switch event.Key() {
|
||||||
|
case tcell.KeyUp, tcell.KeyDown,
|
||||||
|
tcell.KeyPgUp, tcell.KeyPgDn,
|
||||||
|
tcell.KeyHome, tcell.KeyEnd:
|
||||||
|
return event
|
||||||
|
}
|
||||||
|
if event.Key() == tcell.KeyRune {
|
||||||
|
switch event.Rune() {
|
||||||
|
case 'j', 'k', 'g', 'G':
|
||||||
|
return event
|
||||||
|
}
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
//
|
//
|
||||||
@@ -560,6 +524,10 @@ func init() {
|
|||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
if event.Key() == tcell.KeyRune && event.Rune() == 'i' && event.Modifiers()&tcell.ModAlt != 0 {
|
||||||
|
showColorschemeSelectionPopup()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
if event.Key() == tcell.KeyF1 {
|
if event.Key() == tcell.KeyF1 {
|
||||||
// chatList, err := loadHistoryChats()
|
// chatList, err := loadHistoryChats()
|
||||||
chatList, err := store.GetChatByChar(cfg.AssistantRole)
|
chatList, err := store.GetChatByChar(cfg.AssistantRole)
|
||||||
@@ -731,6 +699,8 @@ func init() {
|
|||||||
}
|
}
|
||||||
if event.Key() == tcell.KeyF12 {
|
if event.Key() == tcell.KeyF12 {
|
||||||
// help window cheatsheet
|
// help window cheatsheet
|
||||||
|
// Update help text with current status before showing
|
||||||
|
helpView.SetText(fmt.Sprintf(helpText, makeStatusLine()))
|
||||||
pages.AddPage(helpPage, helpView, true, true)
|
pages.AddPage(helpPage, helpView, true, true)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -865,6 +835,7 @@ func init() {
|
|||||||
lastMsg := chatBody.Messages[len(chatBody.Messages)-1]
|
lastMsg := chatBody.Messages[len(chatBody.Messages)-1]
|
||||||
cleanedText := models.CleanText(lastMsg.Content)
|
cleanedText := models.CleanText(lastMsg.Content)
|
||||||
if cleanedText != "" {
|
if cleanedText != "" {
|
||||||
|
// nolint: errcheck
|
||||||
go orator.Speak(cleanedText)
|
go orator.Speak(cleanedText)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user