Enha: detailed error

This commit is contained in:
Grail Finder
2026-02-03 16:56:31 +03:00
parent 0f5bbaa943
commit 76f14ce4a3
4 changed files with 148 additions and 25 deletions

106
bot.go
View File

@@ -498,6 +498,58 @@ func monitorModelLoad(modelID string) {
}() }()
} }
// extractDetailedErrorFromBytes extracts detailed error information from response body bytes
func extractDetailedErrorFromBytes(body []byte, statusCode int) string {
// Try to parse as JSON to extract detailed error information
var errorResponse map[string]interface{}
if err := json.Unmarshal(body, &errorResponse); err == nil {
// Check if it's an error response with detailed information
if errorData, ok := errorResponse["error"]; ok {
if errorMap, ok := errorData.(map[string]interface{}); ok {
var errorMsg string
if msg, ok := errorMap["message"]; ok {
errorMsg = fmt.Sprintf("%v", msg)
}
var details []string
if code, ok := errorMap["code"]; ok {
details = append(details, fmt.Sprintf("Code: %v", code))
}
if metadata, ok := errorMap["metadata"]; ok {
// Handle metadata which might contain raw error details
if metadataMap, ok := metadata.(map[string]interface{}); ok {
if raw, ok := metadataMap["raw"]; ok {
// Parse the raw error string if it's JSON
var rawError map[string]interface{}
if rawStr, ok := raw.(string); ok && json.Unmarshal([]byte(rawStr), &rawError) == nil {
if rawErrorData, ok := rawError["error"]; ok {
if rawErrorMap, ok := rawErrorData.(map[string]interface{}); ok {
if rawMsg, ok := rawErrorMap["message"]; ok {
return fmt.Sprintf("API Error: %s", rawMsg)
}
}
}
}
}
}
details = append(details, fmt.Sprintf("Metadata: %v", metadata))
}
if len(details) > 0 {
return fmt.Sprintf("API Error: %s (%s)", errorMsg, strings.Join(details, ", "))
}
return "API Error: " + errorMsg
}
}
}
// If not a structured error response, return the raw body with status
return fmt.Sprintf("HTTP Status: %d, Response Body: %s", statusCode, string(body))
}
// sendMsgToLLM expects streaming resp // sendMsgToLLM expects streaming resp
func sendMsgToLLM(body io.Reader) { func sendMsgToLLM(body io.Reader) {
choseChunkParser() choseChunkParser()
@@ -524,6 +576,33 @@ func sendMsgToLLM(body io.Reader) {
streamDone <- true streamDone <- true
return return
} }
// Check if the initial response is an error before starting to stream
if resp.StatusCode >= 400 {
// Read the response body to get detailed error information
bodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
logger.Error("failed to read error response body", "error", err, "status_code", resp.StatusCode)
detailedError := fmt.Sprintf("HTTP Status: %d, Failed to read response body: %v", resp.StatusCode, err)
if err := notifyUser("API Error", detailedError); err != nil {
logger.Error("failed to notify", "error", err)
}
resp.Body.Close()
streamDone <- true
return
}
// Parse the error response for detailed information
detailedError := extractDetailedErrorFromBytes(bodyBytes, resp.StatusCode)
logger.Error("API returned error status", "status_code", resp.StatusCode, "detailed_error", detailedError)
if err := notifyUser("API Error", detailedError); err != nil {
logger.Error("failed to notify", "error", err)
}
resp.Body.Close()
streamDone <- true
return
}
defer resp.Body.Close() defer resp.Body.Close()
reader := bufio.NewReader(resp.Body) reader := bufio.NewReader(resp.Body)
counter := uint32(0) counter := uint32(0)
@@ -541,12 +620,24 @@ func sendMsgToLLM(body io.Reader) {
} }
line, err := reader.ReadBytes('\n') line, err := reader.ReadBytes('\n')
if err != nil { if err != nil {
// Check if this is an EOF error and if the response contains detailed error information
if err == io.EOF {
// For streaming responses, we may have already consumed the error body
// So we'll use the original status code to provide context
detailedError := fmt.Sprintf("Streaming connection closed unexpectedly (Status: %d). This may indicate an API error. Check your API provider and model settings.", resp.StatusCode)
logger.Error("error reading response body", "error", err, "detailed_error", detailedError,
"status_code", resp.StatusCode, "user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI)
if err := notifyUser("API Error", detailedError); err != nil {
logger.Error("failed to notify", "error", err)
}
} else {
logger.Error("error reading response body", "error", err, "line", string(line), logger.Error("error reading response body", "error", err, "line", string(line),
"user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI) "user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI)
// if err.Error() != "EOF" { // if err.Error() != "EOF" {
if err := notifyUser("API error", err.Error()); err != nil { if err := notifyUser("API error", err.Error()); err != nil {
logger.Error("failed to notify", "error", err) logger.Error("failed to notify", "error", err)
} }
}
streamDone <- true streamDone <- true
break break
// } // }
@@ -798,7 +889,7 @@ out:
for i, msg := range chatBody.Messages { for i, msg := range chatBody.Messages {
logger.Debug("chatRound: after cleaning", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID) logger.Debug("chatRound: after cleaning", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID)
} }
colorText() refreshChatDisplay()
updateStatusLine() updateStatusLine()
// bot msg is done; // bot msg is done;
// now check it for func call // now check it for func call
@@ -1255,16 +1346,15 @@ func triggerPrivateMessageResponses(msg models.RoleMsg) {
// weird cases, skip // weird cases, skip
continue continue
} }
// Skip if this is the user character or the sender of the message // Skip if this is the user character (user handles their own turn)
// If user is in KnownTo, stop processing - it's the user's turn
if recipient == cfg.UserRole || recipient == userCharacter { if recipient == cfg.UserRole || recipient == userCharacter {
return // user in known_to => users turn return // user in known_to => user's turn
} }
// Trigger the recipient character to respond by simulating a prompt // Trigger the recipient character to respond
// that indicates it's their turn // Send empty message so LLM continues naturally from the conversation
triggerMsg := recipient + ":\n"
// Call chatRound with the trigger message to make the recipient respond
crr := &models.ChatRoundReq{ crr := &models.ChatRoundReq{
UserMsg: triggerMsg, UserMsg: "", // Empty message - LLM will continue the conversation
Role: recipient, Role: recipient,
} }
chatRoundChan <- crr chatRoundChan <- crr

View File

@@ -22,6 +22,28 @@ func isASCII(s string) bool {
return true return true
} }
// refreshChatDisplay updates the chat display based on current character view
// It filters messages for the character the user is currently "writing as"
// and updates the textView with the filtered conversation
func refreshChatDisplay() {
// Determine which character's view to show
viewingAs := cfg.UserRole
if cfg.WriteNextMsgAs != "" {
viewingAs = cfg.WriteNextMsgAs
}
// Filter messages for this character
filteredMessages := filterMessagesForCharacter(chatBody.Messages, viewingAs)
displayText := chatToText(filteredMessages, cfg.ShowSys)
// Use QueueUpdate for thread-safe UI updates
app.QueueUpdate(func() {
textView.SetText(displayText)
colorText()
if scrollToEndEnabled {
textView.ScrollToEnd()
}
})
}
func colorText() { func colorText() {
text := textView.GetText(false) text := textView.GetText(false)
quoteReplacer := strings.NewReplacer( quoteReplacer := strings.NewReplacer(

11
llm.go
View File

@@ -204,7 +204,8 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
} }
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData)) "msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData))
payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData, defaultLCPProps, chatBody.MakeStopSlice()) payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData,
defaultLCPProps, chatBody.MakeStopSliceExcluding(botPersona, listChatRoles()))
data, err := json.Marshal(payload) data, err := json.Marshal(payload)
if err != nil { if err != nil {
logger.Error("failed to form a msg", "error", err) logger.Error("failed to form a msg", "error", err)
@@ -436,7 +437,8 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt) "msg", msg, "resume", resume, "prompt", prompt)
payload := models.NewDSCompletionReq(prompt, chatBody.Model, payload := models.NewDSCompletionReq(prompt, chatBody.Model,
defaultLCPProps["temp"], chatBody.MakeStopSlice()) defaultLCPProps["temp"],
chatBody.MakeStopSliceExcluding(botPersona, listChatRoles()))
data, err := json.Marshal(payload) data, err := json.Marshal(payload)
if err != nil { if err != nil {
logger.Error("failed to form a msg", "error", err) logger.Error("failed to form a msg", "error", err)
@@ -594,10 +596,11 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
if cfg.ThinkUse && !cfg.ToolUse { if cfg.ThinkUse && !cfg.ToolUse {
prompt += "<think>" prompt += "<think>"
} }
ss := chatBody.MakeStopSlice() ss := chatBody.MakeStopSliceExcluding(botPersona, listChatRoles())
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt, "stop_strings", ss) "msg", msg, "resume", resume, "prompt", prompt, "stop_strings", ss)
payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt, defaultLCPProps, ss) payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt,
defaultLCPProps, ss)
data, err := json.Marshal(payload) data, err := json.Marshal(payload)
if err != nil { if err != nil {
logger.Error("failed to form a msg", "error", err) logger.Error("failed to form a msg", "error", err)

View File

@@ -369,14 +369,22 @@ func (cb *ChatBody) ListRoles() []string {
} }
func (cb *ChatBody) MakeStopSlice() []string { func (cb *ChatBody) MakeStopSlice() []string {
namesMap := make(map[string]struct{}) return cb.MakeStopSliceExcluding("", cb.ListRoles())
for _, m := range cb.Messages {
namesMap[m.Role] = struct{}{}
} }
ss := make([]string, 0, 1+len(namesMap))
ss = append(ss, "<|im_end|>") func (cb *ChatBody) MakeStopSliceExcluding(
for k := range namesMap { excludeRole string, roleList []string,
ss = append(ss, k+":\n") ) []string {
ss := []string{}
for _, role := range roleList {
// Skip the excluded role (typically the current speaker)
if role == excludeRole {
continue
}
// Add multiple variations to catch different formatting
ss = append(ss, role+":\n") // Most common: role with newline
ss = append(ss, role+":") // Role with colon but no newline
ss = append(ss, role+": ") // Role with colon and space
} }
return ss return ss
} }