Feat: add external tool web search

This commit is contained in:
Grail Finder
2025-10-09 10:36:55 +03:00
parent dc183e3692
commit 314c07835e
7 changed files with 373 additions and 306 deletions

8
bot.go
View File

@@ -151,13 +151,8 @@ func fetchORModels(free bool) ([]string, error) {
func sendMsgToLLM(body io.Reader) { func sendMsgToLLM(body io.Reader) {
choseChunkParser() choseChunkParser()
bodyBytes, _ := io.ReadAll(body)
ok := json.Valid(bodyBytes)
if !ok {
panic("invalid json")
}
// nolint // nolint
req, err := http.NewRequest("POST", cfg.CurrentAPI, bytes.NewReader(bodyBytes)) req, err := http.NewRequest("POST", cfg.CurrentAPI, body)
if err != nil { if err != nil {
logger.Error("newreq error", "error", err) logger.Error("newreq error", "error", err)
if err := notifyUser("error", "apicall failed:"+err.Error()); err != nil { if err := notifyUser("error", "apicall failed:"+err.Error()); err != nil {
@@ -172,7 +167,6 @@ func sendMsgToLLM(body io.Reader) {
// req.Header.Set("Content-Length", strconv.Itoa(len(bodyBytes))) // req.Header.Set("Content-Length", strconv.Itoa(len(bodyBytes)))
req.Header.Set("Accept-Encoding", "gzip") req.Header.Set("Accept-Encoding", "gzip")
// nolint // nolint
// resp, err := httpClient.Post(cfg.CurrentAPI, "application/json", body)
resp, err := httpClient.Do(req) resp, err := httpClient.Do(req)
if err != nil { if err != nil {
logger.Error("llamacpp api", "error", err) logger.Error("llamacpp api", "error", err)

View File

@@ -15,6 +15,9 @@ type Config struct {
CurrentProvider string CurrentProvider string
APIMap map[string]string APIMap map[string]string
FetchModelNameAPI string `toml:"FetchModelNameAPI"` FetchModelNameAPI string `toml:"FetchModelNameAPI"`
// ToolsAPI list?
SearchAPI string `toml:"SearchAPI"`
SearchDescribe string `toml:"SearchDescribe"`
// //
ShowSys bool `toml:"ShowSys"` ShowSys bool `toml:"ShowSys"`
LogFile string `toml:"LogFile"` LogFile string `toml:"LogFile"`

6
llm.go
View File

@@ -320,7 +320,7 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
bodyCopy.Messages[i] = msg bodyCopy.Messages[i] = msg
} }
} }
dsBody := models.NewDSCharReq(*bodyCopy) dsBody := models.NewDSChatReq(*bodyCopy)
data, err := json.Marshal(dsBody) data, err := json.Marshal(dsBody)
if err != nil { if err != nil {
logger.Error("failed to form a msg", "error", err) logger.Error("failed to form a msg", "error", err)
@@ -462,8 +462,8 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
bodyCopy.Messages[i] = msg bodyCopy.Messages[i] = msg
} }
} }
dsBody := models.NewDSCharReq(*bodyCopy) orBody := models.NewOpenRouterChatReq(*bodyCopy, defaultLCPProps)
data, err := json.Marshal(dsBody) data, err := json.Marshal(orBody)
if err != nil { if err != nil {
logger.Error("failed to form a msg", "error", err) logger.Error("failed to form a msg", "error", err)
return nil, err return nil, err

144
models/deepseek.go Normal file
View File

@@ -0,0 +1,144 @@
package models
type DSChatReq struct {
Messages []RoleMsg `json:"messages"`
Model string `json:"model"`
Stream bool `json:"stream"`
FrequencyPenalty int `json:"frequency_penalty"`
MaxTokens int `json:"max_tokens"`
PresencePenalty int `json:"presence_penalty"`
Temperature float32 `json:"temperature"`
TopP float32 `json:"top_p"`
// ResponseFormat struct {
// Type string `json:"type"`
// } `json:"response_format"`
// Stop any `json:"stop"`
// StreamOptions any `json:"stream_options"`
// Tools any `json:"tools"`
// ToolChoice string `json:"tool_choice"`
// Logprobs bool `json:"logprobs"`
// TopLogprobs any `json:"top_logprobs"`
}
func NewDSChatReq(cb ChatBody) DSChatReq {
return DSChatReq{
Messages: cb.Messages,
Model: cb.Model,
Stream: cb.Stream,
MaxTokens: 2048,
PresencePenalty: 0,
FrequencyPenalty: 0,
Temperature: 1.0,
TopP: 1.0,
}
}
type DSCompletionReq struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Echo bool `json:"echo"`
FrequencyPenalty int `json:"frequency_penalty"`
// Logprobs int `json:"logprobs"`
MaxTokens int `json:"max_tokens"`
PresencePenalty int `json:"presence_penalty"`
Stop any `json:"stop"`
Stream bool `json:"stream"`
StreamOptions any `json:"stream_options"`
Suffix any `json:"suffix"`
Temperature float32 `json:"temperature"`
TopP float32 `json:"top_p"`
}
func NewDSCompletionReq(prompt, model string, temp float32, stopSlice []string) DSCompletionReq {
return DSCompletionReq{
Model: model,
Prompt: prompt,
Temperature: temp,
Stream: true,
Echo: false,
MaxTokens: 2048,
PresencePenalty: 0,
FrequencyPenalty: 0,
TopP: 1.0,
Stop: stopSlice,
}
}
type DSCompletionResp struct {
ID string `json:"id"`
Choices []struct {
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
Logprobs struct {
TextOffset []int `json:"text_offset"`
TokenLogprobs []int `json:"token_logprobs"`
Tokens []string `json:"tokens"`
TopLogprobs []struct {
} `json:"top_logprobs"`
} `json:"logprobs"`
Text string `json:"text"`
} `json:"choices"`
Created int `json:"created"`
Model string `json:"model"`
SystemFingerprint string `json:"system_fingerprint"`
Object string `json:"object"`
Usage struct {
CompletionTokens int `json:"completion_tokens"`
PromptTokens int `json:"prompt_tokens"`
PromptCacheHitTokens int `json:"prompt_cache_hit_tokens"`
PromptCacheMissTokens int `json:"prompt_cache_miss_tokens"`
TotalTokens int `json:"total_tokens"`
CompletionTokensDetails struct {
ReasoningTokens int `json:"reasoning_tokens"`
} `json:"completion_tokens_details"`
} `json:"usage"`
}
type DSChatResp struct {
Choices []struct {
Delta struct {
Content string `json:"content"`
Role any `json:"role"`
} `json:"delta"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
Logprobs any `json:"logprobs"`
} `json:"choices"`
Created int `json:"created"`
ID string `json:"id"`
Model string `json:"model"`
Object string `json:"object"`
SystemFingerprint string `json:"system_fingerprint"`
Usage struct {
CompletionTokens int `json:"completion_tokens"`
PromptTokens int `json:"prompt_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage"`
}
type DSChatStreamResp struct {
ID string `json:"id"`
Object string `json:"object"`
Created int `json:"created"`
Model string `json:"model"`
SystemFingerprint string `json:"system_fingerprint"`
Choices []struct {
Index int `json:"index"`
Delta struct {
Content string `json:"content"`
ReasoningContent string `json:"reasoning_content"`
} `json:"delta"`
Logprobs any `json:"logprobs"`
FinishReason string `json:"finish_reason"`
} `json:"choices"`
}
type DSBalance struct {
IsAvailable bool `json:"is_available"`
BalanceInfos []struct {
Currency string `json:"currency"`
TotalBalance string `json:"total_balance"`
GrantedBalance string `json:"granted_balance"`
ToppedUpBalance string `json:"topped_up_balance"`
} `json:"balance_infos"`
}

View File

@@ -127,139 +127,6 @@ func (cb *ChatBody) MakeStopSlice() []string {
return ss return ss
} }
type DSChatReq struct {
Messages []RoleMsg `json:"messages"`
Model string `json:"model"`
Stream bool `json:"stream"`
FrequencyPenalty int `json:"frequency_penalty"`
MaxTokens int `json:"max_tokens"`
PresencePenalty int `json:"presence_penalty"`
Temperature float32 `json:"temperature"`
TopP float32 `json:"top_p"`
// ResponseFormat struct {
// Type string `json:"type"`
// } `json:"response_format"`
// Stop any `json:"stop"`
// StreamOptions any `json:"stream_options"`
// Tools any `json:"tools"`
// ToolChoice string `json:"tool_choice"`
// Logprobs bool `json:"logprobs"`
// TopLogprobs any `json:"top_logprobs"`
}
func NewDSCharReq(cb ChatBody) DSChatReq {
return DSChatReq{
Messages: cb.Messages,
Model: cb.Model,
Stream: cb.Stream,
MaxTokens: 2048,
PresencePenalty: 0,
FrequencyPenalty: 0,
Temperature: 1.0,
TopP: 1.0,
}
}
type DSCompletionReq struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Echo bool `json:"echo"`
FrequencyPenalty int `json:"frequency_penalty"`
// Logprobs int `json:"logprobs"`
MaxTokens int `json:"max_tokens"`
PresencePenalty int `json:"presence_penalty"`
Stop any `json:"stop"`
Stream bool `json:"stream"`
StreamOptions any `json:"stream_options"`
Suffix any `json:"suffix"`
Temperature float32 `json:"temperature"`
TopP float32 `json:"top_p"`
}
func NewDSCompletionReq(prompt, model string, temp float32, stopSlice []string) DSCompletionReq {
return DSCompletionReq{
Model: model,
Prompt: prompt,
Temperature: temp,
Stream: true,
Echo: false,
MaxTokens: 2048,
PresencePenalty: 0,
FrequencyPenalty: 0,
TopP: 1.0,
Stop: stopSlice,
}
}
type DSCompletionResp struct {
ID string `json:"id"`
Choices []struct {
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
Logprobs struct {
TextOffset []int `json:"text_offset"`
TokenLogprobs []int `json:"token_logprobs"`
Tokens []string `json:"tokens"`
TopLogprobs []struct {
} `json:"top_logprobs"`
} `json:"logprobs"`
Text string `json:"text"`
} `json:"choices"`
Created int `json:"created"`
Model string `json:"model"`
SystemFingerprint string `json:"system_fingerprint"`
Object string `json:"object"`
Usage struct {
CompletionTokens int `json:"completion_tokens"`
PromptTokens int `json:"prompt_tokens"`
PromptCacheHitTokens int `json:"prompt_cache_hit_tokens"`
PromptCacheMissTokens int `json:"prompt_cache_miss_tokens"`
TotalTokens int `json:"total_tokens"`
CompletionTokensDetails struct {
ReasoningTokens int `json:"reasoning_tokens"`
} `json:"completion_tokens_details"`
} `json:"usage"`
}
type DSChatResp struct {
Choices []struct {
Delta struct {
Content string `json:"content"`
Role any `json:"role"`
} `json:"delta"`
FinishReason string `json:"finish_reason"`
Index int `json:"index"`
Logprobs any `json:"logprobs"`
} `json:"choices"`
Created int `json:"created"`
ID string `json:"id"`
Model string `json:"model"`
Object string `json:"object"`
SystemFingerprint string `json:"system_fingerprint"`
Usage struct {
CompletionTokens int `json:"completion_tokens"`
PromptTokens int `json:"prompt_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage"`
}
type DSChatStreamResp struct {
ID string `json:"id"`
Object string `json:"object"`
Created int `json:"created"`
Model string `json:"model"`
SystemFingerprint string `json:"system_fingerprint"`
Choices []struct {
Index int `json:"index"`
Delta struct {
Content string `json:"content"`
ReasoningContent string `json:"reasoning_content"`
} `json:"delta"`
Logprobs any `json:"logprobs"`
FinishReason string `json:"finish_reason"`
} `json:"choices"`
}
type EmbeddingResp struct { type EmbeddingResp struct {
Embedding []float32 `json:"embedding"` Embedding []float32 `json:"embedding"`
Index uint32 `json:"index"` Index uint32 `json:"index"`
@@ -374,166 +241,3 @@ type LlamaCPPResp struct {
Content string `json:"content"` Content string `json:"content"`
Stop bool `json:"stop"` Stop bool `json:"stop"`
} }
type DSBalance struct {
IsAvailable bool `json:"is_available"`
BalanceInfos []struct {
Currency string `json:"currency"`
TotalBalance string `json:"total_balance"`
GrantedBalance string `json:"granted_balance"`
ToppedUpBalance string `json:"topped_up_balance"`
} `json:"balance_infos"`
}
// openrouter
// https://openrouter.ai/docs/api-reference/completion
type OpenRouterCompletionReq struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Stream bool `json:"stream"`
Temperature float32 `json:"temperature"`
Stop []string `json:"stop"` // not present in docs
MinP float32 `json:"min_p"`
NPredict int32 `json:"max_tokens"`
}
func NewOpenRouterCompletionReq(model, prompt string, props map[string]float32, stopStrings []string) OpenRouterCompletionReq {
return OpenRouterCompletionReq{
Stream: true,
Prompt: prompt,
Temperature: props["temperature"],
MinP: props["min_p"],
NPredict: int32(props["n_predict"]),
Stop: stopStrings,
Model: model,
}
}
type OpenRouterChatReq struct {
Messages []RoleMsg `json:"messages"`
Model string `json:"model"`
Stream bool `json:"stream"`
Temperature float32 `json:"temperature"`
MinP float32 `json:"min_p"`
NPredict int32 `json:"max_tokens"`
}
func NewOpenRouterChatReq(cb ChatBody, props map[string]float32) OpenRouterChatReq {
return OpenRouterChatReq{
Messages: cb.Messages,
Model: cb.Model,
Stream: cb.Stream,
Temperature: props["temperature"],
MinP: props["min_p"],
NPredict: int32(props["n_predict"]),
}
}
type OpenRouterChatRespNonStream struct {
ID string `json:"id"`
Provider string `json:"provider"`
Model string `json:"model"`
Object string `json:"object"`
Created int `json:"created"`
Choices []struct {
Logprobs any `json:"logprobs"`
FinishReason string `json:"finish_reason"`
NativeFinishReason string `json:"native_finish_reason"`
Index int `json:"index"`
Message struct {
Role string `json:"role"`
Content string `json:"content"`
Refusal any `json:"refusal"`
Reasoning any `json:"reasoning"`
} `json:"message"`
} `json:"choices"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage"`
}
type OpenRouterChatResp struct {
ID string `json:"id"`
Provider string `json:"provider"`
Model string `json:"model"`
Object string `json:"object"`
Created int `json:"created"`
Choices []struct {
Index int `json:"index"`
Delta struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"delta"`
FinishReason string `json:"finish_reason"`
NativeFinishReason string `json:"native_finish_reason"`
Logprobs any `json:"logprobs"`
} `json:"choices"`
}
type OpenRouterCompletionResp struct {
ID string `json:"id"`
Provider string `json:"provider"`
Model string `json:"model"`
Object string `json:"object"`
Created int `json:"created"`
Choices []struct {
Text string `json:"text"`
FinishReason string `json:"finish_reason"`
NativeFinishReason string `json:"native_finish_reason"`
Logprobs any `json:"logprobs"`
} `json:"choices"`
}
type ORModel struct {
ID string `json:"id"`
CanonicalSlug string `json:"canonical_slug"`
HuggingFaceID string `json:"hugging_face_id"`
Name string `json:"name"`
Created int `json:"created"`
Description string `json:"description"`
ContextLength int `json:"context_length"`
Architecture struct {
Modality string `json:"modality"`
InputModalities []string `json:"input_modalities"`
OutputModalities []string `json:"output_modalities"`
Tokenizer string `json:"tokenizer"`
InstructType any `json:"instruct_type"`
} `json:"architecture"`
Pricing struct {
Prompt string `json:"prompt"`
Completion string `json:"completion"`
Request string `json:"request"`
Image string `json:"image"`
Audio string `json:"audio"`
WebSearch string `json:"web_search"`
InternalReasoning string `json:"internal_reasoning"`
} `json:"pricing,omitempty"`
TopProvider struct {
ContextLength int `json:"context_length"`
MaxCompletionTokens int `json:"max_completion_tokens"`
IsModerated bool `json:"is_moderated"`
} `json:"top_provider"`
PerRequestLimits any `json:"per_request_limits"`
SupportedParameters []string `json:"supported_parameters"`
}
type ORModels struct {
Data []ORModel `json:"data"`
}
func (orm *ORModels) ListModels(free bool) []string {
resp := []string{}
for _, model := range orm.Data {
if free {
if model.Pricing.Prompt == "0" && model.Pricing.Request == "0" &&
model.Pricing.Completion == "0" {
resp = append(resp, model.ID)
}
} else {
resp = append(resp, model.ID)
}
}
return resp
}

154
models/openrouter.go Normal file
View File

@@ -0,0 +1,154 @@
package models
// openrouter
// https://openrouter.ai/docs/api-reference/completion
type OpenRouterCompletionReq struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Stream bool `json:"stream"`
Temperature float32 `json:"temperature"`
Stop []string `json:"stop"` // not present in docs
MinP float32 `json:"min_p"`
NPredict int32 `json:"max_tokens"`
}
func NewOpenRouterCompletionReq(model, prompt string, props map[string]float32, stopStrings []string) OpenRouterCompletionReq {
return OpenRouterCompletionReq{
Stream: true,
Prompt: prompt,
Temperature: props["temperature"],
MinP: props["min_p"],
NPredict: int32(props["n_predict"]),
Stop: stopStrings,
Model: model,
}
}
type OpenRouterChatReq struct {
Messages []RoleMsg `json:"messages"`
Model string `json:"model"`
Stream bool `json:"stream"`
Temperature float32 `json:"temperature"`
MinP float32 `json:"min_p"`
NPredict int32 `json:"max_tokens"`
}
func NewOpenRouterChatReq(cb ChatBody, props map[string]float32) OpenRouterChatReq {
return OpenRouterChatReq{
Messages: cb.Messages,
Model: cb.Model,
Stream: cb.Stream,
Temperature: props["temperature"],
MinP: props["min_p"],
NPredict: int32(props["n_predict"]),
}
}
type OpenRouterChatRespNonStream struct {
ID string `json:"id"`
Provider string `json:"provider"`
Model string `json:"model"`
Object string `json:"object"`
Created int `json:"created"`
Choices []struct {
Logprobs any `json:"logprobs"`
FinishReason string `json:"finish_reason"`
NativeFinishReason string `json:"native_finish_reason"`
Index int `json:"index"`
Message struct {
Role string `json:"role"`
Content string `json:"content"`
Refusal any `json:"refusal"`
Reasoning any `json:"reasoning"`
} `json:"message"`
} `json:"choices"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage"`
}
type OpenRouterChatResp struct {
ID string `json:"id"`
Provider string `json:"provider"`
Model string `json:"model"`
Object string `json:"object"`
Created int `json:"created"`
Choices []struct {
Index int `json:"index"`
Delta struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"delta"`
FinishReason string `json:"finish_reason"`
NativeFinishReason string `json:"native_finish_reason"`
Logprobs any `json:"logprobs"`
} `json:"choices"`
}
type OpenRouterCompletionResp struct {
ID string `json:"id"`
Provider string `json:"provider"`
Model string `json:"model"`
Object string `json:"object"`
Created int `json:"created"`
Choices []struct {
Text string `json:"text"`
FinishReason string `json:"finish_reason"`
NativeFinishReason string `json:"native_finish_reason"`
Logprobs any `json:"logprobs"`
} `json:"choices"`
}
type ORModel struct {
ID string `json:"id"`
CanonicalSlug string `json:"canonical_slug"`
HuggingFaceID string `json:"hugging_face_id"`
Name string `json:"name"`
Created int `json:"created"`
Description string `json:"description"`
ContextLength int `json:"context_length"`
Architecture struct {
Modality string `json:"modality"`
InputModalities []string `json:"input_modalities"`
OutputModalities []string `json:"output_modalities"`
Tokenizer string `json:"tokenizer"`
InstructType any `json:"instruct_type"`
} `json:"architecture"`
Pricing struct {
Prompt string `json:"prompt"`
Completion string `json:"completion"`
Request string `json:"request"`
Image string `json:"image"`
Audio string `json:"audio"`
WebSearch string `json:"web_search"`
InternalReasoning string `json:"internal_reasoning"`
} `json:"pricing,omitempty"`
TopProvider struct {
ContextLength int `json:"context_length"`
MaxCompletionTokens int `json:"max_completion_tokens"`
IsModerated bool `json:"is_moderated"`
} `json:"top_provider"`
PerRequestLimits any `json:"per_request_limits"`
SupportedParameters []string `json:"supported_parameters"`
}
type ORModels struct {
Data []ORModel `json:"data"`
}
func (orm *ORModels) ListModels(free bool) []string {
resp := []string{}
for _, model := range orm.Data {
if free {
if model.Pricing.Prompt == "0" && model.Pricing.Request == "0" &&
model.Pricing.Completion == "0" {
resp = append(resp, model.ID)
}
} else {
resp = append(resp, model.ID)
}
}
return resp
}

View File

@@ -1,8 +1,13 @@
package main package main
import ( import (
"bytes"
"encoding/json"
"fmt" "fmt"
"gf-lt/config"
"gf-lt/models" "gf-lt/models"
"io"
"net/http"
"regexp" "regexp"
"strings" "strings"
"time" "time"
@@ -76,6 +81,69 @@ After that you are free to respond to the user.
sysLabels = []string{"basic_sys", "tool_sys"} sysLabels = []string{"basic_sys", "tool_sys"}
) )
func populateTools(cfg config.Config) {
// if we have access to some server with funcs we can populate funcs (tools|toolbelt?) with it
// there must be a better way
if cfg.SearchAPI == "" || cfg.SearchDescribe == "" {
return
}
resp, err := httpClient.Get(cfg.SearchDescribe)
if err != nil {
logger.Error("failed to get websearch tool description",
"link", cfg.SearchDescribe, "error", err)
return
}
descResp := models.Tool{}
if err := json.NewDecoder(resp.Body).Decode(&descResp); err != nil {
logger.Error("failed to unmarshal websearch tool description",
"link", cfg.SearchDescribe, "error", err)
return
}
fnMap["web_search"] = websearch
baseTools = append(baseTools, descResp)
logger.Info("added web_search tool", "tool", descResp)
return
}
// {"type":"function","function":{"name":"web_search","description":"Perform a web search to find information on varioust topics","parameters":{"type":"object","properties":{"num_results":{"type":"integer","description":"Maximum number of results to return (default: 10)"},"query":{"type":"string","description":"The search query to find information about"},"search_type":{"type":"string","description":"Type of search to perform: 'api' for SearXNG API search or 'scraper' for web scraping (default: 'scraper')"}},"required":["query"]}}}
// web search (depends on extra server)
func websearch(args map[string]string) []byte {
// make http request return bytes
query, ok := args["query"]
if !ok || query == "" {
msg := "query not provided to web_search tool"
logger.Error(msg)
return []byte(msg)
}
payload, err := json.Marshal(args)
if err != nil {
logger.Error("failed to marshal web_search arguments", "error", err)
msg := fmt.Sprintf("failed to marshal web_search arguments; error: %s\n", err)
return []byte(msg)
}
req, err := http.NewRequest("POST", cfg.SearchAPI, bytes.NewReader(payload))
if err != nil {
logger.Error("failed to build an http request", "error", err)
msg := fmt.Sprintf("failed to build an http request; error: %s\n", err)
return []byte(msg)
}
resp, err := httpClient.Do(req)
if err != nil {
logger.Error("failed to execute http request", "error", err)
msg := fmt.Sprintf("failed to execute http request; error: %s\n", err)
return []byte(msg)
}
defer resp.Body.Close()
data, err := io.ReadAll(resp.Body)
if err != nil {
logger.Error("failed to read response body", "error", err)
msg := fmt.Sprintf("failed to read response body; error: %s\n", err)
return []byte(msg)
}
return data
}
/* /*
consider cases: consider cases:
- append mode (treat it like a journal appendix) - append mode (treat it like a journal appendix)