Files
gf-lt/llm.go
2025-11-24 13:06:42 +03:00

672 lines
23 KiB
Go

package main
import (
"bytes"
"encoding/json"
"fmt"
"gf-lt/models"
"io"
"os"
"strings"
)
var imageAttachmentPath string // Global variable to track image attachment for next message
var lastImg string // for ctrl+j
var RAGMsg = "Retrieved context for user's query:\n"
// SetImageAttachment sets an image to be attached to the next message sent to the LLM
func SetImageAttachment(imagePath string) {
imageAttachmentPath = imagePath
lastImg = imagePath
}
// ClearImageAttachment clears any pending image attachment and updates UI
func ClearImageAttachment() {
imageAttachmentPath = ""
}
type ChunkParser interface {
ParseChunk([]byte) (*models.TextChunk, error)
FormMsg(msg, role string, cont bool) (io.Reader, error)
GetToken() string
}
func choseChunkParser() {
chunkParser = LlamaCPPeer{}
switch cfg.CurrentAPI {
case "http://localhost:8080/completion":
chunkParser = LlamaCPPeer{}
logger.Debug("chosen llamacppeer", "link", cfg.CurrentAPI)
return
case "http://localhost:8080/v1/chat/completions":
chunkParser = OpenAIer{}
logger.Debug("chosen openair", "link", cfg.CurrentAPI)
return
case "https://api.deepseek.com/beta/completions":
chunkParser = DeepSeekerCompletion{}
logger.Debug("chosen deepseekercompletio", "link", cfg.CurrentAPI)
return
case "https://api.deepseek.com/chat/completions":
chunkParser = DeepSeekerChat{}
logger.Debug("chosen deepseekerchat", "link", cfg.CurrentAPI)
return
case "https://openrouter.ai/api/v1/completions":
chunkParser = OpenRouterCompletion{}
logger.Debug("chosen openroutercompletion", "link", cfg.CurrentAPI)
return
case "https://openrouter.ai/api/v1/chat/completions":
chunkParser = OpenRouterChat{}
logger.Debug("chosen openrouterchat", "link", cfg.CurrentAPI)
return
default:
chunkParser = LlamaCPPeer{}
}
}
type LlamaCPPeer struct {
}
type OpenAIer struct {
}
type DeepSeekerCompletion struct {
}
type DeepSeekerChat struct {
}
type OpenRouterCompletion struct {
Model string
}
type OpenRouterChat struct {
Model string
}
func min(a, b int) int {
if a < b {
return a
}
return b
}
func (lcp LlamaCPPeer) GetToken() string {
return ""
}
func (lcp LlamaCPPeer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg llamacppeer", "link", cfg.CurrentAPI)
if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
// if rag - add as system message to avoid conflicts with tool usage
if cfg.RAGEnabled {
ragResp, err := chatRagUse(newMsg.Content)
if err != nil {
logger.Error("failed to form a rag msg", "error", err)
return nil, err
}
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
}
}
if cfg.ToolUse && !resume {
// add to chat body
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
}
messages := make([]string, len(chatBody.Messages))
for i, m := range chatBody.Messages {
messages[i] = m.ToPrompt()
}
prompt := strings.Join(messages, "\n")
// strings builder?
if !resume {
botPersona := cfg.AssistantRole
if cfg.WriteNextMsgAsCompletionAgent != "" {
botPersona = cfg.WriteNextMsgAsCompletionAgent
}
botMsgStart := "\n" + botPersona + ":\n"
prompt += botMsgStart
}
if cfg.ThinkUse && !cfg.ToolUse {
prompt += "<think>"
}
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt)
payload := models.NewLCPReq(prompt, defaultLCPProps, chatBody.MakeStopSlice())
data, err := json.Marshal(payload)
if err != nil {
logger.Error("failed to form a msg", "error", err)
return nil, err
}
return bytes.NewReader(data), nil
}
func (lcp LlamaCPPeer) ParseChunk(data []byte) (*models.TextChunk, error) {
llmchunk := models.LlamaCPPResp{}
resp := &models.TextChunk{}
if err := json.Unmarshal(data, &llmchunk); err != nil {
logger.Error("failed to decode", "error", err, "line", string(data))
return nil, err
}
resp.Chunk = llmchunk.Content
if llmchunk.Stop {
if llmchunk.Content != "" {
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
}
resp.Finished = true
}
return resp, nil
}
func (op OpenAIer) GetToken() string {
return ""
}
func (op OpenAIer) ParseChunk(data []byte) (*models.TextChunk, error) {
llmchunk := models.LLMRespChunk{}
if err := json.Unmarshal(data, &llmchunk); err != nil {
logger.Error("failed to decode", "error", err, "line", string(data))
return nil, err
}
resp := &models.TextChunk{
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content,
}
if len(llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls) > 0 {
resp.ToolChunk = llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0].Function.Arguments
fname := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0].Function.Name
if fname != "" {
resp.FuncName = fname
}
}
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
if resp.Chunk != "" {
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
}
resp.Finished = true
}
if resp.ToolChunk != "" {
resp.ToolResp = true
}
return resp, nil
}
func (op OpenAIer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg openaier", "link", cfg.CurrentAPI)
// Capture the image attachment path at the beginning to avoid race conditions
// with API rotation that might clear the global variable
localImageAttachmentPath := imageAttachmentPath
if msg != "" { // otherwise let the bot continue
// Create the message with support for multimodal content
var newMsg models.RoleMsg
// Check if we have an image to add to this message
if localImageAttachmentPath != "" {
// Create a multimodal message with both text and image
newMsg = models.NewMultimodalMsg(role, []interface{}{})
// Add the text content
newMsg.AddTextPart(msg)
// Add the image content
imageURL, err := models.CreateImageURLFromPath(localImageAttachmentPath)
if err != nil {
logger.Error("failed to create image URL from path", "error", err, "path", localImageAttachmentPath)
// If image processing fails, fall back to simple text message
newMsg = models.NewRoleMsg(role, msg)
} else {
newMsg.AddImagePart(imageURL)
// Only clear the global image attachment after successfully processing it in this API call
imageAttachmentPath = "" // Clear the attachment after use
}
} else {
// Create a simple text message
newMsg = models.NewRoleMsg(role, msg)
}
chatBody.Messages = append(chatBody.Messages, newMsg)
// if rag - add as system message to avoid conflicts with tool usage
if cfg.RAGEnabled {
ragResp, err := chatRagUse(newMsg.Content)
if err != nil {
logger.Error("failed to form a rag msg", "error", err)
return nil, err
}
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
}
}
req := models.OpenAIReq{
ChatBody: chatBody,
Tools: nil,
}
if cfg.ToolUse && !resume && role != cfg.ToolRole {
req.Tools = baseTools // set tools to use
}
data, err := json.Marshal(req)
if err != nil {
logger.Error("failed to form a msg", "error", err)
return nil, err
}
return bytes.NewReader(data), nil
}
// deepseek
func (ds DeepSeekerCompletion) ParseChunk(data []byte) (*models.TextChunk, error) {
llmchunk := models.DSCompletionResp{}
if err := json.Unmarshal(data, &llmchunk); err != nil {
logger.Error("failed to decode", "error", err, "line", string(data))
return nil, err
}
resp := &models.TextChunk{
Chunk: llmchunk.Choices[0].Text,
}
if llmchunk.Choices[0].FinishReason != "" {
if resp.Chunk != "" {
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
}
resp.Finished = true
}
return resp, nil
}
func (ds DeepSeekerCompletion) GetToken() string {
return cfg.DeepSeekToken
}
func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg deepseekercompletion", "link", cfg.CurrentAPI)
if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
// if rag - add as system message to avoid conflicts with tool usage
if cfg.RAGEnabled {
ragResp, err := chatRagUse(newMsg.Content)
if err != nil {
logger.Error("failed to form a rag msg", "error", err)
return nil, err
}
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
}
}
if cfg.ToolUse && !resume {
// add to chat body
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
}
messages := make([]string, len(chatBody.Messages))
for i, m := range chatBody.Messages {
messages[i] = m.ToPrompt()
}
prompt := strings.Join(messages, "\n")
// strings builder?
if !resume {
botPersona := cfg.AssistantRole
if cfg.WriteNextMsgAsCompletionAgent != "" {
botPersona = cfg.WriteNextMsgAsCompletionAgent
}
botMsgStart := "\n" + botPersona + ":\n"
prompt += botMsgStart
}
if cfg.ThinkUse && !cfg.ToolUse {
prompt += "<think>"
}
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt)
payload := models.NewDSCompletionReq(prompt, chatBody.Model,
defaultLCPProps["temp"], chatBody.MakeStopSlice())
data, err := json.Marshal(payload)
if err != nil {
logger.Error("failed to form a msg", "error", err)
return nil, err
}
return bytes.NewReader(data), nil
}
func (ds DeepSeekerChat) ParseChunk(data []byte) (*models.TextChunk, error) {
llmchunk := models.DSChatStreamResp{}
if err := json.Unmarshal(data, &llmchunk); err != nil {
logger.Error("failed to decode", "error", err, "line", string(data))
return nil, err
}
resp := &models.TextChunk{}
if llmchunk.Choices[0].FinishReason != "" {
if llmchunk.Choices[0].Delta.Content != "" {
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
}
resp.Chunk = llmchunk.Choices[0].Delta.Content
resp.Finished = true
} else {
if llmchunk.Choices[0].Delta.ReasoningContent != "" {
resp.Chunk = llmchunk.Choices[0].Delta.ReasoningContent
} else {
resp.Chunk = llmchunk.Choices[0].Delta.Content
}
}
return resp, nil
}
func (ds DeepSeekerChat) GetToken() string {
return cfg.DeepSeekToken
}
func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg deepseekerchat", "link", cfg.CurrentAPI)
if cfg.ToolUse && !resume {
// prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
// add to chat body
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
}
if msg != "" { // otherwise let the bot continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
// if rag - add as system message to avoid conflicts with tool usage
if cfg.RAGEnabled {
ragResp, err := chatRagUse(newMsg.Content)
if err != nil {
logger.Error("failed to form a rag msg", "error", err)
return nil, err
}
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
}
}
// Create copy of chat body with standardized user role
// modifiedBody := *chatBody
bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(chatBody.Messages)),
Model: chatBody.Model,
Stream: chatBody.Stream,
}
// modifiedBody.Messages = make([]models.RoleMsg, len(chatBody.Messages))
for i, msg := range chatBody.Messages {
logger.Debug("checking roles", "#", i, "role", msg.Role)
if msg.Role == cfg.UserRole || i == 1 {
bodyCopy.Messages[i].Role = "user"
logger.Debug("replaced role in body", "#", i)
} else {
bodyCopy.Messages[i] = msg
}
}
dsBody := models.NewDSChatReq(*bodyCopy)
data, err := json.Marshal(dsBody)
if err != nil {
logger.Error("failed to form a msg", "error", err)
return nil, err
}
return bytes.NewReader(data), nil
}
// openrouter
func (or OpenRouterCompletion) ParseChunk(data []byte) (*models.TextChunk, error) {
llmchunk := models.OpenRouterCompletionResp{}
if err := json.Unmarshal(data, &llmchunk); err != nil {
logger.Error("failed to decode", "error", err, "line", string(data))
return nil, err
}
resp := &models.TextChunk{
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Text,
}
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
if resp.Chunk != "" {
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
}
resp.Finished = true
}
return resp, nil
}
func (or OpenRouterCompletion) GetToken() string {
return cfg.OpenRouterToken
}
func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg openroutercompletion", "link", cfg.CurrentAPI)
if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
// if rag - add as system message to avoid conflicts with tool usage
if cfg.RAGEnabled {
ragResp, err := chatRagUse(newMsg.Content)
if err != nil {
logger.Error("failed to form a rag msg", "error", err)
return nil, err
}
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
}
}
if cfg.ToolUse && !resume {
// add to chat body
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
}
messages := make([]string, len(chatBody.Messages))
for i, m := range chatBody.Messages {
messages[i] = m.ToPrompt()
}
prompt := strings.Join(messages, "\n")
// strings builder?
if !resume {
botPersona := cfg.AssistantRole
if cfg.WriteNextMsgAsCompletionAgent != "" {
botPersona = cfg.WriteNextMsgAsCompletionAgent
}
botMsgStart := "\n" + botPersona + ":\n"
prompt += botMsgStart
}
if cfg.ThinkUse && !cfg.ToolUse {
prompt += "<think>"
}
ss := chatBody.MakeStopSlice()
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt, "stop_strings", ss)
payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt, defaultLCPProps, ss)
data, err := json.Marshal(payload)
if err != nil {
logger.Error("failed to form a msg", "error", err)
return nil, err
}
return bytes.NewReader(data), nil
}
// chat
func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) {
llmchunk := models.OpenRouterChatResp{}
if err := json.Unmarshal(data, &llmchunk); err != nil {
logger.Error("failed to decode", "error", err, "line", string(data))
return nil, err
}
resp := &models.TextChunk{
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content,
}
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
if resp.Chunk != "" {
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
}
resp.Finished = true
}
return resp, nil
}
func (or OpenRouterChat) GetToken() string {
return cfg.OpenRouterToken
}
func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg open router completion", "link", cfg.CurrentAPI)
// Capture the image attachment path at the beginning to avoid race conditions
// with API rotation that might clear the global variable
localImageAttachmentPath := imageAttachmentPath
if cfg.ToolUse && !resume {
// prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
// add to chat body
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
}
if msg != "" { // otherwise let the bot continue
var newMsg models.RoleMsg
// Check if we have an image to add to this message
logger.Debug("checking for image attachment", "imageAttachmentPath", localImageAttachmentPath, "msg", msg, "role", role)
if localImageAttachmentPath != "" {
logger.Info("processing image attachment for OpenRouter", "path", localImageAttachmentPath, "msg", msg)
// Check if file exists before attempting to create image URL
if _, err := os.Stat(localImageAttachmentPath); os.IsNotExist(err) {
logger.Error("image file does not exist", "path", localImageAttachmentPath)
// Fallback to simple text message
newMsg = models.NewRoleMsg(role, msg)
} else if err != nil {
logger.Error("error checking image file", "path", localImageAttachmentPath, "error", err)
// Fallback to simple text message
newMsg = models.NewRoleMsg(role, msg)
} else {
logger.Debug("image file exists, proceeding to create URL", "path", localImageAttachmentPath)
// Create a multimodal message with both text and image
newMsg = models.NewMultimodalMsg(role, []interface{}{})
// Add the text content
newMsg.AddTextPart(msg)
// Add the image content
imageURL, err := models.CreateImageURLFromPath(localImageAttachmentPath)
if err != nil {
logger.Error("failed to create image URL from path", "error", err, "path", localImageAttachmentPath)
// If image processing fails, fall back to simple text message
newMsg = models.NewRoleMsg(role, msg)
} else {
logger.Info("image URL created successfully for OpenRouter", "imageURL", imageURL[:min(len(imageURL), 50)]+"...")
newMsg.AddImagePart(imageURL)
// Only clear the global image attachment after successfully processing it in this API call
imageAttachmentPath = "" // Clear the attachment after use
}
}
} else {
// Create a simple text message
newMsg = models.NewRoleMsg(role, msg)
}
chatBody.Messages = append(chatBody.Messages, newMsg)
// if rag - add as system message to avoid conflicts with tool usage
if cfg.RAGEnabled {
ragResp, err := chatRagUse(newMsg.Content)
if err != nil {
logger.Error("failed to form a rag msg", "error", err)
return nil, err
}
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
}
}
// Create copy of chat body with standardized user role
bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(chatBody.Messages)),
Model: chatBody.Model,
Stream: chatBody.Stream,
}
for i, msg := range chatBody.Messages {
logger.Debug("checking roles", "#", i, "role", msg.Role)
// Check if this message has content parts (multimodal) by attempting to marshal and checking structure
msgBytes, err := json.Marshal(msg)
if err != nil {
logger.Error("failed to serialize message for inspection", "error", err)
// Fallback to direct assignment
bodyCopy.Messages[i] = msg
} else {
// Try to deserialize to check if it has content parts
var tempMsg map[string]interface{}
if err := json.Unmarshal(msgBytes, &tempMsg); err != nil {
logger.Error("failed to inspect message structure", "error", err)
bodyCopy.Messages[i] = msg
} else {
// Check if content is an array (indicating content parts) or string (simple content)
if content, ok := tempMsg["content"]; ok {
if _, isArray := content.([]interface{}); isArray {
logger.Info("multimodal message detected", "#", i, "role", msg.Role)
// Deserialize to RoleMsg to access ContentParts
var detailedMsg models.RoleMsg
if err := json.Unmarshal(msgBytes, &detailedMsg); err == nil {
if len(detailedMsg.ContentParts) > 0 {
for j, part := range detailedMsg.ContentParts {
if textPart, ok := part.(models.TextContentPart); ok {
logger.Debug("text content part", "msg#", i, "part#", j, "text", textPart.Text)
} else if imgPart, ok := part.(models.ImageContentPart); ok {
logger.Info("image content part", "msg#", i, "part#", j, "url", imgPart.ImageURL.URL[:min(len(imgPart.ImageURL.URL), 50)]+"...")
} else {
logger.Debug("other content part", "msg#", i, "part#", j, "type", fmt.Sprintf("%T", part))
}
}
}
}
}
}
}
}
// Create a proper copy of the message that preserves all internal state
// First, serialize and deserialize to ensure content parts are preserved
copyMsgBytes, err := json.Marshal(msg)
if err != nil {
logger.Error("failed to serialize message", "error", err)
// Fallback to direct assignment
bodyCopy.Messages[i] = msg
} else {
// Deserialize back to preserve all internal state
var copiedMsg models.RoleMsg
err := json.Unmarshal(copyMsgBytes, &copiedMsg)
if err != nil {
logger.Error("failed to deserialize message", "error", err)
// Fallback to direct assignment
bodyCopy.Messages[i] = msg
} else {
bodyCopy.Messages[i] = copiedMsg
}
}
// Standardize role if it's a user role or first message
if bodyCopy.Messages[i].Role == cfg.UserRole || i == 1 {
bodyCopy.Messages[i].Role = "user"
logger.Debug("replaced role in body", "#", i)
}
}
// Log the final request body before sending to OpenRouter
orBody := models.NewOpenRouterChatReq(*bodyCopy, defaultLCPProps)
data, err := json.Marshal(orBody)
if err != nil {
logger.Error("failed to form a msg", "error", err)
return nil, err
}
logger.Info("OpenRouter request prepared", "messages_count", len(orBody.Messages))
for i, msg := range orBody.Messages {
// Check if this final message has content parts (multimodal)
msgBytes, err := json.Marshal(msg)
if err == nil {
var tempMsg map[string]interface{}
if err := json.Unmarshal(msgBytes, &tempMsg); err == nil {
if content, ok := tempMsg["content"]; ok {
if _, isArray := content.([]interface{}); isArray {
logger.Debug("final message", "#", i, "role", msg.Role, "hasContentParts", true)
// Deserialize to access content parts
var detailedMsg models.RoleMsg
if err := json.Unmarshal(msgBytes, &detailedMsg); err == nil {
if len(detailedMsg.ContentParts) > 0 {
for j, part := range detailedMsg.ContentParts {
if textPart, ok := part.(models.TextContentPart); ok {
logger.Debug("final text part", "msg#", i, "part#", j, "text", textPart.Text)
} else if imgPart, ok := part.(models.ImageContentPart); ok {
logger.Info("final image part sent to OpenRouter", "msg#", i, "part#", j, "url", imgPart.ImageURL.URL[:min(len(imgPart.ImageURL.URL), 50)]+"...")
} else {
logger.Debug("final other part", "msg#", i, "part#", j, "type", fmt.Sprintf("%T", part))
}
}
}
}
} else {
logger.Debug("final message", "#", i, "role", msg.Role, "hasContentParts", false)
}
}
}
}
}
return bytes.NewReader(data), nil
}