Files
gralias/llmapi/parser.go
2025-06-29 13:41:06 +03:00

186 lines
4.6 KiB
Go

package llmapi
import (
"encoding/json"
"errors"
"fmt"
"io"
"log/slog"
"strings"
)
type RespParser interface {
ParseBytes(body []byte) (map[string]any, error)
MakePayload(prompt string) io.Reader
}
// DeepSeekParser: deepseek implementation of RespParser
type deepSeekParser struct {
log *slog.Logger
}
func NewDeepSeekParser(log *slog.Logger) *deepSeekParser {
return &deepSeekParser{log: log}
}
func (p *deepSeekParser) ParseBytes(body []byte) (map[string]any, error) {
// parsing logic here
dsResp := DSResp{}
if err := json.Unmarshal(body, &dsResp); err != nil {
p.log.Error("failed to unmarshall", "error", err)
return nil, err
}
if len(dsResp.Choices) == 0 {
p.log.Error("empty choices", "dsResp", dsResp)
err := errors.New("empty choices in dsResp")
return nil, err
}
text := dsResp.Choices[0].Text
li := strings.Index(text, "{")
ri := strings.LastIndex(text, "}")
if li < 0 || ri < 1 {
p.log.Error("not a json", "msg", text)
err := fmt.Errorf("fn: ParseBytes, not a json; data: %s", text)
return nil, err
}
sj := text[li : ri+1]
respMap := make(map[string]any)
if err := json.Unmarshal([]byte(sj), &respMap); err != nil {
p.log.Error("failed to unmarshal response", "error", err, "string-json", sj)
return nil, err
}
return respMap, nil
}
func (p *deepSeekParser) MakePayload(prompt string) io.Reader {
return strings.NewReader(fmt.Sprintf(`{
"model": "deepseek-chat",
"prompt": "%s",
"echo": false,
"frequency_penalty": 0,
"logprobs": 0,
"max_tokens": 1024,
"presence_penalty": 0,
"stop": null,
"stream": false,
"stream_options": null,
"suffix": null,
"temperature": 1,
"top_p": 1
}`, prompt))
}
// llama.cpp implementation of RespParser
type lcpRespParser struct {
log *slog.Logger
}
func NewLCPRespParser(log *slog.Logger) *lcpRespParser {
return &lcpRespParser{log: log}
}
func (p *lcpRespParser) ParseBytes(body []byte) (map[string]any, error) {
// parsing logic here
resp := LLMResp{}
if err := json.Unmarshal(body, &resp); err != nil {
p.log.Error("failed to unmarshal", "error", err)
return nil, err
}
text := resp.Content
li := strings.Index(text, "{")
ri := strings.LastIndex(text, "}")
if li < 0 || ri < 1 {
p.log.Error("not a json", "msg", text)
err := fmt.Errorf("fn: ParseBytes, not a json; data: %s", text)
return nil, err
}
sj := text[li : ri+1]
respMap := make(map[string]any)
if err := json.Unmarshal([]byte(sj), &respMap); err != nil {
p.log.Error("failed to unmarshal response", "error", err, "string-json", sj)
return nil, err
}
return respMap, nil
}
func (p *lcpRespParser) MakePayload(prompt string) io.Reader {
return strings.NewReader(fmt.Sprintf(`{
"model": "local-model",
"prompt": "%s",
"frequency_penalty": 0,
"max_tokens": 1024,
"stop": null,
"stream": false,
"temperature": 0.4,
"top_p": 1
}`, prompt))
}
type openRouterParser struct {
log *slog.Logger
modelIndex uint32
}
func NewOpenRouterParser(log *slog.Logger) *openRouterParser {
return &openRouterParser{
log: log,
modelIndex: 0,
}
}
func (p *openRouterParser) ParseBytes(body []byte) (map[string]any, error) {
// parsing logic here
resp := OpenRouterResp{}
if err := json.Unmarshal(body, &resp); err != nil {
p.log.Error("failed to unmarshal", "error", err)
return nil, err
}
if len(resp.Choices) == 0 {
p.log.Error("empty choices", "resp", resp)
err := errors.New("empty choices in resp")
return nil, err
}
text := resp.Choices[0].Message.Content
li := strings.Index(text, "{")
ri := strings.LastIndex(text, "}")
if li < 0 || ri < 1 {
p.log.Error("not a json", "msg", text)
err := fmt.Errorf("fn: ParseBytes, not a json; data: %s", text)
return nil, err
}
sj := text[li : ri+1]
respMap := make(map[string]any)
if err := json.Unmarshal([]byte(sj), &respMap); err != nil {
p.log.Error("failed to unmarshal response", "error", err, "string-json", sj)
return nil, err
}
return respMap, nil
}
func (p *openRouterParser) MakePayload(prompt string) io.Reader {
// Models to rotate through
models := []string{
"google/gemini-2.0-flash-exp:free",
"deepseek/deepseek-chat-v3-0324:free",
"mistralai/mistral-small-3.2-24b-instruct:free",
"qwen/qwen3-14b:free",
"deepseek/deepseek-r1:free",
"google/gemma-3-27b-it:free",
"meta-llama/llama-3.3-70b-instruct:free",
}
// Get next model index using atomic addition for thread safety
p.modelIndex++
model := models[int(p.modelIndex)%len(models)]
strPayload := fmt.Sprintf(`{
"model": "%s",
"messages": [
{
"role": "user",
"content": "%s"
}
]
}`, model, prompt)
p.log.Debug("made openrouter payload", "model", model, "payload", strPayload)
return strings.NewReader(strPayload)
}