init
This commit is contained in:
146
parser.go
Normal file
146
parser.go
Normal file
@@ -0,0 +1,146 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type RespParser interface {
|
||||
ParseBytes(body []byte) (string, error)
|
||||
MakePayload(prompt string) io.Reader
|
||||
}
|
||||
|
||||
// DeepSeekParser: deepseek implementation of RespParser
|
||||
type deepSeekParser struct {
|
||||
log *slog.Logger
|
||||
}
|
||||
|
||||
func NewDeepSeekParser(log *slog.Logger) *deepSeekParser {
|
||||
return &deepSeekParser{log: log}
|
||||
}
|
||||
|
||||
func (p *deepSeekParser) ParseBytes(body []byte) (string, error) {
|
||||
// parsing logic here
|
||||
dsResp := DSResp{}
|
||||
if err := json.Unmarshal(body, &dsResp); err != nil {
|
||||
p.log.Error("failed to unmarshall", "error", err)
|
||||
return "", err
|
||||
}
|
||||
if len(dsResp.Choices) == 0 {
|
||||
p.log.Error("empty choices", "dsResp", dsResp)
|
||||
err := errors.New("empty choices in dsResp")
|
||||
return "", err
|
||||
}
|
||||
text := dsResp.Choices[0].Text
|
||||
return text, nil
|
||||
}
|
||||
|
||||
func (p *deepSeekParser) MakePayload(prompt string) io.Reader {
|
||||
return strings.NewReader(fmt.Sprintf(`{
|
||||
"model": "deepseek-chat",
|
||||
"prompt": "%s",
|
||||
"echo": false,
|
||||
"frequency_penalty": 0,
|
||||
"logprobs": 0,
|
||||
"max_tokens": 1024,
|
||||
"presence_penalty": 0,
|
||||
"stop": null,
|
||||
"stream": false,
|
||||
"stream_options": null,
|
||||
"suffix": null,
|
||||
"temperature": 1,
|
||||
"top_p": 1
|
||||
}`, prompt))
|
||||
}
|
||||
|
||||
// llama.cpp implementation of RespParser
|
||||
type lcpRespParser struct {
|
||||
log *slog.Logger
|
||||
}
|
||||
|
||||
func NewLCPRespParser(log *slog.Logger) *lcpRespParser {
|
||||
return &lcpRespParser{log: log}
|
||||
}
|
||||
|
||||
func (p *lcpRespParser) ParseBytes(body []byte) (string, error) {
|
||||
// parsing logic here
|
||||
resp := LLMResp{}
|
||||
if err := json.Unmarshal(body, &resp); err != nil {
|
||||
p.log.Error("failed to unmarshal", "error", err)
|
||||
return "", err
|
||||
}
|
||||
return resp.Content, nil
|
||||
}
|
||||
|
||||
func (p *lcpRespParser) MakePayload(prompt string) io.Reader {
|
||||
return strings.NewReader(fmt.Sprintf(`{
|
||||
"model": "local-model",
|
||||
"prompt": "%s",
|
||||
"frequency_penalty": 0,
|
||||
"max_tokens": 1024,
|
||||
"stop": ["Q:\n", "A:\n"],
|
||||
"stream": false,
|
||||
"temperature": 0.4,
|
||||
"top_p": 1
|
||||
}`, prompt))
|
||||
}
|
||||
|
||||
type openRouterParser struct {
|
||||
log *slog.Logger
|
||||
modelIndex uint32
|
||||
}
|
||||
|
||||
func NewOpenRouterParser(log *slog.Logger) *openRouterParser {
|
||||
return &openRouterParser{
|
||||
log: log,
|
||||
modelIndex: 0,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *openRouterParser) ParseBytes(body []byte) (string, error) {
|
||||
// parsing logic here
|
||||
resp := OpenRouterResp{}
|
||||
if err := json.Unmarshal(body, &resp); err != nil {
|
||||
p.log.Error("failed to unmarshal", "error", err)
|
||||
return "", err
|
||||
}
|
||||
if len(resp.Choices) == 0 {
|
||||
p.log.Error("empty choices", "resp", resp)
|
||||
err := errors.New("empty choices in resp")
|
||||
return "", err
|
||||
}
|
||||
text := resp.Choices[0].Message.Content
|
||||
return text, nil
|
||||
}
|
||||
|
||||
func (p *openRouterParser) MakePayload(prompt string) io.Reader {
|
||||
// Models to rotate through
|
||||
models := []string{
|
||||
"google/gemini-2.0-flash-exp:free",
|
||||
"deepseek/deepseek-chat-v3-0324:free",
|
||||
"mistralai/mistral-small-3.2-24b-instruct:free",
|
||||
"qwen/qwen3-14b:free",
|
||||
"deepseek/deepseek-r1:free",
|
||||
"google/gemma-3-27b-it:free",
|
||||
"meta-llama/llama-3.3-70b-instruct:free",
|
||||
}
|
||||
// Get next model index using atomic addition for thread safety
|
||||
p.modelIndex++
|
||||
model := models[int(p.modelIndex)%len(models)]
|
||||
strPayload := fmt.Sprintf(`{
|
||||
"model": "%s",
|
||||
"max_tokens": 300,
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "%s"
|
||||
}
|
||||
]
|
||||
}`, model, prompt)
|
||||
p.log.Debug("made openrouter payload", "model", model, "payload", strPayload)
|
||||
return strings.NewReader(strPayload)
|
||||
}
|
Reference in New Issue
Block a user