Fix: parser

This commit is contained in:
Grail Finder
2025-08-09 14:08:50 +03:00
parent e499a1ae37
commit 975183d684

124
parser.go
View File

@@ -1,13 +1,12 @@
package main
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"grailbench/models"
"io"
"log/slog"
"strings"
)
type RespParser interface {
@@ -41,22 +40,43 @@ func (p *deepSeekParser) ParseBytes(body []byte) (string, error) {
}
func (p *deepSeekParser) MakePayload(prompt string) io.Reader {
return strings.NewReader(fmt.Sprintf(`{
"model": "deepseek-chat",
"prompt": "%s",
"echo": false,
"frequency_penalty": 0,
"logprobs": 0,
"max_tokens": 1024,
"presence_penalty": 0,
"stop": null,
"stream": false,
"stream_options": null,
"suffix": null,
"temperature": 1,
"n_probs": 10,
"top_p": 1
}`, prompt))
payload := struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Echo bool `json:"echo"`
FrequencyPenalty float64 `json:"frequency_penalty"`
Logprobs int `json:"logprobs"`
MaxTokens int `json:"max_tokens"`
PresencePenalty float64 `json:"presence_penalty"`
Stop interface{} `json:"stop"`
Stream bool `json:"stream"`
StreamOptions interface{} `json:"stream_options"`
Suffix interface{} `json:"suffix"`
Temperature float64 `json:"temperature"`
NProbs int `json:"n_probs"`
TopP float64 `json:"top_p"`
}{
Model: "deepseek-chat",
Prompt: prompt,
Echo: false,
FrequencyPenalty: 0,
Logprobs: 0,
MaxTokens: 1024,
PresencePenalty: 0,
Stop: nil,
Stream: false,
StreamOptions: nil,
Suffix: nil,
Temperature: 1,
NProbs: 10,
TopP: 1,
}
b, err := json.Marshal(payload)
if err != nil {
p.log.Error("failed to marshal deepseek payload", "error", err)
return nil
}
return bytes.NewReader(b)
}
// llama.cpp implementation of RespParser
@@ -79,16 +99,33 @@ func (p *lcpRespParser) ParseBytes(body []byte) (string, error) {
}
func (p *lcpRespParser) MakePayload(prompt string) io.Reader {
return strings.NewReader(fmt.Sprintf(`{
"model": "local-model",
"prompt": "%s",
"frequency_penalty": 0,
"max_tokens": 1024,
"stop": ["Q:\n", "A:\n"],
"stream": false,
"temperature": 0.4,
"top_p": 1
}`, prompt))
payload := struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
FrequencyPenalty float64 `json:"frequency_penalty"`
MaxTokens int `json:"max_tokens"`
Stop []string `json:"stop"`
Stream bool `json:"stream"`
Temperature float64 `json:"temperature"`
TopP float64 `json:"top_p"`
}{
Model: "local-model",
Prompt: prompt,
FrequencyPenalty: 0,
MaxTokens: 1024,
Stop: []string{"Q:\n", "A:\n"},
Stream: false,
Temperature: 0.4,
TopP: 1,
}
b, err := json.Marshal(payload)
if err != nil {
// This should not happen for this struct, but good practice to handle.
p.log.Error("failed to marshal lcp payload", "error", err)
return nil
}
return bytes.NewReader(b)
}
type openRouterParser struct {
@@ -121,20 +158,23 @@ func (p *openRouterParser) ParseBytes(body []byte) (string, error) {
func (p *openRouterParser) MakePayload(prompt string) io.Reader {
// Models to rotate through
models := []string{
"google/gemini-flash-1.5",
"deepseek/deepseek-coder",
"mistralai/mistral-7b-instruct",
"qwen/qwen-72b-chat",
"meta-llama/llama-3-8b-instruct",
}
// TODO: to config
model := "deepseek/deepseek-r1:free"
// Get next model index using atomic addition for thread safety
p.modelIndex++
model := models[int(p.modelIndex)%len(models)]
strPayload := fmt.Sprintf(`{
"model": "%s",
"prompt": "%s"
}`, model, prompt)
p.log.Debug("made openrouter payload", "model", model, "payload", strPayload)
return strings.NewReader(strPayload)
payload := struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
}{
Model: model,
Prompt: prompt,
}
b, err := json.Marshal(payload)
if err != nil {
p.log.Error("failed to marshal openrouter payload", "error", err)
return nil
}
p.log.Debug("made openrouter payload", "model", model, "payload", string(b))
return bytes.NewReader(b)
}