Fix: parser
This commit is contained in:
124
parser.go
124
parser.go
@@ -1,13 +1,12 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
|
||||||
"grailbench/models"
|
"grailbench/models"
|
||||||
"io"
|
"io"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"strings"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type RespParser interface {
|
type RespParser interface {
|
||||||
@@ -41,22 +40,43 @@ func (p *deepSeekParser) ParseBytes(body []byte) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (p *deepSeekParser) MakePayload(prompt string) io.Reader {
|
func (p *deepSeekParser) MakePayload(prompt string) io.Reader {
|
||||||
return strings.NewReader(fmt.Sprintf(`{
|
payload := struct {
|
||||||
"model": "deepseek-chat",
|
Model string `json:"model"`
|
||||||
"prompt": "%s",
|
Prompt string `json:"prompt"`
|
||||||
"echo": false,
|
Echo bool `json:"echo"`
|
||||||
"frequency_penalty": 0,
|
FrequencyPenalty float64 `json:"frequency_penalty"`
|
||||||
"logprobs": 0,
|
Logprobs int `json:"logprobs"`
|
||||||
"max_tokens": 1024,
|
MaxTokens int `json:"max_tokens"`
|
||||||
"presence_penalty": 0,
|
PresencePenalty float64 `json:"presence_penalty"`
|
||||||
"stop": null,
|
Stop interface{} `json:"stop"`
|
||||||
"stream": false,
|
Stream bool `json:"stream"`
|
||||||
"stream_options": null,
|
StreamOptions interface{} `json:"stream_options"`
|
||||||
"suffix": null,
|
Suffix interface{} `json:"suffix"`
|
||||||
"temperature": 1,
|
Temperature float64 `json:"temperature"`
|
||||||
"n_probs": 10,
|
NProbs int `json:"n_probs"`
|
||||||
"top_p": 1
|
TopP float64 `json:"top_p"`
|
||||||
}`, prompt))
|
}{
|
||||||
|
Model: "deepseek-chat",
|
||||||
|
Prompt: prompt,
|
||||||
|
Echo: false,
|
||||||
|
FrequencyPenalty: 0,
|
||||||
|
Logprobs: 0,
|
||||||
|
MaxTokens: 1024,
|
||||||
|
PresencePenalty: 0,
|
||||||
|
Stop: nil,
|
||||||
|
Stream: false,
|
||||||
|
StreamOptions: nil,
|
||||||
|
Suffix: nil,
|
||||||
|
Temperature: 1,
|
||||||
|
NProbs: 10,
|
||||||
|
TopP: 1,
|
||||||
|
}
|
||||||
|
b, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
p.log.Error("failed to marshal deepseek payload", "error", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return bytes.NewReader(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
// llama.cpp implementation of RespParser
|
// llama.cpp implementation of RespParser
|
||||||
@@ -79,16 +99,33 @@ func (p *lcpRespParser) ParseBytes(body []byte) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (p *lcpRespParser) MakePayload(prompt string) io.Reader {
|
func (p *lcpRespParser) MakePayload(prompt string) io.Reader {
|
||||||
return strings.NewReader(fmt.Sprintf(`{
|
payload := struct {
|
||||||
"model": "local-model",
|
Model string `json:"model"`
|
||||||
"prompt": "%s",
|
Prompt string `json:"prompt"`
|
||||||
"frequency_penalty": 0,
|
FrequencyPenalty float64 `json:"frequency_penalty"`
|
||||||
"max_tokens": 1024,
|
MaxTokens int `json:"max_tokens"`
|
||||||
"stop": ["Q:\n", "A:\n"],
|
Stop []string `json:"stop"`
|
||||||
"stream": false,
|
Stream bool `json:"stream"`
|
||||||
"temperature": 0.4,
|
Temperature float64 `json:"temperature"`
|
||||||
"top_p": 1
|
TopP float64 `json:"top_p"`
|
||||||
}`, prompt))
|
}{
|
||||||
|
Model: "local-model",
|
||||||
|
Prompt: prompt,
|
||||||
|
FrequencyPenalty: 0,
|
||||||
|
MaxTokens: 1024,
|
||||||
|
Stop: []string{"Q:\n", "A:\n"},
|
||||||
|
Stream: false,
|
||||||
|
Temperature: 0.4,
|
||||||
|
TopP: 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
b, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
// This should not happen for this struct, but good practice to handle.
|
||||||
|
p.log.Error("failed to marshal lcp payload", "error", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return bytes.NewReader(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
type openRouterParser struct {
|
type openRouterParser struct {
|
||||||
@@ -121,20 +158,23 @@ func (p *openRouterParser) ParseBytes(body []byte) (string, error) {
|
|||||||
|
|
||||||
func (p *openRouterParser) MakePayload(prompt string) io.Reader {
|
func (p *openRouterParser) MakePayload(prompt string) io.Reader {
|
||||||
// Models to rotate through
|
// Models to rotate through
|
||||||
models := []string{
|
// TODO: to config
|
||||||
"google/gemini-flash-1.5",
|
model := "deepseek/deepseek-r1:free"
|
||||||
"deepseek/deepseek-coder",
|
|
||||||
"mistralai/mistral-7b-instruct",
|
|
||||||
"qwen/qwen-72b-chat",
|
|
||||||
"meta-llama/llama-3-8b-instruct",
|
|
||||||
}
|
|
||||||
// Get next model index using atomic addition for thread safety
|
// Get next model index using atomic addition for thread safety
|
||||||
p.modelIndex++
|
p.modelIndex++
|
||||||
model := models[int(p.modelIndex)%len(models)]
|
payload := struct {
|
||||||
strPayload := fmt.Sprintf(`{
|
Model string `json:"model"`
|
||||||
"model": "%s",
|
Prompt string `json:"prompt"`
|
||||||
"prompt": "%s"
|
}{
|
||||||
}`, model, prompt)
|
Model: model,
|
||||||
p.log.Debug("made openrouter payload", "model", model, "payload", strPayload)
|
Prompt: prompt,
|
||||||
return strings.NewReader(strPayload)
|
}
|
||||||
|
|
||||||
|
b, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
p.log.Error("failed to marshal openrouter payload", "error", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
p.log.Debug("made openrouter payload", "model", model, "payload", string(b))
|
||||||
|
return bytes.NewReader(b)
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user