Enha: or free models

This commit is contained in:
Grail Finder
2025-09-05 15:10:40 +03:00
parent 53dc5a5e8d
commit 0276000bfa
4 changed files with 195 additions and 35 deletions

View File

@@ -152,68 +152,99 @@ func (p *lcpRespParser) MakePayload(prompt string) io.Reader {
type openRouterParser struct {
log *slog.Logger
modelIndex uint32
useChatAPI bool
supportsTools bool
}
func NewOpenRouterParser(log *slog.Logger) *openRouterParser {
return &openRouterParser{
log: log,
modelIndex: 0,
useChatAPI: false, // Default to completion API which is more widely supported
supportsTools: false, // Don't assume tool support
}
}
func (p *openRouterParser) ParseBytes(body []byte) (string, error) {
// parsing logic here
resp := models.DSResp{}
if err := json.Unmarshal(body, &resp); err != nil {
p.log.Error("failed to unmarshal", "error", err)
return "", err
}
if len(resp.Choices) == 0 {
p.log.Error("empty choices", "resp", resp)
err := errors.New("empty choices in resp")
return "", err
}
// Check if the response contains tool calls
choice := resp.Choices[0]
// Handle response with message field (OpenAI format)
if choice.Message.Role != "" {
// If using chat API, parse as chat completion response (supports tool calls)
if p.useChatAPI {
resp := models.ORChatResp{}
if err := json.Unmarshal(body, &resp); err != nil {
p.log.Error("failed to unmarshal openrouter chat response", "error", err)
return "", err
}
if len(resp.Choices) == 0 {
p.log.Error("empty choices in openrouter chat response", "resp", resp)
err := errors.New("empty choices in openrouter chat response")
return "", err
}
choice := resp.Choices[0]
// Check if the response contains tool calls
if len(choice.Message.ToolCalls) > 0 {
// Handle tool call response
toolCall := choice.Message.ToolCalls[0]
// Return a special marker indicating tool usage
return fmt.Sprintf("[TOOL_CALL:%s]", toolCall.Function.Name), nil
}
// Regular text response
return choice.Message.Content, nil
}
// Handle response with text field (legacy format)
return choice.Text, nil
// If using completion API, parse as text completion response (no tool calls)
resp := models.ORCompletionResp{}
if err := json.Unmarshal(body, &resp); err != nil {
p.log.Error("failed to unmarshal openrouter completion response", "error", err)
return "", err
}
if len(resp.Choices) == 0 {
p.log.Error("empty choices in openrouter completion response", "resp", resp)
err := errors.New("empty choices in openrouter completion response")
return "", err
}
// Return the text content
return resp.Choices[0].Text, nil
}
func (p *openRouterParser) MakePayload(prompt string) io.Reader {
// Models to rotate through
// TODO: to config
model := "deepseek/deepseek-r1:free"
// Get next model index using atomic addition for thread safety
p.modelIndex++
if p.useChatAPI {
// Use chat completions API with messages format (supports tool calls)
payload := struct {
Model string `json:"model"`
Messages []models.RoleMsg `json:"messages"`
}{
Model: "openai/gpt-4o-mini",
Messages: []models.RoleMsg{
{Role: "user", Content: prompt},
},
}
b, err := json.Marshal(payload)
if err != nil {
p.log.Error("failed to marshal openrouter chat payload", "error", err)
return nil
}
p.log.Debug("made openrouter chat payload", "payload", string(b))
return bytes.NewReader(b)
}
// Use completions API with prompt format (no tool calls)
payload := struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Tools []models.Tool `json:"tools,omitempty"`
Model string `json:"model"`
Prompt string `json:"prompt"`
}{
Model: model,
Model: "openai/gpt-4o-mini",
Prompt: prompt,
Tools: baseTools, // Include the tools in the request
}
b, err := json.Marshal(payload)
if err != nil {
p.log.Error("failed to marshal openrouter payload", "error", err)
p.log.Error("failed to marshal openrouter completion payload", "error", err)
return nil
}
p.log.Debug("made openrouter payload", "model", model, "payload", string(b))
p.log.Debug("made openrouter completion payload", "payload", string(b))
return bytes.NewReader(b)
}