Feat: open router impl

This commit is contained in:
Grail Finder
2025-08-07 12:18:01 +03:00
parent 9b2558ffe8
commit 813cb49d36
8 changed files with 385 additions and 28 deletions

View File

@@ -2,7 +2,6 @@ package models
import (
"fmt"
"gf-lt/config"
"strings"
)
@@ -56,7 +55,7 @@ type RoleMsg struct {
Content string `json:"content"`
}
func (m RoleMsg) ToText(i int, cfg *config.Config) string {
func (m RoleMsg) ToText(i int) string {
icon := fmt.Sprintf("(%d)", i)
// check if already has role annotation (/completion makes them)
if !strings.HasPrefix(m.Content, m.Role+":") {
@@ -185,7 +184,7 @@ type DSCompletionReq struct {
TopP float32 `json:"top_p"`
}
func NewDSCompletionReq(prompt, model string, temp float32, cfg *config.Config, stopSlice []string) DSCompletionReq {
func NewDSCompletionReq(prompt, model string, temp float32, stopSlice []string) DSCompletionReq {
return DSCompletionReq{
Model: model,
Prompt: prompt,
@@ -334,7 +333,7 @@ type LlamaCPPReq struct {
// Samplers string `json:"samplers"`
}
func NewLCPReq(prompt string, cfg *config.Config, props map[string]float32, stopStrings []string) LlamaCPPReq {
func NewLCPReq(prompt string, props map[string]float32, stopStrings []string) LlamaCPPReq {
return LlamaCPPReq{
Stream: true,
Prompt: prompt,
@@ -362,3 +361,156 @@ type DSBalance struct {
ToppedUpBalance string `json:"topped_up_balance"`
} `json:"balance_infos"`
}
// openrouter
// https://openrouter.ai/docs/api-reference/completion
type OpenRouterCompletionReq struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Stream bool `json:"stream"`
Temperature float32 `json:"temperature"`
Stop []string `json:"stop"` // not present in docs
MinP float32 `json:"min_p"`
NPredict int32 `json:"max_tokens"`
}
func NewOpenRouterCompletionReq(model, prompt string, props map[string]float32, stopStrings []string) OpenRouterCompletionReq {
return OpenRouterCompletionReq{
Stream: true,
Prompt: prompt,
Temperature: props["temperature"],
MinP: props["min_p"],
NPredict: int32(props["n_predict"]),
Stop: stopStrings,
Model: model,
}
}
type OpenRouterChatReq struct {
Messages []RoleMsg `json:"messages"`
Model string `json:"model"`
Stream bool `json:"stream"`
Temperature float32 `json:"temperature"`
MinP float32 `json:"min_p"`
NPredict int32 `json:"max_tokens"`
}
func NewOpenRouterChatReq(cb ChatBody, props map[string]float32) OpenRouterChatReq {
return OpenRouterChatReq{
Messages: cb.Messages,
Model: cb.Model,
Stream: cb.Stream,
Temperature: props["temperature"],
MinP: props["min_p"],
NPredict: int32(props["n_predict"]),
}
}
type OpenRouterChatRespNonStream struct {
ID string `json:"id"`
Provider string `json:"provider"`
Model string `json:"model"`
Object string `json:"object"`
Created int `json:"created"`
Choices []struct {
Logprobs any `json:"logprobs"`
FinishReason string `json:"finish_reason"`
NativeFinishReason string `json:"native_finish_reason"`
Index int `json:"index"`
Message struct {
Role string `json:"role"`
Content string `json:"content"`
Refusal any `json:"refusal"`
Reasoning any `json:"reasoning"`
} `json:"message"`
} `json:"choices"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage"`
}
type OpenRouterChatResp struct {
ID string `json:"id"`
Provider string `json:"provider"`
Model string `json:"model"`
Object string `json:"object"`
Created int `json:"created"`
Choices []struct {
Index int `json:"index"`
Delta struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"delta"`
FinishReason string `json:"finish_reason"`
NativeFinishReason string `json:"native_finish_reason"`
Logprobs any `json:"logprobs"`
} `json:"choices"`
}
type OpenRouterCompletionResp struct {
ID string `json:"id"`
Provider string `json:"provider"`
Model string `json:"model"`
Object string `json:"object"`
Created int `json:"created"`
Choices []struct {
Text string `json:"text"`
FinishReason string `json:"finish_reason"`
NativeFinishReason string `json:"native_finish_reason"`
Logprobs any `json:"logprobs"`
} `json:"choices"`
}
type ORModel struct {
ID string `json:"id"`
CanonicalSlug string `json:"canonical_slug"`
HuggingFaceID string `json:"hugging_face_id"`
Name string `json:"name"`
Created int `json:"created"`
Description string `json:"description"`
ContextLength int `json:"context_length"`
Architecture struct {
Modality string `json:"modality"`
InputModalities []string `json:"input_modalities"`
OutputModalities []string `json:"output_modalities"`
Tokenizer string `json:"tokenizer"`
InstructType any `json:"instruct_type"`
} `json:"architecture"`
Pricing struct {
Prompt string `json:"prompt"`
Completion string `json:"completion"`
Request string `json:"request"`
Image string `json:"image"`
Audio string `json:"audio"`
WebSearch string `json:"web_search"`
InternalReasoning string `json:"internal_reasoning"`
} `json:"pricing,omitempty"`
TopProvider struct {
ContextLength int `json:"context_length"`
MaxCompletionTokens int `json:"max_completion_tokens"`
IsModerated bool `json:"is_moderated"`
} `json:"top_provider"`
PerRequestLimits any `json:"per_request_limits"`
SupportedParameters []string `json:"supported_parameters"`
}
type ORModels struct {
Data []ORModel `json:"data"`
}
func (orm *ORModels) ListModels(free bool) []string {
resp := []string{}
for _, model := range orm.Data {
if free {
if model.Pricing.Prompt == "0" && model.Pricing.Request == "0" &&
model.Pricing.Completion == "0" {
resp = append(resp, model.ID)
}
} else {
resp = append(resp, model.ID)
}
}
return resp
}