147 lines
		
	
	
		
			4.4 KiB
		
	
	
	
		
			Go
		
	
	
	
	
	
			
		
		
	
	
			147 lines
		
	
	
		
			4.4 KiB
		
	
	
	
		
			Go
		
	
	
	
	
	
| package llmapi
 | |
| 
 | |
| import (
 | |
| 	"gralias/config"
 | |
| 	"gralias/models"
 | |
| 	"log/slog"
 | |
| )
 | |
| 
 | |
| type OpenRouterResp struct {
 | |
| 	ID       string `json:"id"`
 | |
| 	Provider string `json:"provider"`
 | |
| 	Model    string `json:"model"`
 | |
| 	Object   string `json:"object"`
 | |
| 	Created  int    `json:"created"`
 | |
| 	Choices  []struct {
 | |
| 		Logprobs           any    `json:"logprobs"`
 | |
| 		FinishReason       string `json:"finish_reason"`
 | |
| 		NativeFinishReason string `json:"native_finish_reason"`
 | |
| 		Index              int    `json:"index"`
 | |
| 		Message            struct {
 | |
| 			Role      string `json:"role"`
 | |
| 			Content   string `json:"content"`
 | |
| 			Refusal   any    `json:"refusal"`
 | |
| 			Reasoning any    `json:"reasoning"`
 | |
| 		} `json:"message"`
 | |
| 	} `json:"choices"`
 | |
| 	Usage struct {
 | |
| 		PromptTokens     int `json:"prompt_tokens"`
 | |
| 		CompletionTokens int `json:"completion_tokens"`
 | |
| 		TotalTokens      int `json:"total_tokens"`
 | |
| 	} `json:"usage"`
 | |
| }
 | |
| 
 | |
| type DSResp struct {
 | |
| 	ID      string `json:"id"`
 | |
| 	Choices []struct {
 | |
| 		Text         string `json:"text"`
 | |
| 		Index        int    `json:"index"`
 | |
| 		FinishReason string `json:"finish_reason"`
 | |
| 	} `json:"choices"`
 | |
| 	Created           int    `json:"created"`
 | |
| 	Model             string `json:"model"`
 | |
| 	SystemFingerprint string `json:"system_fingerprint"`
 | |
| 	Object            string `json:"object"`
 | |
| }
 | |
| 
 | |
| type LLMResp struct {
 | |
| 	Index           int    `json:"index"`
 | |
| 	Content         string `json:"content"`
 | |
| 	Tokens          []any  `json:"tokens"`
 | |
| 	IDSlot          int    `json:"id_slot"`
 | |
| 	Stop            bool   `json:"stop"`
 | |
| 	Model           string `json:"model"`
 | |
| 	TokensPredicted int    `json:"tokens_predicted"`
 | |
| 	TokensEvaluated int    `json:"tokens_evaluated"`
 | |
| 	Prompt          string `json:"prompt"`
 | |
| 	HasNewLine      bool   `json:"has_new_line"`
 | |
| 	Truncated       bool   `json:"truncated"`
 | |
| 	StopType        string `json:"stop_type"`
 | |
| 	StoppingWord    string `json:"stopping_word"`
 | |
| 	TokensCached    int    `json:"tokens_cached"`
 | |
| }
 | |
| 
 | |
| type MimeResp struct {
 | |
| 	Clue   string   `json:"clue"`
 | |
| 	Number string   `json:"number"`
 | |
| 	Answer []string `json:"words_I_mean_my_team_to_open"`
 | |
| }
 | |
| 
 | |
| type GusserResp struct {
 | |
| 	Guesses []string `json:"guesses"`
 | |
| 	CouldBe []string `json:"could_be"`
 | |
| }
 | |
| 
 | |
| type Bot struct {
 | |
| 	Role      string         `json:"role"`
 | |
| 	Team      string         `json:"team"`
 | |
| 	cfg       *config.Config `json:"-"`
 | |
| 	RoomID    string         `json:"room_id"` // can we get a room from here?
 | |
| 	BotName   string         `json:"bot_name"`
 | |
| 	log       *slog.Logger   `json:"-"`
 | |
| 	LLMParser RespParser     `json:"-"`
 | |
| 	// channels for communicaton
 | |
| 	// channels are not serializable
 | |
| 	// SignalsCh chan bool
 | |
| 	// DoneCh    chan bool
 | |
| }
 | |
| 
 | |
| func (b *Bot) ToPlayer() *models.Player {
 | |
| 	return &models.Player{
 | |
| 		Role:     models.StrToUserRole(b.Role),
 | |
| 		Team:     models.StrToUserTeam(b.Team),
 | |
| 		RoomID:   &b.RoomID,
 | |
| 		Username: b.BotName,
 | |
| 		IsBot:    true,
 | |
| 	}
 | |
| }
 | |
| 
 | |
| type ORModel struct {
 | |
| 	ID            string `json:"id"`
 | |
| 	CanonicalSlug string `json:"canonical_slug"`
 | |
| 	HuggingFaceID string `json:"hugging_face_id"`
 | |
| 	Name          string `json:"name"`
 | |
| 	Created       int    `json:"created"`
 | |
| 	Description   string `json:"description"`
 | |
| 	ContextLength int    `json:"context_length"`
 | |
| 	Architecture  struct {
 | |
| 		Modality         string   `json:"modality"`
 | |
| 		InputModalities  []string `json:"input_modalities"`
 | |
| 		OutputModalities []string `json:"output_modalities"`
 | |
| 		Tokenizer        string   `json:"tokenizer"`
 | |
| 		InstructType     any      `json:"instruct_type"`
 | |
| 	} `json:"architecture"`
 | |
| 	Pricing struct {
 | |
| 		Prompt            string `json:"prompt"`
 | |
| 		Completion        string `json:"completion"`
 | |
| 		Request           string `json:"request"`
 | |
| 		Image             string `json:"image"`
 | |
| 		Audio             string `json:"audio"`
 | |
| 		WebSearch         string `json:"web_search"`
 | |
| 		InternalReasoning string `json:"internal_reasoning"`
 | |
| 	} `json:"pricing,omitempty"`
 | |
| 	TopProvider struct {
 | |
| 		ContextLength       int  `json:"context_length"`
 | |
| 		MaxCompletionTokens int  `json:"max_completion_tokens"`
 | |
| 		IsModerated         bool `json:"is_moderated"`
 | |
| 	} `json:"top_provider"`
 | |
| 	PerRequestLimits    any      `json:"per_request_limits"`
 | |
| 	SupportedParameters []string `json:"supported_parameters"`
 | |
| }
 | |
| 
 | |
| // https://openrouter.ai/api/v1/models
 | |
| type ORModels struct {
 | |
| 	Data []ORModel `json:"data"`
 | |
| }
 | |
| 
 | |
| func (orm *ORModels) ListFree() []string {
 | |
| 	resp := []string{}
 | |
| 	for _, model := range orm.Data {
 | |
| 		if model.Pricing.Prompt == "0" && model.Pricing.Request == "0" &&
 | |
| 			model.Pricing.Completion == "0" {
 | |
| 			resp = append(resp, model.ID)
 | |
| 		}
 | |
| 	}
 | |
| 	return resp
 | |
| }
 | 
