From 83513b6c6a8cae0d2025aebbce78ef6a7855a1df Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Mon, 23 Jun 2025 13:27:35 +0300 Subject: [PATCH] Enha: llmparser build prompt --- llmapi/main.go | 16 +--------------- llmapi/parser.go | 45 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+), 15 deletions(-) diff --git a/llmapi/main.go b/llmapi/main.go index 8b67ca1..29b9298 100644 --- a/llmapi/main.go +++ b/llmapi/main.go @@ -383,21 +383,7 @@ func (b *Bot) BuildPrompt(room *models.Room) string { func (b *Bot) CallLLM(prompt string) ([]byte, error) { method := "POST" - payload := strings.NewReader(fmt.Sprintf(`{ - "model": "deepseek-chat", - "prompt": "%s", - "echo": false, - "frequency_penalty": 0, - "logprobs": 0, - "max_tokens": 1024, - "presence_penalty": 0, - "stop": null, - "stream": false, - "stream_options": null, - "suffix": null, - "temperature": 1, - "top_p": 1 -}`, prompt)) + payload := b.LLMParser.MakePayload(prompt) client := &http.Client{} req, err := http.NewRequest(method, b.cfg.LLMConfig.URL, payload) if err != nil { diff --git a/llmapi/parser.go b/llmapi/parser.go index 8f41659..dbc01e4 100644 --- a/llmapi/parser.go +++ b/llmapi/parser.go @@ -4,12 +4,14 @@ import ( "encoding/json" "errors" "fmt" + "io" "log/slog" "strings" ) type RespParser interface { ParseBytes(body []byte) (map[string]any, error) + MakePayload(prompt string) io.Reader } // DeepSeekParser: deepseek implementation of RespParser @@ -50,6 +52,24 @@ func (p *deepSeekParser) ParseBytes(body []byte) (map[string]any, error) { return respMap, nil } +func (p *deepSeekParser) MakePayload(prompt string) io.Reader { + return strings.NewReader(fmt.Sprintf(`{ + "model": "deepseek-chat", + "prompt": "%s", + "echo": false, + "frequency_penalty": 0, + "logprobs": 0, + "max_tokens": 1024, + "presence_penalty": 0, + "stop": null, + "stream": false, + "stream_options": null, + "suffix": null, + "temperature": 1, + "top_p": 1 + }`, prompt)) +} + // llama.cpp implementation of RespParser type lcpRespParser struct { log *slog.Logger @@ -89,6 +109,19 @@ func (p *lcpRespParser) ParseBytes(body []byte) (map[string]any, error) { return respMap, nil } +func (p *lcpRespParser) MakePayload(prompt string) io.Reader { + return strings.NewReader(fmt.Sprintf(`{ + "model": "local-model", + "prompt": "%s", + "frequency_penalty": 0, + "max_tokens": 1024, + "stop": null, + "stream": false, + "temperature": 0.4, + "top_p": 1 + }`, prompt)) +} + type openRouterParser struct { log *slog.Logger } @@ -125,3 +158,15 @@ func (p *openRouterParser) ParseBytes(body []byte) (map[string]any, error) { } return respMap, nil } + +func (p *openRouterParser) MakePayload(prompt string) io.Reader { + return strings.NewReader(fmt.Sprintf(`{ + "model": "deepseek/deepseek-chat-v3-0324:free", + "messages": [ + { + "role": "user", + "content": %s + } + ] + }`, prompt)) +}