Enha: llmparser build prompt
This commit is contained in:
		| @@ -383,21 +383,7 @@ func (b *Bot) BuildPrompt(room *models.Room) string { | ||||
|  | ||||
| func (b *Bot) CallLLM(prompt string) ([]byte, error) { | ||||
| 	method := "POST" | ||||
| 	payload := strings.NewReader(fmt.Sprintf(`{ | ||||
|   "model": "deepseek-chat", | ||||
|   "prompt": "%s", | ||||
|   "echo": false, | ||||
|   "frequency_penalty": 0, | ||||
|   "logprobs": 0, | ||||
|   "max_tokens": 1024, | ||||
|   "presence_penalty": 0, | ||||
|   "stop": null, | ||||
|   "stream": false, | ||||
|   "stream_options": null, | ||||
|   "suffix": null, | ||||
|   "temperature": 1, | ||||
|   "top_p": 1 | ||||
| }`, prompt)) | ||||
| 	payload := b.LLMParser.MakePayload(prompt) | ||||
| 	client := &http.Client{} | ||||
| 	req, err := http.NewRequest(method, b.cfg.LLMConfig.URL, payload) | ||||
| 	if err != nil { | ||||
|   | ||||
| @@ -4,12 +4,14 @@ import ( | ||||
| 	"encoding/json" | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"io" | ||||
| 	"log/slog" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| type RespParser interface { | ||||
| 	ParseBytes(body []byte) (map[string]any, error) | ||||
| 	MakePayload(prompt string) io.Reader | ||||
| } | ||||
|  | ||||
| // DeepSeekParser: deepseek implementation of RespParser | ||||
| @@ -50,6 +52,24 @@ func (p *deepSeekParser) ParseBytes(body []byte) (map[string]any, error) { | ||||
| 	return respMap, nil | ||||
| } | ||||
|  | ||||
| func (p *deepSeekParser) MakePayload(prompt string) io.Reader { | ||||
| 	return strings.NewReader(fmt.Sprintf(`{ | ||||
| 	  "model": "deepseek-chat", | ||||
| 	  "prompt": "%s", | ||||
| 	  "echo": false, | ||||
| 	  "frequency_penalty": 0, | ||||
| 	  "logprobs": 0, | ||||
| 	  "max_tokens": 1024, | ||||
| 	  "presence_penalty": 0, | ||||
| 	  "stop": null, | ||||
| 	  "stream": false, | ||||
| 	  "stream_options": null, | ||||
| 	  "suffix": null, | ||||
| 	  "temperature": 1, | ||||
| 	  "top_p": 1 | ||||
| 	}`, prompt)) | ||||
| } | ||||
|  | ||||
| // llama.cpp implementation of RespParser | ||||
| type lcpRespParser struct { | ||||
| 	log *slog.Logger | ||||
| @@ -89,6 +109,19 @@ func (p *lcpRespParser) ParseBytes(body []byte) (map[string]any, error) { | ||||
| 	return respMap, nil | ||||
| } | ||||
|  | ||||
| func (p *lcpRespParser) MakePayload(prompt string) io.Reader { | ||||
| 	return strings.NewReader(fmt.Sprintf(`{ | ||||
| 	  "model": "local-model", | ||||
| 	  "prompt": "%s", | ||||
| 	  "frequency_penalty": 0, | ||||
| 	  "max_tokens": 1024, | ||||
| 	  "stop": null, | ||||
| 	  "stream": false, | ||||
| 	  "temperature": 0.4, | ||||
| 	  "top_p": 1 | ||||
| 	}`, prompt)) | ||||
| } | ||||
|  | ||||
| type openRouterParser struct { | ||||
| 	log *slog.Logger | ||||
| } | ||||
| @@ -125,3 +158,15 @@ func (p *openRouterParser) ParseBytes(body []byte) (map[string]any, error) { | ||||
| 	} | ||||
| 	return respMap, nil | ||||
| } | ||||
|  | ||||
| func (p *openRouterParser) MakePayload(prompt string) io.Reader { | ||||
| 	return strings.NewReader(fmt.Sprintf(`{ | ||||
| 	"model": "deepseek/deepseek-chat-v3-0324:free", | ||||
| 	"messages": [ | ||||
| 		{ | ||||
| 		"role": "user", | ||||
| 		"content": %s | ||||
| 		} | ||||
| 	] | ||||
| 	}`, prompt)) | ||||
| } | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Grail Finder
					Grail Finder