feat: add DeepSeekerChat chunk parser and refactor DeepSeeker to DeepSeekerCompletion

This commit is contained in:
Grail Finder
2025-03-10 22:05:40 +03:00
committed by Grail Finder (aider)
parent d963304f61
commit 686bb1fedd

57
llm.go
View File

@@ -21,7 +21,9 @@ func choseChunkParser() {
case "http://localhost:8080/v1/chat/completions": case "http://localhost:8080/v1/chat/completions":
chunkParser = OpenAIer{} chunkParser = OpenAIer{}
case "https://api.deepseek.com/beta/completions": case "https://api.deepseek.com/beta/completions":
chunkParser = DeepSeeker{} chunkParser = DeepSeekerCompletion{}
case "https://api.deepseek.com/chat/completions":
chunkParser = DeepSeekerChat{}
default: default:
chunkParser = LlamaCPPeer{} chunkParser = LlamaCPPeer{}
} }
@@ -37,7 +39,9 @@ type LlamaCPPeer struct {
} }
type OpenAIer struct { type OpenAIer struct {
} }
type DeepSeeker struct { type DeepSeekerCompletion struct {
}
type DeepSeekerChat struct {
} }
func (lcp LlamaCPPeer) FormMsg(msg, role string, resume bool) (io.Reader, error) { func (lcp LlamaCPPeer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
@@ -148,7 +152,7 @@ func (op OpenAIer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
} }
// deepseek // deepseek
func (ds DeepSeeker) ParseChunk(data []byte) (string, bool, error) { func (ds DeepSeekerCompletion) ParseChunk(data []byte) (string, bool, error) {
llmchunk := models.DSCompletionResp{} llmchunk := models.DSCompletionResp{}
if err := json.Unmarshal(data, &llmchunk); err != nil { if err := json.Unmarshal(data, &llmchunk); err != nil {
logger.Error("failed to decode", "error", err, "line", string(data)) logger.Error("failed to decode", "error", err, "line", string(data))
@@ -163,7 +167,7 @@ func (ds DeepSeeker) ParseChunk(data []byte) (string, bool, error) {
return llmchunk.Choices[0].Text, false, nil return llmchunk.Choices[0].Text, false, nil
} }
func (ds DeepSeeker) FormMsg(msg, role string, resume bool) (io.Reader, error) { func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader, error) {
if msg != "" { // otherwise let the bot to continue if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg} newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg) chatBody.Messages = append(chatBody.Messages, newMsg)
@@ -206,3 +210,48 @@ func (ds DeepSeeker) FormMsg(msg, role string, resume bool) (io.Reader, error) {
} }
return bytes.NewReader(data), nil return bytes.NewReader(data), nil
} }
func (ds DeepSeekerChat) ParseChunk(data []byte) (string, bool, error) {
llmchunk := models.DSCompletionResp{}
if err := json.Unmarshal(data, &llmchunk); err != nil {
logger.Error("failed to decode", "error", err, "line", string(data))
return "", false, err
}
if llmchunk.Choices[0].FinishReason != "" {
if llmchunk.Choices[0].Text != "" {
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
}
return llmchunk.Choices[0].Text, true, nil
}
return llmchunk.Choices[0].Text, false, nil
}
func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
if cfg.ToolUse && !resume {
// prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
// add to chat body
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
}
if msg != "" { // otherwise let the bot continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
// if rag
if cfg.RAGEnabled {
ragResp, err := chatRagUse(newMsg.Content)
if err != nil {
logger.Error("failed to form a rag msg", "error", err)
return nil, err
}
ragMsg := models.RoleMsg{Role: cfg.ToolRole, Content: ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
}
}
// copy chat body and replace config.UserRole with "user"; ai!
models.NewDSCharReq(chatBody)
data, err := json.Marshal(chatBody)
if err != nil {
logger.Error("failed to form a msg", "error", err)
return nil, err
}
return bytes.NewReader(data), nil
}