From 394a7e93caadd21e777b77e804c1c663e555cf50 Mon Sep 17 00:00:00 2001 From: terminaldweller Date: Fri, 10 May 2024 16:20:52 -0400 Subject: fixes #5 --- main.go | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/main.go b/main.go index c2c3565..737454f 100644 --- a/main.go +++ b/main.go @@ -96,6 +96,7 @@ func returnGeminiResponse(resp *genai.GenerateContentResponse) string { func runIRC(appConfig TomlConfig, ircChan chan *girc.Client) { var Memory []MemoryElement + var GPTMemory []openai.ChatCompletionMessage irc := girc.New(girc.Config{ Server: appConfig.IrcServer, @@ -288,6 +289,10 @@ func runIRC(appConfig TomlConfig, ircChan chan *girc.Client) { // return // } + if len(cs.History) > appConfig.MemoryLImit { + cs.History = cs.History[:0] + } + geminiResponse := returnGeminiResponse(resp) log.Println(geminiResponse) @@ -351,16 +356,14 @@ func runIRC(appConfig TomlConfig, ircChan chan *girc.Client) { gptClient := openai.NewClientWithConfig(config) - messages := make([]openai.ChatCompletionMessage, 0) - - messages = append(messages, openai.ChatCompletionMessage{ - Role: "system", + GPTMemory = append(GPTMemory, openai.ChatCompletionMessage{ + Role: openai.ChatMessageRoleUser, Content: prompt, }) resp, err := gptClient.CreateChatCompletion(ctx, openai.ChatCompletionRequest{ Model: appConfig.Model, - Messages: messages, + Messages: GPTMemory, }) if err != nil { client.Cmd.ReplyTo(event, fmt.Sprintf("error: %s", err.Error())) @@ -368,6 +371,15 @@ func runIRC(appConfig TomlConfig, ircChan chan *girc.Client) { return } + GPTMemory = append(GPTMemory, openai.ChatCompletionMessage{ + Role: openai.ChatMessageRoleAssistant, + Content: resp.Choices[0].Message.Content, + }) + + if len(GPTMemory) > appConfig.MemoryLImit { + GPTMemory = GPTMemory[:0] + } + var writer bytes.Buffer err = quick.Highlight( &writer, -- cgit v1.2.3