aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--main.go22
1 files changed, 17 insertions, 5 deletions
diff --git a/main.go b/main.go
index c2c3565..737454f 100644
--- a/main.go
+++ b/main.go
@@ -96,6 +96,7 @@ func returnGeminiResponse(resp *genai.GenerateContentResponse) string {
func runIRC(appConfig TomlConfig, ircChan chan *girc.Client) {
var Memory []MemoryElement
+ var GPTMemory []openai.ChatCompletionMessage
irc := girc.New(girc.Config{
Server: appConfig.IrcServer,
@@ -288,6 +289,10 @@ func runIRC(appConfig TomlConfig, ircChan chan *girc.Client) {
// return
// }
+ if len(cs.History) > appConfig.MemoryLImit {
+ cs.History = cs.History[:0]
+ }
+
geminiResponse := returnGeminiResponse(resp)
log.Println(geminiResponse)
@@ -351,16 +356,14 @@ func runIRC(appConfig TomlConfig, ircChan chan *girc.Client) {
gptClient := openai.NewClientWithConfig(config)
- messages := make([]openai.ChatCompletionMessage, 0)
-
- messages = append(messages, openai.ChatCompletionMessage{
- Role: "system",
+ GPTMemory = append(GPTMemory, openai.ChatCompletionMessage{
+ Role: openai.ChatMessageRoleUser,
Content: prompt,
})
resp, err := gptClient.CreateChatCompletion(ctx, openai.ChatCompletionRequest{
Model: appConfig.Model,
- Messages: messages,
+ Messages: GPTMemory,
})
if err != nil {
client.Cmd.ReplyTo(event, fmt.Sprintf("error: %s", err.Error()))
@@ -368,6 +371,15 @@ func runIRC(appConfig TomlConfig, ircChan chan *girc.Client) {
return
}
+ GPTMemory = append(GPTMemory, openai.ChatCompletionMessage{
+ Role: openai.ChatMessageRoleAssistant,
+ Content: resp.Choices[0].Message.Content,
+ })
+
+ if len(GPTMemory) > appConfig.MemoryLImit {
+ GPTMemory = GPTMemory[:0]
+ }
+
var writer bytes.Buffer
err = quick.Highlight(
&writer,