aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--README.md13
-rw-r--r--config-example.toml1
-rw-r--r--main.go52
-rw-r--r--types.go1
4 files changed, 64 insertions, 3 deletions
diff --git a/README.md b/README.md
index 8f64509..069ef7e 100644
--- a/README.md
+++ b/README.md
@@ -235,6 +235,18 @@ webirc password to use.
webirc address to use.
+#### context
+
+the context to use for the normal conversations with the bot. Yes, this is how you tell your milla instance to act like a pirate.
+
+```toml
+context = ["you are a pirate. use the language and words a pirate would unless you are asked to do otherwise explicitly", "your name is caption blackbeard"]
+```
+
+```toml
+context = ["please respond in french even if i use another language unless you are specifically asked to use any language other than french", "your name is terra"]
+```
+
#### rssFile
The file that contains the rss feeeds.
@@ -351,6 +363,7 @@ skipTLSVerify = false
useTLS = true
adminOnly = false
plugins = ["/plugins/ip.lua", "/plugins/urban.lua"]
+context = ["please respond in french even if i use another language unless you are specifically asked to use any language other than french"]
[ircd.devinet.watchlist.security]
watchList = ["#securityfeeds"]
watchFiles = ["/watchfiles/voidbox.list"]
diff --git a/config-example.toml b/config-example.toml
index c968dd4..d1c9ba0 100644
--- a/config-example.toml
+++ b/config-example.toml
@@ -28,6 +28,7 @@ llmProxy = "http://127.0.0.1:8180"
skipTLSVerify = false
useTLS = true
adminOnly = false
+context = ["please respond in french even if i use another language unless you are specifically asked to use any language other than french", "your name is terra"]
plugins = ["/plugins/ip.lua", "/plugins/urban.lua"]
[ircd.devinet.watchlist.security]
watchList = ["#securityfeeds"]
diff --git a/main.go b/main.go
index b4c3301..4999c9a 100644
--- a/main.go
+++ b/main.go
@@ -345,7 +345,7 @@ func handleCustomCommand(
for _, customContext := range customCommand.Context {
gptMemory = append(gptMemory, openai.ChatCompletionMessage{
- Role: openai.ChatMessageRoleUser,
+ Role: openai.ChatMessageRoleAssistant,
Content: customContext,
})
}
@@ -376,7 +376,7 @@ func handleCustomCommand(
Parts: []genai.Part{
genai.Text(customContext),
},
- Role: "user",
+ Role: "model",
})
}
@@ -396,7 +396,7 @@ func handleCustomCommand(
for _, customContext := range customCommand.Context {
ollamaMemory = append(ollamaMemory, MemoryElement{
- Role: "user",
+ Role: "assistant",
Content: customContext,
})
}
@@ -649,6 +649,13 @@ func DoOllamaRequest(
if len(*ollamaMemory) > appConfig.MemoryLimit {
*ollamaMemory = []MemoryElement{}
+
+ for _, context := range appConfig.Context {
+ *ollamaMemory = append(*ollamaMemory, MemoryElement{
+ Role: "assistant",
+ Content: context,
+ })
+ }
}
*ollamaMemory = append(*ollamaMemory, memoryElement)
@@ -887,6 +894,15 @@ func GeminiRequestProcessor(
if len(*geminiMemory) > appConfig.MemoryLimit {
*geminiMemory = []*genai.Content{}
+
+ for _, context := range appConfig.Context {
+ *geminiMemory = append(*geminiMemory, &genai.Content{
+ Parts: []genai.Part{
+ genai.Text(context),
+ },
+ Role: "model",
+ })
+ }
}
*geminiMemory = append(*geminiMemory, &genai.Content{
@@ -1036,6 +1052,13 @@ func ChatGPTRequestProcessor(
if len(*gptMemory) > appConfig.MemoryLimit {
*gptMemory = []openai.ChatCompletionMessage{}
+
+ for _, context := range appConfig.Context {
+ *gptMemory = append(*gptMemory, openai.ChatCompletionMessage{
+ Role: openai.ChatMessageRoleAssistant,
+ Content: context,
+ })
+ }
}
var writer bytes.Buffer
@@ -1312,10 +1335,33 @@ func runIRC(appConfig TomlConfig) {
switch appConfig.Provider {
case "ollama":
+ for _, context := range appConfig.Context {
+ OllamaMemory = append(OllamaMemory, MemoryElement{
+ Role: "assistant",
+ Content: context,
+ })
+ }
+
OllamaHandler(irc, &appConfig, &OllamaMemory)
case "gemini":
+ for _, context := range appConfig.Context {
+ GeminiMemory = append(GeminiMemory, &genai.Content{
+ Parts: []genai.Part{
+ genai.Text(context),
+ },
+ Role: "model",
+ })
+ }
+
GeminiHandler(irc, &appConfig, &GeminiMemory)
case "chatgpt":
+ for _, context := range appConfig.Context {
+ GPTMemory = append(GPTMemory, openai.ChatCompletionMessage{
+ Role: openai.ChatMessageRoleAssistant,
+ Content: context,
+ })
+ }
+
ChatGPTHandler(irc, &appConfig, &GPTMemory)
}
diff --git a/types.go b/types.go
index 78c1337..1fc09dc 100644
--- a/types.go
+++ b/types.go
@@ -80,6 +80,7 @@ type TomlConfig struct {
WebIRCAddress string `toml:"webIRCAddress"`
RSSFile string `toml:"rssFile"`
Plugins []string `toml:"plugins"`
+ Context []string `toml:"context"`
CustomCommands map[string]CustomCommand `toml:"customCommands"`
WatchLists map[string]WatchList `toml:"watchList"`
LuaStates map[string]LuaLstates