aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorterminaldweller <devi@terminaldweller.com>2024-06-09 12:57:42 +0000
committerterminaldweller <devi@terminaldweller.com>2024-06-09 12:57:42 +0000
commitc16f2f23154433e47c7c571e386e4dbb63a64e0e (patch)
treef4accac08051e3d4072ae7779f01fdf2e2dd71ff
parentadded ollama,chatgpt and gemini request functions for the lua plugins to use (diff)
downloadmilla-c16f2f23154433e47c7c571e386e4dbb63a64e0e.tar.gz
milla-c16f2f23154433e47c7c571e386e4dbb63a64e0e.zip
updated the readme, remove girc.Client as an arg to the lua extension functions closures
-rw-r--r--Dockerfile3
-rw-r--r--README.md33
-rw-r--r--main.go9
-rw-r--r--plugins.go24
4 files changed, 42 insertions, 27 deletions
diff --git a/Dockerfile b/Dockerfile
index ba332b2..641022a 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,7 +1,4 @@
FROM golang:1.22-alpine3.20 as builder
-RUN apk update && \
- apk upgrade && \
- apk add go git
WORKDIR /milla
COPY go.sum go.mod /milla/
RUN go mod download
diff --git a/README.md b/README.md
index 18000c8..ecb4a7c 100644
--- a/README.md
+++ b/README.md
@@ -294,6 +294,7 @@ ircProxy = "socks5://127.0.0.1:9050"
llmProxy = "http://127.0.0.1:8180"
skipTLSVerify = false
useTLS = true
+plugins = ["./plugins/rss.lua"]
[ircd.liberanet]
ircServer = "irc.libera.chat"
@@ -509,9 +510,9 @@ secrets:
file: ./pgadmin/pgadmin_pass
```
-The env vars `UID`and `GID`need to be defined or they can replaces by your host user's uid and gid.<br/>
+The env vars `UID` and `GID` need to be defined or they can replaces by your host user's uid and gid.<br/>
-As a convenience, there is a a [distroless](https://github.com/GoogleContainerTools/distroless) dockerfile, `Dockerfile_distroless` also provided.<br/>
+As a convenience, there is a [distroless](https://github.com/GoogleContainerTools/distroless) dockerfile, `Dockerfile_distroless` also provided.<br/>
A vendored build of milla is available by first running `go mod vendor` and then using the provided dockerfile, `Dockerfile_distroless_vendored`.<br/>
### Build
@@ -609,6 +610,30 @@ end
rss_feed()
```
+```lua
+milla.send_message(msg, target)
+```
+
+```lua
+milla.join_channel(channel)
+```
+
+```lua
+milla.part_channel(channel)
+```
+
+```lua
+milla.send_ollama_request(prompt)
+```
+
+```lua
+milla.send_gemini_request(prompt)
+```
+
+```lua
+milla.send_chatgpt_request(prompt)
+```
+
The example rss plugin, accepts a yaml file as input, reeds the provided rss feeds once, extracts the title, author name and link to the resource, sends the feed over to the `#rssfeed` irc channel and exits.<br/>
More of milla's functionality will be available through milla's lua module over time.<br/>'
@@ -641,10 +666,6 @@ Milla would not exist without the following projects:
- [ollama](https://github.com/ollama/ollama)
- [toml](https://github.com/BurntSushi/toml)
-## TODO
-
-- plugins support
-
## Similar Projects
- [soulshack](https://github.com/pkdindustries/soulshack)
diff --git a/main.go b/main.go
index 2e4e7be..2911ed7 100644
--- a/main.go
+++ b/main.go
@@ -481,7 +481,6 @@ func runCommand(
func DoOllamaRequest(
appConfig *TomlConfig,
- client *girc.Client,
ollamaMemory *[]MemoryElement,
prompt string,
) (string, error) {
@@ -587,7 +586,7 @@ func OllamaRequestProcessor(
ollamaMemory *[]MemoryElement,
prompt string,
) string {
- response, err := DoOllamaRequest(appConfig, client, ollamaMemory, prompt)
+ response, err := DoOllamaRequest(appConfig, ollamaMemory, prompt)
if err != nil {
client.Cmd.ReplyTo(event, "error: "+err.Error())
@@ -661,7 +660,6 @@ func OllamaHandler(
func DoGeminiRequest(
appConfig *TomlConfig,
- client *girc.Client,
geminiMemory *[]*genai.Content,
prompt string,
) (string, error) {
@@ -700,7 +698,7 @@ func GeminiRequestProcessor(
geminiMemory *[]*genai.Content,
prompt string,
) string {
- geminiResponse, err := DoGeminiRequest(appConfig, client, geminiMemory, prompt)
+ geminiResponse, err := DoGeminiRequest(appConfig, geminiMemory, prompt)
if err != nil {
client.Cmd.ReplyTo(event, "error: "+err.Error())
@@ -787,7 +785,6 @@ func GeminiHandler(
func DoChatGPTRequest(
appConfig *TomlConfig,
- client *girc.Client,
gptMemory *[]openai.ChatCompletionMessage,
prompt string,
) (string, error) {
@@ -847,7 +844,7 @@ func ChatGPTRequestProcessor(
gptMemory *[]openai.ChatCompletionMessage,
prompt string,
) string {
- resp, err := DoChatGPTRequest(appConfig, client, gptMemory, prompt)
+ resp, err := DoChatGPTRequest(appConfig, gptMemory, prompt)
if err != nil {
client.Cmd.ReplyTo(event, "error: "+err.Error())
diff --git a/plugins.go b/plugins.go
index 0e61c4f..75273a6 100644
--- a/plugins.go
+++ b/plugins.go
@@ -212,11 +212,11 @@ func ircPartChannelClosure(luaState *lua.LState, client *girc.Client) func(*lua.
}
}
-func ollamaRequestClosure(luaState *lua.LState, client *girc.Client, appConfig *TomlConfig) func(*lua.LState) int {
+func ollamaRequestClosure(luaState *lua.LState, appConfig *TomlConfig) func(*lua.LState) int {
return func(luaState *lua.LState) int {
prompt := luaState.CheckString(1)
- result, err := DoOllamaRequest(appConfig, client, &[]MemoryElement{}, prompt)
+ result, err := DoOllamaRequest(appConfig, &[]MemoryElement{}, prompt)
if err != nil {
log.Print(err)
}
@@ -227,11 +227,11 @@ func ollamaRequestClosure(luaState *lua.LState, client *girc.Client, appConfig *
}
}
-func geminiRequestClosure(luaState *lua.LState, client *girc.Client, appConfig *TomlConfig) func(*lua.LState) int {
+func geminiRequestClosure(luaState *lua.LState, appConfig *TomlConfig) func(*lua.LState) int {
return func(luaState *lua.LState) int {
prompt := luaState.CheckString(1)
- result, err := DoGeminiRequest(appConfig, client, &[]*genai.Content{}, prompt)
+ result, err := DoGeminiRequest(appConfig, &[]*genai.Content{}, prompt)
if err != nil {
log.Print(err)
}
@@ -242,11 +242,11 @@ func geminiRequestClosure(luaState *lua.LState, client *girc.Client, appConfig *
}
}
-func chatGPTRequestClosure(luaState *lua.LState, client *girc.Client, appConfig *TomlConfig) func(*lua.LState) int {
+func chatGPTRequestClosure(luaState *lua.LState, appConfig *TomlConfig) func(*lua.LState) int {
return func(luaState *lua.LState) int {
prompt := luaState.CheckString(1)
- result, err := DoChatGPTRequest(appConfig, client, &[]openai.ChatCompletionMessage{}, prompt)
+ result, err := DoChatGPTRequest(appConfig, &[]openai.ChatCompletionMessage{}, prompt)
if err != nil {
log.Print(err)
}
@@ -260,12 +260,12 @@ func chatGPTRequestClosure(luaState *lua.LState, client *girc.Client, appConfig
func millaModuleLoaderClosure(luaState *lua.LState, client *girc.Client, appConfig *TomlConfig) func(*lua.LState) int {
return func(luaState *lua.LState) int {
exports := map[string]lua.LGFunction{
- "send_message": lua.LGFunction(sendMessageClosure(luaState, client)),
- "join_channel": lua.LGFunction(ircJoinChannelClosure(luaState, client)),
- "part_channel": lua.LGFunction(ircPartChannelClosure(luaState, client)),
- "send_ollama_request": lua.LGFunction(ollamaRequestClosure(luaState, client, appConfig)),
- "send_gemini_request": lua.LGFunction(geminiRequestClosure(luaState, client, appConfig)),
- "send_chat_gpt_request": lua.LGFunction(chatGPTRequestClosure(luaState, client, appConfig)),
+ "send_message": lua.LGFunction(sendMessageClosure(luaState, client)),
+ "join_channel": lua.LGFunction(ircJoinChannelClosure(luaState, client)),
+ "part_channel": lua.LGFunction(ircPartChannelClosure(luaState, client)),
+ "send_ollama_request": lua.LGFunction(ollamaRequestClosure(luaState, appConfig)),
+ "send_gemini_request": lua.LGFunction(geminiRequestClosure(luaState, appConfig)),
+ "send_chatgpt_request": lua.LGFunction(chatGPTRequestClosure(luaState, appConfig)),
}
millaModule := luaState.SetFuncs(luaState.NewTable(), exports)