aboutsummaryrefslogtreecommitdiffstats
path: root/main.go
diff options
context:
space:
mode:
authorterminaldweller <devi@terminaldweller.com>2024-06-09 12:57:42 +0000
committerterminaldweller <devi@terminaldweller.com>2024-06-09 12:57:42 +0000
commitc16f2f23154433e47c7c571e386e4dbb63a64e0e (patch)
treef4accac08051e3d4072ae7779f01fdf2e2dd71ff /main.go
parentadded ollama,chatgpt and gemini request functions for the lua plugins to use (diff)
downloadmilla-c16f2f23154433e47c7c571e386e4dbb63a64e0e.tar.gz
milla-c16f2f23154433e47c7c571e386e4dbb63a64e0e.zip
updated the readme, remove girc.Client as an arg to the lua extension functions closures
Diffstat (limited to 'main.go')
-rw-r--r--main.go9
1 files changed, 3 insertions, 6 deletions
diff --git a/main.go b/main.go
index 2e4e7be..2911ed7 100644
--- a/main.go
+++ b/main.go
@@ -481,7 +481,6 @@ func runCommand(
func DoOllamaRequest(
appConfig *TomlConfig,
- client *girc.Client,
ollamaMemory *[]MemoryElement,
prompt string,
) (string, error) {
@@ -587,7 +586,7 @@ func OllamaRequestProcessor(
ollamaMemory *[]MemoryElement,
prompt string,
) string {
- response, err := DoOllamaRequest(appConfig, client, ollamaMemory, prompt)
+ response, err := DoOllamaRequest(appConfig, ollamaMemory, prompt)
if err != nil {
client.Cmd.ReplyTo(event, "error: "+err.Error())
@@ -661,7 +660,6 @@ func OllamaHandler(
func DoGeminiRequest(
appConfig *TomlConfig,
- client *girc.Client,
geminiMemory *[]*genai.Content,
prompt string,
) (string, error) {
@@ -700,7 +698,7 @@ func GeminiRequestProcessor(
geminiMemory *[]*genai.Content,
prompt string,
) string {
- geminiResponse, err := DoGeminiRequest(appConfig, client, geminiMemory, prompt)
+ geminiResponse, err := DoGeminiRequest(appConfig, geminiMemory, prompt)
if err != nil {
client.Cmd.ReplyTo(event, "error: "+err.Error())
@@ -787,7 +785,6 @@ func GeminiHandler(
func DoChatGPTRequest(
appConfig *TomlConfig,
- client *girc.Client,
gptMemory *[]openai.ChatCompletionMessage,
prompt string,
) (string, error) {
@@ -847,7 +844,7 @@ func ChatGPTRequestProcessor(
gptMemory *[]openai.ChatCompletionMessage,
prompt string,
) string {
- resp, err := DoChatGPTRequest(appConfig, client, gptMemory, prompt)
+ resp, err := DoChatGPTRequest(appConfig, gptMemory, prompt)
if err != nil {
client.Cmd.ReplyTo(event, "error: "+err.Error())