diff options
Diffstat (limited to 'main.go')
-rw-r--r-- | main.go | 530 |
1 files changed, 0 insertions, 530 deletions
@@ -1,10 +1,8 @@ package main import ( - "bytes" "context" "crypto/tls" - "encoding/json" "errors" "expvar" "flag" @@ -27,7 +25,6 @@ import ( "time" "github.com/BurntSushi/toml" - "github.com/alecthomas/chroma/v2/quick" "github.com/cenkalti/backoff/v5" "github.com/google/generative-ai-go/genai" "github.com/jackc/pgx/v5" @@ -35,7 +32,6 @@ import ( "github.com/lrstanley/girc" openai "github.com/sashabaranov/go-openai" "golang.org/x/net/proxy" - "google.golang.org/api/option" ) var ( @@ -571,532 +567,6 @@ func runCommand( } } -func DoOllamaRequest( - appConfig *TomlConfig, - ollamaMemory *[]MemoryElement, - prompt, systemPrompt string, -) (string, error) { - var jsonPayload []byte - - var err error - - memoryElement := MemoryElement{ - Role: "user", - Content: prompt, - } - - if len(*ollamaMemory) > appConfig.MemoryLimit { - *ollamaMemory = []MemoryElement{} - - for _, context := range appConfig.Context { - *ollamaMemory = append(*ollamaMemory, MemoryElement{ - Role: "assistant", - Content: context, - }) - } - } - - *ollamaMemory = append(*ollamaMemory, memoryElement) - - ollamaRequest := OllamaChatRequest{ - Model: appConfig.Model, - KeepAlive: time.Duration(appConfig.KeepAlive), - Stream: false, - Messages: *ollamaMemory, - System: systemPrompt, - Options: OllamaRequestOptions{ - Mirostat: appConfig.OllamaMirostat, - MirostatEta: appConfig.OllamaMirostatEta, - MirostatTau: appConfig.OllamaMirostatTau, - NumCtx: appConfig.OllamaNumCtx, - RepeatLastN: appConfig.OllamaRepeatLastN, - RepeatPenalty: appConfig.OllamaRepeatPenalty, - Temperature: appConfig.Temperature, - Seed: appConfig.OllamaSeed, - NumPredict: appConfig.OllamaNumPredict, - TopK: appConfig.TopK, - TopP: appConfig.TopP, - MinP: appConfig.OllamaMinP, - }, - } - - jsonPayload, err = json.Marshal(ollamaRequest) - if err != nil { - return "", err - } - - log.Printf("json payload: %s", string(jsonPayload)) - - ctx, cancel := context.WithTimeout(context.Background(), time.Duration(appConfig.RequestTimeout)*time.Second) - defer cancel() - - request, err := http.NewRequest(http.MethodPost, appConfig.Endpoint, bytes.NewBuffer(jsonPayload)) - if err != nil { - return "", err - } - - request = request.WithContext(ctx) - request.Header.Set("Content-Type", "application/json") - - var httpClient http.Client - - var dialer proxy.Dialer - - if appConfig.LLMProxy != "" { - proxyURL, err := url.Parse(appConfig.IRCProxy) - if err != nil { - cancel() - - log.Fatal(err.Error()) - } - - dialer, err = proxy.FromURL(proxyURL, &net.Dialer{Timeout: time.Duration(appConfig.RequestTimeout) * time.Second}) - if err != nil { - cancel() - - log.Fatal(err.Error()) - } - - httpClient = http.Client{ - Transport: &http.Transport{ - Dial: dialer.Dial, - }, - } - } - - response, err := httpClient.Do(request) - if err != nil { - return "", err - } - - defer response.Body.Close() - - var ollamaChatResponse OllamaChatMessagesResponse - - err = json.NewDecoder(response.Body).Decode(&ollamaChatResponse) - if err != nil { - return "", err - } - - log.Println("ollama chat response: ", ollamaChatResponse) - - return ollamaChatResponse.Messages.Content, nil -} - -func OllamaRequestProcessor( - appConfig *TomlConfig, - client *girc.Client, - event girc.Event, - ollamaMemory *[]MemoryElement, - prompt, systemPrompt string, -) string { - response, err := DoOllamaRequest(appConfig, ollamaMemory, prompt, systemPrompt) - if err != nil { - client.Cmd.ReplyTo(event, "error: "+err.Error()) - - return "" - } - - assistantElement := MemoryElement{ - Role: "assistant", - Content: response, - } - - *ollamaMemory = append(*ollamaMemory, assistantElement) - - log.Println(response) - - var writer bytes.Buffer - - err = quick.Highlight(&writer, - response, - "markdown", - appConfig.ChromaFormatter, - appConfig.ChromaStyle) - if err != nil { - client.Cmd.ReplyTo(event, "error: "+err.Error()) - - return "" - } - - return writer.String() -} - -func OllamaHandler( - irc *girc.Client, - appConfig *TomlConfig, - ollamaMemory *[]MemoryElement, -) { - irc.Handlers.AddBg(girc.PRIVMSG, func(client *girc.Client, event girc.Event) { - if !strings.HasPrefix(event.Last(), appConfig.IrcNick+": ") { - return - } - - if appConfig.AdminOnly { - byAdmin := false - - for _, admin := range appConfig.Admins { - if event.Source.Name == admin { - byAdmin = true - } - } - - if !byAdmin { - return - } - } - - prompt := strings.TrimPrefix(event.Last(), appConfig.IrcNick+": ") - log.Println(prompt) - - if string(prompt[0]) == "/" { - runCommand(client, event, appConfig) - - return - } - - result := OllamaRequestProcessor(appConfig, client, event, ollamaMemory, prompt, appConfig.SystemPrompt) - if result != "" { - SendToIRC(client, event, result, appConfig.ChromaFormatter) - } - }) -} - -func (t *ProxyRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { - transport := http.DefaultTransport.(*http.Transport).Clone() - - if t.ProxyURL != "" { - proxyURL, err := url.Parse(t.ProxyURL) - if err != nil { - return nil, err - } - - transport.Proxy = http.ProxyURL(proxyURL) - } - - newReq := req.Clone(req.Context()) - vals := newReq.URL.Query() - vals.Set("key", t.APIKey) - newReq.URL.RawQuery = vals.Encode() - - resp, err := transport.RoundTrip(newReq) - if err != nil { - return nil, err - } - - return resp, nil -} - -func DoGeminiRequest( - appConfig *TomlConfig, - geminiMemory *[]*genai.Content, - prompt, systemPrompt string, -) (string, error) { - httpProxyClient := &http.Client{Transport: &ProxyRoundTripper{ - APIKey: appConfig.Apikey, - ProxyURL: appConfig.LLMProxy, - }} - - ctx, cancel := context.WithTimeout(context.Background(), time.Duration(appConfig.RequestTimeout)*time.Second) - defer cancel() - - clientGemini, err := genai.NewClient(ctx, option.WithHTTPClient(httpProxyClient)) - if err != nil { - return "", fmt.Errorf("Could not create a genai client.", err) - } - defer clientGemini.Close() - - model := clientGemini.GenerativeModel(appConfig.Model) - model.SetTemperature(float32(appConfig.Temperature)) - model.SetTopK(appConfig.TopK) - model.SetTopP(appConfig.TopP) - model.SystemInstruction = &genai.Content{ - Parts: []genai.Part{ - genai.Text(systemPrompt), - }, - } - model.SafetySettings = []*genai.SafetySetting{ - { - Category: genai.HarmCategoryDangerousContent, - Threshold: genai.HarmBlockNone, - }, - { - Category: genai.HarmCategoryHarassment, - Threshold: genai.HarmBlockNone, - }, - { - Category: genai.HarmCategoryHateSpeech, - Threshold: genai.HarmBlockNone, - }, - { - Category: genai.HarmCategorySexuallyExplicit, - Threshold: genai.HarmBlockNone, - }, - } - - cs := model.StartChat() - - cs.History = *geminiMemory - - resp, err := cs.SendMessage(ctx, genai.Text(prompt)) - if err != nil { - return "", fmt.Errorf("Gemini: Could not send message", err) - } - - return returnGeminiResponse(resp), nil -} - -func GeminiRequestProcessor( - appConfig *TomlConfig, - client *girc.Client, - event girc.Event, - geminiMemory *[]*genai.Content, - prompt, systemPrompt string, -) string { - geminiResponse, err := DoGeminiRequest(appConfig, geminiMemory, prompt, systemPrompt) - if err != nil { - client.Cmd.ReplyTo(event, "error: "+err.Error()) - - return "" - } - - log.Println(geminiResponse) - - if len(*geminiMemory) > appConfig.MemoryLimit { - *geminiMemory = []*genai.Content{} - - for _, context := range appConfig.Context { - *geminiMemory = append(*geminiMemory, &genai.Content{ - Parts: []genai.Part{ - genai.Text(context), - }, - Role: "model", - }) - } - } - - *geminiMemory = append(*geminiMemory, &genai.Content{ - Parts: []genai.Part{ - genai.Text(prompt), - }, - Role: "user", - }) - - *geminiMemory = append(*geminiMemory, &genai.Content{ - Parts: []genai.Part{ - genai.Text(geminiResponse), - }, - Role: "model", - }) - - var writer bytes.Buffer - - err = quick.Highlight( - &writer, - geminiResponse, - "markdown", - appConfig.ChromaFormatter, - appConfig.ChromaStyle) - if err != nil { - client.Cmd.ReplyTo(event, "error: "+err.Error()) - - return "" - } - - return writer.String() -} - -func GeminiHandler( - irc *girc.Client, - appConfig *TomlConfig, - geminiMemory *[]*genai.Content, -) { - irc.Handlers.AddBg(girc.PRIVMSG, func(client *girc.Client, event girc.Event) { - if !strings.HasPrefix(event.Last(), appConfig.IrcNick+": ") { - return - } - - if appConfig.AdminOnly { - byAdmin := false - - for _, admin := range appConfig.Admins { - if event.Source.Name == admin { - byAdmin = true - } - } - - if !byAdmin { - return - } - } - - prompt := strings.TrimPrefix(event.Last(), appConfig.IrcNick+": ") - log.Println(prompt) - - if string(prompt[0]) == "/" { - runCommand(client, event, appConfig) - - return - } - - result := GeminiRequestProcessor(appConfig, client, event, geminiMemory, prompt, appConfig.SystemPrompt) - - if result != "" { - SendToIRC(client, event, result, appConfig.ChromaFormatter) - } - }) -} - -func DoChatGPTRequest( - appConfig *TomlConfig, - gptMemory *[]openai.ChatCompletionMessage, - prompt, systemPrompt string, -) (string, error) { - ctx, cancel := context.WithTimeout(context.Background(), time.Duration(appConfig.RequestTimeout)*time.Second) - defer cancel() - - var httpClient http.Client - - if appConfig.LLMProxy != "" { - proxyURL, err := url.Parse(appConfig.IRCProxy) - if err != nil { - cancel() - - return "", err - } - - dialer, err := proxy.FromURL(proxyURL, &net.Dialer{Timeout: time.Duration(appConfig.RequestTimeout) * time.Second}) - if err != nil { - cancel() - - return "", err - } - - httpClient = http.Client{ - Transport: &http.Transport{ - Dial: dialer.Dial, - }, - } - } - - config := openai.DefaultConfig(appConfig.Apikey) - config.HTTPClient = &httpClient - - if appConfig.Endpoint != "" { - config.BaseURL = appConfig.Endpoint - log.Print(config.BaseURL) - } - - gptClient := openai.NewClientWithConfig(config) - - *gptMemory = append(*gptMemory, openai.ChatCompletionMessage{ - Role: openai.ChatMessageRoleSystem, - Content: systemPrompt, - }) - - *gptMemory = append(*gptMemory, openai.ChatCompletionMessage{ - Role: openai.ChatMessageRoleUser, - Content: prompt, - }) - - resp, err := gptClient.CreateChatCompletion(ctx, openai.ChatCompletionRequest{ - Model: appConfig.Model, - Messages: *gptMemory, - }) - if err != nil { - return "", err - } - - return resp.Choices[0].Message.Content, nil -} - -func ChatGPTRequestProcessor( - appConfig *TomlConfig, - client *girc.Client, - event girc.Event, - gptMemory *[]openai.ChatCompletionMessage, - prompt, systemPrompt string, -) string { - resp, err := DoChatGPTRequest(appConfig, gptMemory, prompt, systemPrompt) - if err != nil { - client.Cmd.ReplyTo(event, "error: "+err.Error()) - - return "" - } - - *gptMemory = append(*gptMemory, openai.ChatCompletionMessage{ - Role: openai.ChatMessageRoleAssistant, - Content: resp, - }) - - if len(*gptMemory) > appConfig.MemoryLimit { - *gptMemory = []openai.ChatCompletionMessage{} - - for _, context := range appConfig.Context { - *gptMemory = append(*gptMemory, openai.ChatCompletionMessage{ - Role: openai.ChatMessageRoleAssistant, - Content: context, - }) - } - } - - var writer bytes.Buffer - - err = quick.Highlight( - &writer, - resp, - "markdown", - appConfig.ChromaFormatter, - appConfig.ChromaStyle) - if err != nil { - client.Cmd.ReplyTo(event, "error: "+err.Error()) - - return "" - } - - return writer.String() -} - -func ChatGPTHandler( - irc *girc.Client, - appConfig *TomlConfig, - gptMemory *[]openai.ChatCompletionMessage, -) { - irc.Handlers.AddBg(girc.PRIVMSG, func(client *girc.Client, event girc.Event) { - if !strings.HasPrefix(event.Last(), appConfig.IrcNick+": ") { - return - } - - if appConfig.AdminOnly { - byAdmin := false - - for _, admin := range appConfig.Admins { - if event.Source.Name == admin { - byAdmin = true - } - } - - if !byAdmin { - return - } - } - - prompt := strings.TrimPrefix(event.Last(), appConfig.IrcNick+": ") - log.Println(prompt) - - if string(prompt[0]) == "/" { - runCommand(client, event, appConfig) - - return - } - - result := ChatGPTRequestProcessor(appConfig, client, event, gptMemory, prompt, appConfig.SystemPrompt) - if result != "" { - SendToIRC(client, event, result, appConfig.ChromaFormatter) - } - }) -} - func connectToDB(appConfig *TomlConfig, ctx *context.Context, irc *girc.Client) { var pool *pgxpool.Pool |