Skip to content

Commit

Permalink
added openrouter as a provider
Browse files Browse the repository at this point in the history
  • Loading branch information
terminaldweller committed Oct 28, 2024
1 parent fdba838 commit e22d58c
Show file tree
Hide file tree
Showing 6 changed files with 315 additions and 4 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

Milla is an IRC bot that:

- sends things over to an LLM when you ask it questions and prints the answer with optional syntax-highlighting.Currently supported providers: Ollama, Openai, Gemini <br/>
- sends things over to an LLM when you ask it questions and prints the answer with optional syntax-highlighting.Currently supported providers: Ollama, Openai, Gemini, Openrouter <br/>
- Milla can run more than one instance of itself
- Each instance can connect to a different ircd, and will get the full set of configs, e.g. different proxies, different postgres instance, ...
- You can define custom commands in the form of SQL queries to the database with the SQL query result being passed to the bot along with the given prompt and an optional limit so you don't go bankrupt(unless you are running ollama locally like the smart cookie that you are).<br/>
Expand Down Expand Up @@ -45,7 +45,7 @@ The SASL username.

The SASL password for SASL plain authentication. Can also be passed as and environment variable.

#### ollamaEndpoint
#### Endpoint

The address for the Ollama chat endpoint.

Expand Down
38 changes: 37 additions & 1 deletion main.go
Original file line number Diff line number Diff line change
Expand Up @@ -405,6 +405,27 @@ func handleCustomCommand(
if result != "" {
sendToIRC(client, event, result, appConfig.ChromaFormatter)
}
case "openrouter":
var memory []MemoryElement

for _, log := range logs {
memory = append(memory, MemoryElement{
Role: "user",
Content: log.Log,
})
}

for _, customContext := range customCommand.Context {
memory = append(memory, MemoryElement{
Role: "user",
Content: customContext,
})
}

result := ORRequestProcessor(appConfig, client, event, &memory, customCommand.Prompt)
if result != "" {
sendToIRC(client, event, result, appConfig.ChromaFormatter)
}
default:
}
}
Expand Down Expand Up @@ -681,7 +702,7 @@ func DoOllamaRequest(
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(appConfig.RequestTimeout)*time.Second)
defer cancel()

request, err := http.NewRequest(http.MethodPost, appConfig.OllamaEndpoint, bytes.NewBuffer(jsonPayload))
request, err := http.NewRequest(http.MethodPost, appConfig.Endpoint, bytes.NewBuffer(jsonPayload))
if err != nil {

return "", err
Expand Down Expand Up @@ -1011,6 +1032,10 @@ func DoChatGPTRequest(

config := openai.DefaultConfig(appConfig.Apikey)
config.HTTPClient = &httpClient
if appConfig.Endpoint != "" {
config.BaseURL = appConfig.Endpoint
log.Print(config.BaseURL)
}

gptClient := openai.NewClientWithConfig(config)

Expand Down Expand Up @@ -1264,6 +1289,8 @@ func runIRC(appConfig TomlConfig) {

var GPTMemory []openai.ChatCompletionMessage

var ORMemory []MemoryElement

poolChan := make(chan *pgxpool.Pool, 1)

irc := girc.New(girc.Config{
Expand Down Expand Up @@ -1363,6 +1390,15 @@ func runIRC(appConfig TomlConfig) {
}

ChatGPTHandler(irc, &appConfig, &GPTMemory)
case "openrouter":
for _, context := range appConfig.Context {
ORMemory = append(ORMemory, MemoryElement{
Role: "user",
Content: context,
})
}

ORHandler(irc, &appConfig, &ORMemory)
}

go LoadAllPlugins(&appConfig, irc)
Expand Down
30 changes: 30 additions & 0 deletions makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
.PHONY: d_test d_deploy d_down d_build help

IMAGE_NAME=milla

d_test:
nq docker compose -f ./docker-compose-devi.yaml up --build

d_deploy:
nq docker compose -f ./docker-compose.yaml up --build

d_down:
docker compose -f ./docker-compose.yaml down
docker compose -f ./docker-compose-devi.yaml down

d_build: d_build_distroless_vendored

d_build_regular:
docker build -t $(IMAGE_NAME)-f ./Dockerfile .

d_build_distroless:
docker build -t $(IMAGE_NAME) -f ./Dockerfile_distroless .

d_build_distroless_vendored:
docker build -t $(IMAGE_NAME) -f ./Dockerfile_distroless_vendored .

help:
@echo "d_test"
@echo "d_deploy"
@echo "d_down"
@echo "d_build"
200 changes: 200 additions & 0 deletions openrouter.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,200 @@
package main

import (
"bytes"
"context"
"encoding/json"
"log"
"net"
"net/http"
"net/url"
"strings"
"time"

"github.com/alecthomas/chroma/v2/quick"
"github.com/lrstanley/girc"
"golang.org/x/net/proxy"
)

func DoORRequest(
appConfig *TomlConfig,
memory *[]MemoryElement,
prompt string,
) (string, error) {
var jsonPayload []byte

var err error

memoryElement := MemoryElement{
Role: "user",
Content: prompt,
}

if len(*memory) > appConfig.MemoryLimit {
*memory = []MemoryElement{}

for _, context := range appConfig.Context {
*memory = append(*memory, MemoryElement{
Role: "assistant",
Content: context,
})
}
}

*memory = append(*memory, memoryElement)

ollamaRequest := OllamaChatRequest{
Model: appConfig.Model,
Messages: *memory,
}

jsonPayload, err = json.Marshal(ollamaRequest)
if err != nil {

return "", err
}

log.Printf("json payload: %s", string(jsonPayload))

ctx, cancel := context.WithTimeout(context.Background(), time.Duration(appConfig.RequestTimeout)*time.Second)
defer cancel()

request, err := http.NewRequest(http.MethodPost, appConfig.Endpoint, bytes.NewBuffer(jsonPayload))
if err != nil {

return "", err
}

request = request.WithContext(ctx)
request.Header.Set("content-type", "application/json")
request.Header.Set("Authorization", "Bearer "+appConfig.Apikey)

var httpClient http.Client

var dialer proxy.Dialer

if appConfig.LLMProxy != "" {
proxyURL, err := url.Parse(appConfig.IRCProxy)
if err != nil {
cancel()

log.Fatal(err.Error())
}

dialer, err = proxy.FromURL(proxyURL, &net.Dialer{Timeout: time.Duration(appConfig.RequestTimeout) * time.Second})
if err != nil {
cancel()

log.Fatal(err.Error())
}

httpClient = http.Client{
Transport: &http.Transport{
Dial: dialer.Dial,
},
}
}
response, err := httpClient.Do(request)

if err != nil {
return "", err
}

defer response.Body.Close()

log.Println("response body:", response.Body)

var orresponse ORResponse

err = json.NewDecoder(response.Body).Decode(&orresponse)
if err != nil {
return "", err
}

var result string

for _, choice := range orresponse.Choices {
result += choice.Message.Content + "\n"
}

return result, nil
}

func ORRequestProcessor(
appConfig *TomlConfig,
client *girc.Client,
event girc.Event,
memory *[]MemoryElement,
prompt string,
) string {
response, err := DoORRequest(appConfig, memory, prompt)
if err != nil {
client.Cmd.ReplyTo(event, "error: "+err.Error())

return ""
}

assistantElement := MemoryElement{
Role: "assistant",
Content: response,
}

*memory = append(*memory, assistantElement)

log.Println(response)

var writer bytes.Buffer

err = quick.Highlight(&writer,
response,
"markdown",
appConfig.ChromaFormatter,
appConfig.ChromaStyle)
if err != nil {
client.Cmd.ReplyTo(event, "error: "+err.Error())

return ""
}

return writer.String()
}

func ORHandler(
irc *girc.Client,
appConfig *TomlConfig,
memory *[]MemoryElement) {
irc.Handlers.AddBg(girc.PRIVMSG, func(client *girc.Client, event girc.Event) {
if !strings.HasPrefix(event.Last(), appConfig.IrcNick+": ") {
return
}

if appConfig.AdminOnly {
byAdmin := false

for _, admin := range appConfig.Admins {
if event.Source.Name == admin {
byAdmin = true
}
}

if !byAdmin {
return
}
}

prompt := strings.TrimPrefix(event.Last(), appConfig.IrcNick+": ")
log.Println(prompt)

if string(prompt[0]) == "/" {
runCommand(client, event, appConfig)

return
}

result := ORRequestProcessor(appConfig, client, event, memory, prompt)
if result != "" {
sendToIRC(client, event, result, appConfig.ChromaFormatter)
}
})

}
16 changes: 16 additions & 0 deletions plugins.go
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,21 @@ func ircPartChannelClosure(luaState *lua.LState, client *girc.Client) func(*lua.
}
}

func orRequestClosure(luaState *lua.LState, appConfig *TomlConfig) func(*lua.LState) int {
return func(luaState *lua.LState) int {
prompt := luaState.CheckString(1)

result, err := DoORRequest(appConfig, &[]MemoryElement{}, prompt)
if err != nil {
LogError(err)
}

luaState.Push(lua.LString(result))

return 1
}
}

func ollamaRequestClosure(luaState *lua.LState, appConfig *TomlConfig) func(*lua.LState) int {
return func(luaState *lua.LState) int {
prompt := luaState.CheckString(1)
Expand Down Expand Up @@ -334,6 +349,7 @@ func millaModuleLoaderClosure(luaState *lua.LState, client *girc.Client, appConf
"send_ollama_request": lua.LGFunction(ollamaRequestClosure(luaState, appConfig)),
"send_gemini_request": lua.LGFunction(geminiRequestClosure(luaState, appConfig)),
"send_chatgpt_request": lua.LGFunction(chatGPTRequestClosure(luaState, appConfig)),
"send_or_request": lua.LGFunction(orRequestClosure(luaState, appConfig)),
"query_db": lua.LGFunction(dbQueryClosure(luaState, appConfig)),
"register_cmd": lua.LGFunction(registerLuaCommand(luaState, appConfig)),
"url_encode": lua.LGFunction(urlEncode(luaState)),
Expand Down
Loading

0 comments on commit e22d58c

Please sign in to comment.