Skip to content

Commit

Permalink
updated the readme, remove girc.Client as an arg to the lua extension…
Browse files Browse the repository at this point in the history
… functions closures
  • Loading branch information
terminaldweller committed Jun 9, 2024
1 parent b7185d9 commit c16f2f2
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 27 deletions.
3 changes: 0 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
FROM golang:1.22-alpine3.20 as builder
RUN apk update && \
apk upgrade && \
apk add go git
WORKDIR /milla
COPY go.sum go.mod /milla/
RUN go mod download
Expand Down
33 changes: 27 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,7 @@ ircProxy = "socks5://127.0.0.1:9050"
llmProxy = "http://127.0.0.1:8180"
skipTLSVerify = false
useTLS = true
plugins = ["./plugins/rss.lua"]

[ircd.liberanet]
ircServer = "irc.libera.chat"
Expand Down Expand Up @@ -509,9 +510,9 @@ secrets:
file: ./pgadmin/pgadmin_pass
```

The env vars `UID`and `GID`need to be defined or they can replaces by your host user's uid and gid.<br/>
The env vars `UID` and `GID` need to be defined or they can replaces by your host user's uid and gid.<br/>

As a convenience, there is a a [distroless](https://github.com/GoogleContainerTools/distroless) dockerfile, `Dockerfile_distroless` also provided.<br/>
As a convenience, there is a [distroless](https://github.com/GoogleContainerTools/distroless) dockerfile, `Dockerfile_distroless` also provided.<br/>
A vendored build of milla is available by first running `go mod vendor` and then using the provided dockerfile, `Dockerfile_distroless_vendored`.<br/>

### Build
Expand Down Expand Up @@ -609,6 +610,30 @@ end
rss_feed()
```

```lua
milla.send_message(msg, target)
```

```lua
milla.join_channel(channel)
```

```lua
milla.part_channel(channel)
```

```lua
milla.send_ollama_request(prompt)
```

```lua
milla.send_gemini_request(prompt)
```

```lua
milla.send_chatgpt_request(prompt)
```

The example rss plugin, accepts a yaml file as input, reeds the provided rss feeds once, extracts the title, author name and link to the resource, sends the feed over to the `#rssfeed` irc channel and exits.<br/>

More of milla's functionality will be available through milla's lua module over time.<br/>'
Expand Down Expand Up @@ -641,10 +666,6 @@ Milla would not exist without the following projects:
- [ollama](https://github.com/ollama/ollama)
- [toml](https://github.com/BurntSushi/toml)

## TODO

- plugins support

## Similar Projects

- [soulshack](https://github.com/pkdindustries/soulshack)
9 changes: 3 additions & 6 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -481,7 +481,6 @@ func runCommand(

func DoOllamaRequest(
appConfig *TomlConfig,
client *girc.Client,
ollamaMemory *[]MemoryElement,
prompt string,
) (string, error) {
Expand Down Expand Up @@ -587,7 +586,7 @@ func OllamaRequestProcessor(
ollamaMemory *[]MemoryElement,
prompt string,
) string {
response, err := DoOllamaRequest(appConfig, client, ollamaMemory, prompt)
response, err := DoOllamaRequest(appConfig, ollamaMemory, prompt)
if err != nil {
client.Cmd.ReplyTo(event, "error: "+err.Error())

Expand Down Expand Up @@ -661,7 +660,6 @@ func OllamaHandler(

func DoGeminiRequest(
appConfig *TomlConfig,
client *girc.Client,
geminiMemory *[]*genai.Content,
prompt string,
) (string, error) {
Expand Down Expand Up @@ -700,7 +698,7 @@ func GeminiRequestProcessor(
geminiMemory *[]*genai.Content,
prompt string,
) string {
geminiResponse, err := DoGeminiRequest(appConfig, client, geminiMemory, prompt)
geminiResponse, err := DoGeminiRequest(appConfig, geminiMemory, prompt)
if err != nil {
client.Cmd.ReplyTo(event, "error: "+err.Error())

Expand Down Expand Up @@ -787,7 +785,6 @@ func GeminiHandler(

func DoChatGPTRequest(
appConfig *TomlConfig,
client *girc.Client,
gptMemory *[]openai.ChatCompletionMessage,
prompt string,
) (string, error) {
Expand Down Expand Up @@ -847,7 +844,7 @@ func ChatGPTRequestProcessor(
gptMemory *[]openai.ChatCompletionMessage,
prompt string,
) string {
resp, err := DoChatGPTRequest(appConfig, client, gptMemory, prompt)
resp, err := DoChatGPTRequest(appConfig, gptMemory, prompt)
if err != nil {
client.Cmd.ReplyTo(event, "error: "+err.Error())

Expand Down
24 changes: 12 additions & 12 deletions plugins.go
Original file line number Diff line number Diff line change
Expand Up @@ -212,11 +212,11 @@ func ircPartChannelClosure(luaState *lua.LState, client *girc.Client) func(*lua.
}
}

func ollamaRequestClosure(luaState *lua.LState, client *girc.Client, appConfig *TomlConfig) func(*lua.LState) int {
func ollamaRequestClosure(luaState *lua.LState, appConfig *TomlConfig) func(*lua.LState) int {
return func(luaState *lua.LState) int {
prompt := luaState.CheckString(1)

result, err := DoOllamaRequest(appConfig, client, &[]MemoryElement{}, prompt)
result, err := DoOllamaRequest(appConfig, &[]MemoryElement{}, prompt)
if err != nil {
log.Print(err)
}
Expand All @@ -227,11 +227,11 @@ func ollamaRequestClosure(luaState *lua.LState, client *girc.Client, appConfig *
}
}

func geminiRequestClosure(luaState *lua.LState, client *girc.Client, appConfig *TomlConfig) func(*lua.LState) int {
func geminiRequestClosure(luaState *lua.LState, appConfig *TomlConfig) func(*lua.LState) int {
return func(luaState *lua.LState) int {
prompt := luaState.CheckString(1)

result, err := DoGeminiRequest(appConfig, client, &[]*genai.Content{}, prompt)
result, err := DoGeminiRequest(appConfig, &[]*genai.Content{}, prompt)
if err != nil {
log.Print(err)
}
Expand All @@ -242,11 +242,11 @@ func geminiRequestClosure(luaState *lua.LState, client *girc.Client, appConfig *
}
}

func chatGPTRequestClosure(luaState *lua.LState, client *girc.Client, appConfig *TomlConfig) func(*lua.LState) int {
func chatGPTRequestClosure(luaState *lua.LState, appConfig *TomlConfig) func(*lua.LState) int {
return func(luaState *lua.LState) int {
prompt := luaState.CheckString(1)

result, err := DoChatGPTRequest(appConfig, client, &[]openai.ChatCompletionMessage{}, prompt)
result, err := DoChatGPTRequest(appConfig, &[]openai.ChatCompletionMessage{}, prompt)
if err != nil {
log.Print(err)
}
Expand All @@ -260,12 +260,12 @@ func chatGPTRequestClosure(luaState *lua.LState, client *girc.Client, appConfig
func millaModuleLoaderClosure(luaState *lua.LState, client *girc.Client, appConfig *TomlConfig) func(*lua.LState) int {
return func(luaState *lua.LState) int {
exports := map[string]lua.LGFunction{
"send_message": lua.LGFunction(sendMessageClosure(luaState, client)),
"join_channel": lua.LGFunction(ircJoinChannelClosure(luaState, client)),
"part_channel": lua.LGFunction(ircPartChannelClosure(luaState, client)),
"send_ollama_request": lua.LGFunction(ollamaRequestClosure(luaState, client, appConfig)),
"send_gemini_request": lua.LGFunction(geminiRequestClosure(luaState, client, appConfig)),
"send_chat_gpt_request": lua.LGFunction(chatGPTRequestClosure(luaState, client, appConfig)),
"send_message": lua.LGFunction(sendMessageClosure(luaState, client)),
"join_channel": lua.LGFunction(ircJoinChannelClosure(luaState, client)),
"part_channel": lua.LGFunction(ircPartChannelClosure(luaState, client)),
"send_ollama_request": lua.LGFunction(ollamaRequestClosure(luaState, appConfig)),
"send_gemini_request": lua.LGFunction(geminiRequestClosure(luaState, appConfig)),
"send_chatgpt_request": lua.LGFunction(chatGPTRequestClosure(luaState, appConfig)),
}
millaModule := luaState.SetFuncs(luaState.NewTable(), exports)

Expand Down

0 comments on commit c16f2f2

Please sign in to comment.