Skip to content

Commit

Permalink
added a new option, context. fixed a bug with the custom commands whe…
Browse files Browse the repository at this point in the history
…re the context was not being treated as such
  • Loading branch information
terminaldweller committed Sep 15, 2024
1 parent fa11101 commit fdba838
Show file tree
Hide file tree
Showing 4 changed files with 64 additions and 3 deletions.
13 changes: 13 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,18 @@ webirc password to use.

webirc address to use.

#### context

the context to use for the normal conversations with the bot. Yes, this is how you tell your milla instance to act like a pirate.

```toml
context = ["you are a pirate. use the language and words a pirate would unless you are asked to do otherwise explicitly", "your name is caption blackbeard"]
```

```toml
context = ["please respond in french even if i use another language unless you are specifically asked to use any language other than french", "your name is terra"]
```

#### rssFile

The file that contains the rss feeeds.
Expand Down Expand Up @@ -351,6 +363,7 @@ skipTLSVerify = false
useTLS = true
adminOnly = false
plugins = ["/plugins/ip.lua", "/plugins/urban.lua"]
context = ["please respond in french even if i use another language unless you are specifically asked to use any language other than french"]
[ircd.devinet.watchlist.security]
watchList = ["#securityfeeds"]
watchFiles = ["/watchfiles/voidbox.list"]
Expand Down
1 change: 1 addition & 0 deletions config-example.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ llmProxy = "http://127.0.0.1:8180"
skipTLSVerify = false
useTLS = true
adminOnly = false
context = ["please respond in french even if i use another language unless you are specifically asked to use any language other than french", "your name is terra"]
plugins = ["/plugins/ip.lua", "/plugins/urban.lua"]
[ircd.devinet.watchlist.security]
watchList = ["#securityfeeds"]
Expand Down
52 changes: 49 additions & 3 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,7 @@ func handleCustomCommand(

for _, customContext := range customCommand.Context {
gptMemory = append(gptMemory, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleUser,
Role: openai.ChatMessageRoleAssistant,
Content: customContext,
})
}
Expand Down Expand Up @@ -376,7 +376,7 @@ func handleCustomCommand(
Parts: []genai.Part{
genai.Text(customContext),
},
Role: "user",
Role: "model",
})
}

Expand All @@ -396,7 +396,7 @@ func handleCustomCommand(

for _, customContext := range customCommand.Context {
ollamaMemory = append(ollamaMemory, MemoryElement{
Role: "user",
Role: "assistant",
Content: customContext,
})
}
Expand Down Expand Up @@ -649,6 +649,13 @@ func DoOllamaRequest(

if len(*ollamaMemory) > appConfig.MemoryLimit {
*ollamaMemory = []MemoryElement{}

for _, context := range appConfig.Context {
*ollamaMemory = append(*ollamaMemory, MemoryElement{
Role: "assistant",
Content: context,
})
}
}

*ollamaMemory = append(*ollamaMemory, memoryElement)
Expand Down Expand Up @@ -887,6 +894,15 @@ func GeminiRequestProcessor(

if len(*geminiMemory) > appConfig.MemoryLimit {
*geminiMemory = []*genai.Content{}

for _, context := range appConfig.Context {
*geminiMemory = append(*geminiMemory, &genai.Content{
Parts: []genai.Part{
genai.Text(context),
},
Role: "model",
})
}
}

*geminiMemory = append(*geminiMemory, &genai.Content{
Expand Down Expand Up @@ -1036,6 +1052,13 @@ func ChatGPTRequestProcessor(

if len(*gptMemory) > appConfig.MemoryLimit {
*gptMemory = []openai.ChatCompletionMessage{}

for _, context := range appConfig.Context {
*gptMemory = append(*gptMemory, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleAssistant,
Content: context,
})
}
}

var writer bytes.Buffer
Expand Down Expand Up @@ -1312,10 +1335,33 @@ func runIRC(appConfig TomlConfig) {

switch appConfig.Provider {
case "ollama":
for _, context := range appConfig.Context {
OllamaMemory = append(OllamaMemory, MemoryElement{
Role: "assistant",
Content: context,
})
}

OllamaHandler(irc, &appConfig, &OllamaMemory)
case "gemini":
for _, context := range appConfig.Context {
GeminiMemory = append(GeminiMemory, &genai.Content{
Parts: []genai.Part{
genai.Text(context),
},
Role: "model",
})
}

GeminiHandler(irc, &appConfig, &GeminiMemory)
case "chatgpt":
for _, context := range appConfig.Context {
GPTMemory = append(GPTMemory, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleAssistant,
Content: context,
})
}

ChatGPTHandler(irc, &appConfig, &GPTMemory)
}

Expand Down
1 change: 1 addition & 0 deletions types.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ type TomlConfig struct {
WebIRCAddress string `toml:"webIRCAddress"`
RSSFile string `toml:"rssFile"`
Plugins []string `toml:"plugins"`
Context []string `toml:"context"`
CustomCommands map[string]CustomCommand `toml:"customCommands"`
WatchLists map[string]WatchList `toml:"watchList"`
LuaStates map[string]LuaLstates
Expand Down

0 comments on commit fdba838

Please sign in to comment.