Skip to content

Commit f14241a

Browse files
committed
Refactoring LLM Plugin, update docs.
1 parent 8703d1a commit f14241a

File tree

8 files changed

+82
-58
lines changed

8 files changed

+82
-58
lines changed

README.md

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -211,9 +211,9 @@ commands:
211211
212212
#### Example SSH Honeypot
213213
214-
###### Honeypot LLM Honeypots
214+
###### LLM Honeypots
215215
216-
Example with OpenAI GPT-4:
216+
Follow a SSH LLM Honeypot using OpenAI as provider LLM:
217217
218218
```yaml
219219
apiVersion: "v1"
@@ -228,11 +228,12 @@ serverName: "ubuntu"
228228
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
229229
deadlineTimeoutSeconds: 60
230230
plugin:
231-
llmModel: "gpt4-o"
231+
llmProvider: "openai"
232+
llmModel: "gpt4-o" #Models https://platform.openai.com/docs/models
232233
openAISecretKey: "sk-proj-123456"
233234
```
234235
235-
Example with Ollama Llama3:
236+
Examples with local Ollama instance using model codellama:7b:
236237
237238
```yaml
238239
apiVersion: "v1"
@@ -247,7 +248,8 @@ serverName: "ubuntu"
247248
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
248249
deadlineTimeoutSeconds: 60
249250
plugin:
250-
llmModel: "llama3"
251+
llmProvider: "ollama"
252+
llmModel: "codellama:7b" #Models https://ollama.com/search
251253
host: "http://example.com/api/chat" #default http://localhost:11434/api/chat
252254
```
253255
Example with custom prompt:
@@ -265,6 +267,7 @@ serverName: "ubuntu"
265267
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
266268
deadlineTimeoutSeconds: 60
267269
plugin:
270+
llmProvider: "openai"
268271
llmModel: "gpt4-o"
269272
openAISecretKey: "sk-proj-123456"
270273
prompt: "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block."

configurations/services/ssh-2222.yaml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,9 @@ commands:
77
plugin: "LLMHoneypot"
88
serverVersion: "OpenSSH"
99
serverName: "ubuntu"
10-
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
10+
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456|1234)$"
1111
deadlineTimeoutSeconds: 6000
1212
plugin:
13-
llmModel: "llama3"
13+
llmProvider: "openai"
14+
llmModel: "gpt4-o"
15+
openAISecretKey: "sk-proj-12345"

parser/configurations_parser.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ type Plugin struct {
5252
OpenAISecretKey string `yaml:"openAISecretKey"`
5353
Host string `yaml:"host"`
5454
LLMModel string `yaml:"llmModel"`
55+
LLMProvider string `yaml:"llmProvider"`
5556
Prompt string `yaml:"prompt"`
5657
}
5758

parser/configurations_parser_test.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@ commands:
5959
plugin:
6060
openAISecretKey: "qwerty"
6161
llmModel: "llama3"
62+
llmProvider: "ollama"
6263
host: "localhost:1563"
6364
prompt: "hello world"
6465
`)
@@ -135,6 +136,7 @@ func TestReadConfigurationsServicesValid(t *testing.T) {
135136
assert.Equal(t, firstBeelzebubServiceConfiguration.Commands[0].Headers[0], "Content-Type: text/html")
136137
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.OpenAISecretKey, "qwerty")
137138
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.LLMModel, "llama3")
139+
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.LLMProvider, "ollama")
138140
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Host, "localhost:1563")
139141
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Prompt, "hello world")
140142
assert.Equal(t, firstBeelzebubServiceConfiguration.TLSCertPath, "/tmp/cert.crt")

plugins/llm-integration.go

Lines changed: 21 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,14 @@ import (
88
"github.com/mariocandela/beelzebub/v3/tracer"
99
log "github.com/sirupsen/logrus"
1010
"regexp"
11+
"strings"
1112
)
1213

1314
const (
1415
systemPromptVirtualizeLinuxTerminal = "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block. Do not provide note. Do not provide explanations or type commands unless explicitly instructed by the user. Your entire response/output is going to consist of a simple text with \n for new line, and you will NOT wrap it within string md markers"
1516
systemPromptVirtualizeHTTPServer = "You will act as an unsecure HTTP Server with multiple vulnerability like aws and git credentials stored into root http directory. The user will send HTTP requests, and you are to reply with what the server should show. Do not provide explanations or type commands unless explicitly instructed by the user."
1617
LLMPluginName = "LLMHoneypot"
17-
openAIGPTEndpoint = "https://api.openai.com/v1/chat/completions"
18+
openAIEndpoint = "https://api.openai.com/v1/chat/completions"
1819
ollamaEndpoint = "http://localhost:11434/api/chat"
1920
)
2021

@@ -23,7 +24,8 @@ type LLMHoneypot struct {
2324
OpenAIKey string
2425
client *resty.Client
2526
Protocol tracer.Protocol
26-
Model LLMModel
27+
Provider LLMProvider
28+
Model string
2729
Host string
2830
CustomPrompt string
2931
}
@@ -71,21 +73,21 @@ func (role Role) String() string {
7173
return [...]string{"system", "user", "assistant"}[role]
7274
}
7375

74-
type LLMModel int
76+
type LLMProvider int
7577

7678
const (
77-
LLAMA3 LLMModel = iota
78-
GPT4O
79+
Ollama LLMProvider = iota
80+
OpenAI
7981
)
8082

81-
func FromStringToLLMModel(llmModel string) (LLMModel, error) {
82-
switch llmModel {
83-
case "llama3":
84-
return LLAMA3, nil
85-
case "gpt4-o":
86-
return GPT4O, nil
83+
func FromStringToLLMProvider(llmProvider string) (LLMProvider, error) {
84+
switch strings.ToLower(llmProvider) {
85+
case "ollama":
86+
return Ollama, nil
87+
case "openai":
88+
return OpenAI, nil
8789
default:
88-
return -1, fmt.Errorf("model %s not found", llmModel)
90+
return -1, fmt.Errorf("provider %s not found, valid providers: ollama, openai", llmProvider)
8991
}
9092
}
9193

@@ -153,7 +155,7 @@ func (llmHoneypot *LLMHoneypot) openAICaller(messages []Message) (string, error)
153155
var err error
154156

155157
requestJson, err := json.Marshal(Request{
156-
Model: "gpt-4o",
158+
Model: llmHoneypot.Model,
157159
Messages: messages,
158160
Stream: false,
159161
})
@@ -166,7 +168,7 @@ func (llmHoneypot *LLMHoneypot) openAICaller(messages []Message) (string, error)
166168
}
167169

168170
if llmHoneypot.Host == "" {
169-
llmHoneypot.Host = openAIGPTEndpoint
171+
llmHoneypot.Host = openAIEndpoint
170172
}
171173

172174
log.Debug(string(requestJson))
@@ -192,7 +194,7 @@ func (llmHoneypot *LLMHoneypot) ollamaCaller(messages []Message) (string, error)
192194
var err error
193195

194196
requestJson, err := json.Marshal(Request{
195-
Model: "llama3",
197+
Model: llmHoneypot.Model,
196198
Messages: messages,
197199
Stream: false,
198200
})
@@ -229,13 +231,13 @@ func (llmHoneypot *LLMHoneypot) ExecuteModel(command string) (string, error) {
229231
return "", err
230232
}
231233

232-
switch llmHoneypot.Model {
233-
case LLAMA3:
234+
switch llmHoneypot.Provider {
235+
case Ollama:
234236
return llmHoneypot.ollamaCaller(prompt)
235-
case GPT4O:
237+
case OpenAI:
236238
return llmHoneypot.openAICaller(prompt)
237239
default:
238-
return "", errors.New("no model selected")
240+
return "", fmt.Errorf("provider %d not found, valid providers: ollama, openai", llmHoneypot.Provider)
239241
}
240242
}
241243

plugins/llm-integration_test.go

Lines changed: 32 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,8 @@ func TestBuildExecuteModelFailValidation(t *testing.T) {
8585
Histories: make([]Message, 0),
8686
OpenAIKey: "",
8787
Protocol: tracer.SSH,
88-
Model: GPT4O,
88+
Model: "gpt4-o",
89+
Provider: OpenAI,
8990
}
9091

9192
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
@@ -101,7 +102,7 @@ func TestBuildExecuteModelWithCustomPrompt(t *testing.T) {
101102
defer httpmock.DeactivateAndReset()
102103

103104
// Given
104-
httpmock.RegisterMatcherResponder("POST", openAIGPTEndpoint,
105+
httpmock.RegisterMatcherResponder("POST", openAIEndpoint,
105106
httpmock.BodyContainsString("hello world"),
106107
func(req *http.Request) (*http.Response, error) {
107108
resp, err := httpmock.NewJsonResponse(200, &Response{
@@ -125,7 +126,8 @@ func TestBuildExecuteModelWithCustomPrompt(t *testing.T) {
125126
Histories: make([]Message, 0),
126127
OpenAIKey: "sdjdnklfjndslkjanfk",
127128
Protocol: tracer.HTTP,
128-
Model: GPT4O,
129+
Model: "gpt4-o",
130+
Provider: OpenAI,
129131
CustomPrompt: "hello world",
130132
}
131133

@@ -146,7 +148,8 @@ func TestBuildExecuteModelFailValidationStrategyType(t *testing.T) {
146148
Histories: make([]Message, 0),
147149
OpenAIKey: "",
148150
Protocol: tracer.TCP,
149-
Model: GPT4O,
151+
Model: "gpt4-o",
152+
Provider: OpenAI,
150153
}
151154

152155
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
@@ -161,7 +164,8 @@ func TestBuildExecuteModelFailValidationModelType(t *testing.T) {
161164
llmHoneypot := LLMHoneypot{
162165
Histories: make([]Message, 0),
163166
Protocol: tracer.SSH,
164-
Model: 5,
167+
Model: "llama3",
168+
Provider: 5,
165169
}
166170

167171
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
@@ -179,7 +183,7 @@ func TestBuildExecuteModelSSHWithResultsOpenAI(t *testing.T) {
179183
defer httpmock.DeactivateAndReset()
180184

181185
// Given
182-
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
186+
httpmock.RegisterResponder("POST", openAIEndpoint,
183187
func(req *http.Request) (*http.Response, error) {
184188
resp, err := httpmock.NewJsonResponse(200, &Response{
185189
Choices: []Choice{
@@ -202,7 +206,8 @@ func TestBuildExecuteModelSSHWithResultsOpenAI(t *testing.T) {
202206
Histories: make([]Message, 0),
203207
OpenAIKey: "sdjdnklfjndslkjanfk",
204208
Protocol: tracer.SSH,
205-
Model: GPT4O,
209+
Model: "gpt4-o",
210+
Provider: OpenAI,
206211
}
207212

208213
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
@@ -240,7 +245,8 @@ func TestBuildExecuteModelSSHWithResultsLLama(t *testing.T) {
240245
llmHoneypot := LLMHoneypot{
241246
Histories: make([]Message, 0),
242247
Protocol: tracer.SSH,
243-
Model: LLAMA3,
248+
Model: "llama3",
249+
Provider: Ollama,
244250
}
245251

246252
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
@@ -260,7 +266,7 @@ func TestBuildExecuteModelSSHWithoutResults(t *testing.T) {
260266
defer httpmock.DeactivateAndReset()
261267

262268
// Given
263-
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
269+
httpmock.RegisterResponder("POST", openAIEndpoint,
264270
func(req *http.Request) (*http.Response, error) {
265271
resp, err := httpmock.NewJsonResponse(200, &Response{
266272
Choices: []Choice{},
@@ -276,7 +282,8 @@ func TestBuildExecuteModelSSHWithoutResults(t *testing.T) {
276282
Histories: make([]Message, 0),
277283
OpenAIKey: "sdjdnklfjndslkjanfk",
278284
Protocol: tracer.SSH,
279-
Model: GPT4O,
285+
Model: "gpt4-o",
286+
Provider: OpenAI,
280287
}
281288

282289
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
@@ -295,7 +302,7 @@ func TestBuildExecuteModelHTTPWithResults(t *testing.T) {
295302
defer httpmock.DeactivateAndReset()
296303

297304
// Given
298-
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
305+
httpmock.RegisterResponder("POST", openAIEndpoint,
299306
func(req *http.Request) (*http.Response, error) {
300307
resp, err := httpmock.NewJsonResponse(200, &Response{
301308
Choices: []Choice{
@@ -318,7 +325,8 @@ func TestBuildExecuteModelHTTPWithResults(t *testing.T) {
318325
Histories: make([]Message, 0),
319326
OpenAIKey: "sdjdnklfjndslkjanfk",
320327
Protocol: tracer.HTTP,
321-
Model: GPT4O,
328+
Model: "gpt4-o",
329+
Provider: OpenAI,
322330
}
323331

324332
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
@@ -338,7 +346,7 @@ func TestBuildExecuteModelHTTPWithoutResults(t *testing.T) {
338346
defer httpmock.DeactivateAndReset()
339347

340348
// Given
341-
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
349+
httpmock.RegisterResponder("POST", openAIEndpoint,
342350
func(req *http.Request) (*http.Response, error) {
343351
resp, err := httpmock.NewJsonResponse(200, &Response{
344352
Choices: []Choice{},
@@ -354,7 +362,8 @@ func TestBuildExecuteModelHTTPWithoutResults(t *testing.T) {
354362
Histories: make([]Message, 0),
355363
OpenAIKey: "sdjdnklfjndslkjanfk",
356364
Protocol: tracer.HTTP,
357-
Model: GPT4O,
365+
Model: "gpt4-o",
366+
Provider: OpenAI,
358367
}
359368

360369
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
@@ -368,16 +377,16 @@ func TestBuildExecuteModelHTTPWithoutResults(t *testing.T) {
368377
}
369378

370379
func TestFromString(t *testing.T) {
371-
model, err := FromStringToLLMModel("llama3")
380+
model, err := FromStringToLLMProvider("openai")
372381
assert.Nil(t, err)
373-
assert.Equal(t, LLAMA3, model)
382+
assert.Equal(t, OpenAI, model)
374383

375-
model, err = FromStringToLLMModel("gpt4-o")
384+
model, err = FromStringToLLMProvider("ollama")
376385
assert.Nil(t, err)
377-
assert.Equal(t, GPT4O, model)
386+
assert.Equal(t, Ollama, model)
378387

379-
model, err = FromStringToLLMModel("beelzebub-model")
380-
assert.Errorf(t, err, "model beelzebub-model not found")
388+
model, err = FromStringToLLMProvider("beelzebub-model")
389+
assert.Errorf(t, err, "provider beelzebub-model not found")
381390
}
382391

383392
func TestBuildExecuteModelSSHWithoutPlaintextSection(t *testing.T) {
@@ -404,7 +413,7 @@ func TestBuildExecuteModelSSHWithoutPlaintextSection(t *testing.T) {
404413
llmHoneypot := LLMHoneypot{
405414
Histories: make([]Message, 0),
406415
Protocol: tracer.SSH,
407-
Model: LLAMA3,
416+
Model: "llama3",
408417
}
409418

410419
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
@@ -442,7 +451,8 @@ func TestBuildExecuteModelSSHWithoutQuotesSection(t *testing.T) {
442451
llmHoneypot := LLMHoneypot{
443452
Histories: make([]Message, 0),
444453
Protocol: tracer.SSH,
445-
Model: LLAMA3,
454+
Model: "llama3",
455+
Provider: Ollama,
446456
}
447457

448458
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)

protocols/strategies/http.go

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,10 +37,10 @@ func (httpStrategy HTTPStrategy) Init(beelzebubServiceConfiguration parser.Beelz
3737

3838
if command.Plugin == plugins.LLMPluginName {
3939

40-
llmModel, err := plugins.FromStringToLLMModel(beelzebubServiceConfiguration.Plugin.LLMModel)
40+
llmProvider, err := plugins.FromStringToLLMProvider(beelzebubServiceConfiguration.Plugin.LLMProvider)
4141

4242
if err != nil {
43-
log.Errorf("Error fromString: %s", err.Error())
43+
log.Errorf("Error: %s", err.Error())
4444
responseHTTPBody = "404 Not Found!"
4545
}
4646

@@ -49,7 +49,8 @@ func (httpStrategy HTTPStrategy) Init(beelzebubServiceConfiguration parser.Beelz
4949
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
5050
Protocol: tracer.HTTP,
5151
Host: beelzebubServiceConfiguration.Plugin.Host,
52-
Model: llmModel,
52+
Model: beelzebubServiceConfiguration.Plugin.LLMModel,
53+
Provider: llmProvider,
5354
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
5455
}
5556

0 commit comments

Comments
 (0)