mirror of
https://github.com/mariocandela/beelzebub.git
synced 2025-07-01 18:47:26 -04:00
Compare commits
4 Commits
Author | SHA1 | Date | |
---|---|---|---|
4813685834 | |||
6f6acb212b | |||
99c7287c02 | |||
c3d2ff885d |
19
README.md
19
README.md
@ -250,6 +250,25 @@ plugin:
|
||||
llmModel: "llama3"
|
||||
host: "http://example.com/api/chat" #default http://localhost:11434/api/chat
|
||||
```
|
||||
Example with custom prompt:
|
||||
|
||||
```yaml
|
||||
apiVersion: "v1"
|
||||
protocol: "ssh"
|
||||
address: ":2222"
|
||||
description: "SSH interactive OpenAI GPT-4"
|
||||
commands:
|
||||
- regex: "^(.+)$"
|
||||
plugin: "LLMHoneypot"
|
||||
serverVersion: "OpenSSH"
|
||||
serverName: "ubuntu"
|
||||
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
|
||||
deadlineTimeoutSeconds: 60
|
||||
plugin:
|
||||
llmModel: "gpt4-o"
|
||||
openAISecretKey: "sk-proj-123456"
|
||||
prompt: "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block."
|
||||
```
|
||||
|
||||
###### SSH Honeypot on Port 22
|
||||
|
||||
|
2
go.mod
2
go.mod
@ -4,7 +4,7 @@ go 1.20
|
||||
|
||||
require (
|
||||
github.com/gliderlabs/ssh v0.3.8
|
||||
github.com/go-resty/resty/v2 v2.16.3
|
||||
github.com/go-resty/resty/v2 v2.16.5
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/jarcoal/httpmock v1.3.1
|
||||
github.com/melbahja/goph v1.4.0
|
||||
|
4
go.sum
4
go.sum
@ -10,8 +10,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c=
|
||||
github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU=
|
||||
github.com/go-resty/resty/v2 v2.16.3 h1:zacNT7lt4b8M/io2Ahj6yPypL7bqx9n1iprfQuodV+E=
|
||||
github.com/go-resty/resty/v2 v2.16.3/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA=
|
||||
github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM=
|
||||
github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
|
@ -52,6 +52,7 @@ type Plugin struct {
|
||||
OpenAISecretKey string `yaml:"openAISecretKey"`
|
||||
Host string `yaml:"host"`
|
||||
LLMModel string `yaml:"llmModel"`
|
||||
Prompt string `yaml:"prompt"`
|
||||
}
|
||||
|
||||
// BeelzebubServiceConfiguration is the struct that contains the configurations of the honeypot service
|
||||
|
@ -58,6 +58,7 @@ plugin:
|
||||
openAISecretKey: "qwerty"
|
||||
llmModel: "llama3"
|
||||
host: "localhost:1563"
|
||||
prompt: "hello world"
|
||||
`)
|
||||
return beelzebubServiceConfiguration, nil
|
||||
}
|
||||
@ -133,6 +134,7 @@ func TestReadConfigurationsServicesValid(t *testing.T) {
|
||||
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.OpenAISecretKey, "qwerty")
|
||||
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.LLMModel, "llama3")
|
||||
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Host, "localhost:1563")
|
||||
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Prompt, "hello world")
|
||||
}
|
||||
|
||||
func TestGelAllFilesNameByDirName(t *testing.T) {
|
||||
|
@ -19,12 +19,13 @@ const (
|
||||
)
|
||||
|
||||
type LLMHoneypot struct {
|
||||
Histories []Message
|
||||
OpenAIKey string
|
||||
client *resty.Client
|
||||
Protocol tracer.Protocol
|
||||
Model LLMModel
|
||||
Host string
|
||||
Histories []Message
|
||||
OpenAIKey string
|
||||
client *resty.Client
|
||||
Protocol tracer.Protocol
|
||||
Model LLMModel
|
||||
Host string
|
||||
CustomPrompt string
|
||||
}
|
||||
|
||||
type Choice struct {
|
||||
@ -95,14 +96,19 @@ func InitLLMHoneypot(config LLMHoneypot) *LLMHoneypot {
|
||||
return &config
|
||||
}
|
||||
|
||||
func buildPrompt(histories []Message, protocol tracer.Protocol, command string) ([]Message, error) {
|
||||
func (llmHoneypot *LLMHoneypot) buildPrompt(command string) ([]Message, error) {
|
||||
var messages []Message
|
||||
var prompt string
|
||||
|
||||
switch protocol {
|
||||
switch llmHoneypot.Protocol {
|
||||
case tracer.SSH:
|
||||
prompt = systemPromptVirtualizeLinuxTerminal
|
||||
if llmHoneypot.CustomPrompt != "" {
|
||||
prompt = llmHoneypot.CustomPrompt
|
||||
}
|
||||
messages = append(messages, Message{
|
||||
Role: SYSTEM.String(),
|
||||
Content: systemPromptVirtualizeLinuxTerminal,
|
||||
Content: prompt,
|
||||
})
|
||||
messages = append(messages, Message{
|
||||
Role: USER.String(),
|
||||
@ -112,13 +118,17 @@ func buildPrompt(histories []Message, protocol tracer.Protocol, command string)
|
||||
Role: ASSISTANT.String(),
|
||||
Content: "/home/user",
|
||||
})
|
||||
for _, history := range histories {
|
||||
for _, history := range llmHoneypot.Histories {
|
||||
messages = append(messages, history)
|
||||
}
|
||||
case tracer.HTTP:
|
||||
prompt = systemPromptVirtualizeHTTPServer
|
||||
if llmHoneypot.CustomPrompt != "" {
|
||||
prompt = llmHoneypot.CustomPrompt
|
||||
}
|
||||
messages = append(messages, Message{
|
||||
Role: SYSTEM.String(),
|
||||
Content: systemPromptVirtualizeHTTPServer,
|
||||
Content: prompt,
|
||||
})
|
||||
messages = append(messages, Message{
|
||||
Role: USER.String(),
|
||||
@ -211,8 +221,9 @@ func (llmHoneypot *LLMHoneypot) ollamaCaller(messages []Message) (string, error)
|
||||
|
||||
func (llmHoneypot *LLMHoneypot) ExecuteModel(command string) (string, error) {
|
||||
var err error
|
||||
var prompt []Message
|
||||
|
||||
prompt, err := buildPrompt(llmHoneypot.Histories, llmHoneypot.Protocol, command)
|
||||
prompt, err = llmHoneypot.buildPrompt(command)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
@ -16,8 +16,13 @@ func TestBuildPromptEmptyHistory(t *testing.T) {
|
||||
var histories []Message
|
||||
command := "pwd"
|
||||
|
||||
honeypot := LLMHoneypot{
|
||||
Histories: histories,
|
||||
Protocol: tracer.SSH,
|
||||
}
|
||||
|
||||
//When
|
||||
prompt, err := buildPrompt(histories, tracer.SSH, command)
|
||||
prompt, err := honeypot.buildPrompt(command)
|
||||
|
||||
//Then
|
||||
assert.Nil(t, err)
|
||||
@ -35,14 +40,45 @@ func TestBuildPromptWithHistory(t *testing.T) {
|
||||
|
||||
command := "pwd"
|
||||
|
||||
honeypot := LLMHoneypot{
|
||||
Histories: histories,
|
||||
Protocol: tracer.SSH,
|
||||
}
|
||||
|
||||
//When
|
||||
prompt, err := buildPrompt(histories, tracer.SSH, command)
|
||||
prompt, err := honeypot.buildPrompt(command)
|
||||
|
||||
//Then
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, SystemPromptLen+1, len(prompt))
|
||||
}
|
||||
|
||||
func TestBuildPromptWithCustomPrompt(t *testing.T) {
|
||||
//Given
|
||||
var histories = []Message{
|
||||
{
|
||||
Role: "cat hello.txt",
|
||||
Content: "world",
|
||||
},
|
||||
}
|
||||
|
||||
command := "pwd"
|
||||
|
||||
honeypot := LLMHoneypot{
|
||||
Histories: histories,
|
||||
Protocol: tracer.SSH,
|
||||
CustomPrompt: "act as calculator",
|
||||
}
|
||||
|
||||
//When
|
||||
prompt, err := honeypot.buildPrompt(command)
|
||||
|
||||
//Then
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, prompt[0].Content, "act as calculator")
|
||||
assert.Equal(t, prompt[0].Role, SYSTEM.String())
|
||||
}
|
||||
|
||||
func TestBuildExecuteModelFailValidation(t *testing.T) {
|
||||
|
||||
llmHoneypot := LLMHoneypot{
|
||||
@ -59,6 +95,51 @@ func TestBuildExecuteModelFailValidation(t *testing.T) {
|
||||
assert.Equal(t, "openAIKey is empty", err.Error())
|
||||
}
|
||||
|
||||
func TestBuildExecuteModelWithCustomPrompt(t *testing.T) {
|
||||
client := resty.New()
|
||||
httpmock.ActivateNonDefault(client.GetClient())
|
||||
defer httpmock.DeactivateAndReset()
|
||||
|
||||
// Given
|
||||
httpmock.RegisterMatcherResponder("POST", openAIGPTEndpoint,
|
||||
httpmock.BodyContainsString("hello world"),
|
||||
func(req *http.Request) (*http.Response, error) {
|
||||
resp, err := httpmock.NewJsonResponse(200, &Response{
|
||||
Choices: []Choice{
|
||||
{
|
||||
Message: Message{
|
||||
Role: SYSTEM.String(),
|
||||
Content: "[default]\nregion = us-west-2\noutput = json",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return httpmock.NewStringResponse(500, ""), nil
|
||||
}
|
||||
return resp, nil
|
||||
},
|
||||
)
|
||||
|
||||
llmHoneypot := LLMHoneypot{
|
||||
Histories: make([]Message, 0),
|
||||
OpenAIKey: "sdjdnklfjndslkjanfk",
|
||||
Protocol: tracer.HTTP,
|
||||
Model: GPT4O,
|
||||
CustomPrompt: "hello world",
|
||||
}
|
||||
|
||||
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
||||
openAIGPTVirtualTerminal.client = client
|
||||
|
||||
//When
|
||||
str, err := openAIGPTVirtualTerminal.ExecuteModel("GET /.aws/credentials")
|
||||
|
||||
//Then
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "[default]\nregion = us-west-2\noutput = json", str)
|
||||
}
|
||||
|
||||
func TestBuildExecuteModelFailValidationStrategyType(t *testing.T) {
|
||||
|
||||
llmHoneypot := LLMHoneypot{
|
||||
|
@ -45,11 +45,12 @@ func (httpStrategy HTTPStrategy) Init(beelzebubServiceConfiguration parser.Beelz
|
||||
}
|
||||
|
||||
llmHoneypot := plugins.LLMHoneypot{
|
||||
Histories: make([]plugins.Message, 0),
|
||||
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
|
||||
Protocol: tracer.HTTP,
|
||||
Host: beelzebubServiceConfiguration.Plugin.Host,
|
||||
Model: llmModel,
|
||||
Histories: make([]plugins.Message, 0),
|
||||
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
|
||||
Protocol: tracer.HTTP,
|
||||
Host: beelzebubServiceConfiguration.Plugin.Host,
|
||||
Model: llmModel,
|
||||
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
|
||||
}
|
||||
|
||||
llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot)
|
||||
|
@ -52,11 +52,12 @@ func (sshStrategy *SSHStrategy) Init(beelzebubServiceConfiguration parser.Beelze
|
||||
}
|
||||
|
||||
llmHoneypot := plugins.LLMHoneypot{
|
||||
Histories: make([]plugins.Message, 0),
|
||||
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
|
||||
Protocol: tracer.SSH,
|
||||
Host: beelzebubServiceConfiguration.Plugin.Host,
|
||||
Model: llmModel,
|
||||
Histories: make([]plugins.Message, 0),
|
||||
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
|
||||
Protocol: tracer.SSH,
|
||||
Host: beelzebubServiceConfiguration.Plugin.Host,
|
||||
Model: llmModel,
|
||||
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
|
||||
}
|
||||
|
||||
llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot)
|
||||
@ -137,11 +138,12 @@ func (sshStrategy *SSHStrategy) Init(beelzebubServiceConfiguration parser.Beelze
|
||||
}
|
||||
|
||||
llmHoneypot := plugins.LLMHoneypot{
|
||||
Histories: histories,
|
||||
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
|
||||
Protocol: tracer.SSH,
|
||||
Host: beelzebubServiceConfiguration.Plugin.Host,
|
||||
Model: llmModel,
|
||||
Histories: histories,
|
||||
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
|
||||
Protocol: tracer.SSH,
|
||||
Host: beelzebubServiceConfiguration.Plugin.Host,
|
||||
Model: llmModel,
|
||||
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
|
||||
}
|
||||
|
||||
llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot)
|
||||
|
Reference in New Issue
Block a user