Compare commits

...

2 Commits

Author SHA1 Message Date
99c7287c02 Feat: Refactoring plugin:LLM honeypot custom prompt (#154)
refactoring LLM honeypot custom prompt
2025-01-16 08:46:13 +01:00
c3d2ff885d Feat: LLM Honeypot allow specifying the custom prompt #152 (#153)
* implement new feature, custom prompt

* Add doc for custom prompt
2025-01-14 08:45:30 +01:00
7 changed files with 146 additions and 29 deletions

View File

@ -250,6 +250,25 @@ plugin:
llmModel: "llama3" llmModel: "llama3"
host: "http://example.com/api/chat" #default http://localhost:11434/api/chat host: "http://example.com/api/chat" #default http://localhost:11434/api/chat
``` ```
Example with custom prompt:
```yaml
apiVersion: "v1"
protocol: "ssh"
address: ":2222"
description: "SSH interactive OpenAI GPT-4"
commands:
- regex: "^(.+)$"
plugin: "LLMHoneypot"
serverVersion: "OpenSSH"
serverName: "ubuntu"
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
deadlineTimeoutSeconds: 60
plugin:
llmModel: "gpt4-o"
openAISecretKey: "sk-proj-123456"
prompt: "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block."
```
###### SSH Honeypot on Port 22 ###### SSH Honeypot on Port 22

View File

@ -52,6 +52,7 @@ type Plugin struct {
OpenAISecretKey string `yaml:"openAISecretKey"` OpenAISecretKey string `yaml:"openAISecretKey"`
Host string `yaml:"host"` Host string `yaml:"host"`
LLMModel string `yaml:"llmModel"` LLMModel string `yaml:"llmModel"`
Prompt string `yaml:"prompt"`
} }
// BeelzebubServiceConfiguration is the struct that contains the configurations of the honeypot service // BeelzebubServiceConfiguration is the struct that contains the configurations of the honeypot service

View File

@ -58,6 +58,7 @@ plugin:
openAISecretKey: "qwerty" openAISecretKey: "qwerty"
llmModel: "llama3" llmModel: "llama3"
host: "localhost:1563" host: "localhost:1563"
prompt: "hello world"
`) `)
return beelzebubServiceConfiguration, nil return beelzebubServiceConfiguration, nil
} }
@ -133,6 +134,7 @@ func TestReadConfigurationsServicesValid(t *testing.T) {
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.OpenAISecretKey, "qwerty") assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.OpenAISecretKey, "qwerty")
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.LLMModel, "llama3") assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.LLMModel, "llama3")
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Host, "localhost:1563") assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Host, "localhost:1563")
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Prompt, "hello world")
} }
func TestGelAllFilesNameByDirName(t *testing.T) { func TestGelAllFilesNameByDirName(t *testing.T) {

View File

@ -19,12 +19,13 @@ const (
) )
type LLMHoneypot struct { type LLMHoneypot struct {
Histories []Message Histories []Message
OpenAIKey string OpenAIKey string
client *resty.Client client *resty.Client
Protocol tracer.Protocol Protocol tracer.Protocol
Model LLMModel Model LLMModel
Host string Host string
CustomPrompt string
} }
type Choice struct { type Choice struct {
@ -95,14 +96,19 @@ func InitLLMHoneypot(config LLMHoneypot) *LLMHoneypot {
return &config return &config
} }
func buildPrompt(histories []Message, protocol tracer.Protocol, command string) ([]Message, error) { func (llmHoneypot *LLMHoneypot) buildPrompt(command string) ([]Message, error) {
var messages []Message var messages []Message
var prompt string
switch protocol { switch llmHoneypot.Protocol {
case tracer.SSH: case tracer.SSH:
prompt = systemPromptVirtualizeLinuxTerminal
if llmHoneypot.CustomPrompt != "" {
prompt = llmHoneypot.CustomPrompt
}
messages = append(messages, Message{ messages = append(messages, Message{
Role: SYSTEM.String(), Role: SYSTEM.String(),
Content: systemPromptVirtualizeLinuxTerminal, Content: prompt,
}) })
messages = append(messages, Message{ messages = append(messages, Message{
Role: USER.String(), Role: USER.String(),
@ -112,13 +118,17 @@ func buildPrompt(histories []Message, protocol tracer.Protocol, command string)
Role: ASSISTANT.String(), Role: ASSISTANT.String(),
Content: "/home/user", Content: "/home/user",
}) })
for _, history := range histories { for _, history := range llmHoneypot.Histories {
messages = append(messages, history) messages = append(messages, history)
} }
case tracer.HTTP: case tracer.HTTP:
prompt = systemPromptVirtualizeHTTPServer
if llmHoneypot.CustomPrompt != "" {
prompt = llmHoneypot.CustomPrompt
}
messages = append(messages, Message{ messages = append(messages, Message{
Role: SYSTEM.String(), Role: SYSTEM.String(),
Content: systemPromptVirtualizeHTTPServer, Content: prompt,
}) })
messages = append(messages, Message{ messages = append(messages, Message{
Role: USER.String(), Role: USER.String(),
@ -211,8 +221,9 @@ func (llmHoneypot *LLMHoneypot) ollamaCaller(messages []Message) (string, error)
func (llmHoneypot *LLMHoneypot) ExecuteModel(command string) (string, error) { func (llmHoneypot *LLMHoneypot) ExecuteModel(command string) (string, error) {
var err error var err error
var prompt []Message
prompt, err := buildPrompt(llmHoneypot.Histories, llmHoneypot.Protocol, command) prompt, err = llmHoneypot.buildPrompt(command)
if err != nil { if err != nil {
return "", err return "", err

View File

@ -16,8 +16,13 @@ func TestBuildPromptEmptyHistory(t *testing.T) {
var histories []Message var histories []Message
command := "pwd" command := "pwd"
honeypot := LLMHoneypot{
Histories: histories,
Protocol: tracer.SSH,
}
//When //When
prompt, err := buildPrompt(histories, tracer.SSH, command) prompt, err := honeypot.buildPrompt(command)
//Then //Then
assert.Nil(t, err) assert.Nil(t, err)
@ -35,14 +40,45 @@ func TestBuildPromptWithHistory(t *testing.T) {
command := "pwd" command := "pwd"
honeypot := LLMHoneypot{
Histories: histories,
Protocol: tracer.SSH,
}
//When //When
prompt, err := buildPrompt(histories, tracer.SSH, command) prompt, err := honeypot.buildPrompt(command)
//Then //Then
assert.Nil(t, err) assert.Nil(t, err)
assert.Equal(t, SystemPromptLen+1, len(prompt)) assert.Equal(t, SystemPromptLen+1, len(prompt))
} }
func TestBuildPromptWithCustomPrompt(t *testing.T) {
//Given
var histories = []Message{
{
Role: "cat hello.txt",
Content: "world",
},
}
command := "pwd"
honeypot := LLMHoneypot{
Histories: histories,
Protocol: tracer.SSH,
CustomPrompt: "act as calculator",
}
//When
prompt, err := honeypot.buildPrompt(command)
//Then
assert.Nil(t, err)
assert.Equal(t, prompt[0].Content, "act as calculator")
assert.Equal(t, prompt[0].Role, SYSTEM.String())
}
func TestBuildExecuteModelFailValidation(t *testing.T) { func TestBuildExecuteModelFailValidation(t *testing.T) {
llmHoneypot := LLMHoneypot{ llmHoneypot := LLMHoneypot{
@ -59,6 +95,51 @@ func TestBuildExecuteModelFailValidation(t *testing.T) {
assert.Equal(t, "openAIKey is empty", err.Error()) assert.Equal(t, "openAIKey is empty", err.Error())
} }
func TestBuildExecuteModelWithCustomPrompt(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterMatcherResponder("POST", openAIGPTEndpoint,
httpmock.BodyContainsString("hello world"),
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{
{
Message: Message{
Role: SYSTEM.String(),
Content: "[default]\nregion = us-west-2\noutput = json",
},
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.HTTP,
Model: GPT4O,
CustomPrompt: "hello world",
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.ExecuteModel("GET /.aws/credentials")
//Then
assert.Nil(t, err)
assert.Equal(t, "[default]\nregion = us-west-2\noutput = json", str)
}
func TestBuildExecuteModelFailValidationStrategyType(t *testing.T) { func TestBuildExecuteModelFailValidationStrategyType(t *testing.T) {
llmHoneypot := LLMHoneypot{ llmHoneypot := LLMHoneypot{

View File

@ -45,11 +45,12 @@ func (httpStrategy HTTPStrategy) Init(beelzebubServiceConfiguration parser.Beelz
} }
llmHoneypot := plugins.LLMHoneypot{ llmHoneypot := plugins.LLMHoneypot{
Histories: make([]plugins.Message, 0), Histories: make([]plugins.Message, 0),
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey, OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
Protocol: tracer.HTTP, Protocol: tracer.HTTP,
Host: beelzebubServiceConfiguration.Plugin.Host, Host: beelzebubServiceConfiguration.Plugin.Host,
Model: llmModel, Model: llmModel,
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
} }
llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot) llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot)

View File

@ -52,11 +52,12 @@ func (sshStrategy *SSHStrategy) Init(beelzebubServiceConfiguration parser.Beelze
} }
llmHoneypot := plugins.LLMHoneypot{ llmHoneypot := plugins.LLMHoneypot{
Histories: make([]plugins.Message, 0), Histories: make([]plugins.Message, 0),
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey, OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
Protocol: tracer.SSH, Protocol: tracer.SSH,
Host: beelzebubServiceConfiguration.Plugin.Host, Host: beelzebubServiceConfiguration.Plugin.Host,
Model: llmModel, Model: llmModel,
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
} }
llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot) llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot)
@ -137,11 +138,12 @@ func (sshStrategy *SSHStrategy) Init(beelzebubServiceConfiguration parser.Beelze
} }
llmHoneypot := plugins.LLMHoneypot{ llmHoneypot := plugins.LLMHoneypot{
Histories: histories, Histories: histories,
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey, OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
Protocol: tracer.SSH, Protocol: tracer.SSH,
Host: beelzebubServiceConfiguration.Plugin.Host, Host: beelzebubServiceConfiguration.Plugin.Host,
Model: llmModel, Model: llmModel,
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
} }
llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot) llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot)