mirror of
https://github.com/mariocandela/beelzebub.git
synced 2025-07-01 18:47:26 -04:00
Compare commits
2 Commits
Author | SHA1 | Date | |
---|---|---|---|
38297faed2 | |||
8703d1afda |
13
README.md
13
README.md
@ -211,9 +211,9 @@ commands:
|
|||||||
|
|
||||||
#### Example SSH Honeypot
|
#### Example SSH Honeypot
|
||||||
|
|
||||||
###### Honeypot LLM Honeypots
|
###### LLM Honeypots
|
||||||
|
|
||||||
Example with OpenAI GPT-4:
|
Follow a SSH LLM Honeypot using OpenAI as provider LLM:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
apiVersion: "v1"
|
apiVersion: "v1"
|
||||||
@ -228,11 +228,12 @@ serverName: "ubuntu"
|
|||||||
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
|
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
|
||||||
deadlineTimeoutSeconds: 60
|
deadlineTimeoutSeconds: 60
|
||||||
plugin:
|
plugin:
|
||||||
llmModel: "gpt4-o"
|
llmProvider: "openai"
|
||||||
|
llmModel: "gpt4-o" #Models https://platform.openai.com/docs/models
|
||||||
openAISecretKey: "sk-proj-123456"
|
openAISecretKey: "sk-proj-123456"
|
||||||
```
|
```
|
||||||
|
|
||||||
Example with Ollama Llama3:
|
Examples with local Ollama instance using model codellama:7b:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
apiVersion: "v1"
|
apiVersion: "v1"
|
||||||
@ -247,7 +248,8 @@ serverName: "ubuntu"
|
|||||||
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
|
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
|
||||||
deadlineTimeoutSeconds: 60
|
deadlineTimeoutSeconds: 60
|
||||||
plugin:
|
plugin:
|
||||||
llmModel: "llama3"
|
llmProvider: "ollama"
|
||||||
|
llmModel: "codellama:7b" #Models https://ollama.com/search
|
||||||
host: "http://example.com/api/chat" #default http://localhost:11434/api/chat
|
host: "http://example.com/api/chat" #default http://localhost:11434/api/chat
|
||||||
```
|
```
|
||||||
Example with custom prompt:
|
Example with custom prompt:
|
||||||
@ -265,6 +267,7 @@ serverName: "ubuntu"
|
|||||||
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
|
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
|
||||||
deadlineTimeoutSeconds: 60
|
deadlineTimeoutSeconds: 60
|
||||||
plugin:
|
plugin:
|
||||||
|
llmProvider: "openai"
|
||||||
llmModel: "gpt4-o"
|
llmModel: "gpt4-o"
|
||||||
openAISecretKey: "sk-proj-123456"
|
openAISecretKey: "sk-proj-123456"
|
||||||
prompt: "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block."
|
prompt: "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block."
|
||||||
|
@ -7,7 +7,9 @@ commands:
|
|||||||
plugin: "LLMHoneypot"
|
plugin: "LLMHoneypot"
|
||||||
serverVersion: "OpenSSH"
|
serverVersion: "OpenSSH"
|
||||||
serverName: "ubuntu"
|
serverName: "ubuntu"
|
||||||
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
|
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456|1234)$"
|
||||||
deadlineTimeoutSeconds: 6000
|
deadlineTimeoutSeconds: 6000
|
||||||
plugin:
|
plugin:
|
||||||
llmModel: "llama3"
|
llmProvider: "openai"
|
||||||
|
llmModel: "gpt4-o"
|
||||||
|
openAISecretKey: "sk-proj-12345"
|
@ -52,6 +52,7 @@ type Plugin struct {
|
|||||||
OpenAISecretKey string `yaml:"openAISecretKey"`
|
OpenAISecretKey string `yaml:"openAISecretKey"`
|
||||||
Host string `yaml:"host"`
|
Host string `yaml:"host"`
|
||||||
LLMModel string `yaml:"llmModel"`
|
LLMModel string `yaml:"llmModel"`
|
||||||
|
LLMProvider string `yaml:"llmProvider"`
|
||||||
Prompt string `yaml:"prompt"`
|
Prompt string `yaml:"prompt"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -59,6 +59,7 @@ commands:
|
|||||||
plugin:
|
plugin:
|
||||||
openAISecretKey: "qwerty"
|
openAISecretKey: "qwerty"
|
||||||
llmModel: "llama3"
|
llmModel: "llama3"
|
||||||
|
llmProvider: "ollama"
|
||||||
host: "localhost:1563"
|
host: "localhost:1563"
|
||||||
prompt: "hello world"
|
prompt: "hello world"
|
||||||
`)
|
`)
|
||||||
@ -135,6 +136,7 @@ func TestReadConfigurationsServicesValid(t *testing.T) {
|
|||||||
assert.Equal(t, firstBeelzebubServiceConfiguration.Commands[0].Headers[0], "Content-Type: text/html")
|
assert.Equal(t, firstBeelzebubServiceConfiguration.Commands[0].Headers[0], "Content-Type: text/html")
|
||||||
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.OpenAISecretKey, "qwerty")
|
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.OpenAISecretKey, "qwerty")
|
||||||
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.LLMModel, "llama3")
|
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.LLMModel, "llama3")
|
||||||
|
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.LLMProvider, "ollama")
|
||||||
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Host, "localhost:1563")
|
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Host, "localhost:1563")
|
||||||
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Prompt, "hello world")
|
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Prompt, "hello world")
|
||||||
assert.Equal(t, firstBeelzebubServiceConfiguration.TLSCertPath, "/tmp/cert.crt")
|
assert.Equal(t, firstBeelzebubServiceConfiguration.TLSCertPath, "/tmp/cert.crt")
|
||||||
|
@ -6,15 +6,16 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
"github.com/mariocandela/beelzebub/v3/tracer"
|
"github.com/mariocandela/beelzebub/v3/tracer"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
systemPromptVirtualizeLinuxTerminal = "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block. Do not provide explanations or type commands unless explicitly instructed by the user. Your entire response/output is going to consist of a simple text with \n for new line, and you will NOT wrap it within string md markers"
|
systemPromptVirtualizeLinuxTerminal = "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block. Do not provide note. Do not provide explanations or type commands unless explicitly instructed by the user. Your entire response/output is going to consist of a simple text with \n for new line, and you will NOT wrap it within string md markers"
|
||||||
systemPromptVirtualizeHTTPServer = "You will act as an unsecure HTTP Server with multiple vulnerability like aws and git credentials stored into root http directory. The user will send HTTP requests, and you are to reply with what the server should show. Do not provide explanations or type commands unless explicitly instructed by the user."
|
systemPromptVirtualizeHTTPServer = "You will act as an unsecure HTTP Server with multiple vulnerability like aws and git credentials stored into root http directory. The user will send HTTP requests, and you are to reply with what the server should show. Do not provide explanations or type commands unless explicitly instructed by the user."
|
||||||
LLMPluginName = "LLMHoneypot"
|
LLMPluginName = "LLMHoneypot"
|
||||||
openAIGPTEndpoint = "https://api.openai.com/v1/chat/completions"
|
openAIEndpoint = "https://api.openai.com/v1/chat/completions"
|
||||||
ollamaEndpoint = "http://localhost:11434/api/chat"
|
ollamaEndpoint = "http://localhost:11434/api/chat"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -23,7 +24,8 @@ type LLMHoneypot struct {
|
|||||||
OpenAIKey string
|
OpenAIKey string
|
||||||
client *resty.Client
|
client *resty.Client
|
||||||
Protocol tracer.Protocol
|
Protocol tracer.Protocol
|
||||||
Model LLMModel
|
Provider LLMProvider
|
||||||
|
Model string
|
||||||
Host string
|
Host string
|
||||||
CustomPrompt string
|
CustomPrompt string
|
||||||
}
|
}
|
||||||
@ -71,21 +73,21 @@ func (role Role) String() string {
|
|||||||
return [...]string{"system", "user", "assistant"}[role]
|
return [...]string{"system", "user", "assistant"}[role]
|
||||||
}
|
}
|
||||||
|
|
||||||
type LLMModel int
|
type LLMProvider int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
LLAMA3 LLMModel = iota
|
Ollama LLMProvider = iota
|
||||||
GPT4O
|
OpenAI
|
||||||
)
|
)
|
||||||
|
|
||||||
func FromStringToLLMModel(llmModel string) (LLMModel, error) {
|
func FromStringToLLMProvider(llmProvider string) (LLMProvider, error) {
|
||||||
switch llmModel {
|
switch strings.ToLower(llmProvider) {
|
||||||
case "llama3":
|
case "ollama":
|
||||||
return LLAMA3, nil
|
return Ollama, nil
|
||||||
case "gpt4-o":
|
case "openai":
|
||||||
return GPT4O, nil
|
return OpenAI, nil
|
||||||
default:
|
default:
|
||||||
return -1, fmt.Errorf("model %s not found", llmModel)
|
return -1, fmt.Errorf("provider %s not found, valid providers: ollama, openai", llmProvider)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -153,7 +155,7 @@ func (llmHoneypot *LLMHoneypot) openAICaller(messages []Message) (string, error)
|
|||||||
var err error
|
var err error
|
||||||
|
|
||||||
requestJson, err := json.Marshal(Request{
|
requestJson, err := json.Marshal(Request{
|
||||||
Model: "gpt-4o",
|
Model: llmHoneypot.Model,
|
||||||
Messages: messages,
|
Messages: messages,
|
||||||
Stream: false,
|
Stream: false,
|
||||||
})
|
})
|
||||||
@ -166,7 +168,7 @@ func (llmHoneypot *LLMHoneypot) openAICaller(messages []Message) (string, error)
|
|||||||
}
|
}
|
||||||
|
|
||||||
if llmHoneypot.Host == "" {
|
if llmHoneypot.Host == "" {
|
||||||
llmHoneypot.Host = openAIGPTEndpoint
|
llmHoneypot.Host = openAIEndpoint
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Debug(string(requestJson))
|
log.Debug(string(requestJson))
|
||||||
@ -185,14 +187,14 @@ func (llmHoneypot *LLMHoneypot) openAICaller(messages []Message) (string, error)
|
|||||||
return "", errors.New("no choices")
|
return "", errors.New("no choices")
|
||||||
}
|
}
|
||||||
|
|
||||||
return response.Result().(*Response).Choices[0].Message.Content, nil
|
return removeQuotes(response.Result().(*Response).Choices[0].Message.Content), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (llmHoneypot *LLMHoneypot) ollamaCaller(messages []Message) (string, error) {
|
func (llmHoneypot *LLMHoneypot) ollamaCaller(messages []Message) (string, error) {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
requestJson, err := json.Marshal(Request{
|
requestJson, err := json.Marshal(Request{
|
||||||
Model: "llama3",
|
Model: llmHoneypot.Model,
|
||||||
Messages: messages,
|
Messages: messages,
|
||||||
Stream: false,
|
Stream: false,
|
||||||
})
|
})
|
||||||
@ -216,7 +218,7 @@ func (llmHoneypot *LLMHoneypot) ollamaCaller(messages []Message) (string, error)
|
|||||||
}
|
}
|
||||||
log.Debug(response)
|
log.Debug(response)
|
||||||
|
|
||||||
return response.Result().(*Response).Message.Content, nil
|
return removeQuotes(response.Result().(*Response).Message.Content), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (llmHoneypot *LLMHoneypot) ExecuteModel(command string) (string, error) {
|
func (llmHoneypot *LLMHoneypot) ExecuteModel(command string) (string, error) {
|
||||||
@ -229,12 +231,17 @@ func (llmHoneypot *LLMHoneypot) ExecuteModel(command string) (string, error) {
|
|||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
switch llmHoneypot.Model {
|
switch llmHoneypot.Provider {
|
||||||
case LLAMA3:
|
case Ollama:
|
||||||
return llmHoneypot.ollamaCaller(prompt)
|
return llmHoneypot.ollamaCaller(prompt)
|
||||||
case GPT4O:
|
case OpenAI:
|
||||||
return llmHoneypot.openAICaller(prompt)
|
return llmHoneypot.openAICaller(prompt)
|
||||||
default:
|
default:
|
||||||
return "", errors.New("no model selected")
|
return "", fmt.Errorf("provider %d not found, valid providers: ollama, openai", llmHoneypot.Provider)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func removeQuotes(content string) string {
|
||||||
|
regex := regexp.MustCompile("(```( *)?([a-z]*)?(\\n)?)")
|
||||||
|
return regex.ReplaceAllString(content, "")
|
||||||
|
}
|
||||||
|
@ -85,7 +85,8 @@ func TestBuildExecuteModelFailValidation(t *testing.T) {
|
|||||||
Histories: make([]Message, 0),
|
Histories: make([]Message, 0),
|
||||||
OpenAIKey: "",
|
OpenAIKey: "",
|
||||||
Protocol: tracer.SSH,
|
Protocol: tracer.SSH,
|
||||||
Model: GPT4O,
|
Model: "gpt4-o",
|
||||||
|
Provider: OpenAI,
|
||||||
}
|
}
|
||||||
|
|
||||||
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
||||||
@ -101,7 +102,7 @@ func TestBuildExecuteModelWithCustomPrompt(t *testing.T) {
|
|||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
// Given
|
// Given
|
||||||
httpmock.RegisterMatcherResponder("POST", openAIGPTEndpoint,
|
httpmock.RegisterMatcherResponder("POST", openAIEndpoint,
|
||||||
httpmock.BodyContainsString("hello world"),
|
httpmock.BodyContainsString("hello world"),
|
||||||
func(req *http.Request) (*http.Response, error) {
|
func(req *http.Request) (*http.Response, error) {
|
||||||
resp, err := httpmock.NewJsonResponse(200, &Response{
|
resp, err := httpmock.NewJsonResponse(200, &Response{
|
||||||
@ -125,7 +126,8 @@ func TestBuildExecuteModelWithCustomPrompt(t *testing.T) {
|
|||||||
Histories: make([]Message, 0),
|
Histories: make([]Message, 0),
|
||||||
OpenAIKey: "sdjdnklfjndslkjanfk",
|
OpenAIKey: "sdjdnklfjndslkjanfk",
|
||||||
Protocol: tracer.HTTP,
|
Protocol: tracer.HTTP,
|
||||||
Model: GPT4O,
|
Model: "gpt4-o",
|
||||||
|
Provider: OpenAI,
|
||||||
CustomPrompt: "hello world",
|
CustomPrompt: "hello world",
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -146,7 +148,8 @@ func TestBuildExecuteModelFailValidationStrategyType(t *testing.T) {
|
|||||||
Histories: make([]Message, 0),
|
Histories: make([]Message, 0),
|
||||||
OpenAIKey: "",
|
OpenAIKey: "",
|
||||||
Protocol: tracer.TCP,
|
Protocol: tracer.TCP,
|
||||||
Model: GPT4O,
|
Model: "gpt4-o",
|
||||||
|
Provider: OpenAI,
|
||||||
}
|
}
|
||||||
|
|
||||||
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
||||||
@ -161,7 +164,8 @@ func TestBuildExecuteModelFailValidationModelType(t *testing.T) {
|
|||||||
llmHoneypot := LLMHoneypot{
|
llmHoneypot := LLMHoneypot{
|
||||||
Histories: make([]Message, 0),
|
Histories: make([]Message, 0),
|
||||||
Protocol: tracer.SSH,
|
Protocol: tracer.SSH,
|
||||||
Model: 5,
|
Model: "llama3",
|
||||||
|
Provider: 5,
|
||||||
}
|
}
|
||||||
|
|
||||||
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
||||||
@ -179,7 +183,7 @@ func TestBuildExecuteModelSSHWithResultsOpenAI(t *testing.T) {
|
|||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
// Given
|
// Given
|
||||||
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
|
httpmock.RegisterResponder("POST", openAIEndpoint,
|
||||||
func(req *http.Request) (*http.Response, error) {
|
func(req *http.Request) (*http.Response, error) {
|
||||||
resp, err := httpmock.NewJsonResponse(200, &Response{
|
resp, err := httpmock.NewJsonResponse(200, &Response{
|
||||||
Choices: []Choice{
|
Choices: []Choice{
|
||||||
@ -202,7 +206,8 @@ func TestBuildExecuteModelSSHWithResultsOpenAI(t *testing.T) {
|
|||||||
Histories: make([]Message, 0),
|
Histories: make([]Message, 0),
|
||||||
OpenAIKey: "sdjdnklfjndslkjanfk",
|
OpenAIKey: "sdjdnklfjndslkjanfk",
|
||||||
Protocol: tracer.SSH,
|
Protocol: tracer.SSH,
|
||||||
Model: GPT4O,
|
Model: "gpt4-o",
|
||||||
|
Provider: OpenAI,
|
||||||
}
|
}
|
||||||
|
|
||||||
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
||||||
@ -240,7 +245,8 @@ func TestBuildExecuteModelSSHWithResultsLLama(t *testing.T) {
|
|||||||
llmHoneypot := LLMHoneypot{
|
llmHoneypot := LLMHoneypot{
|
||||||
Histories: make([]Message, 0),
|
Histories: make([]Message, 0),
|
||||||
Protocol: tracer.SSH,
|
Protocol: tracer.SSH,
|
||||||
Model: LLAMA3,
|
Model: "llama3",
|
||||||
|
Provider: Ollama,
|
||||||
}
|
}
|
||||||
|
|
||||||
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
||||||
@ -260,7 +266,7 @@ func TestBuildExecuteModelSSHWithoutResults(t *testing.T) {
|
|||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
// Given
|
// Given
|
||||||
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
|
httpmock.RegisterResponder("POST", openAIEndpoint,
|
||||||
func(req *http.Request) (*http.Response, error) {
|
func(req *http.Request) (*http.Response, error) {
|
||||||
resp, err := httpmock.NewJsonResponse(200, &Response{
|
resp, err := httpmock.NewJsonResponse(200, &Response{
|
||||||
Choices: []Choice{},
|
Choices: []Choice{},
|
||||||
@ -276,7 +282,8 @@ func TestBuildExecuteModelSSHWithoutResults(t *testing.T) {
|
|||||||
Histories: make([]Message, 0),
|
Histories: make([]Message, 0),
|
||||||
OpenAIKey: "sdjdnklfjndslkjanfk",
|
OpenAIKey: "sdjdnklfjndslkjanfk",
|
||||||
Protocol: tracer.SSH,
|
Protocol: tracer.SSH,
|
||||||
Model: GPT4O,
|
Model: "gpt4-o",
|
||||||
|
Provider: OpenAI,
|
||||||
}
|
}
|
||||||
|
|
||||||
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
||||||
@ -295,7 +302,7 @@ func TestBuildExecuteModelHTTPWithResults(t *testing.T) {
|
|||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
// Given
|
// Given
|
||||||
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
|
httpmock.RegisterResponder("POST", openAIEndpoint,
|
||||||
func(req *http.Request) (*http.Response, error) {
|
func(req *http.Request) (*http.Response, error) {
|
||||||
resp, err := httpmock.NewJsonResponse(200, &Response{
|
resp, err := httpmock.NewJsonResponse(200, &Response{
|
||||||
Choices: []Choice{
|
Choices: []Choice{
|
||||||
@ -318,7 +325,8 @@ func TestBuildExecuteModelHTTPWithResults(t *testing.T) {
|
|||||||
Histories: make([]Message, 0),
|
Histories: make([]Message, 0),
|
||||||
OpenAIKey: "sdjdnklfjndslkjanfk",
|
OpenAIKey: "sdjdnklfjndslkjanfk",
|
||||||
Protocol: tracer.HTTP,
|
Protocol: tracer.HTTP,
|
||||||
Model: GPT4O,
|
Model: "gpt4-o",
|
||||||
|
Provider: OpenAI,
|
||||||
}
|
}
|
||||||
|
|
||||||
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
||||||
@ -338,7 +346,7 @@ func TestBuildExecuteModelHTTPWithoutResults(t *testing.T) {
|
|||||||
defer httpmock.DeactivateAndReset()
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
// Given
|
// Given
|
||||||
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
|
httpmock.RegisterResponder("POST", openAIEndpoint,
|
||||||
func(req *http.Request) (*http.Response, error) {
|
func(req *http.Request) (*http.Response, error) {
|
||||||
resp, err := httpmock.NewJsonResponse(200, &Response{
|
resp, err := httpmock.NewJsonResponse(200, &Response{
|
||||||
Choices: []Choice{},
|
Choices: []Choice{},
|
||||||
@ -354,7 +362,8 @@ func TestBuildExecuteModelHTTPWithoutResults(t *testing.T) {
|
|||||||
Histories: make([]Message, 0),
|
Histories: make([]Message, 0),
|
||||||
OpenAIKey: "sdjdnklfjndslkjanfk",
|
OpenAIKey: "sdjdnklfjndslkjanfk",
|
||||||
Protocol: tracer.HTTP,
|
Protocol: tracer.HTTP,
|
||||||
Model: GPT4O,
|
Model: "gpt4-o",
|
||||||
|
Provider: OpenAI,
|
||||||
}
|
}
|
||||||
|
|
||||||
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
||||||
@ -368,14 +377,105 @@ func TestBuildExecuteModelHTTPWithoutResults(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestFromString(t *testing.T) {
|
func TestFromString(t *testing.T) {
|
||||||
model, err := FromStringToLLMModel("llama3")
|
model, err := FromStringToLLMProvider("openai")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
assert.Equal(t, LLAMA3, model)
|
assert.Equal(t, OpenAI, model)
|
||||||
|
|
||||||
model, err = FromStringToLLMModel("gpt4-o")
|
model, err = FromStringToLLMProvider("ollama")
|
||||||
assert.Nil(t, err)
|
assert.Nil(t, err)
|
||||||
assert.Equal(t, GPT4O, model)
|
assert.Equal(t, Ollama, model)
|
||||||
|
|
||||||
model, err = FromStringToLLMModel("beelzebub-model")
|
model, err = FromStringToLLMProvider("beelzebub-model")
|
||||||
assert.Errorf(t, err, "model beelzebub-model not found")
|
assert.Errorf(t, err, "provider beelzebub-model not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildExecuteModelSSHWithoutPlaintextSection(t *testing.T) {
|
||||||
|
client := resty.New()
|
||||||
|
httpmock.ActivateNonDefault(client.GetClient())
|
||||||
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
|
// Given
|
||||||
|
httpmock.RegisterResponder("POST", ollamaEndpoint,
|
||||||
|
func(req *http.Request) (*http.Response, error) {
|
||||||
|
resp, err := httpmock.NewJsonResponse(200, &Response{
|
||||||
|
Message: Message{
|
||||||
|
Role: SYSTEM.String(),
|
||||||
|
Content: "```plaintext\n```\n",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return httpmock.NewStringResponse(500, ""), nil
|
||||||
|
}
|
||||||
|
return resp, nil
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
llmHoneypot := LLMHoneypot{
|
||||||
|
Histories: make([]Message, 0),
|
||||||
|
Protocol: tracer.SSH,
|
||||||
|
Model: "llama3",
|
||||||
|
}
|
||||||
|
|
||||||
|
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
||||||
|
openAIGPTVirtualTerminal.client = client
|
||||||
|
|
||||||
|
//When
|
||||||
|
str, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
|
||||||
|
|
||||||
|
//Then
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, "", str)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildExecuteModelSSHWithoutQuotesSection(t *testing.T) {
|
||||||
|
client := resty.New()
|
||||||
|
httpmock.ActivateNonDefault(client.GetClient())
|
||||||
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
|
// Given
|
||||||
|
httpmock.RegisterResponder("POST", ollamaEndpoint,
|
||||||
|
func(req *http.Request) (*http.Response, error) {
|
||||||
|
resp, err := httpmock.NewJsonResponse(200, &Response{
|
||||||
|
Message: Message{
|
||||||
|
Role: SYSTEM.String(),
|
||||||
|
Content: "```\n```\n",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return httpmock.NewStringResponse(500, ""), nil
|
||||||
|
}
|
||||||
|
return resp, nil
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
llmHoneypot := LLMHoneypot{
|
||||||
|
Histories: make([]Message, 0),
|
||||||
|
Protocol: tracer.SSH,
|
||||||
|
Model: "llama3",
|
||||||
|
Provider: Ollama,
|
||||||
|
}
|
||||||
|
|
||||||
|
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
||||||
|
openAIGPTVirtualTerminal.client = client
|
||||||
|
|
||||||
|
//When
|
||||||
|
str, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
|
||||||
|
|
||||||
|
//Then
|
||||||
|
assert.Nil(t, err)
|
||||||
|
assert.Equal(t, "", str)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRemoveQuotes(t *testing.T) {
|
||||||
|
plaintext := "```plaintext\n```"
|
||||||
|
bash := "```bash\n```"
|
||||||
|
onlyQuotes := "```\n```"
|
||||||
|
complexText := "```plaintext\ntop - 10:30:48 up 1 day, 4:30, 2 users, load average: 0.15, 0.10, 0.08\nTasks: 198 total, 1 running, 197 sleeping, 0 stopped, 0 zombie\n```"
|
||||||
|
complexText2 := "```\ntop - 15:06:59 up 10 days, 3:17, 1 user, load average: 0.10, 0.09, 0.08\nTasks: 285 total\n```"
|
||||||
|
|
||||||
|
assert.Equal(t, "", removeQuotes(plaintext))
|
||||||
|
assert.Equal(t, "", removeQuotes(bash))
|
||||||
|
assert.Equal(t, "", removeQuotes(onlyQuotes))
|
||||||
|
assert.Equal(t, "top - 10:30:48 up 1 day, 4:30, 2 users, load average: 0.15, 0.10, 0.08\nTasks: 198 total, 1 running, 197 sleeping, 0 stopped, 0 zombie\n", removeQuotes(complexText))
|
||||||
|
assert.Equal(t, "top - 15:06:59 up 10 days, 3:17, 1 user, load average: 0.10, 0.09, 0.08\nTasks: 285 total\n", removeQuotes(complexText2))
|
||||||
}
|
}
|
||||||
|
@ -37,10 +37,10 @@ func (httpStrategy HTTPStrategy) Init(beelzebubServiceConfiguration parser.Beelz
|
|||||||
|
|
||||||
if command.Plugin == plugins.LLMPluginName {
|
if command.Plugin == plugins.LLMPluginName {
|
||||||
|
|
||||||
llmModel, err := plugins.FromStringToLLMModel(beelzebubServiceConfiguration.Plugin.LLMModel)
|
llmProvider, err := plugins.FromStringToLLMProvider(beelzebubServiceConfiguration.Plugin.LLMProvider)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Error fromString: %s", err.Error())
|
log.Errorf("Error: %s", err.Error())
|
||||||
responseHTTPBody = "404 Not Found!"
|
responseHTTPBody = "404 Not Found!"
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,7 +49,8 @@ func (httpStrategy HTTPStrategy) Init(beelzebubServiceConfiguration parser.Beelz
|
|||||||
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
|
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
|
||||||
Protocol: tracer.HTTP,
|
Protocol: tracer.HTTP,
|
||||||
Host: beelzebubServiceConfiguration.Plugin.Host,
|
Host: beelzebubServiceConfiguration.Plugin.Host,
|
||||||
Model: llmModel,
|
Model: beelzebubServiceConfiguration.Plugin.LLMModel,
|
||||||
|
Provider: llmProvider,
|
||||||
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
|
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,11 +44,12 @@ func (sshStrategy *SSHStrategy) Init(beelzebubServiceConfiguration parser.Beelze
|
|||||||
|
|
||||||
if command.Plugin == plugins.LLMPluginName {
|
if command.Plugin == plugins.LLMPluginName {
|
||||||
|
|
||||||
llmModel, err := plugins.FromStringToLLMModel(beelzebubServiceConfiguration.Plugin.LLMModel)
|
llmProvider, err := plugins.FromStringToLLMProvider(beelzebubServiceConfiguration.Plugin.LLMProvider)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Error fromString: %s", err.Error())
|
log.Errorf("Error: %s", err.Error())
|
||||||
commandOutput = "command not found"
|
commandOutput = "command not found"
|
||||||
|
llmProvider = plugins.OpenAI
|
||||||
}
|
}
|
||||||
|
|
||||||
llmHoneypot := plugins.LLMHoneypot{
|
llmHoneypot := plugins.LLMHoneypot{
|
||||||
@ -56,7 +57,8 @@ func (sshStrategy *SSHStrategy) Init(beelzebubServiceConfiguration parser.Beelze
|
|||||||
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
|
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
|
||||||
Protocol: tracer.SSH,
|
Protocol: tracer.SSH,
|
||||||
Host: beelzebubServiceConfiguration.Plugin.Host,
|
Host: beelzebubServiceConfiguration.Plugin.Host,
|
||||||
Model: llmModel,
|
Model: beelzebubServiceConfiguration.Plugin.LLMProvider,
|
||||||
|
Provider: llmProvider,
|
||||||
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
|
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -130,11 +132,11 @@ func (sshStrategy *SSHStrategy) Init(beelzebubServiceConfiguration parser.Beelze
|
|||||||
|
|
||||||
if command.Plugin == plugins.LLMPluginName {
|
if command.Plugin == plugins.LLMPluginName {
|
||||||
|
|
||||||
llmModel, err := plugins.FromStringToLLMModel(beelzebubServiceConfiguration.Plugin.LLMModel)
|
llmProvider, err := plugins.FromStringToLLMProvider(beelzebubServiceConfiguration.Plugin.LLMProvider)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Error fromString: %s", err.Error())
|
log.Errorf("Error: %s, fallback OpenAI", err.Error())
|
||||||
commandOutput = "command not found"
|
llmProvider = plugins.OpenAI
|
||||||
}
|
}
|
||||||
|
|
||||||
llmHoneypot := plugins.LLMHoneypot{
|
llmHoneypot := plugins.LLMHoneypot{
|
||||||
@ -142,7 +144,8 @@ func (sshStrategy *SSHStrategy) Init(beelzebubServiceConfiguration parser.Beelze
|
|||||||
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
|
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
|
||||||
Protocol: tracer.SSH,
|
Protocol: tracer.SSH,
|
||||||
Host: beelzebubServiceConfiguration.Plugin.Host,
|
Host: beelzebubServiceConfiguration.Plugin.Host,
|
||||||
Model: llmModel,
|
Model: beelzebubServiceConfiguration.Plugin.LLMModel,
|
||||||
|
Provider: llmProvider,
|
||||||
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
|
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user