Feature: add support for llama, refactor yaml service interface (#115)

* refactor and add llama LMM support

* update readme

* improve code coverage
This commit is contained in:
Mario Candela
2024-07-21 20:11:18 +02:00
committed by GitHub
parent 0af1a05ae9
commit 2088163b54
11 changed files with 511 additions and 275 deletions

View File

@ -104,7 +104,8 @@ $ make test.dependencies.down
Beelzebub offers a wide range of features to enhance your honeypot environment:
- OpenAI Generative Pre-trained Transformer act as Linux virtualization
- Support for Ollama
- Support for OpenAI
- SSH Honeypot
- HTTP Honeypot
- TCP Honeypot
@ -210,7 +211,9 @@ commands:
#### Example SSH Honeypot
###### Honeypot with GPT-3 on Port 2222
###### Honeypot LLM Honeypots
Example with OpenAI GPT-4:
```yaml
apiVersion: "v1"
@ -219,13 +222,54 @@ address: ":2222"
description: "SSH interactive ChatGPT"
commands:
- regex: "^(.+)$"
plugin: "OpenAIGPTLinuxTerminal"
plugin: "LLMHoneypot"
serverVersion: "OpenSSH"
serverName: "ubuntu"
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
deadlineTimeoutSeconds: 60
plugin:
openAISecretKey: "Your OpenAI Secret Key"
llmModel: "gpt4-o"
openAISecretKey: "sk-proj-123456"
```
###### Honeypot LLM Honeypots
Example with OpenAI GPT-4:
```yaml
apiVersion: "v1"
protocol: "ssh"
address: ":2222"
description: "SSH interactive ChatGPT"
commands:
- regex: "^(.+)$"
plugin: "LLMHoneypot"
serverVersion: "OpenSSH"
serverName: "ubuntu"
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
deadlineTimeoutSeconds: 60
plugin:
llmModel: "gpt4-o"
openAISecretKey: "sk-proj-123456"
```
Example with Ollama Llama3:
```yaml
apiVersion: "v1"
protocol: "ssh"
address: ":2222"
description: "SSH interactive ChatGPT"
commands:
- regex: "^(.+)$"
plugin: "LLMHoneypot"
serverVersion: "OpenSSH"
serverName: "ubuntu"
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
deadlineTimeoutSeconds: 60
plugin:
llmModel: "llama3"
host: "http://example.com/api/chat" #default http://localhost:11434/api/chat
```
###### SSH Honeypot on Port 22

View File

@ -18,41 +18,9 @@ commands:
- "Server: Apache/2.4.53 (Debian)"
- "X-Powered-By: PHP/7.4.29"
statusCode: 200
- regex: "^(/wp-login.php|/wp-admin)$"
handler:
<html>
<header>
<title>Wordpress 6 test page</title>
</header>
<body>
<form action="" method="post">
<label for="uname"><b>Username</b></label>
<input type="text" placeholder="Enter Username" name="uname" required>
<label for="psw"><b>Password</b></label>
<input type="password" placeholder="Enter Password" name="psw" required>
<button type="submit">Login</button>
</form>
</body>
</html>
headers:
- "Content-Type: text/html"
- "Server: Apache/2.4.53 (Debian)"
- "X-Powered-By: PHP/7.4.29"
statusCode: 200
- regex: "^.*$"
handler:
<html>
<header>
<title>404</title>
</header>
<body>
<h1>Not found!</h1>
</body>
</html>
headers:
- "Content-Type: text/html"
- "Server: Apache/2.4.53 (Debian)"
- "X-Powered-By: PHP/7.4.29"
statusCode: 404
plugin: "LLMHoneypot"
statusCode: 200
plugin:
llmModel: "gpt4-o"
openAISecretKey: "sk-proj-123456"

View File

@ -8,6 +8,6 @@ commands:
serverVersion: "OpenSSH"
serverName: "ubuntu"
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
deadlineTimeoutSeconds: 60
deadlineTimeoutSeconds: 6000
plugin:
openAISecretKey: ""
llmModel: "llama3"

View File

@ -3,6 +3,7 @@ package parser
import (
"fmt"
"github.com/mariocandela/beelzebub/v3/plugins"
"os"
"path/filepath"
"strings"
@ -50,6 +51,19 @@ type Prometheus struct {
type Plugin struct {
OpenAISecretKey string `yaml:"openAISecretKey"`
Host string `yaml:"host"`
LLMModel string `yaml:"llmModel"`
}
func FromString(llmModel string) (plugins.LLMModel, error) {
switch llmModel {
case "llama3":
return plugins.LLAMA3, nil
case "gpt4-o":
return plugins.GPT4O, nil
default:
return -1, fmt.Errorf("model %s not found", llmModel)
}
}
// BeelzebubServiceConfiguration is the struct that contains the configurations of the honeypot service

View File

@ -2,6 +2,7 @@ package parser
import (
"errors"
"github.com/mariocandela/beelzebub/v3/plugins"
"os"
"testing"
@ -53,7 +54,12 @@ commands:
- regex: "wp-admin"
handler: "login"
headers:
- "Content-Type: text/html"`)
- "Content-Type: text/html"
plugin:
openAISecretKey: "qwerty"
llmModel: "llama3"
host: "localhost:1563"
`)
return beelzebubServiceConfiguration, nil
}
@ -112,10 +118,10 @@ func TestReadConfigurationsServicesValid(t *testing.T) {
configurationsParser.gelAllFilesNameByDirNameDependency = mockReadDirValid
beelzebubServicesConfiguration, err := configurationsParser.ReadConfigurationsServices()
assert.Nil(t, err)
firstBeelzebubServiceConfiguration := beelzebubServicesConfiguration[0]
assert.Nil(t, err)
assert.Equal(t, firstBeelzebubServiceConfiguration.Protocol, "http")
assert.Equal(t, firstBeelzebubServiceConfiguration.ApiVersion, "v1")
assert.Equal(t, firstBeelzebubServiceConfiguration.Address, ":8080")
@ -125,6 +131,9 @@ func TestReadConfigurationsServicesValid(t *testing.T) {
assert.Equal(t, firstBeelzebubServiceConfiguration.Commands[0].Handler, "login")
assert.Equal(t, len(firstBeelzebubServiceConfiguration.Commands[0].Headers), 1)
assert.Equal(t, firstBeelzebubServiceConfiguration.Commands[0].Headers[0], "Content-Type: text/html")
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.OpenAISecretKey, "qwerty")
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.LLMModel, "llama3")
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Host, "localhost:1563")
}
func TestGelAllFilesNameByDirName(t *testing.T) {
@ -177,3 +186,16 @@ func TestReadFileBytesByFilePath(t *testing.T) {
assert.Equal(t, "", string(bytes))
}
func TestFromString(t *testing.T) {
model, err := FromString("llama3")
assert.Nil(t, err)
assert.Equal(t, plugins.LLAMA3, model)
model, err = FromString("gpt4-o")
assert.Nil(t, err)
assert.Equal(t, plugins.GPT4O, model)
model, err = FromString("beelzebub-model")
assert.Errorf(t, err, "model beelzebub-model not found")
}

View File

@ -36,7 +36,6 @@ func (beelzebubCloud *beelzebubCloud) SendEvent(event tracer.Event) (bool, error
SetHeader("Content-Type", "application/json").
SetBody(requestJson).
SetHeader("Authorization", beelzebubCloud.AuthToken).
SetResult(&gptResponse{}).
Post(beelzebubCloud.URI)
log.Debug(response)

View File

@ -12,30 +12,33 @@ import (
const (
systemPromptVirtualizeLinuxTerminal = "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block. Do not provide explanations or type commands unless explicitly instructed by the user. Your entire response/output is going to consist of a simple text with \n for new line, and you will NOT wrap it within string md markers"
systemPromptVirtualizeHTTPServer = "You will act as an unsecure HTTP Server with multiple vulnerability like aws and git credentials stored into root http directory. The user will send HTTP requests, and you are to reply with what the server should show. Do not provide explanations or type commands unless explicitly instructed by the user."
ChatGPTPluginName = "LLMHoneypot"
LLMPluginName = "LLMHoneypot"
openAIGPTEndpoint = "https://api.openai.com/v1/chat/completions"
ollamaEndpoint = "http://localhost:11434/api/chat"
)
type openAIVirtualHoneypot struct {
type LLMHoneypot struct {
Histories []Message
openAIKey string
OpenAIKey string
client *resty.Client
protocol tracer.Protocol
Protocol tracer.Protocol
Model LLMModel
Host string
}
type Choice struct {
Message Message `json:"message"`
Index int `json:"index"`
Logprobs interface{} `json:"logprobs"`
FinishReason string `json:"finish_reason"`
Message Message `json:"message"`
Index int `json:"index"`
FinishReason string `json:"finish_reason"`
}
type gptResponse struct {
type Response struct {
ID string `json:"id"`
Object string `json:"object"`
Created int `json:"created"`
Model string `json:"model"`
Choices []Choice `json:"choices"`
Message Message `json:"message"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
@ -43,9 +46,10 @@ type gptResponse struct {
} `json:"usage"`
}
type gptRequest struct {
type Request struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
Stream bool `json:"stream"`
}
type Message struct {
@ -65,13 +69,18 @@ func (role Role) String() string {
return [...]string{"system", "user", "assistant"}[role]
}
func Init(history []Message, openAIKey string, protocol tracer.Protocol) *openAIVirtualHoneypot {
return &openAIVirtualHoneypot{
Histories: history,
openAIKey: openAIKey,
client: resty.New(),
protocol: protocol,
}
type LLMModel int
const (
LLAMA3 LLMModel = iota
GPT4O
)
func InitLLMHoneypot(config LLMHoneypot) *LLMHoneypot {
// Inject the dependencies
config.client = resty.New()
return &config
}
func buildPrompt(histories []Message, protocol tracer.Protocol, command string) ([]Message, error) {
@ -118,42 +127,91 @@ func buildPrompt(histories []Message, protocol tracer.Protocol, command string)
return messages, nil
}
func (openAIVirtualHoneypot *openAIVirtualHoneypot) GetCompletions(command string) (string, error) {
func (llmHoneypot *LLMHoneypot) openAICaller(messages []Message) (string, error) {
var err error
prompt, err := buildPrompt(openAIVirtualHoneypot.Histories, openAIVirtualHoneypot.protocol, command)
if err != nil {
return "", err
}
requestJson, err := json.Marshal(gptRequest{
requestJson, err := json.Marshal(Request{
Model: "gpt-4o",
Messages: prompt,
Messages: messages,
Stream: false,
})
if err != nil {
return "", err
}
if openAIVirtualHoneypot.openAIKey == "" {
if llmHoneypot.OpenAIKey == "" {
return "", errors.New("openAIKey is empty")
}
if llmHoneypot.Host == "" {
llmHoneypot.Host = openAIGPTEndpoint
}
log.Debug(string(requestJson))
response, err := openAIVirtualHoneypot.client.R().
response, err := llmHoneypot.client.R().
SetHeader("Content-Type", "application/json").
SetBody(requestJson).
SetAuthToken(openAIVirtualHoneypot.openAIKey).
SetResult(&gptResponse{}).
Post(openAIGPTEndpoint)
SetAuthToken(llmHoneypot.OpenAIKey).
SetResult(&Response{}).
Post(llmHoneypot.Host)
if err != nil {
return "", err
}
log.Debug(response)
if len(response.Result().(*gptResponse).Choices) == 0 {
if len(response.Result().(*Response).Choices) == 0 {
return "", errors.New("no choices")
}
return response.Result().(*gptResponse).Choices[0].Message.Content, nil
return response.Result().(*Response).Choices[0].Message.Content, nil
}
func (llmHoneypot *LLMHoneypot) ollamaCaller(messages []Message) (string, error) {
var err error
requestJson, err := json.Marshal(Request{
Model: "llama3",
Messages: messages,
Stream: false,
})
if err != nil {
return "", err
}
if llmHoneypot.Host == "" {
llmHoneypot.Host = ollamaEndpoint
}
log.Debug(string(requestJson))
response, err := llmHoneypot.client.R().
SetHeader("Content-Type", "application/json").
SetBody(requestJson).
SetResult(&Response{}).
Post(llmHoneypot.Host)
if err != nil {
return "", err
}
log.Debug(response)
return response.Result().(*Response).Message.Content, nil
}
func (llmHoneypot *LLMHoneypot) ExecuteModel(command string) (string, error) {
var err error
prompt, err := buildPrompt(llmHoneypot.Histories, llmHoneypot.Protocol, command)
if err != nil {
return "", err
}
switch llmHoneypot.Model {
case LLAMA3:
return llmHoneypot.ollamaCaller(prompt)
case GPT4O:
return llmHoneypot.openAICaller(prompt)
default:
return "", errors.New("no model selected")
}
}

View File

@ -0,0 +1,287 @@
package plugins
import (
"github.com/go-resty/resty/v2"
"github.com/jarcoal/httpmock"
"github.com/mariocandela/beelzebub/v3/tracer"
"github.com/stretchr/testify/assert"
"net/http"
"testing"
)
const SystemPromptLen = 4
func TestBuildPromptEmptyHistory(t *testing.T) {
//Given
var histories []Message
command := "pwd"
//When
prompt, err := buildPrompt(histories, tracer.SSH, command)
//Then
assert.Nil(t, err)
assert.Equal(t, SystemPromptLen, len(prompt))
}
func TestBuildPromptWithHistory(t *testing.T) {
//Given
var histories = []Message{
{
Role: "cat hello.txt",
Content: "world",
},
}
command := "pwd"
//When
prompt, err := buildPrompt(histories, tracer.SSH, command)
//Then
assert.Nil(t, err)
assert.Equal(t, SystemPromptLen+1, len(prompt))
}
func TestBuildExecuteModelFailValidation(t *testing.T) {
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "",
Protocol: tracer.SSH,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
_, err := openAIGPTVirtualTerminal.ExecuteModel("test")
assert.Equal(t, "openAIKey is empty", err.Error())
}
func TestBuildExecuteModelFailValidationStrategyType(t *testing.T) {
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "",
Protocol: tracer.TCP,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
_, err := openAIGPTVirtualTerminal.ExecuteModel("test")
assert.Equal(t, "no prompt for protocol selected", err.Error())
}
func TestBuildExecuteModelFailValidationModelType(t *testing.T) {
// Given
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
Protocol: tracer.SSH,
Model: 5,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
//When
_, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
//Then
assert.Errorf(t, err, "no model selected")
}
func TestBuildExecuteModelSSHWithResultsOpenAI(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{
{
Message: Message{
Role: SYSTEM.String(),
Content: "prova.txt",
},
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.SSH,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
//Then
assert.Nil(t, err)
assert.Equal(t, "prova.txt", str)
}
func TestBuildExecuteModelSSHWithResultsLLama(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", ollamaEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Message: Message{
Role: SYSTEM.String(),
Content: "prova.txt",
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
Protocol: tracer.SSH,
Model: LLAMA3,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
//Then
assert.Nil(t, err)
assert.Equal(t, "prova.txt", str)
}
func TestBuildExecuteModelSSHWithoutResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.SSH,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
_, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
//Then
assert.Equal(t, "no choices", err.Error())
}
func TestBuildExecuteModelHTTPWithResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{
{
Message: Message{
Role: SYSTEM.String(),
Content: "[default]\nregion = us-west-2\noutput = json",
},
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.HTTP,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.ExecuteModel("GET /.aws/credentials")
//Then
assert.Nil(t, err)
assert.Equal(t, "[default]\nregion = us-west-2\noutput = json", str)
}
func TestBuildExecuteModelHTTPWithoutResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.HTTP,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
_, err := openAIGPTVirtualTerminal.ExecuteModel("GET /.aws/credentials")
//Then
assert.Equal(t, "no choices", err.Error())
}

View File

@ -1,188 +0,0 @@
package plugins
import (
"github.com/go-resty/resty/v2"
"github.com/jarcoal/httpmock"
"github.com/mariocandela/beelzebub/v3/tracer"
"github.com/stretchr/testify/assert"
"net/http"
"testing"
)
const SystemPromptLen = 4
func TestBuildPromptEmptyHistory(t *testing.T) {
//Given
var histories []Message
command := "pwd"
//When
prompt, err := buildPrompt(histories, tracer.SSH, command)
//Then
assert.Nil(t, err)
assert.Equal(t, SystemPromptLen, len(prompt))
}
func TestBuildPromptWithHistory(t *testing.T) {
//Given
var histories = []Message{
{
Role: "cat hello.txt",
Content: "world",
},
}
command := "pwd"
//When
prompt, err := buildPrompt(histories, tracer.SSH, command)
//Then
assert.Nil(t, err)
assert.Equal(t, SystemPromptLen+1, len(prompt))
}
func TestBuildGetCompletionsFailValidation(t *testing.T) {
openAIGPTVirtualTerminal := Init(make([]Message, 0), "", tracer.SSH)
_, err := openAIGPTVirtualTerminal.GetCompletions("test")
assert.Equal(t, "openAIKey is empty", err.Error())
}
func TestBuildGetCompletionsFailValidationStrategyType(t *testing.T) {
openAIGPTVirtualTerminal := Init(make([]Message, 0), "", tracer.TCP)
_, err := openAIGPTVirtualTerminal.GetCompletions("test")
assert.Equal(t, "no prompt for protocol selected", err.Error())
}
func TestBuildGetCompletionsSSHWithResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &gptResponse{
Choices: []Choice{
{
Message: Message{
Role: SYSTEM.String(),
Content: "prova.txt",
},
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
openAIGPTVirtualTerminal := Init(make([]Message, 0), "sdjdnklfjndslkjanfk", tracer.SSH)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.GetCompletions("ls")
//Then
assert.Nil(t, err)
assert.Equal(t, "prova.txt", str)
}
func TestBuildGetCompletionsSSHWithoutResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &gptResponse{
Choices: []Choice{},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
openAIGPTVirtualTerminal := Init(make([]Message, 0), "sdjdnklfjndslkjanfk", tracer.SSH)
openAIGPTVirtualTerminal.client = client
//When
_, err := openAIGPTVirtualTerminal.GetCompletions("ls")
//Then
assert.Equal(t, "no choices", err.Error())
}
func TestBuildGetCompletionsHTTPWithResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &gptResponse{
Choices: []Choice{
{
Message: Message{
Role: SYSTEM.String(),
Content: "[default]\nregion = us-west-2\noutput = json",
},
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
openAIGPTVirtualTerminal := Init(make([]Message, 0), "sdjdnklfjndslkjanfk", tracer.HTTP)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.GetCompletions("GET /.aws/credentials")
//Then
assert.Nil(t, err)
assert.Equal(t, "[default]\nregion = us-west-2\noutput = json", str)
}
func TestBuildGetCompletionsHTTPWithoutResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &gptResponse{
Choices: []Choice{},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
openAIGPTVirtualTerminal := Init(make([]Message, 0), "sdjdnklfjndslkjanfk", tracer.HTTP)
openAIGPTVirtualTerminal.client = client
//When
_, err := openAIGPTVirtualTerminal.GetCompletions("GET /.aws/credentials")
//Then
assert.Equal(t, "no choices", err.Error())
}

View File

@ -34,13 +34,29 @@ func (httpStrategy HTTPStrategy) Init(beelzebubServiceConfiguration parser.Beelz
if matched {
responseHTTPBody := command.Handler
if command.Plugin == plugins.ChatGPTPluginName {
openAIGPTVirtualTerminal := plugins.Init(make([]plugins.Message, 0), beelzebubServiceConfiguration.Plugin.OpenAISecretKey, tracer.HTTP)
if command.Plugin == plugins.LLMPluginName {
llmModel, err := parser.FromString(beelzebubServiceConfiguration.Plugin.LLMModel)
if err != nil {
log.Errorf("Error fromString: %s", err.Error())
responseHTTPBody = "404 Not Found!"
}
llmHoneypot := plugins.LLMHoneypot{
Histories: make([]plugins.Message, 0),
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
Protocol: tracer.HTTP,
Host: beelzebubServiceConfiguration.Plugin.Host,
Model: llmModel,
}
llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot)
command := fmt.Sprintf("%s %s", request.Method, request.RequestURI)
if completions, err := openAIGPTVirtualTerminal.GetCompletions(command); err != nil {
log.Errorf("Error GetCompletions: %s, %s", command, err.Error())
if completions, err := llmHoneypotInstance.ExecuteModel(command); err != nil {
log.Errorf("Error ExecuteModel: %s, %s", command, err.Error())
responseHTTPBody = "404 Not Found!"
} else {
responseHTTPBody = completions

View File

@ -62,11 +62,27 @@ func (sshStrategy *SSHStrategy) Init(beelzebubServiceConfiguration parser.Beelze
if matched {
commandOutput := command.Handler
if command.Plugin == plugins.ChatGPTPluginName {
openAIGPTVirtualTerminal := plugins.Init(histories, beelzebubServiceConfiguration.Plugin.OpenAISecretKey, tracer.SSH)
if command.Plugin == plugins.LLMPluginName {
if commandOutput, err = openAIGPTVirtualTerminal.GetCompletions(commandInput); err != nil {
log.Errorf("Error GetCompletions: %s, %s", commandInput, err.Error())
llmModel, err := parser.FromString(beelzebubServiceConfiguration.Plugin.LLMModel)
if err != nil {
log.Errorf("Error fromString: %s", err.Error())
commandOutput = "command not found"
}
llmHoneypot := plugins.LLMHoneypot{
Histories: histories,
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
Protocol: tracer.SSH,
Host: beelzebubServiceConfiguration.Plugin.Host,
Model: llmModel,
}
llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot)
if commandOutput, err = llmHoneypotInstance.ExecuteModel(commandInput); err != nil {
log.Errorf("Error ExecuteModel: %s, %s", commandInput, err.Error())
commandOutput = "command not found"
}
}