Feature: add support for llama, refactor yaml service interface (#115)

* refactor and add llama LMM support

* update readme

* improve code coverage
This commit is contained in:
Mario Candela
2024-07-21 20:11:18 +02:00
committed by GitHub
parent 0af1a05ae9
commit 2088163b54
11 changed files with 511 additions and 275 deletions

View File

@ -36,7 +36,6 @@ func (beelzebubCloud *beelzebubCloud) SendEvent(event tracer.Event) (bool, error
SetHeader("Content-Type", "application/json").
SetBody(requestJson).
SetHeader("Authorization", beelzebubCloud.AuthToken).
SetResult(&gptResponse{}).
Post(beelzebubCloud.URI)
log.Debug(response)

View File

@ -12,30 +12,33 @@ import (
const (
systemPromptVirtualizeLinuxTerminal = "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block. Do not provide explanations or type commands unless explicitly instructed by the user. Your entire response/output is going to consist of a simple text with \n for new line, and you will NOT wrap it within string md markers"
systemPromptVirtualizeHTTPServer = "You will act as an unsecure HTTP Server with multiple vulnerability like aws and git credentials stored into root http directory. The user will send HTTP requests, and you are to reply with what the server should show. Do not provide explanations or type commands unless explicitly instructed by the user."
ChatGPTPluginName = "LLMHoneypot"
LLMPluginName = "LLMHoneypot"
openAIGPTEndpoint = "https://api.openai.com/v1/chat/completions"
ollamaEndpoint = "http://localhost:11434/api/chat"
)
type openAIVirtualHoneypot struct {
type LLMHoneypot struct {
Histories []Message
openAIKey string
OpenAIKey string
client *resty.Client
protocol tracer.Protocol
Protocol tracer.Protocol
Model LLMModel
Host string
}
type Choice struct {
Message Message `json:"message"`
Index int `json:"index"`
Logprobs interface{} `json:"logprobs"`
FinishReason string `json:"finish_reason"`
Message Message `json:"message"`
Index int `json:"index"`
FinishReason string `json:"finish_reason"`
}
type gptResponse struct {
type Response struct {
ID string `json:"id"`
Object string `json:"object"`
Created int `json:"created"`
Model string `json:"model"`
Choices []Choice `json:"choices"`
Message Message `json:"message"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
@ -43,9 +46,10 @@ type gptResponse struct {
} `json:"usage"`
}
type gptRequest struct {
type Request struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
Stream bool `json:"stream"`
}
type Message struct {
@ -65,13 +69,18 @@ func (role Role) String() string {
return [...]string{"system", "user", "assistant"}[role]
}
func Init(history []Message, openAIKey string, protocol tracer.Protocol) *openAIVirtualHoneypot {
return &openAIVirtualHoneypot{
Histories: history,
openAIKey: openAIKey,
client: resty.New(),
protocol: protocol,
}
type LLMModel int
const (
LLAMA3 LLMModel = iota
GPT4O
)
func InitLLMHoneypot(config LLMHoneypot) *LLMHoneypot {
// Inject the dependencies
config.client = resty.New()
return &config
}
func buildPrompt(histories []Message, protocol tracer.Protocol, command string) ([]Message, error) {
@ -118,42 +127,91 @@ func buildPrompt(histories []Message, protocol tracer.Protocol, command string)
return messages, nil
}
func (openAIVirtualHoneypot *openAIVirtualHoneypot) GetCompletions(command string) (string, error) {
func (llmHoneypot *LLMHoneypot) openAICaller(messages []Message) (string, error) {
var err error
prompt, err := buildPrompt(openAIVirtualHoneypot.Histories, openAIVirtualHoneypot.protocol, command)
if err != nil {
return "", err
}
requestJson, err := json.Marshal(gptRequest{
requestJson, err := json.Marshal(Request{
Model: "gpt-4o",
Messages: prompt,
Messages: messages,
Stream: false,
})
if err != nil {
return "", err
}
if openAIVirtualHoneypot.openAIKey == "" {
if llmHoneypot.OpenAIKey == "" {
return "", errors.New("openAIKey is empty")
}
if llmHoneypot.Host == "" {
llmHoneypot.Host = openAIGPTEndpoint
}
log.Debug(string(requestJson))
response, err := openAIVirtualHoneypot.client.R().
response, err := llmHoneypot.client.R().
SetHeader("Content-Type", "application/json").
SetBody(requestJson).
SetAuthToken(openAIVirtualHoneypot.openAIKey).
SetResult(&gptResponse{}).
Post(openAIGPTEndpoint)
SetAuthToken(llmHoneypot.OpenAIKey).
SetResult(&Response{}).
Post(llmHoneypot.Host)
if err != nil {
return "", err
}
log.Debug(response)
if len(response.Result().(*gptResponse).Choices) == 0 {
if len(response.Result().(*Response).Choices) == 0 {
return "", errors.New("no choices")
}
return response.Result().(*gptResponse).Choices[0].Message.Content, nil
return response.Result().(*Response).Choices[0].Message.Content, nil
}
func (llmHoneypot *LLMHoneypot) ollamaCaller(messages []Message) (string, error) {
var err error
requestJson, err := json.Marshal(Request{
Model: "llama3",
Messages: messages,
Stream: false,
})
if err != nil {
return "", err
}
if llmHoneypot.Host == "" {
llmHoneypot.Host = ollamaEndpoint
}
log.Debug(string(requestJson))
response, err := llmHoneypot.client.R().
SetHeader("Content-Type", "application/json").
SetBody(requestJson).
SetResult(&Response{}).
Post(llmHoneypot.Host)
if err != nil {
return "", err
}
log.Debug(response)
return response.Result().(*Response).Message.Content, nil
}
func (llmHoneypot *LLMHoneypot) ExecuteModel(command string) (string, error) {
var err error
prompt, err := buildPrompt(llmHoneypot.Histories, llmHoneypot.Protocol, command)
if err != nil {
return "", err
}
switch llmHoneypot.Model {
case LLAMA3:
return llmHoneypot.ollamaCaller(prompt)
case GPT4O:
return llmHoneypot.openAICaller(prompt)
default:
return "", errors.New("no model selected")
}
}

View File

@ -0,0 +1,287 @@
package plugins
import (
"github.com/go-resty/resty/v2"
"github.com/jarcoal/httpmock"
"github.com/mariocandela/beelzebub/v3/tracer"
"github.com/stretchr/testify/assert"
"net/http"
"testing"
)
const SystemPromptLen = 4
func TestBuildPromptEmptyHistory(t *testing.T) {
//Given
var histories []Message
command := "pwd"
//When
prompt, err := buildPrompt(histories, tracer.SSH, command)
//Then
assert.Nil(t, err)
assert.Equal(t, SystemPromptLen, len(prompt))
}
func TestBuildPromptWithHistory(t *testing.T) {
//Given
var histories = []Message{
{
Role: "cat hello.txt",
Content: "world",
},
}
command := "pwd"
//When
prompt, err := buildPrompt(histories, tracer.SSH, command)
//Then
assert.Nil(t, err)
assert.Equal(t, SystemPromptLen+1, len(prompt))
}
func TestBuildExecuteModelFailValidation(t *testing.T) {
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "",
Protocol: tracer.SSH,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
_, err := openAIGPTVirtualTerminal.ExecuteModel("test")
assert.Equal(t, "openAIKey is empty", err.Error())
}
func TestBuildExecuteModelFailValidationStrategyType(t *testing.T) {
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "",
Protocol: tracer.TCP,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
_, err := openAIGPTVirtualTerminal.ExecuteModel("test")
assert.Equal(t, "no prompt for protocol selected", err.Error())
}
func TestBuildExecuteModelFailValidationModelType(t *testing.T) {
// Given
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
Protocol: tracer.SSH,
Model: 5,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
//When
_, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
//Then
assert.Errorf(t, err, "no model selected")
}
func TestBuildExecuteModelSSHWithResultsOpenAI(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{
{
Message: Message{
Role: SYSTEM.String(),
Content: "prova.txt",
},
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.SSH,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
//Then
assert.Nil(t, err)
assert.Equal(t, "prova.txt", str)
}
func TestBuildExecuteModelSSHWithResultsLLama(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", ollamaEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Message: Message{
Role: SYSTEM.String(),
Content: "prova.txt",
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
Protocol: tracer.SSH,
Model: LLAMA3,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
//Then
assert.Nil(t, err)
assert.Equal(t, "prova.txt", str)
}
func TestBuildExecuteModelSSHWithoutResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.SSH,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
_, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
//Then
assert.Equal(t, "no choices", err.Error())
}
func TestBuildExecuteModelHTTPWithResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{
{
Message: Message{
Role: SYSTEM.String(),
Content: "[default]\nregion = us-west-2\noutput = json",
},
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.HTTP,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.ExecuteModel("GET /.aws/credentials")
//Then
assert.Nil(t, err)
assert.Equal(t, "[default]\nregion = us-west-2\noutput = json", str)
}
func TestBuildExecuteModelHTTPWithoutResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.HTTP,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
_, err := openAIGPTVirtualTerminal.ExecuteModel("GET /.aws/credentials")
//Then
assert.Equal(t, "no choices", err.Error())
}

View File

@ -1,188 +0,0 @@
package plugins
import (
"github.com/go-resty/resty/v2"
"github.com/jarcoal/httpmock"
"github.com/mariocandela/beelzebub/v3/tracer"
"github.com/stretchr/testify/assert"
"net/http"
"testing"
)
const SystemPromptLen = 4
func TestBuildPromptEmptyHistory(t *testing.T) {
//Given
var histories []Message
command := "pwd"
//When
prompt, err := buildPrompt(histories, tracer.SSH, command)
//Then
assert.Nil(t, err)
assert.Equal(t, SystemPromptLen, len(prompt))
}
func TestBuildPromptWithHistory(t *testing.T) {
//Given
var histories = []Message{
{
Role: "cat hello.txt",
Content: "world",
},
}
command := "pwd"
//When
prompt, err := buildPrompt(histories, tracer.SSH, command)
//Then
assert.Nil(t, err)
assert.Equal(t, SystemPromptLen+1, len(prompt))
}
func TestBuildGetCompletionsFailValidation(t *testing.T) {
openAIGPTVirtualTerminal := Init(make([]Message, 0), "", tracer.SSH)
_, err := openAIGPTVirtualTerminal.GetCompletions("test")
assert.Equal(t, "openAIKey is empty", err.Error())
}
func TestBuildGetCompletionsFailValidationStrategyType(t *testing.T) {
openAIGPTVirtualTerminal := Init(make([]Message, 0), "", tracer.TCP)
_, err := openAIGPTVirtualTerminal.GetCompletions("test")
assert.Equal(t, "no prompt for protocol selected", err.Error())
}
func TestBuildGetCompletionsSSHWithResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &gptResponse{
Choices: []Choice{
{
Message: Message{
Role: SYSTEM.String(),
Content: "prova.txt",
},
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
openAIGPTVirtualTerminal := Init(make([]Message, 0), "sdjdnklfjndslkjanfk", tracer.SSH)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.GetCompletions("ls")
//Then
assert.Nil(t, err)
assert.Equal(t, "prova.txt", str)
}
func TestBuildGetCompletionsSSHWithoutResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &gptResponse{
Choices: []Choice{},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
openAIGPTVirtualTerminal := Init(make([]Message, 0), "sdjdnklfjndslkjanfk", tracer.SSH)
openAIGPTVirtualTerminal.client = client
//When
_, err := openAIGPTVirtualTerminal.GetCompletions("ls")
//Then
assert.Equal(t, "no choices", err.Error())
}
func TestBuildGetCompletionsHTTPWithResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &gptResponse{
Choices: []Choice{
{
Message: Message{
Role: SYSTEM.String(),
Content: "[default]\nregion = us-west-2\noutput = json",
},
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
openAIGPTVirtualTerminal := Init(make([]Message, 0), "sdjdnklfjndslkjanfk", tracer.HTTP)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.GetCompletions("GET /.aws/credentials")
//Then
assert.Nil(t, err)
assert.Equal(t, "[default]\nregion = us-west-2\noutput = json", str)
}
func TestBuildGetCompletionsHTTPWithoutResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &gptResponse{
Choices: []Choice{},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
openAIGPTVirtualTerminal := Init(make([]Message, 0), "sdjdnklfjndslkjanfk", tracer.HTTP)
openAIGPTVirtualTerminal.client = client
//When
_, err := openAIGPTVirtualTerminal.GetCompletions("GET /.aws/credentials")
//Then
assert.Equal(t, "no choices", err.Error())
}