Feat: LLM Honeypot allow specifying the custom prompt #152 (#153)

* implement new feature, custom prompt

* Add doc for custom prompt
This commit is contained in:
Mario Candela
2025-01-14 08:45:30 +01:00
committed by GitHub
parent f1b35e9e43
commit c3d2ff885d
7 changed files with 105 additions and 22 deletions

View File

@ -19,12 +19,13 @@ const (
)
type LLMHoneypot struct {
Histories []Message
OpenAIKey string
client *resty.Client
Protocol tracer.Protocol
Model LLMModel
Host string
Histories []Message
OpenAIKey string
client *resty.Client
Protocol tracer.Protocol
Model LLMModel
Host string
CustomPrompt string
}
type Choice struct {
@ -211,8 +212,20 @@ func (llmHoneypot *LLMHoneypot) ollamaCaller(messages []Message) (string, error)
func (llmHoneypot *LLMHoneypot) ExecuteModel(command string) (string, error) {
var err error
var prompt []Message
prompt, err := buildPrompt(llmHoneypot.Histories, llmHoneypot.Protocol, command)
if llmHoneypot.CustomPrompt != "" {
prompt = append(prompt, Message{
Role: SYSTEM.String(),
Content: llmHoneypot.CustomPrompt,
})
prompt = append(prompt, Message{
Role: USER.String(),
Content: command,
})
} else {
prompt, err = buildPrompt(llmHoneypot.Histories, llmHoneypot.Protocol, command)
}
if err != nil {
return "", err

View File

@ -59,6 +59,51 @@ func TestBuildExecuteModelFailValidation(t *testing.T) {
assert.Equal(t, "openAIKey is empty", err.Error())
}
func TestBuildExecuteModelWithCustomPrompt(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterMatcherResponder("POST", openAIGPTEndpoint,
httpmock.BodyContainsString("hello world"),
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{
{
Message: Message{
Role: SYSTEM.String(),
Content: "[default]\nregion = us-west-2\noutput = json",
},
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.HTTP,
Model: GPT4O,
CustomPrompt: "hello world",
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.ExecuteModel("GET /.aws/credentials")
//Then
assert.Nil(t, err)
assert.Equal(t, "[default]\nregion = us-west-2\noutput = json", str)
}
func TestBuildExecuteModelFailValidationStrategyType(t *testing.T) {
llmHoneypot := LLMHoneypot{