mirror of
https://github.com/mariocandela/beelzebub.git
synced 2025-07-01 18:47:26 -04:00
* implement new feature, custom prompt * Add doc for custom prompt
This commit is contained in:
@ -19,12 +19,13 @@ const (
|
||||
)
|
||||
|
||||
type LLMHoneypot struct {
|
||||
Histories []Message
|
||||
OpenAIKey string
|
||||
client *resty.Client
|
||||
Protocol tracer.Protocol
|
||||
Model LLMModel
|
||||
Host string
|
||||
Histories []Message
|
||||
OpenAIKey string
|
||||
client *resty.Client
|
||||
Protocol tracer.Protocol
|
||||
Model LLMModel
|
||||
Host string
|
||||
CustomPrompt string
|
||||
}
|
||||
|
||||
type Choice struct {
|
||||
@ -211,8 +212,20 @@ func (llmHoneypot *LLMHoneypot) ollamaCaller(messages []Message) (string, error)
|
||||
|
||||
func (llmHoneypot *LLMHoneypot) ExecuteModel(command string) (string, error) {
|
||||
var err error
|
||||
var prompt []Message
|
||||
|
||||
prompt, err := buildPrompt(llmHoneypot.Histories, llmHoneypot.Protocol, command)
|
||||
if llmHoneypot.CustomPrompt != "" {
|
||||
prompt = append(prompt, Message{
|
||||
Role: SYSTEM.String(),
|
||||
Content: llmHoneypot.CustomPrompt,
|
||||
})
|
||||
prompt = append(prompt, Message{
|
||||
Role: USER.String(),
|
||||
Content: command,
|
||||
})
|
||||
} else {
|
||||
prompt, err = buildPrompt(llmHoneypot.Histories, llmHoneypot.Protocol, command)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
@ -59,6 +59,51 @@ func TestBuildExecuteModelFailValidation(t *testing.T) {
|
||||
assert.Equal(t, "openAIKey is empty", err.Error())
|
||||
}
|
||||
|
||||
func TestBuildExecuteModelWithCustomPrompt(t *testing.T) {
|
||||
client := resty.New()
|
||||
httpmock.ActivateNonDefault(client.GetClient())
|
||||
defer httpmock.DeactivateAndReset()
|
||||
|
||||
// Given
|
||||
httpmock.RegisterMatcherResponder("POST", openAIGPTEndpoint,
|
||||
httpmock.BodyContainsString("hello world"),
|
||||
func(req *http.Request) (*http.Response, error) {
|
||||
resp, err := httpmock.NewJsonResponse(200, &Response{
|
||||
Choices: []Choice{
|
||||
{
|
||||
Message: Message{
|
||||
Role: SYSTEM.String(),
|
||||
Content: "[default]\nregion = us-west-2\noutput = json",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return httpmock.NewStringResponse(500, ""), nil
|
||||
}
|
||||
return resp, nil
|
||||
},
|
||||
)
|
||||
|
||||
llmHoneypot := LLMHoneypot{
|
||||
Histories: make([]Message, 0),
|
||||
OpenAIKey: "sdjdnklfjndslkjanfk",
|
||||
Protocol: tracer.HTTP,
|
||||
Model: GPT4O,
|
||||
CustomPrompt: "hello world",
|
||||
}
|
||||
|
||||
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
|
||||
openAIGPTVirtualTerminal.client = client
|
||||
|
||||
//When
|
||||
str, err := openAIGPTVirtualTerminal.ExecuteModel("GET /.aws/credentials")
|
||||
|
||||
//Then
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "[default]\nregion = us-west-2\noutput = json", str)
|
||||
}
|
||||
|
||||
func TestBuildExecuteModelFailValidationStrategyType(t *testing.T) {
|
||||
|
||||
llmHoneypot := LLMHoneypot{
|
||||
|
Reference in New Issue
Block a user