2023-02-26 18:04:05 +01:00
package plugins
2022-12-16 23:02:16 +01:00
import (
"encoding/json"
"errors"
2024-08-01 20:05:05 +02:00
"fmt"
2024-04-03 08:20:52 +02:00
"github.com/go-resty/resty/v2"
2024-06-23 10:55:06 +02:00
"github.com/mariocandela/beelzebub/v3/tracer"
2023-10-09 01:16:53 +02:00
2023-06-01 00:15:21 +02:00
log "github.com/sirupsen/logrus"
2022-12-16 23:02:16 +01:00
)
2023-06-26 11:55:49 -05:00
const (
2024-06-23 16:00:31 +02:00
systemPromptVirtualizeLinuxTerminal = "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block. Do not provide explanations or type commands unless explicitly instructed by the user. Your entire response/output is going to consist of a simple text with \n for new line, and you will NOT wrap it within string md markers"
2024-06-23 10:55:06 +02:00
systemPromptVirtualizeHTTPServer = "You will act as an unsecure HTTP Server with multiple vulnerability like aws and git credentials stored into root http directory. The user will send HTTP requests, and you are to reply with what the server should show. Do not provide explanations or type commands unless explicitly instructed by the user."
2024-07-21 20:11:18 +02:00
LLMPluginName = "LLMHoneypot"
2024-06-23 10:55:06 +02:00
openAIGPTEndpoint = "https://api.openai.com/v1/chat/completions"
2024-07-21 20:11:18 +02:00
ollamaEndpoint = "http://localhost:11434/api/chat"
2023-10-09 01:16:53 +02:00
)
2022-12-16 23:02:16 +01:00
2024-07-21 20:11:18 +02:00
type LLMHoneypot struct {
2025-01-14 08:45:30 +01:00
Histories [ ] Message
OpenAIKey string
client * resty . Client
Protocol tracer . Protocol
Model LLMModel
Host string
CustomPrompt string
2022-12-16 23:02:16 +01:00
}
type Choice struct {
2024-07-21 20:11:18 +02:00
Message Message ` json:"message" `
Index int ` json:"index" `
FinishReason string ` json:"finish_reason" `
2022-12-16 23:02:16 +01:00
}
2024-07-21 20:11:18 +02:00
type Response struct {
2022-12-16 23:02:16 +01:00
ID string ` json:"id" `
Object string ` json:"object" `
Created int ` json:"created" `
Model string ` json:"model" `
Choices [ ] Choice ` json:"choices" `
2024-07-21 20:11:18 +02:00
Message Message ` json:"message" `
2022-12-16 23:02:16 +01:00
Usage struct {
PromptTokens int ` json:"prompt_tokens" `
CompletionTokens int ` json:"completion_tokens" `
TotalTokens int ` json:"total_tokens" `
} ` json:"usage" `
}
2024-07-21 20:11:18 +02:00
type Request struct {
2024-06-23 10:55:06 +02:00
Model string ` json:"model" `
Messages [ ] Message ` json:"messages" `
2024-07-21 20:11:18 +02:00
Stream bool ` json:"stream" `
2024-06-23 10:55:06 +02:00
}
type Message struct {
Role string ` json:"role" `
Content string ` json:"content" `
}
type Role int
const (
SYSTEM Role = iota
USER
ASSISTANT
)
func ( role Role ) String ( ) string {
return [ ... ] string { "system" , "user" , "assistant" } [ role ]
2022-12-16 23:02:16 +01:00
}
2024-07-21 20:11:18 +02:00
type LLMModel int
const (
LLAMA3 LLMModel = iota
GPT4O
)
2024-08-01 20:05:05 +02:00
func FromStringToLLMModel ( llmModel string ) ( LLMModel , error ) {
switch llmModel {
case "llama3" :
return LLAMA3 , nil
case "gpt4-o" :
return GPT4O , nil
default :
return - 1 , fmt . Errorf ( "model %s not found" , llmModel )
}
}
2024-07-21 20:11:18 +02:00
func InitLLMHoneypot ( config LLMHoneypot ) * LLMHoneypot {
// Inject the dependencies
config . client = resty . New ( )
return & config
2023-10-09 01:16:53 +02:00
}
2024-06-23 10:55:06 +02:00
func buildPrompt ( histories [ ] Message , protocol tracer . Protocol , command string ) ( [ ] Message , error ) {
var messages [ ] Message
switch protocol {
case tracer . SSH :
messages = append ( messages , Message {
Role : SYSTEM . String ( ) ,
Content : systemPromptVirtualizeLinuxTerminal ,
} )
messages = append ( messages , Message {
Role : USER . String ( ) ,
Content : "pwd" ,
} )
messages = append ( messages , Message {
Role : ASSISTANT . String ( ) ,
Content : "/home/user" ,
} )
for _ , history := range histories {
messages = append ( messages , history )
}
case tracer . HTTP :
messages = append ( messages , Message {
Role : SYSTEM . String ( ) ,
Content : systemPromptVirtualizeHTTPServer ,
} )
messages = append ( messages , Message {
Role : USER . String ( ) ,
Content : "GET /index.html" ,
} )
messages = append ( messages , Message {
Role : ASSISTANT . String ( ) ,
Content : "<html><body>Hello, World!</body></html>" ,
} )
default :
return nil , errors . New ( "no prompt for protocol selected" )
2022-12-16 23:02:16 +01:00
}
2024-06-23 10:55:06 +02:00
messages = append ( messages , Message {
Role : USER . String ( ) ,
Content : command ,
} )
2022-12-16 23:02:16 +01:00
2024-06-23 10:55:06 +02:00
return messages , nil
2022-12-16 23:02:16 +01:00
}
2024-07-21 20:11:18 +02:00
func ( llmHoneypot * LLMHoneypot ) openAICaller ( messages [ ] Message ) ( string , error ) {
2024-06-23 10:55:06 +02:00
var err error
2024-07-21 20:11:18 +02:00
requestJson , err := json . Marshal ( Request {
Model : "gpt-4o" ,
Messages : messages ,
Stream : false ,
} )
if err != nil {
return "" , err
}
if llmHoneypot . OpenAIKey == "" {
return "" , errors . New ( "openAIKey is empty" )
}
if llmHoneypot . Host == "" {
llmHoneypot . Host = openAIGPTEndpoint
}
log . Debug ( string ( requestJson ) )
response , err := llmHoneypot . client . R ( ) .
SetHeader ( "Content-Type" , "application/json" ) .
SetBody ( requestJson ) .
SetAuthToken ( llmHoneypot . OpenAIKey ) .
SetResult ( & Response { } ) .
Post ( llmHoneypot . Host )
2024-06-23 10:55:06 +02:00
if err != nil {
return "" , err
}
2024-07-21 20:11:18 +02:00
log . Debug ( response )
if len ( response . Result ( ) . ( * Response ) . Choices ) == 0 {
return "" , errors . New ( "no choices" )
}
2024-06-23 10:55:06 +02:00
2024-07-21 20:11:18 +02:00
return response . Result ( ) . ( * Response ) . Choices [ 0 ] . Message . Content , nil
}
func ( llmHoneypot * LLMHoneypot ) ollamaCaller ( messages [ ] Message ) ( string , error ) {
var err error
requestJson , err := json . Marshal ( Request {
Model : "llama3" ,
Messages : messages ,
Stream : false ,
2022-12-16 23:02:16 +01:00
} )
if err != nil {
return "" , err
}
2024-07-21 20:11:18 +02:00
if llmHoneypot . Host == "" {
llmHoneypot . Host = ollamaEndpoint
2022-12-16 23:02:16 +01:00
}
2024-06-23 10:55:06 +02:00
log . Debug ( string ( requestJson ) )
2024-07-21 20:11:18 +02:00
response , err := llmHoneypot . client . R ( ) .
2022-12-16 23:02:16 +01:00
SetHeader ( "Content-Type" , "application/json" ) .
SetBody ( requestJson ) .
2024-07-21 20:11:18 +02:00
SetResult ( & Response { } ) .
Post ( llmHoneypot . Host )
2022-12-16 23:02:16 +01:00
if err != nil {
return "" , err
}
2023-06-01 00:15:21 +02:00
log . Debug ( response )
2024-07-21 20:11:18 +02:00
return response . Result ( ) . ( * Response ) . Message . Content , nil
}
func ( llmHoneypot * LLMHoneypot ) ExecuteModel ( command string ) ( string , error ) {
var err error
2025-01-14 08:45:30 +01:00
var prompt [ ] Message
2024-07-21 20:11:18 +02:00
2025-01-14 08:45:30 +01:00
if llmHoneypot . CustomPrompt != "" {
prompt = append ( prompt , Message {
Role : SYSTEM . String ( ) ,
Content : llmHoneypot . CustomPrompt ,
} )
prompt = append ( prompt , Message {
Role : USER . String ( ) ,
Content : command ,
} )
} else {
prompt , err = buildPrompt ( llmHoneypot . Histories , llmHoneypot . Protocol , command )
}
2024-07-21 20:11:18 +02:00
if err != nil {
return "" , err
2022-12-16 23:02:16 +01:00
}
2024-07-21 20:11:18 +02:00
switch llmHoneypot . Model {
case LLAMA3 :
return llmHoneypot . ollamaCaller ( prompt )
case GPT4O :
return llmHoneypot . openAICaller ( prompt )
default :
return "" , errors . New ( "no model selected" )
}
2022-12-16 23:02:16 +01:00
}