Compare commits

..

18 Commits

Author SHA1 Message Date
cd284877cf fix typo README.md 2024-07-21 20:15:09 +02:00
2088163b54 Feature: add support for llama, refactor yaml service interface (#115)
* refactor and add llama LMM support

* update readme

* improve code coverage
2024-07-21 20:11:18 +02:00
0af1a05ae9 Bump golang.org/x/crypto from 0.24.0 to 0.25.0 (#113)
Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.24.0 to 0.25.0.
- [Commits](https://github.com/golang/crypto/compare/v0.24.0...v0.25.0)

---
updated-dependencies:
- dependency-name: golang.org/x/crypto
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-07-08 09:01:31 +02:00
5086f5ba08 Update README.md
Signed-off-by: Mario Candela <mario.candela.personal@gmail.com>
2024-06-26 23:22:56 +02:00
e1f82db2d0 Update README.md
Signed-off-by: Mario Candela <mario.candela.personal@gmail.com>
2024-06-23 20:04:43 +02:00
59f40a166b Feat: Improve LMM SSH Honeypot (#112)
* add LMM Honeypot HTTP Server

* improve unit test code coverage

* integrate LLM plugin into http honeypot strategy

* improve code coverage

* fix typos

* improve OpenAI plugin with gpt-4, adpt new API amd map new object

* improve LLM SSH honeypot, fix updated README.md
2024-06-23 16:00:31 +02:00
93d7804ba3 Feat: add LMM Honeypot HTTP Server (#110)
* add LMM Honeypot HTTP Server

* improve unit test code coverage

* integrate LLM plugin into http honeypot strategy

* improve code coverage

* fix typos

* improve OpenAI plugin with gpt-4, adpt new API amd map new object
2024-06-23 10:55:06 +02:00
24b4153e77 Bump golang.org/x/crypto from 0.23.0 to 0.24.0 (#109)
Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.23.0 to 0.24.0.
- [Commits](https://github.com/golang/crypto/compare/v0.23.0...v0.24.0)

---
updated-dependencies:
- dependency-name: golang.org/x/crypto
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-06-05 09:08:16 +02:00
1d90c83678 Bump github.com/go-resty/resty/v2 from 2.13.0 to 2.13.1 (#108) 2024-05-13 07:46:43 +02:00
67829655f4 Bump github.com/go-resty/resty/v2 from 2.12.0 to 2.13.0 (#106)
Bumps [github.com/go-resty/resty/v2](https://github.com/go-resty/resty) from 2.12.0 to 2.13.0.
- [Release notes](https://github.com/go-resty/resty/releases)
- [Commits](https://github.com/go-resty/resty/compare/v2.12.0...v2.13.0)

---
updated-dependencies:
- dependency-name: github.com/go-resty/resty/v2
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-05-10 09:03:59 +02:00
9ad21e138b Bump github.com/prometheus/client_golang from 1.19.0 to 1.19.1 (#107)
Bumps [github.com/prometheus/client_golang](https://github.com/prometheus/client_golang) from 1.19.0 to 1.19.1.
- [Release notes](https://github.com/prometheus/client_golang/releases)
- [Changelog](https://github.com/prometheus/client_golang/blob/main/CHANGELOG.md)
- [Commits](https://github.com/prometheus/client_golang/compare/v1.19.0...v1.19.1)

---
updated-dependencies:
- dependency-name: github.com/prometheus/client_golang
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-05-10 09:03:36 +02:00
8ab11e6ac2 Bump github.com/rabbitmq/amqp091-go from 1.9.0 to 1.10.0 (#105)
Bumps [github.com/rabbitmq/amqp091-go](https://github.com/rabbitmq/amqp091-go) from 1.9.0 to 1.10.0.
- [Release notes](https://github.com/rabbitmq/amqp091-go/releases)
- [Changelog](https://github.com/rabbitmq/amqp091-go/blob/main/CHANGELOG.md)
- [Commits](https://github.com/rabbitmq/amqp091-go/compare/v1.9.0...v1.10.0)

---
updated-dependencies:
- dependency-name: github.com/rabbitmq/amqp091-go
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-05-09 17:05:51 +02:00
965942609d Bump golang.org/x/crypto from 0.22.0 to 0.23.0 (#104)
Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.22.0 to 0.23.0.
- [Commits](https://github.com/golang/crypto/compare/v0.22.0...v0.23.0)

---
updated-dependencies:
- dependency-name: golang.org/x/crypto
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-05-07 09:10:44 +02:00
b8d77983ee Bump golang.org/x/net from 0.22.0 to 0.23.0 (#102)
Bumps [golang.org/x/net](https://github.com/golang/net) from 0.22.0 to 0.23.0.
- [Commits](https://github.com/golang/net/compare/v0.22.0...v0.23.0)

---
updated-dependencies:
- dependency-name: golang.org/x/net
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-04-19 15:46:12 +02:00
b332f85230 Bump golang.org/x/crypto from 0.21.0 to 0.22.0 (#101)
Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.21.0 to 0.22.0.
- [Commits](https://github.com/golang/crypto/compare/v0.21.0...v0.22.0)

---
updated-dependencies:
- dependency-name: golang.org/x/crypto
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-04-05 07:46:16 +02:00
b1de020de8 Feat: Improve OpenAI plugin: model and prompt (#100)
* Change OpenAI model and prompt.

---------

Co-authored-by: mariocandela <mario.candela.personal@gmail.com>
2024-04-03 08:20:52 +02:00
05b49051db Bump github.com/gliderlabs/ssh from 0.3.6 to 0.3.7 (#99)
Bumps [github.com/gliderlabs/ssh](https://github.com/gliderlabs/ssh) from 0.3.6 to 0.3.7.
- [Release notes](https://github.com/gliderlabs/ssh/releases)
- [Commits](https://github.com/gliderlabs/ssh/compare/v0.3.6...v0.3.7)

---
updated-dependencies:
- dependency-name: github.com/gliderlabs/ssh
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-03-19 09:19:22 +01:00
3555ea9d3b Bump github.com/go-resty/resty/v2 from 2.11.0 to 2.12.0 (#98)
Bumps [github.com/go-resty/resty/v2](https://github.com/go-resty/resty) from 2.11.0 to 2.12.0.
- [Release notes](https://github.com/go-resty/resty/releases)
- [Commits](https://github.com/go-resty/resty/compare/v2.11.0...v2.12.0)

---
updated-dependencies:
- dependency-name: github.com/go-resty/resty/v2
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-03-18 09:07:45 +01:00
14 changed files with 679 additions and 336 deletions

View File

@ -8,13 +8,14 @@
## Overview
Beelzebub is an advanced honeypot framework designed to provide a highly secure environment for detecting and analyzing cyber attacks. It offers a low code approach for easy implementation and utilizes virtualization techniques powered by OpenAI Generative Pre-trained Transformer.
Beelzebub is an advanced honeypot framework designed to provide a highly secure environment for detecting and analyzing cyber attacks. It offers a low code approach for easy implementation and uses AI to mimic the behavior of a high-interaction honeypot.
<img src="https://beelzebub.netlify.app/go-beelzebub.png" alt="Beelzebub Logo" width="200"/>
## OpenAI GPT Integration
## LLM Honeypot
[![asciicast](https://asciinema.org/a/665295.svg)](https://asciinema.org/a/665295)
Learn how to integrate Beelzebub with OpenAI GPT-3 by referring to our comprehensive guide on Medium: [Medium Article](https://medium.com/@mario.candela.personal/how-to-build-a-highly-effective-honeypot-with-beelzebub-and-chatgpt-a2f0f05b3e1)
## Telegram Bot for Real-Time Attacks
@ -103,7 +104,8 @@ $ make test.dependencies.down
Beelzebub offers a wide range of features to enhance your honeypot environment:
- OpenAI Generative Pre-trained Transformer act as Linux virtualization
- Support for Ollama
- Support for OpenAI
- SSH Honeypot
- HTTP Honeypot
- TCP Honeypot
@ -209,22 +211,44 @@ commands:
#### Example SSH Honeypot
###### Honeypot with GPT-3 on Port 2222
###### Honeypot LLM Honeypots
Example with OpenAI GPT-4:
```yaml
apiVersion: "v1"
protocol: "ssh"
address: ":2222"
description: "SSH interactive ChatGPT"
description: "SSH interactive OpenAI GPT-4"
commands:
- regex: "^(.+)$"
plugin: "OpenAIGPTLinuxTerminal"
plugin: "LLMHoneypot"
serverVersion: "OpenSSH"
serverName: "ubuntu"
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
deadlineTimeoutSeconds: 60
plugin:
openAPIChatGPTSecretKey: "Your OpenAI Secret Key"
llmModel: "gpt4-o"
openAISecretKey: "sk-proj-123456"
```
Example with Ollama Llama3:
```yaml
apiVersion: "v1"
protocol: "ssh"
address: ":2222"
description: "SSH Ollama Llama3"
commands:
- regex: "^(.+)$"
plugin: "LLMHoneypot"
serverVersion: "OpenSSH"
serverName: "ubuntu"
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
deadlineTimeoutSeconds: 60
plugin:
llmModel: "llama3"
host: "http://example.com/api/chat" #default http://localhost:11434/api/chat
```
###### SSH Honeypot on Port 22
@ -261,8 +285,6 @@ passwordRegex: "^(root|qwerty|Smoker666)$"
deadlineTimeoutSeconds: 60
```
[![asciicast](https://asciinema.org/a/604522.svg)](https://asciinema.org/a/604522)
## Roadmap
Our future plans for Beelzebub include developing it into a robust PaaS platform.

View File

@ -18,41 +18,9 @@ commands:
- "Server: Apache/2.4.53 (Debian)"
- "X-Powered-By: PHP/7.4.29"
statusCode: 200
- regex: "^(/wp-login.php|/wp-admin)$"
handler:
<html>
<header>
<title>Wordpress 6 test page</title>
</header>
<body>
<form action="" method="post">
<label for="uname"><b>Username</b></label>
<input type="text" placeholder="Enter Username" name="uname" required>
<label for="psw"><b>Password</b></label>
<input type="password" placeholder="Enter Password" name="psw" required>
<button type="submit">Login</button>
</form>
</body>
</html>
headers:
- "Content-Type: text/html"
- "Server: Apache/2.4.53 (Debian)"
- "X-Powered-By: PHP/7.4.29"
statusCode: 200
- regex: "^.*$"
handler:
<html>
<header>
<title>404</title>
</header>
<body>
<h1>Not found!</h1>
</body>
</html>
headers:
- "Content-Type: text/html"
- "Server: Apache/2.4.53 (Debian)"
- "X-Powered-By: PHP/7.4.29"
statusCode: 404
plugin: "LLMHoneypot"
statusCode: 200
plugin:
llmModel: "gpt4-o"
openAISecretKey: "sk-proj-123456"

View File

@ -4,10 +4,10 @@ address: ":2222"
description: "SSH interactive ChatGPT"
commands:
- regex: "^(.+)$"
plugin: "OpenAIGPTLinuxTerminal"
plugin: "LLMHoneypot"
serverVersion: "OpenSSH"
serverName: "ubuntu"
passwordRegex: "^(root|qwerty|Smoker666|123456|jenkins|minecraft|sinus|alex|postgres|Ly123456)$"
deadlineTimeoutSeconds: 60
deadlineTimeoutSeconds: 6000
plugin:
openAPIChatGPTSecretKey: ""
llmModel: "llama3"

17
go.mod
View File

@ -3,16 +3,16 @@ module github.com/mariocandela/beelzebub/v3
go 1.20
require (
github.com/gliderlabs/ssh v0.3.6
github.com/go-resty/resty/v2 v2.11.0
github.com/gliderlabs/ssh v0.3.7
github.com/go-resty/resty/v2 v2.13.1
github.com/google/uuid v1.6.0
github.com/jarcoal/httpmock v1.3.1
github.com/melbahja/goph v1.4.0
github.com/prometheus/client_golang v1.19.0
github.com/rabbitmq/amqp091-go v1.9.0
github.com/prometheus/client_golang v1.19.1
github.com/rabbitmq/amqp091-go v1.10.0
github.com/sirupsen/logrus v1.9.3
github.com/stretchr/testify v1.9.0
golang.org/x/crypto v0.21.0
golang.org/x/crypto v0.25.0
gopkg.in/yaml.v3 v3.0.1
)
@ -22,14 +22,15 @@ require (
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/kr/fs v0.1.0 // indirect
github.com/kr/text v0.2.0 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pkg/sftp v1.13.5 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_model v0.5.0 // indirect
github.com/prometheus/common v0.48.0 // indirect
github.com/prometheus/procfs v0.12.0 // indirect
golang.org/x/net v0.21.0 // indirect
golang.org/x/sys v0.18.0 // indirect
golang.org/x/term v0.18.0 // indirect
golang.org/x/net v0.25.0 // indirect
golang.org/x/sys v0.22.0 // indirect
golang.org/x/term v0.22.0 // indirect
google.golang.org/protobuf v1.33.0 // indirect
)

62
go.sum
View File

@ -4,13 +4,14 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/gliderlabs/ssh v0.3.6 h1:ZzjlDa05TcFRICb3anf/dSPN3ewz1Zx6CMLPWgkm3b8=
github.com/gliderlabs/ssh v0.3.6/go.mod h1:zpHEXBstFnQYtGnB8k8kQLol82umzn/2/snG7alWVD8=
github.com/go-resty/resty/v2 v2.11.0 h1:i7jMfNOJYMp69lq7qozJP+bjgzfAzeOhuGlyDrqxT/8=
github.com/go-resty/resty/v2 v2.11.0/go.mod h1:iiP/OpA0CkcL3IGt1O0+/SIItFUbkkyw5BGXiVdTu+A=
github.com/gliderlabs/ssh v0.3.7 h1:iV3Bqi942d9huXnzEF2Mt+CY9gLu8DNM4Obd+8bODRE=
github.com/gliderlabs/ssh v0.3.7/go.mod h1:zpHEXBstFnQYtGnB8k8kQLol82umzn/2/snG7alWVD8=
github.com/go-resty/resty/v2 v2.13.1 h1:x+LHXBI2nMB1vqndymf26quycC4aggYJ7DECYbiz03g=
github.com/go-resty/resty/v2 v2.13.1/go.mod h1:GznXlLxkq6Nh4sU59rPmUw3VtgpO3aS96ORAI6Q7d+0=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@ -18,11 +19,9 @@ github.com/jarcoal/httpmock v1.3.1 h1:iUx3whfZWVf3jT01hQTO/Eo5sAYtB2/rqaUuOtpInw
github.com/jarcoal/httpmock v1.3.1/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg=
github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8=
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/maxatome/go-testdeep v1.12.0 h1:Ql7Go8Tg0C1D/uMMX59LAoYK7LffeJQ6X2T04nTH68g=
github.com/melbahja/goph v1.4.0 h1:z0PgDbBFe66lRYl3v5dGb9aFgPy0kotuQ37QOwSQFqs=
github.com/melbahja/goph v1.4.0/go.mod h1:uG+VfK2Dlhk+O32zFrRlc3kYKTlV6+BtvPWd/kK7U68=
@ -32,36 +31,33 @@ github.com/pkg/sftp v1.13.5 h1:a3RLUqkyjYRtBTZJZ1VRrKbN3zhuPLlUc3sphVz81go=
github.com/pkg/sftp v1.13.5/go.mod h1:wHDZ0IZX6JcBYRK1TH9bcVq8G7TLpVHYIGJRFnmPfxg=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v1.19.0 h1:ygXvpU1AoN1MhdzckN+PyD9QJOSD4x7kmXYlnfbA6JU=
github.com/prometheus/client_golang v1.19.0/go.mod h1:ZRM9uEAypZakd+q/x7+gmsvXdURP+DABIEIjnmDdp+k=
github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE=
github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho=
github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw=
github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI=
github.com/prometheus/common v0.48.0 h1:QO8U2CdOzSn1BBsmXJXduaaW+dY/5QLjfB8svtSzKKE=
github.com/prometheus/common v0.48.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc=
github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo=
github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo=
github.com/rabbitmq/amqp091-go v1.9.0 h1:qrQtyzB4H8BQgEuJwhmVQqVHB9O4+MNDJCCAcpc3Aoo=
github.com/rabbitmq/amqp091-go v1.9.0/go.mod h1:+jPrT9iY2eLjRaMSRHUhc3z14E/l85kv/f+6luSD3pc=
github.com/rabbitmq/amqp091-go v1.10.0 h1:STpn5XsHlHGcecLmMFCtg7mqq0RnD+zFr4uzukfVhBw=
github.com/rabbitmq/amqp091-go v1.10.0/go.mod h1:Hy4jKW5kQART1u+JkDTF9YYOQUHXqMuhrgxOEeS7G4o=
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A=
go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30=
golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@ -70,9 +66,9 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -86,25 +82,28 @@ golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.22.0 h1:BbsgPEJULsl2fV/AT3v15Mjva5yXKQDyKf+TbDz7QJk=
golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
@ -113,7 +112,6 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=

View File

@ -3,6 +3,7 @@ package parser
import (
"fmt"
"github.com/mariocandela/beelzebub/v3/plugins"
"os"
"path/filepath"
"strings"
@ -49,7 +50,20 @@ type Prometheus struct {
}
type Plugin struct {
OpenAPIChatGPTSecretKey string `yaml:"openAPIChatGPTSecretKey"`
OpenAISecretKey string `yaml:"openAISecretKey"`
Host string `yaml:"host"`
LLMModel string `yaml:"llmModel"`
}
func FromString(llmModel string) (plugins.LLMModel, error) {
switch llmModel {
case "llama3":
return plugins.LLAMA3, nil
case "gpt4-o":
return plugins.GPT4O, nil
default:
return -1, fmt.Errorf("model %s not found", llmModel)
}
}
// BeelzebubServiceConfiguration is the struct that contains the configurations of the honeypot service

View File

@ -2,6 +2,7 @@ package parser
import (
"errors"
"github.com/mariocandela/beelzebub/v3/plugins"
"os"
"testing"
@ -53,7 +54,12 @@ commands:
- regex: "wp-admin"
handler: "login"
headers:
- "Content-Type: text/html"`)
- "Content-Type: text/html"
plugin:
openAISecretKey: "qwerty"
llmModel: "llama3"
host: "localhost:1563"
`)
return beelzebubServiceConfiguration, nil
}
@ -112,10 +118,10 @@ func TestReadConfigurationsServicesValid(t *testing.T) {
configurationsParser.gelAllFilesNameByDirNameDependency = mockReadDirValid
beelzebubServicesConfiguration, err := configurationsParser.ReadConfigurationsServices()
assert.Nil(t, err)
firstBeelzebubServiceConfiguration := beelzebubServicesConfiguration[0]
assert.Nil(t, err)
assert.Equal(t, firstBeelzebubServiceConfiguration.Protocol, "http")
assert.Equal(t, firstBeelzebubServiceConfiguration.ApiVersion, "v1")
assert.Equal(t, firstBeelzebubServiceConfiguration.Address, ":8080")
@ -125,6 +131,9 @@ func TestReadConfigurationsServicesValid(t *testing.T) {
assert.Equal(t, firstBeelzebubServiceConfiguration.Commands[0].Handler, "login")
assert.Equal(t, len(firstBeelzebubServiceConfiguration.Commands[0].Headers), 1)
assert.Equal(t, firstBeelzebubServiceConfiguration.Commands[0].Headers[0], "Content-Type: text/html")
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.OpenAISecretKey, "qwerty")
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.LLMModel, "llama3")
assert.Equal(t, firstBeelzebubServiceConfiguration.Plugin.Host, "localhost:1563")
}
func TestGelAllFilesNameByDirName(t *testing.T) {
@ -177,3 +186,16 @@ func TestReadFileBytesByFilePath(t *testing.T) {
assert.Equal(t, "", string(bytes))
}
func TestFromString(t *testing.T) {
model, err := FromString("llama3")
assert.Nil(t, err)
assert.Equal(t, plugins.LLAMA3, model)
model, err = FromString("gpt4-o")
assert.Nil(t, err)
assert.Equal(t, plugins.GPT4O, model)
model, err = FromString("beelzebub-model")
assert.Errorf(t, err, "model beelzebub-model not found")
}

View File

@ -36,7 +36,6 @@ func (beelzebubCloud *beelzebubCloud) SendEvent(event tracer.Event) (bool, error
SetHeader("Content-Type", "application/json").
SetBody(requestJson).
SetHeader("Authorization", beelzebubCloud.AuthToken).
SetResult(&gptResponse{}).
Post(beelzebubCloud.URI)
log.Debug(response)

217
plugins/llm-integration.go Normal file
View File

@ -0,0 +1,217 @@
package plugins
import (
"encoding/json"
"errors"
"github.com/go-resty/resty/v2"
"github.com/mariocandela/beelzebub/v3/tracer"
log "github.com/sirupsen/logrus"
)
const (
systemPromptVirtualizeLinuxTerminal = "You will act as an Ubuntu Linux terminal. The user will type commands, and you are to reply with what the terminal should show. Your responses must be contained within a single code block. Do not provide explanations or type commands unless explicitly instructed by the user. Your entire response/output is going to consist of a simple text with \n for new line, and you will NOT wrap it within string md markers"
systemPromptVirtualizeHTTPServer = "You will act as an unsecure HTTP Server with multiple vulnerability like aws and git credentials stored into root http directory. The user will send HTTP requests, and you are to reply with what the server should show. Do not provide explanations or type commands unless explicitly instructed by the user."
LLMPluginName = "LLMHoneypot"
openAIGPTEndpoint = "https://api.openai.com/v1/chat/completions"
ollamaEndpoint = "http://localhost:11434/api/chat"
)
type LLMHoneypot struct {
Histories []Message
OpenAIKey string
client *resty.Client
Protocol tracer.Protocol
Model LLMModel
Host string
}
type Choice struct {
Message Message `json:"message"`
Index int `json:"index"`
FinishReason string `json:"finish_reason"`
}
type Response struct {
ID string `json:"id"`
Object string `json:"object"`
Created int `json:"created"`
Model string `json:"model"`
Choices []Choice `json:"choices"`
Message Message `json:"message"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage"`
}
type Request struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
Stream bool `json:"stream"`
}
type Message struct {
Role string `json:"role"`
Content string `json:"content"`
}
type Role int
const (
SYSTEM Role = iota
USER
ASSISTANT
)
func (role Role) String() string {
return [...]string{"system", "user", "assistant"}[role]
}
type LLMModel int
const (
LLAMA3 LLMModel = iota
GPT4O
)
func InitLLMHoneypot(config LLMHoneypot) *LLMHoneypot {
// Inject the dependencies
config.client = resty.New()
return &config
}
func buildPrompt(histories []Message, protocol tracer.Protocol, command string) ([]Message, error) {
var messages []Message
switch protocol {
case tracer.SSH:
messages = append(messages, Message{
Role: SYSTEM.String(),
Content: systemPromptVirtualizeLinuxTerminal,
})
messages = append(messages, Message{
Role: USER.String(),
Content: "pwd",
})
messages = append(messages, Message{
Role: ASSISTANT.String(),
Content: "/home/user",
})
for _, history := range histories {
messages = append(messages, history)
}
case tracer.HTTP:
messages = append(messages, Message{
Role: SYSTEM.String(),
Content: systemPromptVirtualizeHTTPServer,
})
messages = append(messages, Message{
Role: USER.String(),
Content: "GET /index.html",
})
messages = append(messages, Message{
Role: ASSISTANT.String(),
Content: "<html><body>Hello, World!</body></html>",
})
default:
return nil, errors.New("no prompt for protocol selected")
}
messages = append(messages, Message{
Role: USER.String(),
Content: command,
})
return messages, nil
}
func (llmHoneypot *LLMHoneypot) openAICaller(messages []Message) (string, error) {
var err error
requestJson, err := json.Marshal(Request{
Model: "gpt-4o",
Messages: messages,
Stream: false,
})
if err != nil {
return "", err
}
if llmHoneypot.OpenAIKey == "" {
return "", errors.New("openAIKey is empty")
}
if llmHoneypot.Host == "" {
llmHoneypot.Host = openAIGPTEndpoint
}
log.Debug(string(requestJson))
response, err := llmHoneypot.client.R().
SetHeader("Content-Type", "application/json").
SetBody(requestJson).
SetAuthToken(llmHoneypot.OpenAIKey).
SetResult(&Response{}).
Post(llmHoneypot.Host)
if err != nil {
return "", err
}
log.Debug(response)
if len(response.Result().(*Response).Choices) == 0 {
return "", errors.New("no choices")
}
return response.Result().(*Response).Choices[0].Message.Content, nil
}
func (llmHoneypot *LLMHoneypot) ollamaCaller(messages []Message) (string, error) {
var err error
requestJson, err := json.Marshal(Request{
Model: "llama3",
Messages: messages,
Stream: false,
})
if err != nil {
return "", err
}
if llmHoneypot.Host == "" {
llmHoneypot.Host = ollamaEndpoint
}
log.Debug(string(requestJson))
response, err := llmHoneypot.client.R().
SetHeader("Content-Type", "application/json").
SetBody(requestJson).
SetResult(&Response{}).
Post(llmHoneypot.Host)
if err != nil {
return "", err
}
log.Debug(response)
return response.Result().(*Response).Message.Content, nil
}
func (llmHoneypot *LLMHoneypot) ExecuteModel(command string) (string, error) {
var err error
prompt, err := buildPrompt(llmHoneypot.Histories, llmHoneypot.Protocol, command)
if err != nil {
return "", err
}
switch llmHoneypot.Model {
case LLAMA3:
return llmHoneypot.ollamaCaller(prompt)
case GPT4O:
return llmHoneypot.openAICaller(prompt)
default:
return "", errors.New("no model selected")
}
}

View File

@ -0,0 +1,287 @@
package plugins
import (
"github.com/go-resty/resty/v2"
"github.com/jarcoal/httpmock"
"github.com/mariocandela/beelzebub/v3/tracer"
"github.com/stretchr/testify/assert"
"net/http"
"testing"
)
const SystemPromptLen = 4
func TestBuildPromptEmptyHistory(t *testing.T) {
//Given
var histories []Message
command := "pwd"
//When
prompt, err := buildPrompt(histories, tracer.SSH, command)
//Then
assert.Nil(t, err)
assert.Equal(t, SystemPromptLen, len(prompt))
}
func TestBuildPromptWithHistory(t *testing.T) {
//Given
var histories = []Message{
{
Role: "cat hello.txt",
Content: "world",
},
}
command := "pwd"
//When
prompt, err := buildPrompt(histories, tracer.SSH, command)
//Then
assert.Nil(t, err)
assert.Equal(t, SystemPromptLen+1, len(prompt))
}
func TestBuildExecuteModelFailValidation(t *testing.T) {
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "",
Protocol: tracer.SSH,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
_, err := openAIGPTVirtualTerminal.ExecuteModel("test")
assert.Equal(t, "openAIKey is empty", err.Error())
}
func TestBuildExecuteModelFailValidationStrategyType(t *testing.T) {
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "",
Protocol: tracer.TCP,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
_, err := openAIGPTVirtualTerminal.ExecuteModel("test")
assert.Equal(t, "no prompt for protocol selected", err.Error())
}
func TestBuildExecuteModelFailValidationModelType(t *testing.T) {
// Given
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
Protocol: tracer.SSH,
Model: 5,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
//When
_, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
//Then
assert.Errorf(t, err, "no model selected")
}
func TestBuildExecuteModelSSHWithResultsOpenAI(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{
{
Message: Message{
Role: SYSTEM.String(),
Content: "prova.txt",
},
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.SSH,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
//Then
assert.Nil(t, err)
assert.Equal(t, "prova.txt", str)
}
func TestBuildExecuteModelSSHWithResultsLLama(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", ollamaEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Message: Message{
Role: SYSTEM.String(),
Content: "prova.txt",
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
Protocol: tracer.SSH,
Model: LLAMA3,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
//Then
assert.Nil(t, err)
assert.Equal(t, "prova.txt", str)
}
func TestBuildExecuteModelSSHWithoutResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.SSH,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
_, err := openAIGPTVirtualTerminal.ExecuteModel("ls")
//Then
assert.Equal(t, "no choices", err.Error())
}
func TestBuildExecuteModelHTTPWithResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{
{
Message: Message{
Role: SYSTEM.String(),
Content: "[default]\nregion = us-west-2\noutput = json",
},
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.HTTP,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.ExecuteModel("GET /.aws/credentials")
//Then
assert.Nil(t, err)
assert.Equal(t, "[default]\nregion = us-west-2\noutput = json", str)
}
func TestBuildExecuteModelHTTPWithoutResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &Response{
Choices: []Choice{},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
llmHoneypot := LLMHoneypot{
Histories: make([]Message, 0),
OpenAIKey: "sdjdnklfjndslkjanfk",
Protocol: tracer.HTTP,
Model: GPT4O,
}
openAIGPTVirtualTerminal := InitLLMHoneypot(llmHoneypot)
openAIGPTVirtualTerminal.client = client
//When
_, err := openAIGPTVirtualTerminal.ExecuteModel("GET /.aws/credentials")
//Then
assert.Equal(t, "no choices", err.Error())
}

View File

@ -1,119 +0,0 @@
package plugins
import (
"encoding/json"
"errors"
"fmt"
"strings"
log "github.com/sirupsen/logrus"
"github.com/go-resty/resty/v2"
)
const (
// Reference: https://www.engraved.blog/building-a-virtual-machine-inside/
promptVirtualizeLinuxTerminal = "I want you to act as a Linux terminal. I will type commands and you will reply with what the terminal should show. I want you to only reply with the terminal output inside one unique code block, and nothing else. Do no write explanations. Do not type commands unless I instruct you to do so.\n\nA:pwd\n\nQ:/home/user\n\n"
ChatGPTPluginName = "OpenAIGPTLinuxTerminal"
openAIGPTEndpoint = "https://api.openai.com/v1/completions"
)
type History struct {
Input, Output string
}
type openAIGPTVirtualTerminal struct {
Histories []History
openAIKey string
client *resty.Client
}
type Choice struct {
Text string `json:"text"`
Index int `json:"index"`
Logprobs interface{} `json:"logprobs"`
FinishReason string `json:"finish_reason"`
}
type gptResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int `json:"created"`
Model string `json:"model"`
Choices []Choice `json:"choices"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage"`
}
type gptRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Temperature int `json:"temperature"`
MaxTokens int `json:"max_tokens"`
TopP int `json:"top_p"`
FrequencyPenalty int `json:"frequency_penalty"`
PresencePenalty int `json:"presence_penalty"`
Stop []string `json:"stop"`
}
func Init(history []History, openAIKey string) *openAIGPTVirtualTerminal {
return &openAIGPTVirtualTerminal{
Histories: history,
openAIKey: openAIKey,
client: resty.New(),
}
}
func buildPrompt(histories []History, command string) string {
var sb strings.Builder
sb.WriteString(promptVirtualizeLinuxTerminal)
for _, history := range histories {
sb.WriteString(fmt.Sprintf("A:%s\n\nQ:%s\n\n", history.Input, history.Output))
}
// Append command to evaluate
sb.WriteString(fmt.Sprintf("A:%s\n\nQ:", command))
return sb.String()
}
func (openAIGPTVirtualTerminal *openAIGPTVirtualTerminal) GetCompletions(command string) (string, error) {
requestJson, err := json.Marshal(gptRequest{
Model: "text-davinci-003",
Prompt: buildPrompt(openAIGPTVirtualTerminal.Histories, command),
Temperature: 0,
MaxTokens: 100,
TopP: 1,
FrequencyPenalty: 0,
PresencePenalty: 0,
Stop: []string{"\n"},
})
if err != nil {
return "", err
}
if openAIGPTVirtualTerminal.openAIKey == "" {
return "", errors.New("openAIKey is empty")
}
response, err := openAIGPTVirtualTerminal.client.R().
SetHeader("Content-Type", "application/json").
SetBody(requestJson).
SetAuthToken(openAIGPTVirtualTerminal.openAIKey).
SetResult(&gptResponse{}).
Post(openAIGPTEndpoint)
if err != nil {
return "", err
}
log.Debug(response)
if len(response.Result().(*gptResponse).Choices) == 0 {
return "", errors.New("no choices")
}
return response.Result().(*gptResponse).Choices[0].Text, nil
}

View File

@ -1,116 +0,0 @@
package plugins
import (
"github.com/go-resty/resty/v2"
"github.com/jarcoal/httpmock"
"github.com/stretchr/testify/assert"
"net/http"
"testing"
)
func TestBuildPromptEmptyHistory(t *testing.T) {
//Given
var histories []History
command := "pwd"
//When
prompt := buildPrompt(histories, command)
//Then
assert.Equal(t,
"I want you to act as a Linux terminal. I will type commands and you will reply with what the terminal should show. I want you to only reply with the terminal output inside one unique code block, and nothing else. Do no write explanations. Do not type commands unless I instruct you to do so.\n\nA:pwd\n\nQ:/home/user\n\nA:pwd\n\nQ:",
prompt)
}
func TestBuildPromptWithHistory(t *testing.T) {
//Given
var histories = []History{
{
Input: "cat hello.txt",
Output: "world",
},
{
Input: "echo 1234",
Output: "1234",
},
}
command := "pwd"
//When
prompt := buildPrompt(histories, command)
//Then
assert.Equal(t,
"I want you to act as a Linux terminal. I will type commands and you will reply with what the terminal should show. I want you to only reply with the terminal output inside one unique code block, and nothing else. Do no write explanations. Do not type commands unless I instruct you to do so.\n\nA:pwd\n\nQ:/home/user\n\nA:cat hello.txt\n\nQ:world\n\nA:echo 1234\n\nQ:1234\n\nA:pwd\n\nQ:",
prompt)
}
func TestBuildGetCompletionsFailValidation(t *testing.T) {
openAIGPTVirtualTerminal := Init(make([]History, 0), "")
_, err := openAIGPTVirtualTerminal.GetCompletions("test")
assert.Equal(t, "openAIKey is empty", err.Error())
}
func TestBuildGetCompletionsWithResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &gptResponse{
Choices: []Choice{
{
Text: "prova.txt",
},
},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
openAIGPTVirtualTerminal := Init(make([]History, 0), "sdjdnklfjndslkjanfk")
openAIGPTVirtualTerminal.client = client
//When
str, err := openAIGPTVirtualTerminal.GetCompletions("ls")
//Then
assert.Nil(t, err)
assert.Equal(t, "prova.txt", str)
}
func TestBuildGetCompletionsWithoutResults(t *testing.T) {
client := resty.New()
httpmock.ActivateNonDefault(client.GetClient())
defer httpmock.DeactivateAndReset()
// Given
httpmock.RegisterResponder("POST", openAIGPTEndpoint,
func(req *http.Request) (*http.Response, error) {
resp, err := httpmock.NewJsonResponse(200, &gptResponse{
Choices: []Choice{},
})
if err != nil {
return httpmock.NewStringResponse(500, ""), nil
}
return resp, nil
},
)
openAIGPTVirtualTerminal := Init(make([]History, 0), "sdjdnklfjndslkjanfk")
openAIGPTVirtualTerminal.client = client
//When
_, err := openAIGPTVirtualTerminal.GetCompletions("ls")
//Then
assert.Equal(t, "no choices", err.Error())
}

View File

@ -3,6 +3,7 @@ package strategies
import (
"fmt"
"github.com/mariocandela/beelzebub/v3/parser"
"github.com/mariocandela/beelzebub/v3/plugins"
"github.com/mariocandela/beelzebub/v3/tracer"
"io"
"net/http"
@ -31,8 +32,40 @@ func (httpStrategy HTTPStrategy) Init(beelzebubServiceConfiguration parser.Beelz
}
if matched {
responseHTTPBody := command.Handler
if command.Plugin == plugins.LLMPluginName {
llmModel, err := parser.FromString(beelzebubServiceConfiguration.Plugin.LLMModel)
if err != nil {
log.Errorf("Error fromString: %s", err.Error())
responseHTTPBody = "404 Not Found!"
}
llmHoneypot := plugins.LLMHoneypot{
Histories: make([]plugins.Message, 0),
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
Protocol: tracer.HTTP,
Host: beelzebubServiceConfiguration.Plugin.Host,
Model: llmModel,
}
llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot)
command := fmt.Sprintf("%s %s", request.Method, request.RequestURI)
if completions, err := llmHoneypotInstance.ExecuteModel(command); err != nil {
log.Errorf("Error ExecuteModel: %s, %s", command, err.Error())
responseHTTPBody = "404 Not Found!"
} else {
responseHTTPBody = completions
}
}
setResponseHeaders(responseWriter, command.Headers, command.StatusCode)
fmt.Fprintf(responseWriter, command.Handler)
fmt.Fprintf(responseWriter, responseHTTPBody)
break
}
}

View File

@ -42,7 +42,7 @@ func (sshStrategy *SSHStrategy) Init(beelzebubServiceConfiguration parser.Beelze
})
term := terminal.NewTerminal(sess, buildPrompt(sess.User(), beelzebubServiceConfiguration.ServerName))
var histories []plugins.History
var histories []plugins.Message
for {
commandInput, err := term.ReadLine()
if err != nil {
@ -62,16 +62,33 @@ func (sshStrategy *SSHStrategy) Init(beelzebubServiceConfiguration parser.Beelze
if matched {
commandOutput := command.Handler
if command.Plugin == plugins.ChatGPTPluginName {
openAIGPTVirtualTerminal := plugins.Init(histories, beelzebubServiceConfiguration.Plugin.OpenAPIChatGPTSecretKey)
if command.Plugin == plugins.LLMPluginName {
if commandOutput, err = openAIGPTVirtualTerminal.GetCompletions(commandInput); err != nil {
log.Errorf("Error GetCompletions: %s, %s", commandInput, err.Error())
llmModel, err := parser.FromString(beelzebubServiceConfiguration.Plugin.LLMModel)
if err != nil {
log.Errorf("Error fromString: %s", err.Error())
commandOutput = "command not found"
}
llmHoneypot := plugins.LLMHoneypot{
Histories: histories,
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
Protocol: tracer.SSH,
Host: beelzebubServiceConfiguration.Plugin.Host,
Model: llmModel,
}
llmHoneypotInstance := plugins.InitLLMHoneypot(llmHoneypot)
if commandOutput, err = llmHoneypotInstance.ExecuteModel(commandInput); err != nil {
log.Errorf("Error ExecuteModel: %s, %s", commandInput, err.Error())
commandOutput = "command not found"
}
}
histories = append(histories, plugins.History{Input: commandInput, Output: commandOutput})
histories = append(histories, plugins.Message{Role: plugins.USER.String(), Content: commandInput})
histories = append(histories, plugins.Message{Role: plugins.ASSISTANT.String(), Content: commandOutput})
term.Write(append([]byte(commandOutput), '\n'))