Feature: add support for llama, refactor yaml service interface (#115)

* refactor and add llama LMM support

* update readme

* improve code coverage
This commit is contained in:
Mario Candela
2024-07-21 20:11:18 +02:00
committed by GitHub
parent 0af1a05ae9
commit 2088163b54
11 changed files with 511 additions and 275 deletions

View File

@ -3,6 +3,7 @@ package parser
import (
"fmt"
"github.com/mariocandela/beelzebub/v3/plugins"
"os"
"path/filepath"
"strings"
@ -50,6 +51,19 @@ type Prometheus struct {
type Plugin struct {
OpenAISecretKey string `yaml:"openAISecretKey"`
Host string `yaml:"host"`
LLMModel string `yaml:"llmModel"`
}
func FromString(llmModel string) (plugins.LLMModel, error) {
switch llmModel {
case "llama3":
return plugins.LLAMA3, nil
case "gpt4-o":
return plugins.GPT4O, nil
default:
return -1, fmt.Errorf("model %s not found", llmModel)
}
}
// BeelzebubServiceConfiguration is the struct that contains the configurations of the honeypot service