mirror of
https://github.com/mariocandela/beelzebub.git
synced 2025-07-01 18:47:26 -04:00
Feature: add support for llama, refactor yaml service interface (#115)
* refactor and add llama LMM support * update readme * improve code coverage
This commit is contained in:
@ -3,6 +3,7 @@ package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/mariocandela/beelzebub/v3/plugins"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
@ -50,6 +51,19 @@ type Prometheus struct {
|
||||
|
||||
type Plugin struct {
|
||||
OpenAISecretKey string `yaml:"openAISecretKey"`
|
||||
Host string `yaml:"host"`
|
||||
LLMModel string `yaml:"llmModel"`
|
||||
}
|
||||
|
||||
func FromString(llmModel string) (plugins.LLMModel, error) {
|
||||
switch llmModel {
|
||||
case "llama3":
|
||||
return plugins.LLAMA3, nil
|
||||
case "gpt4-o":
|
||||
return plugins.GPT4O, nil
|
||||
default:
|
||||
return -1, fmt.Errorf("model %s not found", llmModel)
|
||||
}
|
||||
}
|
||||
|
||||
// BeelzebubServiceConfiguration is the struct that contains the configurations of the honeypot service
|
||||
|
Reference in New Issue
Block a user