Fix: mapping LLMModel for SSH inline, removed old comments on docker-c… (#168)

Fix mapping LLMModel for SSH inline, removed old comments on docker-compose.yml
This commit is contained in:
Mario Candela
2025-02-20 22:41:28 +01:00
committed by GitHub
parent 44ec44ea5c
commit 8963bbc86d
2 changed files with 3 additions and 4 deletions

View File

@ -3,17 +3,16 @@ version: "3.9"
services: services:
beelzebub: beelzebub:
build: . build: .
#network_mode: host # Not work on Mac OS
container_name: beelzebub container_name: beelzebub
restart: always restart: always
ports: # Remove me, if you use configuration network_mode: host ports:
- "22:22" - "22:22"
- "2222:2222" - "2222:2222"
- "8080:8080" - "8080:8080"
- "8081:8081" - "8081:8081"
- "80:80" - "80:80"
- "3306:3306" - "3306:3306"
- "2112:2112" # Prometheus openmetrics - "2112:2112" #Prometheus Open Metrics
environment: environment:
RABBITMQ_URI: ${RABBITMQ_URI} RABBITMQ_URI: ${RABBITMQ_URI}
volumes: volumes:

View File

@ -57,7 +57,7 @@ func (sshStrategy *SSHStrategy) Init(beelzebubServiceConfiguration parser.Beelze
OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey, OpenAIKey: beelzebubServiceConfiguration.Plugin.OpenAISecretKey,
Protocol: tracer.SSH, Protocol: tracer.SSH,
Host: beelzebubServiceConfiguration.Plugin.Host, Host: beelzebubServiceConfiguration.Plugin.Host,
Model: beelzebubServiceConfiguration.Plugin.LLMProvider, Model: beelzebubServiceConfiguration.Plugin.LLMModel,
Provider: llmProvider, Provider: llmProvider,
CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt, CustomPrompt: beelzebubServiceConfiguration.Plugin.Prompt,
} }