mirror of
https://github.com/splunk/DECEIVE.git
synced 2025-07-02 00:57:26 -04:00
Compare commits
4 Commits
temperatur
...
main
Author | SHA1 | Date | |
---|---|---|---|
847e7bce48 | |||
8c0c3eb81f | |||
681ab58750 | |||
e738379fc4 |
@ -1,2 +1 @@
|
|||||||
You are a video game developer's system. Include realistic video game source
|
You are a video game developer's system. Include realistic video game source and asset files.
|
||||||
and asset files.
|
|
||||||
|
@ -211,7 +211,7 @@ async def handle_client(process: asyncssh.SSHServerProcess, server: MySSHServer)
|
|||||||
# Handle interactive session
|
# Handle interactive session
|
||||||
llm_response = await with_message_history.ainvoke(
|
llm_response = await with_message_history.ainvoke(
|
||||||
{
|
{
|
||||||
"messages": [HumanMessage(content="ignore this message")],
|
"messages": [HumanMessage(content="")],
|
||||||
"username": process.get_extra_info('username'),
|
"username": process.get_extra_info('username'),
|
||||||
"interactive": True
|
"interactive": True
|
||||||
},
|
},
|
||||||
@ -308,10 +308,10 @@ def get_user_accounts() -> dict:
|
|||||||
def choose_llm(llm_provider: Optional[str] = None, model_name: Optional[str] = None):
|
def choose_llm(llm_provider: Optional[str] = None, model_name: Optional[str] = None):
|
||||||
llm_provider_name = llm_provider or config['llm'].get("llm_provider", "openai")
|
llm_provider_name = llm_provider or config['llm'].get("llm_provider", "openai")
|
||||||
llm_provider_name = llm_provider_name.lower()
|
llm_provider_name = llm_provider_name.lower()
|
||||||
model_name = model_name or config['llm'].get("model_name", "gpt-3.5-turbo")
|
model_name = model_name or config['llm'].get("model_name", "gpt-4o-mini")
|
||||||
|
|
||||||
# Get temperature parameter from config, default to 0.7 if not specified
|
# Get temperature parameter from config, default to 0.2 if not specified
|
||||||
temperature = config['llm'].getfloat("temperature", 0.7)
|
temperature = config['llm'].getfloat("temperature", 0.2)
|
||||||
|
|
||||||
if llm_provider_name == 'openai':
|
if llm_provider_name == 'openai':
|
||||||
llm_model = ChatOpenAI(
|
llm_model = ChatOpenAI(
|
||||||
|
Reference in New Issue
Block a user