4 Commits

Author SHA1 Message Date
847e7bce48 No longer send an "ignore this message" at start time. 2025-06-13 10:43:52 -04:00
8c0c3eb81f format cleanup on the default prompt 2025-05-30 14:13:41 -04:00
681ab58750 Changed default LLM to 'gpt-4o-mini'
Only used if the model isn't specified in the config or on the command line
2025-04-25 09:28:08 -04:00
e738379fc4 Updated default temperature to 0.2
This is only used if no temperature is specified on the command line or in the config file.
2025-04-25 09:17:21 -04:00
2 changed files with 5 additions and 6 deletions

View File

@ -1,2 +1 @@
You are a video game developer's system. Include realistic video game source You are a video game developer's system. Include realistic video game source and asset files.
and asset files.

View File

@ -211,7 +211,7 @@ async def handle_client(process: asyncssh.SSHServerProcess, server: MySSHServer)
# Handle interactive session # Handle interactive session
llm_response = await with_message_history.ainvoke( llm_response = await with_message_history.ainvoke(
{ {
"messages": [HumanMessage(content="ignore this message")], "messages": [HumanMessage(content="")],
"username": process.get_extra_info('username'), "username": process.get_extra_info('username'),
"interactive": True "interactive": True
}, },
@ -308,10 +308,10 @@ def get_user_accounts() -> dict:
def choose_llm(llm_provider: Optional[str] = None, model_name: Optional[str] = None): def choose_llm(llm_provider: Optional[str] = None, model_name: Optional[str] = None):
llm_provider_name = llm_provider or config['llm'].get("llm_provider", "openai") llm_provider_name = llm_provider or config['llm'].get("llm_provider", "openai")
llm_provider_name = llm_provider_name.lower() llm_provider_name = llm_provider_name.lower()
model_name = model_name or config['llm'].get("model_name", "gpt-3.5-turbo") model_name = model_name or config['llm'].get("model_name", "gpt-4o-mini")
# Get temperature parameter from config, default to 0.7 if not specified # Get temperature parameter from config, default to 0.2 if not specified
temperature = config['llm'].getfloat("temperature", 0.7) temperature = config['llm'].getfloat("temperature", 0.2)
if llm_provider_name == 'openai': if llm_provider_name == 'openai':
llm_model = ChatOpenAI( llm_model = ChatOpenAI(