Changed default LLM to 'gpt-4o-mini'

Only used if the model isn't specified in the config or on the command line
This commit is contained in:
David J. Bianco
2025-04-25 09:28:08 -04:00
parent e738379fc4
commit 681ab58750

View File

@ -308,7 +308,7 @@ def get_user_accounts() -> dict:
def choose_llm(llm_provider: Optional[str] = None, model_name: Optional[str] = None): def choose_llm(llm_provider: Optional[str] = None, model_name: Optional[str] = None):
llm_provider_name = llm_provider or config['llm'].get("llm_provider", "openai") llm_provider_name = llm_provider or config['llm'].get("llm_provider", "openai")
llm_provider_name = llm_provider_name.lower() llm_provider_name = llm_provider_name.lower()
model_name = model_name or config['llm'].get("model_name", "gpt-3.5-turbo") model_name = model_name or config['llm'].get("model_name", "gpt-4o-mini")
# Get temperature parameter from config, default to 0.2 if not specified # Get temperature parameter from config, default to 0.2 if not specified
temperature = config['llm'].getfloat("temperature", 0.2) temperature = config['llm'].getfloat("temperature", 0.2)