diff --git a/SSH/config.ini.TEMPLATE b/SSH/config.ini.TEMPLATE index b5b3b99..a1b666b 100644 --- a/SSH/config.ini.TEMPLATE +++ b/SSH/config.ini.TEMPLATE @@ -28,6 +28,10 @@ server_version_string = SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.3 llm_provider = openai model_name = gpt-4o +##### ollama llama3 +#llm_provider = ollama +#model_name = llama3.3 + ##### Any model via Amazon Bedrock # Valid AWS model names can be found here: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html #llm_provider = AWS diff --git a/SSH/ssh_server.py b/SSH/ssh_server.py index f2c3a25..342fdcb 100755 --- a/SSH/ssh_server.py +++ b/SSH/ssh_server.py @@ -18,6 +18,7 @@ from operator import itemgetter from langchain_openai import ChatOpenAI from langchain_aws import ChatBedrock, ChatBedrockConverse from langchain_google_genai import ChatGoogleGenerativeAI +from langchain_ollama import ChatOllama from langchain_core.messages import HumanMessage, SystemMessage, trim_messages from langchain_core.chat_history import BaseChatMessageHistory, InMemoryChatMessageHistory from langchain_core.runnables.history import RunnableWithMessageHistory @@ -313,6 +314,10 @@ def choose_llm(): llm_model = ChatOpenAI( model=model_name ) + elif llm_provider_name == 'ollama': + llm_model = ChatOllama( + model=model_name + ) elif llm_provider_name == 'aws': llm_model = ChatBedrockConverse( model=model_name, diff --git a/requirements.txt b/requirements.txt index 018a664..e52919f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,7 +7,8 @@ langchain_community langchain_openai # For Google's Gemini models langchain_google_genai - +# For Ollama models +langchain_ollama # For AWS langchain_aws transformers