mirror of
https://github.com/splunk/DECEIVE.git
synced 2025-07-01 16:47:28 -04:00
@ -28,6 +28,10 @@ server_version_string = SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.3
|
|||||||
llm_provider = openai
|
llm_provider = openai
|
||||||
model_name = gpt-4o
|
model_name = gpt-4o
|
||||||
|
|
||||||
|
##### ollama llama3
|
||||||
|
#llm_provider = ollama
|
||||||
|
#model_name = llama3.3
|
||||||
|
|
||||||
##### Any model via Amazon Bedrock
|
##### Any model via Amazon Bedrock
|
||||||
# Valid AWS model names can be found here: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html
|
# Valid AWS model names can be found here: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html
|
||||||
#llm_provider = AWS
|
#llm_provider = AWS
|
||||||
|
@ -18,6 +18,7 @@ from operator import itemgetter
|
|||||||
from langchain_openai import ChatOpenAI
|
from langchain_openai import ChatOpenAI
|
||||||
from langchain_aws import ChatBedrock, ChatBedrockConverse
|
from langchain_aws import ChatBedrock, ChatBedrockConverse
|
||||||
from langchain_google_genai import ChatGoogleGenerativeAI
|
from langchain_google_genai import ChatGoogleGenerativeAI
|
||||||
|
from langchain_ollama import ChatOllama
|
||||||
from langchain_core.messages import HumanMessage, SystemMessage, trim_messages
|
from langchain_core.messages import HumanMessage, SystemMessage, trim_messages
|
||||||
from langchain_core.chat_history import BaseChatMessageHistory, InMemoryChatMessageHistory
|
from langchain_core.chat_history import BaseChatMessageHistory, InMemoryChatMessageHistory
|
||||||
from langchain_core.runnables.history import RunnableWithMessageHistory
|
from langchain_core.runnables.history import RunnableWithMessageHistory
|
||||||
@ -313,6 +314,10 @@ def choose_llm():
|
|||||||
llm_model = ChatOpenAI(
|
llm_model = ChatOpenAI(
|
||||||
model=model_name
|
model=model_name
|
||||||
)
|
)
|
||||||
|
elif llm_provider_name == 'ollama':
|
||||||
|
llm_model = ChatOllama(
|
||||||
|
model=model_name
|
||||||
|
)
|
||||||
elif llm_provider_name == 'aws':
|
elif llm_provider_name == 'aws':
|
||||||
llm_model = ChatBedrockConverse(
|
llm_model = ChatBedrockConverse(
|
||||||
model=model_name,
|
model=model_name,
|
||||||
|
@ -7,7 +7,8 @@ langchain_community
|
|||||||
langchain_openai
|
langchain_openai
|
||||||
# For Google's Gemini models
|
# For Google's Gemini models
|
||||||
langchain_google_genai
|
langchain_google_genai
|
||||||
|
# For Ollama models
|
||||||
|
langchain_ollama
|
||||||
# For AWS
|
# For AWS
|
||||||
langchain_aws
|
langchain_aws
|
||||||
transformers
|
transformers
|
||||||
|
Reference in New Issue
Block a user