mirror of
https://github.com/splunk/DECEIVE.git
synced 2025-07-01 08:37:28 -04:00
@ -1,10 +1,9 @@
|
||||
import streamlit as st
|
||||
import json
|
||||
import base64
|
||||
import pandas as pd
|
||||
|
||||
def is_base64(s):
|
||||
"""Check if a string is Base64-encoded and decode it if valid."""
|
||||
"""Check if a string is Base64-encoded."""
|
||||
try:
|
||||
decoded = base64.b64decode(s, validate=True)
|
||||
return decoded.decode("utf-8") if decoded else None
|
||||
@ -17,6 +16,7 @@ def process_logs(file):
|
||||
for line in file:
|
||||
try:
|
||||
log_entry = json.loads(line.decode("utf-8").strip()) # Decode and parse JSON
|
||||
# Scan each key in the log entry to detect Base64-encoded values
|
||||
for key, value in log_entry.items():
|
||||
if isinstance(value, str):
|
||||
decoded_value = is_base64(value)
|
||||
@ -27,25 +27,8 @@ def process_logs(file):
|
||||
st.error(f"Error decoding JSON: {e}")
|
||||
return logs
|
||||
|
||||
def filter_logs(logs, query):
|
||||
"""Filters logs based on the search query."""
|
||||
if not query:
|
||||
return logs # Return all logs if no search query
|
||||
|
||||
return [log for log in logs if any(query.lower() in str(value).lower() for value in log.values())]
|
||||
|
||||
def group_by_task_name(logs):
|
||||
"""Groups logs by session (task name)."""
|
||||
grouped_logs = {}
|
||||
for log in logs:
|
||||
task_name = log.get("task_name", "No Task Name")
|
||||
if task_name not in grouped_logs:
|
||||
grouped_logs[task_name] = []
|
||||
grouped_logs[task_name].append(log)
|
||||
return grouped_logs
|
||||
|
||||
# Streamlit UI
|
||||
st.title("SSH Log Viewer with Search, Base64 Decoding & Session Grouping 🔍")
|
||||
st.title("SSH Log Viewer with Base64 Decoding 🔍")
|
||||
|
||||
uploaded_file = st.file_uploader("Upload SSH Log JSON file", type=["log", "json"])
|
||||
|
||||
@ -54,25 +37,15 @@ if uploaded_file is not None:
|
||||
|
||||
logs = process_logs(uploaded_file) # Process file and decode Base64
|
||||
|
||||
# Search Feature
|
||||
search_query = st.text_input("Search logs", placeholder="Enter search keyword (e.g., 'authentication', 'session')")
|
||||
|
||||
filtered_logs = filter_logs(logs, search_query)
|
||||
|
||||
# Group logs by session
|
||||
grouped_logs = group_by_task_name(filtered_logs)
|
||||
# Display logs
|
||||
st.subheader("Formatted JSON Logs")
|
||||
st.json(logs, expanded=False) # Show JSON in a collapsible section
|
||||
|
||||
# Dropdown for selecting a session
|
||||
session_options = list(grouped_logs.keys())
|
||||
selected_session = st.selectbox("Select a session", session_options)
|
||||
|
||||
# Display selected session logs in a table
|
||||
if selected_session:
|
||||
session_logs = grouped_logs[selected_session]
|
||||
st.subheader(f"Logs for Session: {selected_session}")
|
||||
st.dataframe(pd.DataFrame(session_logs)) # Show logs in a table
|
||||
else:
|
||||
st.warning("No logs found for the selected session.")
|
||||
# Table view
|
||||
if logs:
|
||||
st.subheader("Log Table View")
|
||||
st.dataframe(logs) # Display structured logs in a table
|
||||
|
||||
else:
|
||||
st.info("Please upload a JSON log file to view the logs.")
|
||||
|
||||
|
@ -194,7 +194,7 @@ async def handle_client(process: asyncssh.SSHServerProcess, server: MySSHServer)
|
||||
if process.command:
|
||||
# Handle non-interactive command execution
|
||||
command = process.command
|
||||
logger.info("User input", extra={"details": command.encode('utf-8').decode('utf-8'), "interactive": False})
|
||||
logger.info("User input", extra={"details": b64encode(command.encode('utf-8')).decode('utf-8'), "interactive": False})
|
||||
llm_response = await with_message_history.ainvoke(
|
||||
{
|
||||
"messages": [HumanMessage(content=command)],
|
||||
|
Reference in New Issue
Block a user