mirror of
https://github.com/jayofelony/pwnagotchi.git
synced 2025-07-01 18:37:27 -04:00
Merge pull request #1 from dadav/fix/wifips
Fix/wifips: improvement in the plugin style and renamed to net-pos
This commit is contained in:
@ -33,7 +33,7 @@ def on_loaded():
|
||||
logging.info("AUTO-BACKUP: Successfuly loaded.")
|
||||
|
||||
|
||||
def on_internet_available(display, config, log):
|
||||
def on_internet_available(agent):
|
||||
global STATUS
|
||||
|
||||
if READY:
|
||||
@ -42,6 +42,8 @@ def on_internet_available(display, config, log):
|
||||
|
||||
files_to_backup = " ".join(OPTIONS['files'])
|
||||
try:
|
||||
display = agent.view()
|
||||
|
||||
logging.info("AUTO-BACKUP: Backing up ...")
|
||||
display.set('status', 'Backing up ...')
|
||||
display.update()
|
||||
|
@ -23,13 +23,15 @@ def on_loaded():
|
||||
READY = True
|
||||
|
||||
|
||||
def on_internet_available(display, config, log):
|
||||
def on_internet_available(agent):
|
||||
global STATUS
|
||||
|
||||
if READY:
|
||||
if STATUS.newer_then_days(OPTIONS['interval']):
|
||||
return
|
||||
|
||||
display = agent.view()
|
||||
|
||||
try:
|
||||
display.set('status', 'Updating ...')
|
||||
display.update()
|
||||
|
@ -20,7 +20,7 @@ def on_loaded():
|
||||
|
||||
|
||||
# called in manual mode when there's internet connectivity
|
||||
def on_internet_available(ui, config, log):
|
||||
def on_internet_available(agent):
|
||||
pass
|
||||
|
||||
|
||||
|
@ -8,9 +8,8 @@ import logging
|
||||
import json
|
||||
import os
|
||||
|
||||
device = '/dev/ttyUSB0'
|
||||
speed = 19200
|
||||
running = False
|
||||
OPTIONS = dict()
|
||||
|
||||
|
||||
def on_loaded():
|
||||
@ -27,8 +26,8 @@ def on_ready(agent):
|
||||
except:
|
||||
pass
|
||||
|
||||
agent.run('set gps.device %s' % device)
|
||||
agent.run('set gps.speed %d' % speed)
|
||||
agent.run('set gps.device %s' % OPTIONS['device'])
|
||||
agent.run('set gps.speed %d' % OPTIONS['speed'])
|
||||
agent.run('gps on')
|
||||
running = True
|
||||
else:
|
||||
|
163
pwnagotchi/plugins/default/grid.py
Normal file
163
pwnagotchi/plugins/default/grid.py
Normal file
@ -0,0 +1,163 @@
|
||||
__author__ = 'evilsocket@gmail.com'
|
||||
__version__ = '1.0.0'
|
||||
__name__ = 'grid'
|
||||
__license__ = 'GPL3'
|
||||
__description__ = 'This plugin signals the unit cryptographic identity and list of pwned networks and list of pwned networks to api.pwnagotchi.ai'
|
||||
|
||||
import os
|
||||
import logging
|
||||
import requests
|
||||
import glob
|
||||
import subprocess
|
||||
import pwnagotchi
|
||||
import pwnagotchi.utils as utils
|
||||
from pwnagotchi.utils import WifiInfo, extract_from_pcap
|
||||
|
||||
OPTIONS = dict()
|
||||
AUTH = utils.StatusFile('/root/.api-enrollment.json', data_format='json')
|
||||
REPORT = utils.StatusFile('/root/.api-report.json', data_format='json')
|
||||
|
||||
|
||||
def on_loaded():
|
||||
logging.info("grid plugin loaded.")
|
||||
|
||||
|
||||
def get_api_token(last_session, keys):
|
||||
global AUTH
|
||||
|
||||
if AUTH.newer_then_minutes(25) and AUTH.data is not None and 'token' in AUTH.data:
|
||||
return AUTH.data['token']
|
||||
|
||||
if AUTH.data is None:
|
||||
logging.info("grid: enrolling unit ...")
|
||||
else:
|
||||
logging.info("grid: refreshing token ...")
|
||||
|
||||
identity = "%s@%s" % (pwnagotchi.name(), keys.fingerprint)
|
||||
# sign the identity string to prove we own both keys
|
||||
_, signature_b64 = keys.sign(identity)
|
||||
|
||||
api_address = 'https://api.pwnagotchi.ai/api/v1/unit/enroll'
|
||||
enrollment = {
|
||||
'identity': identity,
|
||||
'public_key': keys.pub_key_pem_b64,
|
||||
'signature': signature_b64,
|
||||
'data': {
|
||||
'duration': last_session.duration,
|
||||
'epochs': last_session.epochs,
|
||||
'train_epochs': last_session.train_epochs,
|
||||
'avg_reward': last_session.avg_reward,
|
||||
'min_reward': last_session.min_reward,
|
||||
'max_reward': last_session.max_reward,
|
||||
'deauthed': last_session.deauthed,
|
||||
'associated': last_session.associated,
|
||||
'handshakes': last_session.handshakes,
|
||||
'peers': last_session.peers,
|
||||
'uname': subprocess.getoutput("uname -a")
|
||||
}
|
||||
}
|
||||
|
||||
r = requests.post(api_address, json=enrollment)
|
||||
if r.status_code != 200:
|
||||
raise Exception("(status %d) %s" % (r.status_code, r.json()))
|
||||
|
||||
AUTH.update(data=r.json())
|
||||
|
||||
logging.info("grid: done")
|
||||
|
||||
return AUTH.data["token"]
|
||||
|
||||
|
||||
def parse_pcap(filename):
|
||||
logging.info("grid: parsing %s ..." % filename)
|
||||
|
||||
net_id = os.path.basename(filename).replace('.pcap', '')
|
||||
|
||||
if '_' in net_id:
|
||||
# /root/handshakes/ESSID_BSSID.pcap
|
||||
essid, bssid = net_id.split('_')
|
||||
else:
|
||||
# /root/handshakes/BSSID.pcap
|
||||
essid, bssid = '', net_id
|
||||
|
||||
it = iter(bssid)
|
||||
bssid = ':'.join([a + b for a, b in zip(it, it)])
|
||||
|
||||
info = {
|
||||
WifiInfo.ESSID: essid,
|
||||
WifiInfo.BSSID: bssid,
|
||||
}
|
||||
|
||||
try:
|
||||
info = extract_from_pcap(filename, [WifiInfo.BSSID, WifiInfo.ESSID])
|
||||
except Exception as e:
|
||||
logging.error("grid: %s" % e)
|
||||
|
||||
return info[WifiInfo.ESSID], info[WifiInfo.BSSID]
|
||||
|
||||
|
||||
def api_report_ap(last_session, keys, token, essid, bssid):
|
||||
while True:
|
||||
token = AUTH.data['token']
|
||||
logging.info("grid: reporting %s (%s)" % (essid, bssid))
|
||||
try:
|
||||
api_address = 'https://api.pwnagotchi.ai/api/v1/unit/report/ap'
|
||||
headers = {'Authorization': 'access_token %s' % token}
|
||||
report = {
|
||||
'essid': essid,
|
||||
'bssid': bssid,
|
||||
}
|
||||
r = requests.post(api_address, headers=headers, json=report)
|
||||
if r.status_code != 200:
|
||||
if r.status_code == 401:
|
||||
logging.warning("token expired")
|
||||
token = get_api_token(last_session, keys)
|
||||
continue
|
||||
else:
|
||||
raise Exception("(status %d) %s" % (r.status_code, r.text))
|
||||
else:
|
||||
return True
|
||||
except Exception as e:
|
||||
logging.error("grid: %s" % e)
|
||||
return False
|
||||
|
||||
|
||||
def on_internet_available(agent):
|
||||
global REPORT
|
||||
|
||||
try:
|
||||
config = agent.config()
|
||||
keys = agent.keypair()
|
||||
|
||||
pcap_files = glob.glob(os.path.join(config['bettercap']['handshakes'], "*.pcap"))
|
||||
num_networks = len(pcap_files)
|
||||
reported = REPORT.data_field_or('reported', default=[])
|
||||
num_reported = len(reported)
|
||||
num_new = num_networks - num_reported
|
||||
|
||||
if num_new > 0:
|
||||
if OPTIONS['report']:
|
||||
logging.info("grid: %d new networks to report" % num_new)
|
||||
token = get_api_token(agent.last_session, agent.keypair())
|
||||
|
||||
for pcap_file in pcap_files:
|
||||
net_id = os.path.basename(pcap_file).replace('.pcap', '')
|
||||
do_skip = False
|
||||
for skip in OPTIONS['exclude']:
|
||||
skip = skip.lower()
|
||||
net = net_id.lower()
|
||||
if skip in net or skip.replace(':', '') in net:
|
||||
do_skip = True
|
||||
break
|
||||
|
||||
if net_id not in reported and not do_skip:
|
||||
essid, bssid = parse_pcap(pcap_file)
|
||||
if bssid:
|
||||
if api_report_ap(agent.last_session, keys, token, essid, bssid):
|
||||
reported.append(net_id)
|
||||
REPORT.update(data={'reported': reported})
|
||||
else:
|
||||
logging.debug("grid: reporting disabled")
|
||||
|
||||
except Exception as e:
|
||||
logging.exception("error while enrolling the unit")
|
144
pwnagotchi/plugins/default/net-pos.py
Normal file
144
pwnagotchi/plugins/default/net-pos.py
Normal file
@ -0,0 +1,144 @@
|
||||
__author__ = 'zenzen san'
|
||||
__version__ = '1.0.0'
|
||||
__name__ = 'net-pos'
|
||||
__license__ = 'GPL3'
|
||||
__description__ = """Saves a json file with the access points with more signal
|
||||
whenever a handshake is captured.
|
||||
When internet is available the files are converted in geo locations
|
||||
using Mozilla LocationService """
|
||||
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
|
||||
MOZILLA_API_URL = 'https://location.services.mozilla.com/v1/geolocate?key={api}'
|
||||
ALREADY_SAVED = None
|
||||
SKIP = None
|
||||
READY = False
|
||||
OPTIONS = {}
|
||||
|
||||
|
||||
def on_loaded():
|
||||
global ALREADY_SAVED
|
||||
global SKIP
|
||||
global READY
|
||||
|
||||
SKIP = list()
|
||||
|
||||
if 'api_key' not in OPTIONS or ('api_key' in OPTIONS and OPTIONS['api_key'] is None):
|
||||
logging.error("NET-POS: api_key isn't set. Can't use mozilla's api.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open('/root/.net_pos_saved', 'r') as f:
|
||||
ALREADY_SAVED = f.read().splitlines()
|
||||
except OSError:
|
||||
logging.warning('NET-POS: No net-pos-file found.')
|
||||
ALREADY_SAVED = []
|
||||
|
||||
READY = True
|
||||
logging.info("net-pos plugin loaded.")
|
||||
|
||||
def _append_saved(path):
|
||||
to_save = list()
|
||||
if isinstance(path, str):
|
||||
to_save.append(path)
|
||||
elif isinstance(path, list):
|
||||
to_save += path
|
||||
else:
|
||||
raise TypeError("Expected list or str, got %s" % type(path))
|
||||
|
||||
with open('/root/.net_pos_saved', 'a') as saved_file:
|
||||
for x in to_save:
|
||||
saved_file.write(x + "\n")
|
||||
|
||||
def on_internet_available(agent):
|
||||
global SKIP
|
||||
|
||||
if READY:
|
||||
config = agent.config()
|
||||
display = agent.view()
|
||||
handshake_dir = config['bettercap']['handshakes']
|
||||
|
||||
all_files = os.listdir(handshake_dir)
|
||||
all_np_files = [os.path.join(handshake_dir, filename)
|
||||
for filename in all_files
|
||||
if filename.endswith('.net-pos.json')]
|
||||
new_np_files = set(all_np_files) - set(ALREADY_SAVED) - set(SKIP)
|
||||
|
||||
if new_np_files:
|
||||
logging.info("NET-POS: Found {num} new net-pos files. Fetching positions ...", len(new_np_files))
|
||||
display.set('status', f"Found {len(new_np_files)} new net-pos files. Fetching positions ...")
|
||||
display.update(force=True)
|
||||
for idx, np_file in enumerate(new_np_files):
|
||||
|
||||
geo_file = np_file.replace('.net-pos.json', '.geo.json')
|
||||
if os.path.exists(geo_file):
|
||||
# got already the position
|
||||
ALREADY_SAVED.append(np_file)
|
||||
_append_saved(np_file)
|
||||
continue
|
||||
|
||||
try:
|
||||
geo_data = _get_geo_data(np_file) # returns json obj
|
||||
except requests.exceptions.RequestException as req_e:
|
||||
logging.error("NET-POS: %s", req_e)
|
||||
SKIP += np_file
|
||||
except json.JSONDecodeError as js_e:
|
||||
logging.error("NET-POS: %s", js_e)
|
||||
SKIP += np_file
|
||||
except OSError as os_e:
|
||||
logging.error("NET-POS: %s", os_e)
|
||||
SKIP += np_file
|
||||
|
||||
with open(geo_file, 'w+t') as sf:
|
||||
json.dump(geo_data, sf)
|
||||
|
||||
ALREADY_SAVED.append(np_file)
|
||||
_append_saved(np_file)
|
||||
|
||||
display.set('status', f"Fetching positions ({idx+1}/{len(new_np_files)}")
|
||||
display.update(force=True)
|
||||
|
||||
|
||||
def on_handshake(agent, filename, access_point, client_station):
|
||||
netpos = _get_netpos(agent)
|
||||
netpos_filename = filename.replace('.pcap', '.net-pos.json')
|
||||
logging.info("NET-POS: Saving net-location to %s", netpos_filename)
|
||||
|
||||
try:
|
||||
with open(netpos_filename, 'w+t') as fp:
|
||||
json.dump(netpos, fp)
|
||||
except OSError as os_e:
|
||||
logging.error("NET-POS: %s", os_e)
|
||||
|
||||
|
||||
def _get_netpos(agent):
|
||||
aps = agent.get_access_points()
|
||||
netpos = {}
|
||||
netpos['wifiAccessPoints'] = list()
|
||||
# 6 seems a good number to save a wifi networks location
|
||||
for access_point in sorted(aps, key=lambda i: i['rssi'], reverse=True)[:6]:
|
||||
netpos['wifiAccessPoints'].append({'macAddress': access_point['mac'],
|
||||
'signalStrength': access_point['rssi']})
|
||||
return netpos
|
||||
|
||||
def _get_geo_data(path, timeout=30):
|
||||
geourl = MOZILLA_API_URL.format(api=OPTIONS['api_key'])
|
||||
|
||||
try:
|
||||
with open(path, "r") as json_file:
|
||||
data = json.load(json_file)
|
||||
except json.JSONDecodeError as js_e:
|
||||
raise js_e
|
||||
except OSError as os_e:
|
||||
raise os_e
|
||||
|
||||
try:
|
||||
result = requests.post(geourl,
|
||||
json=data,
|
||||
timeout=timeout)
|
||||
return result.json()
|
||||
except requests.exceptions.RequestException as req_e:
|
||||
raise req_e
|
@ -55,14 +55,17 @@ def _upload_to_ohc(path, timeout=30):
|
||||
raise e
|
||||
|
||||
|
||||
def on_internet_available(display, config, log):
|
||||
def on_internet_available(agent):
|
||||
"""
|
||||
Called in manual mode when there's internet connectivity
|
||||
"""
|
||||
if READY:
|
||||
display = agent.view()
|
||||
config = agent.config()
|
||||
|
||||
handshake_dir = config['bettercap']['handshakes']
|
||||
handshake_filenames = os.listdir(handshake_dir)
|
||||
handshake_paths = [os.path.join(handshake_dir, filename) for filename in handshake_filenames]
|
||||
handshake_paths = [os.path.join(handshake_dir, filename) for filename in handshake_filenames if filename.endswith('.pcap')]
|
||||
handshake_new = set(handshake_paths) - set(ALREADY_UPLOADED)
|
||||
|
||||
if handshake_new:
|
||||
|
24
pwnagotchi/plugins/default/screen_refresh.py
Normal file
24
pwnagotchi/plugins/default/screen_refresh.py
Normal file
@ -0,0 +1,24 @@
|
||||
__author__ = 'pwnagotcchi [at] rossmarks [dot] uk'
|
||||
__version__ = '1.0.0'
|
||||
__name__ = 'screen_refresh'
|
||||
__license__ = 'GPL3'
|
||||
__description__ = 'Refresh he e-ink display after X amount of updates'
|
||||
|
||||
import logging
|
||||
|
||||
OPTIONS = dict()
|
||||
update_count = 0;
|
||||
|
||||
|
||||
def on_loaded():
|
||||
logging.info("Screen refresh plugin loaded")
|
||||
|
||||
|
||||
def on_ui_update(ui):
|
||||
global update_count
|
||||
update_count += 1
|
||||
if update_count == OPTIONS['refresh_interval']:
|
||||
ui._init_display()
|
||||
ui.set('status', "Screen cleaned")
|
||||
logging.info("Screen refreshing")
|
||||
update_count = 0
|
@ -14,8 +14,12 @@ def on_loaded():
|
||||
|
||||
|
||||
# called in manual mode when there's internet connectivity
|
||||
def on_internet_available(ui, config, log):
|
||||
if log.is_new() and log.handshakes > 0:
|
||||
def on_internet_available(agent):
|
||||
config = agent.config()
|
||||
display = agent.view()
|
||||
last_session = agent.last_session
|
||||
|
||||
if last_session.is_new() and last_session.handshakes > 0:
|
||||
try:
|
||||
import tweepy
|
||||
except ImportError:
|
||||
@ -26,20 +30,20 @@ def on_internet_available(ui, config, log):
|
||||
|
||||
picture = '/dev/shm/pwnagotchi.png'
|
||||
|
||||
ui.on_manual_mode(log)
|
||||
ui.update(force=True)
|
||||
ui.image().save(picture, 'png')
|
||||
ui.set('status', 'Tweeting...')
|
||||
ui.update(force=True)
|
||||
display.on_manual_mode(last_session)
|
||||
display.update(force=True)
|
||||
display.image().save(picture, 'png')
|
||||
display.set('status', 'Tweeting...')
|
||||
display.update(force=True)
|
||||
|
||||
try:
|
||||
auth = tweepy.OAuthHandler(OPTIONS['consumer_key'], OPTIONS['consumer_secret'])
|
||||
auth.set_access_token(OPTIONS['access_token_key'], OPTIONS['access_token_secret'])
|
||||
api = tweepy.API(auth)
|
||||
|
||||
tweet = Voice(lang=config['main']['lang']).on_log_tweet(log)
|
||||
tweet = Voice(lang=config['main']['lang']).on_last_session_tweet(last_session)
|
||||
api.update_with_media(filename=picture, status=tweet)
|
||||
log.save_session_id()
|
||||
last_session.save_session_id()
|
||||
|
||||
logging.info("tweeted: %s" % tweet)
|
||||
except Exception as e:
|
||||
|
278
pwnagotchi/plugins/default/wigle.py
Normal file
278
pwnagotchi/plugins/default/wigle.py
Normal file
@ -0,0 +1,278 @@
|
||||
__author__ = '33197631+dadav@users.noreply.github.com'
|
||||
__version__ = '1.0.0'
|
||||
__name__ = 'wigle'
|
||||
__license__ = 'GPL3'
|
||||
__description__ = 'This plugin automatically uploades collected wifis to wigle.net'
|
||||
|
||||
import os
|
||||
import logging
|
||||
import json
|
||||
from io import StringIO
|
||||
import csv
|
||||
from datetime import datetime
|
||||
import requests
|
||||
from pwnagotchi.mesh.wifi import freq_to_channel
|
||||
from pwnagotchi.utils import WifiInfo, FieldNotFoundError, extract_from_pcap
|
||||
|
||||
READY = False
|
||||
ALREADY_UPLOADED = None
|
||||
SKIP = None
|
||||
OPTIONS = dict()
|
||||
|
||||
AKMSUITE_TYPES = {
|
||||
0x00: "Reserved",
|
||||
0x01: "802.1X",
|
||||
0x02: "PSK",
|
||||
}
|
||||
|
||||
def _handle_packet(packet, result):
|
||||
from scapy.all import RadioTap, Dot11Elt, Dot11Beacon, rdpcap, Scapy_Exception, Dot11, Dot11ProbeResp, Dot11AssoReq, \
|
||||
Dot11ReassoReq, Dot11EltRSN, Dot11EltVendorSpecific, Dot11EltMicrosoftWPA
|
||||
"""
|
||||
Analyze each packet and extract the data from Dot11 layers
|
||||
"""
|
||||
|
||||
if hasattr(packet, 'cap') and 'privacy' in packet.cap:
|
||||
# packet is encrypted
|
||||
if 'encryption' not in result:
|
||||
result['encryption'] = set()
|
||||
|
||||
if packet.haslayer(Dot11Beacon):
|
||||
if packet.haslayer(Dot11Beacon)\
|
||||
or packet.haslayer(Dot11ProbeResp)\
|
||||
or packet.haslayer(Dot11AssoReq)\
|
||||
or packet.haslayer(Dot11ReassoReq):
|
||||
if 'bssid' not in result and hasattr(packet[Dot11], 'addr3'):
|
||||
result['bssid'] = packet[Dot11].addr3
|
||||
if 'essid' not in result and hasattr(packet[Dot11Elt], 'info'):
|
||||
result['essid'] = packet[Dot11Elt].info
|
||||
if 'channel' not in result and hasattr(packet[Dot11Elt:3], 'info'):
|
||||
result['channel'] = int(ord(packet[Dot11Elt:3].info))
|
||||
|
||||
if packet.haslayer(RadioTap):
|
||||
if 'rssi' not in result and hasattr(packet[RadioTap], 'dBm_AntSignal'):
|
||||
result['rssi'] = packet[RadioTap].dBm_AntSignal
|
||||
if 'channel' not in result and hasattr(packet[RadioTap], 'ChannelFrequency'):
|
||||
result['channel'] = freq_to_channel(packet[RadioTap].ChannelFrequency)
|
||||
|
||||
# see: https://fossies.org/linux/scapy/scapy/layers/dot11.py
|
||||
if packet.haslayer(Dot11EltRSN):
|
||||
if hasattr(packet[Dot11EltRSN], 'akm_suites'):
|
||||
auth = AKMSUITE_TYPES.get(packet[Dot11EltRSN].akm_suites[0].suite)
|
||||
result['encryption'].add(f"WPA2/{auth}")
|
||||
else:
|
||||
result['encryption'].add("WPA2")
|
||||
|
||||
if packet.haslayer(Dot11EltVendorSpecific)\
|
||||
and (packet.haslayer(Dot11EltMicrosoftWPA)
|
||||
or packet.info.startswith(b'\x00P\xf2\x01\x01\x00')):
|
||||
|
||||
if hasattr(packet, 'akm_suites'):
|
||||
auth = AKMSUITE_TYPES.get(packet.akm_suites[0].suite)
|
||||
result['encryption'].add(f"WPA2/{auth}")
|
||||
else:
|
||||
result['encryption'].add("WPA2")
|
||||
# end see
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _analyze_pcap(pcap):
|
||||
from scapy.all import RadioTap, Dot11Elt, Dot11Beacon, rdpcap, Scapy_Exception, Dot11, Dot11ProbeResp, Dot11AssoReq, \
|
||||
Dot11ReassoReq, Dot11EltRSN, Dot11EltVendorSpecific, Dot11EltMicrosoftWPA
|
||||
"""
|
||||
Iterate over the packets and extract data
|
||||
"""
|
||||
result = dict()
|
||||
|
||||
try:
|
||||
packets = rdpcap(pcap)
|
||||
for packet in packets:
|
||||
result = _handle_packet(packet, result)
|
||||
except Scapy_Exception as sc_e:
|
||||
raise sc_e
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def on_loaded():
|
||||
"""
|
||||
Gets called when the plugin gets loaded
|
||||
"""
|
||||
global READY
|
||||
global ALREADY_UPLOADED
|
||||
global SKIP
|
||||
|
||||
SKIP = list()
|
||||
|
||||
if 'api_key' not in OPTIONS or ('api_key' in OPTIONS and OPTIONS['api_key'] is None):
|
||||
logging.error("WIGLE: api_key isn't set. Can't upload to wigle.net")
|
||||
return
|
||||
|
||||
try:
|
||||
with open('/root/.wigle_uploads', 'r') as f:
|
||||
ALREADY_UPLOADED = f.read().splitlines()
|
||||
except OSError:
|
||||
logging.warning('WIGLE: No upload-file found.')
|
||||
ALREADY_UPLOADED = []
|
||||
|
||||
READY = True
|
||||
|
||||
|
||||
def _extract_gps_data(path):
|
||||
"""
|
||||
Extract data from gps-file
|
||||
|
||||
return json-obj
|
||||
"""
|
||||
|
||||
try:
|
||||
with open(path, 'r') as json_file:
|
||||
return json.load(json_file)
|
||||
except OSError as os_err:
|
||||
raise os_err
|
||||
except json.JSONDecodeError as json_err:
|
||||
raise json_err
|
||||
|
||||
|
||||
def _format_auth(data):
|
||||
out = ""
|
||||
for auth in data:
|
||||
out = f"{out}[{auth}]"
|
||||
return out
|
||||
|
||||
def _transform_wigle_entry(gps_data, pcap_data):
|
||||
"""
|
||||
Transform to wigle entry in file
|
||||
"""
|
||||
dummy = StringIO()
|
||||
# write kismet header
|
||||
dummy.write("WigleWifi-1.4,appRelease=20190201,model=Kismet,release=2019.02.01.{},device=kismet,display=kismet,board=kismet,brand=kismet\n")
|
||||
dummy.write("MAC,SSID,AuthMode,FirstSeen,Channel,RSSI,CurrentLatitude,CurrentLongitude,AltitudeMeters,AccuracyMeters,Type")
|
||||
|
||||
writer = csv.writer(dummy, delimiter=",", quoting=csv.QUOTE_NONE)
|
||||
writer.writerow([
|
||||
pcap_data[WifiInfo.BSSID],
|
||||
pcap_data[WifiInfo.ESSID],
|
||||
_format_auth(pcap_data[WifiInfo.ENCRYPTION]),
|
||||
datetime.strptime(gps_data['Updated'].rsplit('.')[0],
|
||||
"%Y-%m-%dT%H:%M:%S").strftime('%Y-%m-%d %H:%M:%S'),
|
||||
pcap_data[WifiInfo.CHANNEL],
|
||||
pcap_data[WifiInfo.RSSI],
|
||||
gps_data['Latitude'],
|
||||
gps_data['Longitude'],
|
||||
gps_data['Altitude'],
|
||||
0, # accuracy?
|
||||
'WIFI'])
|
||||
return dummy.getvalue()
|
||||
|
||||
def _send_to_wigle(lines, api_key, timeout=30):
|
||||
"""
|
||||
Uploads the file to wigle-net
|
||||
"""
|
||||
|
||||
dummy = StringIO()
|
||||
|
||||
for line in lines:
|
||||
dummy.write(f"{line}")
|
||||
|
||||
dummy.seek(0)
|
||||
|
||||
headers = {'Authorization': f"Basic {api_key}",
|
||||
'Accept': 'application/json'}
|
||||
data = {'donate': 'false'}
|
||||
payload = {'file': dummy, 'type': 'text/csv'}
|
||||
|
||||
try:
|
||||
res = requests.post('https://api.wigle.net/api/v2/file/upload',
|
||||
data=data,
|
||||
headers=headers,
|
||||
files=payload,
|
||||
timeout=timeout)
|
||||
json_res = res.json()
|
||||
if not json_res['success']:
|
||||
raise requests.exceptions.RequestException(json_res['message'])
|
||||
except requests.exceptions.RequestException as re_e:
|
||||
raise re_e
|
||||
|
||||
|
||||
def on_internet_available(agent):
|
||||
from scapy.all import RadioTap, Dot11Elt, Dot11Beacon, rdpcap, Scapy_Exception, Dot11, Dot11ProbeResp, Dot11AssoReq, \
|
||||
Dot11ReassoReq, Dot11EltRSN, Dot11EltVendorSpecific, Dot11EltMicrosoftWPA
|
||||
"""
|
||||
Called in manual mode when there's internet connectivity
|
||||
"""
|
||||
global ALREADY_UPLOADED
|
||||
global SKIP
|
||||
|
||||
if READY:
|
||||
config = agent.config()
|
||||
display = agent.view()
|
||||
|
||||
handshake_dir = config['bettercap']['handshakes']
|
||||
all_files = os.listdir(handshake_dir)
|
||||
all_gps_files = [os.path.join(handshake_dir, filename)
|
||||
for filename in all_files
|
||||
if filename.endswith('.gps.json')]
|
||||
new_gps_files = set(all_gps_files) - set(ALREADY_UPLOADED) - set(SKIP)
|
||||
|
||||
if new_gps_files:
|
||||
logging.info("WIGLE: Internet connectivity detected. Uploading new handshakes to wigle.net")
|
||||
|
||||
csv_entries = list()
|
||||
no_err_entries = list()
|
||||
|
||||
for gps_file in new_gps_files:
|
||||
pcap_filename = gps_file.replace('.gps.json', '.pcap')
|
||||
|
||||
if not os.path.exists(pcap_filename):
|
||||
logging.error("WIGLE: Can't find pcap for %s", gps_file)
|
||||
SKIP.append(gps_file)
|
||||
continue
|
||||
|
||||
try:
|
||||
gps_data = _extract_gps_data(gps_file)
|
||||
except OSError as os_err:
|
||||
logging.error("WIGLE: %s", os_err)
|
||||
SKIP.append(gps_file)
|
||||
continue
|
||||
except json.JSONDecodeError as json_err:
|
||||
logging.error("WIGLE: %s", json_err)
|
||||
SKIP.append(gps_file)
|
||||
continue
|
||||
|
||||
try:
|
||||
pcap_data = extract_from_pcap(pcap_filename, [WifiInfo.BSSID,
|
||||
WifiInfo.ESSID,
|
||||
WifiInfo.ENCRYPTION,
|
||||
WifiInfo.CHANNEL,
|
||||
WifiInfo.RSSI])
|
||||
except FieldNotFoundError:
|
||||
logging.error("WIGLE: Could not extract all informations. Skip %s", gps_file)
|
||||
SKIP.append(gps_file)
|
||||
continue
|
||||
except Scapy_Exception as sc_e:
|
||||
logging.error("WIGLE: %s", sc_e)
|
||||
SKIP.append(gps_file)
|
||||
continue
|
||||
|
||||
new_entry = _transform_wigle_entry(gps_data, pcap_data)
|
||||
csv_entries.append(new_entry)
|
||||
no_err_entries.append(gps_file)
|
||||
|
||||
if csv_entries:
|
||||
display.set('status', "Uploading gps-data to wigle.net ...")
|
||||
display.update(force=True)
|
||||
try:
|
||||
_send_to_wigle(csv_entries, OPTIONS['api_key'])
|
||||
ALREADY_UPLOADED += no_err_entries
|
||||
with open('/root/.wigle_uploads', 'a') as up_file:
|
||||
for gps in no_err_entries:
|
||||
up_file.write(gps + "\n")
|
||||
logging.info("WIGLE: Successfuly uploaded %d files", len(no_err_entries))
|
||||
except requests.exceptions.RequestException as re_e:
|
||||
SKIP += no_err_entries
|
||||
logging.error("WIGLE: Got an exception while uploading %s", re_e)
|
||||
except OSError as os_e:
|
||||
SKIP += no_err_entries
|
||||
logging.error("WIGLE: Got the following error: %s", os_e)
|
@ -54,14 +54,17 @@ def _upload_to_wpasec(path, timeout=30):
|
||||
raise e
|
||||
|
||||
|
||||
def on_internet_available(display, config, log):
|
||||
def on_internet_available(agent):
|
||||
"""
|
||||
Called in manual mode when there's internet connectivity
|
||||
"""
|
||||
if READY:
|
||||
config = agent.config()
|
||||
display = agent.view()
|
||||
|
||||
handshake_dir = config['bettercap']['handshakes']
|
||||
handshake_filenames = os.listdir(handshake_dir)
|
||||
handshake_paths = [os.path.join(handshake_dir, filename) for filename in handshake_filenames]
|
||||
handshake_paths = [os.path.join(handshake_dir, filename) for filename in handshake_filenames if filename.endswith('.pcap')]
|
||||
handshake_new = set(handshake_paths) - set(ALREADY_UPLOADED)
|
||||
|
||||
if handshake_new:
|
||||
|
Reference in New Issue
Block a user