Change repo's

Signed-off-by: jayofelony <oudshoorn.jeroen@gmail.com>
This commit is contained in:
jayofelony
2024-02-25 10:42:47 +01:00
parent d20619340c
commit faa48b2752
922 changed files with 0 additions and 156850 deletions

View File

@ -1,147 +0,0 @@
import _thread
import glob
import importlib
import importlib.util
import logging
import os
import threading
import pwnagotchi.grid
default_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "default")
loaded = {}
database = {}
locks = {}
class Plugin:
@classmethod
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
global loaded, locks
plugin_name = cls.__module__.split('.')[0]
plugin_instance = cls()
logging.debug("loaded plugin %s as %s" % (plugin_name, plugin_instance))
loaded[plugin_name] = plugin_instance
for attr_name in plugin_instance.__dir__():
if attr_name.startswith('on_'):
cb = getattr(plugin_instance, attr_name, None)
if cb is not None and callable(cb):
locks["%s::%s" % (plugin_name, attr_name)] = threading.Lock()
def toggle_plugin(name, enable=True):
"""
Load or unload a plugin
returns True if changed, otherwise False
"""
import pwnagotchi
from pwnagotchi.ui import view
from pwnagotchi.utils import save_config
global loaded, database
if pwnagotchi.config:
if name not in pwnagotchi.config['main']['plugins']:
pwnagotchi.config['main']['plugins'][name] = dict()
pwnagotchi.config['main']['plugins'][name]['enabled'] = enable
save_config(pwnagotchi.config, '/etc/pwnagotchi/config.toml')
if not enable and name in loaded:
if getattr(loaded[name], 'on_unload', None):
loaded[name].on_unload(view.ROOT)
del loaded[name]
return True
if enable and name in database and name not in loaded:
load_from_file(database[name])
if name in loaded and pwnagotchi.config and name in pwnagotchi.config['main']['plugins']:
loaded[name].options = pwnagotchi.config['main']['plugins'][name]
one(name, 'loaded')
if pwnagotchi.config:
one(name, 'config_changed', pwnagotchi.config)
one(name, 'ui_setup', view.ROOT)
one(name, 'ready', view.ROOT._agent)
return True
return False
def on(event_name, *args, **kwargs):
for plugin_name in loaded.keys():
one(plugin_name, event_name, *args, **kwargs)
def locked_cb(lock_name, cb, *args, **kwargs):
global locks
if lock_name not in locks:
locks[lock_name] = threading.Lock()
with locks[lock_name]:
cb(*args, *kwargs)
def one(plugin_name, event_name, *args, **kwargs):
global loaded
if plugin_name in loaded:
plugin = loaded[plugin_name]
cb_name = 'on_%s' % event_name
callback = getattr(plugin, cb_name, None)
if callback is not None and callable(callback):
try:
lock_name = "%s::%s" % (plugin_name, cb_name)
locked_cb_args = (lock_name, callback, *args, *kwargs)
_thread.start_new_thread(locked_cb, locked_cb_args)
except Exception as e:
logging.error("error while running %s.%s : %s" % (plugin_name, cb_name, e))
logging.error(e, exc_info=True)
def load_from_file(filename):
logging.debug("loading %s" % filename)
plugin_name = os.path.basename(filename.replace(".py", ""))
spec = importlib.util.spec_from_file_location(plugin_name, filename)
instance = importlib.util.module_from_spec(spec)
spec.loader.exec_module(instance)
return plugin_name, instance
def load_from_path(path, enabled=()):
global loaded, database
logging.debug("loading plugins from %s - enabled: %s" % (path, enabled))
for filename in glob.glob(os.path.join(path, "*.py")):
plugin_name = os.path.basename(filename.replace(".py", ""))
database[plugin_name] = filename
if plugin_name in enabled:
try:
load_from_file(filename)
except Exception as e:
logging.warning("error while loading %s: %s" % (filename, e))
logging.debug(e, exc_info=True)
return loaded
def load(config):
enabled = [name for name, options in config['main']['plugins'].items() if
'enabled' in options and options['enabled']]
# load default plugins
load_from_path(default_path, enabled=enabled)
# load custom ones
custom_path = config['main']['custom_plugins'] if 'custom_plugins' in config['main'] else None
if custom_path is not None:
load_from_path(custom_path, enabled=enabled)
# propagate options
for name, plugin in loaded.items():
plugin.options = config['main']['plugins'][name]
on('loaded')
on('config_changed', config)

View File

@ -1,392 +0,0 @@
# Handles the commandline stuff
import os
import logging
import glob
import re
import shutil
from fnmatch import fnmatch
from pwnagotchi.utils import download_file, unzip, save_config, parse_version, md5
from pwnagotchi.plugins import default_path
SAVE_DIR = '/usr/local/share/pwnagotchi/available-plugins/'
DEFAULT_INSTALL_PATH = '/usr/local/share/pwnagotchi/installed-plugins/'
def add_parsers(subparsers):
"""
Adds the plugins subcommand to a given argparse.ArgumentParser
"""
# subparsers = parser.add_subparsers()
# pwnagotchi plugins
parser_plugins = subparsers.add_parser('plugins')
plugin_subparsers = parser_plugins.add_subparsers(dest='plugincmd')
# pwnagotchi plugins search
parser_plugins_search = plugin_subparsers.add_parser('search', help='Search for pwnagotchi plugins')
parser_plugins_search.add_argument('pattern', type=str, help="Search expression (wildcards allowed)")
# pwnagotchi plugins list
parser_plugins_list = plugin_subparsers.add_parser('list', help='List available pwnagotchi plugins')
parser_plugins_list.add_argument('-i', '--installed', action='store_true', required=False, help='List also installed plugins')
# pwnagotchi plugins update
parser_plugins_update = plugin_subparsers.add_parser('update', help='Updates the database')
# pwnagotchi plugins upgrade
parser_plugins_upgrade = plugin_subparsers.add_parser('upgrade', help='Upgrades plugins')
parser_plugins_upgrade.add_argument('pattern', type=str, nargs='?', default='*', help="Filter expression (wildcards allowed)")
# pwnagotchi plugins enable
parser_plugins_enable = plugin_subparsers.add_parser('enable', help='Enables a plugin')
parser_plugins_enable.add_argument('name', type=str, help='Name of the plugin')
# pwnagotchi plugins disable
parser_plugins_disable = plugin_subparsers.add_parser('disable', help='Disables a plugin')
parser_plugins_disable.add_argument('name', type=str, help='Name of the plugin')
# pwnagotchi plugins install
parser_plugins_install = plugin_subparsers.add_parser('install', help='Installs a plugin')
parser_plugins_install.add_argument('name', type=str, help='Name of the plugin')
# pwnagotchi plugins uninstall
parser_plugins_uninstall = plugin_subparsers.add_parser('uninstall', help='Uninstalls a plugin')
parser_plugins_uninstall.add_argument('name', type=str, help='Name of the plugin')
# pwnagotchi plugins edit
parser_plugins_edit = plugin_subparsers.add_parser('edit', help='Edit the options')
parser_plugins_edit.add_argument('name', type=str, help='Name of the plugin')
return subparsers
def used_plugin_cmd(args):
"""
Checks if the plugins subcommand was used
"""
return hasattr(args, 'plugincmd')
def handle_cmd(args, config):
"""
Parses the arguments and does the thing the user wants
"""
if args.plugincmd == 'update':
return update(config)
elif args.plugincmd == 'search':
args.installed = True # also search in installed plugins
return list_plugins(args, config, args.pattern)
elif args.plugincmd == 'install':
return install(args, config)
elif args.plugincmd == 'uninstall':
return uninstall(args, config)
elif args.plugincmd == 'list':
return list_plugins(args, config)
elif args.plugincmd == 'enable':
return enable(args, config)
elif args.plugincmd == 'disable':
return disable(args, config)
elif args.plugincmd == 'upgrade':
return upgrade(args, config, args.pattern)
elif args.plugincmd == 'edit':
return edit(args, config)
raise NotImplementedError()
def edit(args, config):
"""
Edit the config of the plugin
"""
plugin = args.name
editor = os.environ.get('EDITOR', 'vim') # because vim is the best
if plugin not in config['main']['plugins']:
return 1
plugin_config = {'main': {'plugins': {plugin: config['main']['plugins'][plugin]}}}
import toml
from subprocess import call
from tempfile import NamedTemporaryFile
from pwnagotchi.utils import DottedTomlEncoder
new_plugin_config = None
with NamedTemporaryFile(suffix=".tmp", mode='r+t') as tmp:
tmp.write(toml.dumps(plugin_config, encoder=DottedTomlEncoder()))
tmp.flush()
rc = call([editor, tmp.name])
if rc != 0:
return rc
tmp.seek(0)
new_plugin_config = toml.load(tmp)
config['main']['plugins'][plugin] = new_plugin_config['main']['plugins'][plugin]
save_config(config, args.user_config)
return 0
def enable(args, config):
"""
Enables the given plugin and saves the config to disk
"""
if args.name not in config['main']['plugins']:
config['main']['plugins'][args.name] = dict()
config['main']['plugins'][args.name]['enabled'] = True
save_config(config, args.user_config)
return 0
def disable(args, config):
"""
Disables the given plugin and saves the config to disk
"""
if args.name not in config['main']['plugins']:
config['main']['plugins'][args.name] = dict()
config['main']['plugins'][args.name]['enabled'] = False
save_config(config, args.user_config)
return 0
def upgrade(args, config, pattern='*'):
"""
Upgrades the given plugin
"""
available = _get_available()
installed = _get_installed(config)
for plugin, filename in installed.items():
if not fnmatch(plugin, pattern) or plugin not in available:
continue
available_version = _extract_version(available[plugin])
installed_version = _extract_version(filename)
if installed_version and available_version:
if available_version <= installed_version:
continue
else:
continue
logging.info('Upgrade %s from %s to %s', plugin, '.'.join(installed_version), '.'.join(available_version))
shutil.copyfile(available[plugin], installed[plugin])
# maybe has config
for conf in glob.glob(available[plugin].replace('.py', '.y?ml')):
dst = os.path.join(os.path.dirname(installed[plugin]), os.path.basename(conf))
if os.path.exists(dst) and md5(dst) != md5(conf):
# backup
logging.info('Backing up config: %s', os.path.basename(conf))
shutil.move(dst, dst + '.bak')
shutil.copyfile(conf, dst)
return 0
def list_plugins(args, config, pattern='*'):
"""
Lists the available and installed plugins
"""
found = False
line = "|{name:^{width}}|{version:^9}|{enabled:^10}|{status:^15}|"
available = _get_available()
installed = _get_installed(config)
available_and_installed = set(list(available.keys()) + list(installed.keys()))
available_not_installed = set(available.keys()) - set(installed.keys())
max_len_list = available_and_installed if args.installed else available_not_installed
max_len = max(map(len, max_len_list))
header = line.format(name='Plugin', width=max_len, version='Version', enabled='Active', status='Status')
line_length = max(max_len, len('Plugin')) + len(header) - len('Plugin') - 12 # lol
print('-' * line_length)
print(header)
print('-' * line_length)
if args.installed:
# only installed (maybe update available?)
for plugin, filename in sorted(installed.items()):
if not fnmatch(plugin, pattern):
continue
found = True
installed_version = _extract_version(filename)
available_version = None
if plugin in available:
available_version = _extract_version(available[plugin])
status = "installed"
if installed_version and available_version:
if available_version > installed_version:
status = "installed (^)"
enabled = 'enabled' if (plugin in config['main']['plugins'] and
'enabled' in config['main']['plugins'][plugin] and
config['main']['plugins'][plugin]['enabled']) else 'disabled'
print(line.format(name=plugin, width=max_len, version='.'.join(installed_version), enabled=enabled, status=status))
for plugin in sorted(available_not_installed):
if not fnmatch(plugin, pattern):
continue
found = True
available_version = _extract_version(available[plugin])
print(line.format(name=plugin, width=max_len, version='.'.join(available_version), enabled='-', status='available'))
print('-' * line_length)
if not found:
logging.info('Maybe try: pwnagotchi plugins update')
return 1
return 0
def _extract_version(filename):
"""
Extracts the version from a python file
"""
plugin_content = open(filename, 'rt').read()
m = re.search(r'__version__[\t ]*=[\t ]*[\'\"]([^\"\']+)', plugin_content)
if m:
return parse_version(m.groups()[0])
return None
def _get_available():
"""
Get all availaible plugins
"""
available = dict()
for filename in glob.glob(os.path.join(SAVE_DIR, "*.py")):
plugin_name = os.path.basename(filename.replace(".py", ""))
available[plugin_name] = filename
return available
def _get_installed(config):
"""
Get all installed plugins
"""
installed = dict()
search_dirs = [default_path, config['main']['custom_plugins']]
for search_dir in search_dirs:
if search_dir:
for filename in glob.glob(os.path.join(search_dir, "*.py")):
plugin_name = os.path.basename(filename.replace(".py", ""))
installed[plugin_name] = filename
return installed
def uninstall(args, config):
"""
Uninstalls a plugin
"""
plugin_name = args.name
installed = _get_installed(config)
if plugin_name not in installed:
logging.error('Plugin %s is not installed.', plugin_name)
return 1
os.remove(installed[plugin_name])
return 0
def install(args, config):
"""
Installs the given plugin
"""
global DEFAULT_INSTALL_PATH
plugin_name = args.name
available = _get_available()
installed = _get_installed(config)
if plugin_name not in available:
logging.error('%s not found.', plugin_name)
return 1
if plugin_name in installed:
logging.error('%s already installed.', plugin_name)
# install into custom_plugins path
install_path = config['main']['custom_plugins']
if not install_path:
install_path = DEFAULT_INSTALL_PATH
config['main']['custom_plugins'] = install_path
save_config(config, args.user_config)
os.makedirs(install_path, exist_ok=True)
shutil.copyfile(available[plugin_name], os.path.join(install_path, os.path.basename(available[plugin_name])))
# maybe has config
for conf in glob.glob(available[plugin_name].replace('.py', '.y?ml')):
dst = os.path.join(install_path, os.path.basename(conf))
if os.path.exists(dst) and md5(dst) != md5(conf):
# backup
logging.info('Backing up config: %s', os.path.basename(conf))
shutil.move(dst, dst + '.bak')
shutil.copyfile(conf, dst)
return 0
def _analyse_dir(path):
results = dict()
path += '*' if path.endswith('/') else '/*'
for filename in glob.glob(path, recursive=True):
if not os.path.isfile(filename):
continue
try:
results[filename] = md5(filename)
except OSError:
continue
return results
def update(config):
"""
Updates the database
"""
global SAVE_DIR
urls = config['main']['custom_plugin_repos']
if not urls:
logging.info('No plugin repositories configured.')
return 1
rc = 0
for idx, REPO_URL in enumerate(urls):
DEST = os.path.join(SAVE_DIR, 'plugins%d.zip' % idx)
logging.info('Downloading plugins from %s to %s', REPO_URL, DEST)
try:
os.makedirs(SAVE_DIR, exist_ok=True)
before_update = _analyse_dir(SAVE_DIR)
download_file(REPO_URL, os.path.join(SAVE_DIR, DEST))
logging.info('Unzipping...')
unzip(DEST, SAVE_DIR, strip_dirs=1)
after_update = _analyse_dir(SAVE_DIR)
b_len = len(before_update)
a_len = len(after_update)
if a_len > b_len:
logging.info('Found %d new file(s).', a_len - b_len)
changed = 0
for filename, filehash in after_update.items():
if filename in before_update and filehash != before_update[filename]:
changed += 1
if changed:
logging.info('%d file(s) were changed.', changed)
except Exception as ex:
logging.error('Error while updating plugins: %s', ex)
rc = 1
return rc

View File

@ -1,73 +0,0 @@
import pwnagotchi.plugins as plugins
import logging
import subprocess
import string
import os
'''
Aircrack-ng needed, to install:
> apt-get install aircrack-ng
'''
class AircrackOnly(plugins.Plugin):
__author__ = 'pwnagotchi [at] rossmarks [dot] uk'
__version__ = '1.0.1'
__license__ = 'GPL3'
__description__ = 'confirm pcap contains handshake/PMKID or delete it'
def __init__(self):
self.text_to_set = ""
self.options = dict()
def on_ready(self):
return
def on_loaded(self):
logging.info("aircrackonly plugin loaded")
if 'face' not in self.options:
self.options['face'] = '(>.<)'
check = subprocess.run(
'/usr/bin/dpkg -l aircrack-ng | grep aircrack-ng | awk \'{print $2, $3}\'', shell=True,
stdout=subprocess.PIPE)
check = check.stdout.decode('utf-8').strip()
if check != "aircrack-ng <none>":
logging.info("aircrackonly: Found " + check)
else:
logging.warning("aircrack-ng is not installed!")
def on_handshake(self, agent, filename, access_point, client_station):
display = agent.view()
to_delete = 0
handshake_found = 0
result = subprocess.run(('/usr/bin/aircrack-ng ' + filename + ' | grep "1 handshake" | awk \'{print $2}\''),
shell=True, stdout=subprocess.PIPE)
result = result.stdout.decode('utf-8').translate({ord(c): None for c in string.whitespace})
if result:
handshake_found = 1
logging.info("[AircrackOnly] contains handshake")
if handshake_found == 0:
result = subprocess.run(('/usr/bin/aircrack-ng ' + filename + ' | grep "PMKID" | awk \'{print $2}\''),
shell=True, stdout=subprocess.PIPE)
result = result.stdout.decode('utf-8').translate({ord(c): None for c in string.whitespace})
if result:
logging.info("[AircrackOnly] contains PMKID")
else:
to_delete = 1
if to_delete == 1:
os.remove(filename)
self.text_to_set = "Removed an uncrackable pcap"
logging.warning("Removed uncrackable pcap " + filename)
display.update(force=True)
def on_ui_update(self, ui):
if self.text_to_set:
ui.set('face', self.options['face'])
ui.set('status', self.text_to_set)
self.text_to_set = ""

View File

@ -1,229 +0,0 @@
import os
import re
import logging
import subprocess
import requests
import platform
import shutil
import glob
from threading import Lock
import time
import pwnagotchi
import pwnagotchi.plugins as plugins
from pwnagotchi.utils import StatusFile, parse_version as version_to_tuple
def check(version, repo, native=True):
logging.debug("checking remote version for %s, local is %s" % (repo, version))
info = {
'repo': repo,
'current': version,
'available': None,
'url': None,
'native': native,
'arch': platform.machine()
}
resp = requests.get("https://api.github.com/repos/%s/releases/latest" % repo)
latest = resp.json()
info['available'] = latest_ver = latest['tag_name'].replace('v', '')
is_arm64 = info['arch'].startswith('aarch')
local = version_to_tuple(info['current'])
remote = version_to_tuple(latest_ver)
if remote > local:
if not native:
info['url'] = "https://github.com/%s/archive/%s.zip" % (repo, latest['tag_name'])
else:
if is_arm64:
# check if this release is compatible with aarch64
for asset in latest['assets']:
download_url = asset['browser_download_url']
if (download_url.endswith('.zip') and
(info['arch'] in download_url or (is_arm64 and 'aarch64' in download_url))):
info['url'] = download_url
break
return info
def make_path_for(name):
path = os.path.join("/usr/local/src/", name)
if os.path.exists(path):
logging.debug("[update] deleting %s" % path)
shutil.rmtree(path, ignore_errors=True, onerror=None)
os.makedirs(path)
return path
def download_and_unzip(name, path, display, update):
target = "%s_%s.zip" % (name, update['available'])
target_path = os.path.join(path, target)
logging.info("[update] downloading %s to %s ..." % (update['url'], target_path))
display.update(force=True, new_data={'status': 'Downloading %s %s ...' % (name, update['available'])})
os.system('wget -q "%s" -O "%s"' % (update['url'], target_path))
logging.info("[update] extracting %s to %s ..." % (target_path, path))
display.update(force=True, new_data={'status': 'Extracting %s %s ...' % (name, update['available'])})
os.system('unzip "%s" -d "%s"' % (target_path, path))
def verify(name, path, source_path, display, update):
display.update(force=True, new_data={'status': 'Verifying %s %s ...' % (name, update['available'])})
checksums = glob.glob("%s/*.sha256" % path)
if len(checksums) == 0:
if update['native']:
logging.warning("[update] native update without SHA256 checksum file")
return False
else:
checksum = checksums[0]
logging.info("[update] verifying %s for %s ..." % (checksum, source_path))
with open(checksum, 'rt') as fp:
expected = fp.read().split('=')[1].strip().lower()
real = subprocess.getoutput('sha256sum "%s"' % source_path).split(' ')[0].strip().lower()
if real != expected:
logging.warning("[update] checksum mismatch for %s: expected=%s got=%s" % (source_path, expected, real))
return False
return True
def install(display, update):
name = update['repo'].split('/')[1]
path = make_path_for(name)
download_and_unzip(name, path, display, update)
source_path = os.path.join(path, name)
if not verify(name, path, source_path, display, update):
return False
logging.info("[update] installing %s ..." % name)
display.update(force=True, new_data={'status': 'Installing %s %s ...' % (name, update['available'])})
if update['native']:
dest_path = subprocess.getoutput("which %s" % name)
if dest_path == "":
logging.warning("[update] can't find path for %s" % name)
return False
logging.info("[update] stopping %s ..." % update['service'])
os.system("service %s stop" % update['service'])
shutil.move(source_path, dest_path)
os.chmod("/usr/local/bin/%s" % name, 0o755)
logging.info("[update] restarting %s ..." % update['service'])
os.system("service %s start" % update['service'])
else:
if not os.path.exists(source_path):
source_path = "%s-%s" % (source_path, update['available'])
# setup.py is going to install data files for us
os.system("cd %s && pip3 install . --break-system-packages" % source_path)
return True
def parse_version(cmd):
out = subprocess.getoutput(cmd)
for part in out.split(' '):
part = part.replace('v', '').strip()
if re.search(r'^\d+\.\d+\.\d+.*$', part):
return part
raise Exception('could not parse version from "%s": output=\n%s' % (cmd, out))
class AutoUpdate(plugins.Plugin):
__author__ = 'evilsocket@gmail.com'
__version__ = '1.1.1'
__name__ = 'auto-update'
__license__ = 'GPL3'
__description__ = 'This plugin checks when updates are available and applies them when internet is available.'
def __init__(self):
self.ready = False
self.status = StatusFile('/root/.auto-update')
self.lock = Lock()
self.options = dict()
def on_loaded(self):
if 'interval' not in self.options or ('interval' in self.options and not self.options['interval']):
logging.error("[update] main.plugins.auto-update.interval is not set")
return
self.ready = True
logging.info("[update] plugin loaded.")
def on_internet_available(self, agent):
if self.lock.locked():
return
with self.lock:
logging.debug("[update] internet connectivity is available (ready %s)" % self.ready)
if not self.ready:
return
if self.status.newer_then_hours(self.options['interval']):
logging.debug("[update] last check happened less than %d hours ago" % self.options['interval'])
return
logging.info("[update] checking for updates ...")
display = agent.view()
prev_status = display.get('status')
try:
display.update(force=True, new_data={'status': 'Checking for updates ...'})
to_install = []
to_check = [
('jayofelony/bettercap', parse_version('bettercap -version'), True, 'bettercap'),
('jayofelony/pwngrid', parse_version('pwngrid -version'), True, 'pwngrid-peer'),
('jayofelony/pwnagotchi-bookworm', pwnagotchi.__version__, False, 'pwnagotchi')
]
for repo, local_version, is_native, svc_name in to_check:
info = check(local_version, repo, is_native)
if info['url'] is not None:
logging.warning(
"update for %s available (local version is '%s'): %s" % (
repo, info['current'], info['url']))
info['service'] = svc_name
to_install.append(info)
num_updates = len(to_install)
num_installed = 0
if num_updates > 0:
if self.options['install']:
for update in to_install:
plugins.on('updating')
if install(display, update):
num_installed += 1
else:
prev_status = '%d new update%s available!' % (num_updates, 's' if num_updates > 1 else '')
logging.info("[update] done")
self.status.update()
if num_installed > 0:
display.update(force=True, new_data={'status': 'Rebooting ...'})
time.sleep(3)
os.system("service pwnagotchi restart")
except Exception as e:
logging.error("[update] %s" % e)
display.update(force=True, new_data={'status': prev_status if prev_status is not None else ''})

View File

@ -1,585 +0,0 @@
import logging
import os
import subprocess
import time
from threading import Lock
import dbus
import pwnagotchi.plugins as plugins
import pwnagotchi.ui.fonts as fonts
from pwnagotchi.ui.components import LabeledValue
from pwnagotchi.ui.view import BLACK
from pwnagotchi.utils import StatusFile
class BTError(Exception):
"""
Custom bluetooth exception
"""
pass
class BTNap:
"""
This class creates a bluetooth connection to the specified bt-mac
see https://github.com/bablokb/pi-btnap/blob/master/files/usr/local/sbin/btnap.service.py
"""
IFACE_BASE = 'org.bluez'
IFACE_DEV = 'org.bluez.Device1'
IFACE_ADAPTER = 'org.bluez.Adapter1'
IFACE_PROPS = 'org.freedesktop.DBus.Properties'
def __init__(self, mac):
self._mac = mac
@staticmethod
def get_bus():
"""
Get systembus obj
"""
bus = getattr(BTNap.get_bus, 'cached_obj', None)
if not bus:
bus = BTNap.get_bus.cached_obj = dbus.SystemBus()
return bus
@staticmethod
def get_manager():
"""
Get manager obj
"""
manager = getattr(BTNap.get_manager, 'cached_obj', None)
if not manager:
manager = BTNap.get_manager.cached_obj = dbus.Interface(
BTNap.get_bus().get_object(BTNap.IFACE_BASE, '/'),
'org.freedesktop.DBus.ObjectManager')
return manager
@staticmethod
def prop_get(obj, k, iface=None):
"""
Get a property of the obj
"""
if iface is None:
iface = obj.dbus_interface
return obj.Get(iface, k, dbus_interface=BTNap.IFACE_PROPS)
@staticmethod
def prop_set(obj, k, v, iface=None):
"""
Set a property of the obj
"""
if iface is None:
iface = obj.dbus_interface
return obj.Set(iface, k, v, dbus_interface=BTNap.IFACE_PROPS)
@staticmethod
def find_adapter(pattern=None):
"""
Find the bt adapter
"""
return BTNap.find_adapter_in_objects(BTNap.get_manager().GetManagedObjects(), pattern)
@staticmethod
def find_adapter_in_objects(objects, pattern=None):
"""
Finds the obj with a pattern
"""
bus, obj = BTNap.get_bus(), None
for path, ifaces in objects.items():
adapter = ifaces.get(BTNap.IFACE_ADAPTER)
if adapter is None:
continue
if not pattern or pattern == adapter['Address'] or path.endswith(pattern):
obj = bus.get_object(BTNap.IFACE_BASE, path)
yield dbus.Interface(obj, BTNap.IFACE_ADAPTER)
if obj is None:
raise BTError('Bluetooth adapter not found')
@staticmethod
def find_device(device_address, adapter_pattern=None):
"""
Finds the device
"""
return BTNap.find_device_in_objects(BTNap.get_manager().GetManagedObjects(),
device_address, adapter_pattern)
@staticmethod
def find_device_in_objects(objects, device_address, adapter_pattern=None):
"""
Finds the device in objects
"""
bus = BTNap.get_bus()
path_prefix = ''
if adapter_pattern:
if not isinstance(adapter_pattern, str):
adapter = adapter_pattern
else:
adapter = BTNap.find_adapter_in_objects(objects, adapter_pattern)
path_prefix = adapter.object_path
for path, ifaces in objects.items():
device = ifaces.get(BTNap.IFACE_DEV)
if device is None:
continue
if str(device['Address']).lower() == device_address.lower() and path.startswith(path_prefix):
obj = bus.get_object(BTNap.IFACE_BASE, path)
return dbus.Interface(obj, BTNap.IFACE_DEV)
raise BTError('Bluetooth device not found')
def power(self, on=True):
"""
Set power of devices to on/off
"""
logging.debug("BT-TETHER: Changing bluetooth device to %s", str(on))
try:
devs = list(BTNap.find_adapter())
devs = dict((BTNap.prop_get(dev, 'Address'), dev) for dev in devs)
except BTError as bt_err:
logging.error(bt_err)
return None
for dev_addr, dev in devs.items():
BTNap.prop_set(dev, 'Powered', on)
logging.debug('Set power of %s (addr %s) to %s', dev.object_path, dev_addr, str(on))
if devs:
return list(devs.values())[0]
return None
def is_paired(self):
"""
Check if already connected
"""
logging.debug("BT-TETHER: Checking if device is paired")
bt_dev = self.power(True)
if not bt_dev:
logging.debug("BT-TETHER: No bluetooth device found.")
return False
try:
dev_remote = BTNap.find_device(self._mac, bt_dev)
return bool(BTNap.prop_get(dev_remote, 'Paired'))
except BTError:
logging.debug("BT-TETHER: Device is not paired.")
return False
def wait_for_device(self, timeout=15):
"""
Wait for device
returns device if found None if not
"""
logging.debug("BT-TETHER: Waiting for device")
bt_dev = self.power(True)
if not bt_dev:
logging.debug("BT-TETHER: No bluetooth device found.")
return None
try:
logging.debug("BT-TETHER: Starting discovery ...")
bt_dev.StartDiscovery()
except Exception as bt_ex:
logging.error(bt_ex)
raise bt_ex
dev_remote = None
# could be set to 0, so check if > -1
while timeout > -1:
try:
dev_remote = BTNap.find_device(self._mac, bt_dev)
logging.debug("BT-TETHER: Using remote device (addr: %s): %s",
BTNap.prop_get(dev_remote, 'Address'), dev_remote.object_path)
break
except BTError:
logging.debug("BT-TETHER: Not found yet ...")
time.sleep(1)
timeout -= 1
try:
logging.debug("BT-TETHER: Stopping Discovery ...")
bt_dev.StopDiscovery()
except Exception as bt_ex:
logging.error(bt_ex)
raise bt_ex
return dev_remote
@staticmethod
def pair(device):
logging.debug('BT-TETHER: Trying to pair ...')
try:
device.Pair()
logging.debug('BT-TETHER: Successful paired with device ;)')
return True
except dbus.exceptions.DBusException as err:
if err.get_dbus_name() == 'org.bluez.Error.AlreadyExists':
logging.debug('BT-TETHER: Already paired ...')
return True
except Exception:
pass
return False
@staticmethod
def nap(device):
logging.debug('BT-TETHER: Trying to nap ...')
try:
logging.debug('BT-TETHER: Connecting to profile ...')
device.ConnectProfile('nap')
except Exception: # raises exception, but still works
pass
net = dbus.Interface(device, 'org.bluez.Network1')
try:
logging.debug('BT-TETHER: Connecting to nap network ...')
net.Connect('nap')
return net, True
except dbus.exceptions.DBusException as err:
if err.get_dbus_name() == 'org.bluez.Error.AlreadyConnected':
return net, True
connected = BTNap.prop_get(net, 'Connected')
if not connected:
return None, False
return net, True
class SystemdUnitWrapper:
"""
systemd wrapper
"""
def __init__(self, unit):
self.unit = unit
@staticmethod
def _action_on_unit(action, unit):
process = subprocess.Popen(f"systemctl {action} {unit}", shell=True, stdin=None,
stdout=open("/dev/null", "w"), stderr=None, executable="/bin/bash")
process.wait()
if process.returncode > 0:
return False
return True
@staticmethod
def daemon_reload():
"""
Calls systemctl daemon-reload
"""
process = subprocess.Popen("systemctl daemon-reload", shell=True, stdin=None,
stdout=open("/dev/null", "w"), stderr=None, executable="/bin/bash")
process.wait()
if process.returncode > 0:
return False
return True
def is_active(self):
"""
Checks if unit is active
"""
return SystemdUnitWrapper._action_on_unit('is-active', self.unit)
def is_enabled(self):
"""
Checks if unit is enabled
"""
return SystemdUnitWrapper._action_on_unit('is-enabled', self.unit)
def is_failed(self):
"""
Checks if unit is failed
"""
return SystemdUnitWrapper._action_on_unit('is-failed', self.unit)
def enable(self):
"""
Enables the unit
"""
return SystemdUnitWrapper._action_on_unit('enable', self.unit)
def disable(self):
"""
Disables the unit
"""
return SystemdUnitWrapper._action_on_unit('disable', self.unit)
def start(self):
"""
Starts the unit
"""
return SystemdUnitWrapper._action_on_unit('start', self.unit)
def stop(self):
"""
Stops the unit
"""
return SystemdUnitWrapper._action_on_unit('stop', self.unit)
def restart(self):
"""
Restarts the unit
"""
return SystemdUnitWrapper._action_on_unit('restart', self.unit)
class IfaceWrapper:
"""
Small wrapper to check and manage ifaces
see: https://github.com/rlisagor/pynetlinux/blob/master/pynetlinux/ifconfig.py
"""
def __init__(self, iface):
self.iface = iface
self.path = f"/sys/class/net/{iface}"
def exists(self):
"""
Checks if iface exists
"""
return os.path.exists(self.path)
def is_up(self):
"""
Checks if iface is ip
"""
return open(f"{self.path}/operstate", 'r').read().rsplit('\n') == 'up'
def set_addr(self, addr):
"""
Set the netmask
"""
process = subprocess.Popen(f"ip addr add {addr} dev {self.iface}", shell=True, stdin=None,
stdout=open("/dev/null", "w"), stderr=None, executable="/bin/bash")
process.wait()
if process.returncode == 2 or process.returncode == 0: # 2 = already set
return True
return False
@staticmethod
def set_route(gateway, device):
process = subprocess.Popen(f"ip route replace default via {gateway} dev {device}", shell=True, stdin=None,
stdout=open("/dev/null", "w"), stderr=None, executable="/bin/bash")
process.wait()
if process.returncode > 0:
return False
return True
class Device:
def __init__(self, name, share_internet, mac, ip, netmask, interval, gateway=None, priority=10, scantime=15, search_order=0, max_tries=0, **kwargs):
self.name = name
self.status = StatusFile(f'/root/.bt-tether-{name}')
self.status.update()
self.tries = 0
self.network = None
self.max_tries = max_tries
self.search_order = search_order
self.share_internet = share_internet
self.ip = ip
self.netmask = netmask
self.gateway = gateway
self.interval = interval
self.mac = mac
self.scantime = scantime
self.priority = priority
def connected(self):
"""
Checks if device is connected
"""
return self.network and BTNap.prop_get(self.network, 'Connected')
def interface(self):
"""
Returns the interface name or None
"""
if not self.connected():
return None
return BTNap.prop_get(self.network, 'Interface')
class BTTether(plugins.Plugin):
__author__ = '33197631+dadav@users.noreply.github.com'
__version__ = '1.1.0'
__license__ = 'GPL3'
__description__ = 'This makes the display reachable over bluetooth'
def __init__(self):
self.ready = False
self.options = dict()
self.devices = dict()
self.lock = Lock()
self.running = True
self.status = '-'
def on_loaded(self):
# new config
if 'devices' in self.options:
for device, options in self.options['devices'].items():
if 'enabled' in options and options['enabled']:
for device_opt in ['enabled', 'priority', 'scantime', 'search_order',
'max_tries', 'share_internet', 'mac', 'ip',
'netmask', 'interval']:
if device_opt not in options or options[device_opt] is None:
logging.error("BT-TETHER: Please specify the %s for device %s.",
device_opt, device)
break
else:
if options['enabled']:
self.devices[device] = Device(name=device, **options)
# legacy
if 'mac' in self.options:
for opt in ['share_internet', 'mac', 'ip', 'netmask', 'interval']:
if opt not in self.options or self.options[opt] is None:
logging.error("BT-TETHER: Please specify the %s in your config.toml.", opt)
return
self.devices['legacy'] = Device(name='legacy', **self.options)
if not self.devices:
logging.error("BT-TETHER: No valid devices found")
return
# ensure bluetooth is running
bt_unit = SystemdUnitWrapper('bluetooth.service')
if not bt_unit.is_active():
if not bt_unit.start():
logging.error("BT-TETHER: Can't start bluetooth.service")
return
logging.info("BT-TETHER: Successfully loaded ...")
while self.running:
time.sleep(1)
devices_to_try = list()
connected_priorities = list()
any_device_connected = False # if this is true, last status on screen should be C
for _, device in self.devices.items():
if device.connected():
connected_priorities.append(device.priority)
any_device_connected = True
continue
if not device.max_tries or (device.max_tries > device.tries):
if not device.status.newer_then_minutes(device.interval):
devices_to_try.append(device)
device.status.update()
device.tries += 1
sorted_devices = sorted(devices_to_try, key=lambda x: x.search_order)
for device in sorted_devices:
bt = BTNap(device.mac)
try:
logging.debug('BT-TETHER: Search %d secs for %s ...', device.scantime, device.name)
dev_remote = bt.wait_for_device(timeout=device.scantime)
if dev_remote is None:
logging.debug('BT-TETHER: Could not find %s, try again in %d minutes.', device.name, device.interval)
self.status = 'NF'
continue
except Exception as bt_ex:
logging.error(bt_ex)
self.status = 'NF'
continue
paired = bt.is_paired()
if not paired:
if BTNap.pair(dev_remote):
logging.debug('BT-TETHER: Paired with %s.', device.name)
else:
logging.debug('BT-TETHER: Pairing with %s failed ...', device.name)
self.status = 'PE'
continue
else:
logging.debug('BT-TETHER: Already paired.')
logging.debug('BT-TETHER: Try to create nap connection with %s ...', device.name)
device.network, success = BTNap.nap(dev_remote)
interface = None
if success:
try:
interface = device.interface()
except Exception:
logging.debug('BT-TETHER: Could not establish nap connection with %s', device.name)
continue
if interface is None:
self.status = 'BE'
logging.debug('BT-TETHER: Could not establish nap connection with %s', device.name)
continue
logging.debug('BT-TETHER: Created interface (%s)', interface)
self.status = 'C'
any_device_connected = True
device.tries = 0 # reset tries
else:
logging.debug('BT-TETHER: Could not establish nap connection with %s', device.name)
self.status = 'NF'
continue
addr = f"{device.ip}/{device.netmask}"
if device.gateway:
gateway = device.gateway
else:
gateway = ".".join(device.ip.split('.')[:-1] + ['1'])
wrapped_interface = IfaceWrapper(interface)
logging.debug('BT-TETHER: Add ip to %s', interface)
if not wrapped_interface.set_addr(addr):
self.status = 'AE'
logging.debug("BT-TETHER: Could not add ip to %s", interface)
continue
if device.share_internet:
if not connected_priorities or device.priority > max(connected_priorities):
logging.debug('BT-TETHER: Set default route to %s via %s', gateway, interface)
IfaceWrapper.set_route(gateway, interface)
connected_priorities.append(device.priority)
logging.debug('BT-TETHER: Change resolv.conf if necessary ...')
with open('/etc/resolv.conf', 'r+') as resolv:
nameserver = resolv.read()
if 'nameserver 9.9.9.9' not in nameserver:
logging.debug('BT-TETHER: Added nameserver')
resolv.seek(0)
resolv.write(nameserver + 'nameserver 9.9.9.9\n')
if any_device_connected:
self.status = 'C'
def on_unload(self, ui):
self.running = False
with ui._lock:
ui.remove_element('bluetooth')
def on_ui_setup(self, ui):
with ui._lock:
ui.add_element('bluetooth', LabeledValue(color=BLACK, label='BT', value='-',
position=(ui.width() / 2 - 10, 0),
label_font=fonts.Bold, text_font=fonts.Medium))
def on_ui_update(self, ui):
ui.set('bluetooth', self.status)

View File

@ -1,157 +0,0 @@
import logging
import pwnagotchi.plugins as plugins
from pwnagotchi.ui.components import LabeledValue
from pwnagotchi.ui.view import BLACK
import pwnagotchi.ui.fonts as fonts
class Example(plugins.Plugin):
__author__ = 'evilsocket@gmail.com'
__version__ = '1.0.0'
__license__ = 'GPL3'
__description__ = 'An example plugin for pwnagotchi that implements all the available callbacks.'
def __init__(self):
logging.debug("example plugin created")
# called when http://<host>:<port>/plugins/<plugin>/ is called
# must return a html page
# IMPORTANT: If you use "POST"s, add a csrf-token (via csrf_token() and render_template_string)
def on_webhook(self, path, request):
pass
# called when the plugin is loaded
def on_loaded(self):
logging.warning("WARNING: this plugin should be disabled! options = " % self.options)
# called before the plugin is unloaded
def on_unload(self, ui):
pass
# called hen there's internet connectivity
def on_internet_available(self, agent):
pass
# called to setup the ui elements
def on_ui_setup(self, ui):
# add custom UI elements
ui.add_element('ups', LabeledValue(color=BLACK, label='UPS', value='0%/0V', position=(ui.width() / 2 - 25, 0),
label_font=fonts.Bold, text_font=fonts.Medium))
# called when the ui is updated
def on_ui_update(self, ui):
# update those elements
some_voltage = 0.1
some_capacity = 100.0
ui.set('ups', "%4.2fV/%2i%%" % (some_voltage, some_capacity))
# called when the hardware display setup is done, display is an hardware specific object
def on_display_setup(self, display):
pass
# called when everything is ready and the main loop is about to start
def on_ready(self, agent):
logging.info("unit is ready")
# you can run custom bettercap commands if you want
# agent.run('ble.recon on')
# or set a custom state
# agent.set_bored()
# called when the AI finished loading
def on_ai_ready(self, agent):
pass
# called when the AI finds a new set of parameters
def on_ai_policy(self, agent, policy):
pass
# called when the AI starts training for a given number of epochs
def on_ai_training_start(self, agent, epochs):
pass
# called after the AI completed a training epoch
def on_ai_training_step(self, agent, _locals, _globals):
pass
# called when the AI has done training
def on_ai_training_end(self, agent):
pass
# called when the AI got the best reward so far
def on_ai_best_reward(self, agent, reward):
pass
# called when the AI got the worst reward so far
def on_ai_worst_reward(self, agent, reward):
pass
# called when a non overlapping wifi channel is found to be free
def on_free_channel(self, agent, channel):
pass
# called when the status is set to bored
def on_bored(self, agent):
pass
# called when the status is set to sad
def on_sad(self, agent):
pass
# called when the status is set to excited
def on_excited(self, agent):
pass
# called when the status is set to lonely
def on_lonely(self, agent):
pass
# called when the agent is rebooting the board
def on_rebooting(self, agent):
pass
# called when the agent is waiting for t seconds
def on_wait(self, agent, t):
pass
# called when the agent is sleeping for t seconds
def on_sleep(self, agent, t):
pass
# called when the agent refreshed its access points list
def on_wifi_update(self, agent, access_points):
pass
# called when the agent refreshed an unfiltered access point list
# this list contains all access points that were detected BEFORE filtering
def on_unfiltered_ap_list(self, agent, access_points):
pass
# called when the agent is sending an association frame
def on_association(self, agent, access_point):
pass
# called when the agent is deauthenticating a client station from an AP
def on_deauthentication(self, agent, access_point, client_station):
pass
# callend when the agent is tuning on a specific channel
def on_channel_hop(self, agent, channel):
pass
# called when a new handshake is captured, access_point and client_station are json objects
# if the agent could match the BSSIDs to the current list, otherwise they are just the strings of the BSSIDs
def on_handshake(self, agent, filename, access_point, client_station):
pass
# called when an epoch is over (where an epoch is a single loop of the main algorithm)
def on_epoch(self, agent, epoch, epoch_data):
pass
# called when a new peer is detected
def on_peer_detected(self, agent, peer):
pass
# called when a known peer is lost
def on_peer_lost(self, agent, peer):
pass

View File

@ -1,371 +0,0 @@
import logging
import re
import subprocess
import time
import random
from io import TextIOWrapper
import pwnagotchi
from pwnagotchi import plugins
import pwnagotchi.ui.faces as faces
from pwnagotchi.bettercap import Client
class FixServices(plugins.Plugin):
__author__ = 'jayofelony'
__version__ = '1.0'
__license__ = 'GPL3'
__description__ = 'Fix blindness, firmware crashes and brain not being loaded'
__name__ = 'Fix_Services'
__help__ = """
Reload brcmfmac module when blindbug is detected, instead of rebooting. Adapted from WATCHDOG.
"""
__defaults__ = {
'enabled': True,
}
def __init__(self):
self.options = dict()
self.pattern1 = re.compile(r'wifi error while hopping to channel')
self.pattern2 = re.compile(r'Firmware has halted or crashed')
self.pattern3 = re.compile(r'error 400: could not find interface wlan0mon')
self.isReloadingMon = False
self.connection = None
self.LASTTRY = 0
self._count = 0
def on_loaded(self):
"""
Gets called when the plugin gets loaded
"""
logging.info("[Fix_Services] plugin loaded.")
def on_ready(self, agent):
last_lines = self.get_last_lines('journalctl', ['-n10', '-k'], 10)
try:
cmd_output = subprocess.check_output("ip link show wlan0mon", shell=True)
logging.debug("[Fix_Services ip link show wlan0mon]: %s" % repr(cmd_output))
if ",UP," in str(cmd_output):
logging.info("wlan0mon is up.")
logging.info("[Fix_Services] Logs look good!")
except Exception as err:
logging.error("[Fix_Services ip link show wlan0mon]: %s" % repr(err))
try:
self._tryTurningItOffAndOnAgain(agent)
except Exception as err:
logging.error("[Fix_Services OffNOn]: %s" % repr(err))
# bettercap sys_log event
# search syslog events for the brcmf channel fail, and reset when it shows up
# apparently this only gets messages from bettercap going to syslog, not from syslog
def on_bcap_sys_log(self, agent, event):
if re.search('wifi error while hopping to channel', event['data']['Message']):
logging.info("[Fix_Services]SYSLOG MATCH: %s" % event['data']['Message'])
logging.info("[Fix_Services]**** restarting wifi.recon")
try:
result = agent.run("wifi.recon off; wifi.recon on")
if result["success"]:
logging.info("[Fix_Services] wifi.recon flip: success!")
if hasattr(agent, 'view'):
display = agent.view()
if display:
display.update(force=True, new_data={"status": "Wifi recon flipped!", "face": faces.COOL})
else:
print("Wifi recon flipped")
else:
logging.warning("[Fix_Services] wifi.recon flip: FAILED: %s" % repr(result))
self._tryTurningItOffAndOnAgain(agent)
except Exception as err:
logging.error("[Fix_Services]SYSLOG wifi.recon flip fail: %s" % err)
self._tryTurningItOffAndOnAgain(agent)
def get_last_lines(self, command, args, n):
try:
process = subprocess.Popen([command] + args, stdout=subprocess.PIPE)
output = TextIOWrapper(process.stdout)
lines = output.readlines()
last_n_lines = ''.join(lines[-n:])
return last_n_lines
except Exception as e:
print(f"Error occurred: {e}")
return None
def on_epoch(self, agent, epoch, epoch_data):
last_lines = self.get_last_lines('journalctl', ['-n10', '-k'], 10)
other_last_lines = self.get_last_lines('journalctl', ['-n10'], 10)
other_other_last_lines = self.get_last_lines('tail', ['-n10', '/home/pi/logs/pwnagotchi.log'], 10)
# don't check if we ran a reset recently
logging.debug("[Fix_Services]**** epoch")
if time.time() - self.LASTTRY > 180:
# get last 10 lines
display = None
logging.debug("[Fix_Services]**** checking")
# Look for pattern 1
if len(self.pattern1.findall(other_last_lines)) >= 5:
logging.debug("[Fix_Services]**** Should trigger a reload of the wlan0mon device:\n%s" % last_lines)
if hasattr(agent, 'view'):
display = agent.view()
display.set('status', 'Wifi channel stuck. Restarting recon.')
display.update(force=True)
logging.info('[Fix_Services] Wifi channel stuck. Restarting recon.')
try:
result = agent.run("wifi.recon off; wifi.recon on")
if result["success"]:
logging.info("[Fix_Services] wifi.recon flip: success!")
if display:
display.update(force=True, new_data={"status": "Wifi recon flipped!",
"face": faces.COOL})
else:
print("Wifi recon flipped\nthat was easy!")
else:
logging.warning("[Fix_Services] wifi.recon flip: FAILED: %s" % repr(result))
except Exception as err:
logging.error("[Fix_Services wifi.recon flip] %s" % repr(err))
# Look for pattern 2
elif len(self.pattern2.findall(other_last_lines)) >= 1:
logging.info("[Fix_Services] Firmware has halted or crashed. Restarting wlan0mon.")
if hasattr(agent, 'view'):
display = agent.view()
display.set('status', 'Firmware has halted or crashed. Restarting wlan0mon.')
display.update(force=True)
try:
# Run the monstart command to restart wlan0mon
cmd_output = subprocess.check_output("monstart", shell=True)
logging.debug("[Fix_Services monstart]: %s" % repr(cmd_output))
except Exception as err:
logging.error("[Fix_Services monstart]: %s" % repr(err))
# Look for pattern 3
elif len(self.pattern3.findall(other_other_last_lines)) >= 3:
logging.info("[Fix_Services] wlan0 is down!")
if hasattr(agent, 'view'):
display = agent.view()
display.set('status', 'Restarting wlan0 now!')
display.update(force=True)
try:
# Run the monstart command to restart wlan0mon
cmd_output = subprocess.check_output("monstart", shell=True)
logging.debug("[Fix_Services monstart]: %s" % repr(cmd_output))
except Exception as err:
logging.error("[Fix_Services monstart]: %s" % repr(err))
else:
print("logs look good")
def logPrintView(self, level, message, ui=None, displayData=None, force=True):
try:
if level == "error":
logging.error(message)
elif level == "warning":
logging.warning(message)
elif level == "debug":
logging.debug(message)
else:
logging.info(message)
if ui:
ui.update(force=force, new_data=displayData)
elif displayData and "status" in displayData:
print(displayData["status"])
else:
print("[%s] %s" % (level, message))
except Exception as err:
logging.error("[logPrintView] ERROR %s" % repr(err))
def _tryTurningItOffAndOnAgain(self, connection):
# avoid overlapping restarts, but allow it if it's been a while
# (in case the last attempt failed before resetting "isReloadingMon")
if self.isReloadingMon and (time.time() - self.LASTTRY) < 180:
logging.info("[Fix_Services] Duplicate attempt ignored")
else:
self.isReloadingMon = True
self.LASTTRY = time.time()
if hasattr(connection, 'view'):
display = connection.view()
if display:
display.update(force=True, new_data={"status": "I'm blind! Try turning it off and on again",
"face": faces.BORED})
else:
display = None
# main divergence from WATCHDOG starts here
#
# instead of rebooting, and losing all that energy loading up the AI
# pause wifi.recon, close wlan0mon, reload the brcmfmac kernel module
# then recreate wlan0mon, ..., and restart wifi.recon
# Turn it off
# attempt a sanity check. does wlan0mon exist?
# is it up?
try:
cmd_output = subprocess.check_output("ip link show wlan0mon", shell=True)
logging.debug("[Fix_Services ip link show wlan0mon]: %s" % repr(cmd_output))
if ",UP," in str(cmd_output):
logging.info("wlan0mon is up. Skip reset?")
# not reliable, so don't skip just yet
# print("wlan0mon is up. Skipping reset.")
# self.isReloadingMon = False
# return
except Exception as err:
logging.error("[Fix_Services ip link show wlan0mon]: %s" % repr(err))
try:
result = connection.run("wifi.recon off")
if "success" in result:
self.logPrintView("info", "[Fix_Services] wifi.recon off: %s!" % repr(result),
display, {"status": "Wifi recon paused!", "face": faces.COOL})
time.sleep(2)
else:
self.logPrintView("warning", "[Fix_Services] wifi.recon off: FAILED: %s" % repr(result),
display, {"status": "Recon was busted (probably)",
"face": random.choice((faces.BROKEN, faces.DEBUG))})
except Exception as err:
logging.error("[Fix_Services wifi.recon off] error %s" % (repr(err)))
logging.info("[Fix_Services] recon paused. Now trying wlan0mon reload")
try:
cmd_output = subprocess.check_output("monstop", shell=True)
self.logPrintView("info", "[Fix_Services] wlan0mon down and deleted: %s" % cmd_output,
display, {"status": "wlan0mon d-d-d-down!", "face": faces.BORED})
except Exception as nope:
logging.error("[Fix_Services delete wlan0mon] %s" % nope)
pass
logging.debug("[Fix_Services] Now trying modprobe -r")
# Try this sequence 3 times until it is reloaded
#
# Future: while "not fixed yet": blah blah blah. if "max_attemts", then reboot like the old days
#
tries = 0
while tries < 3:
try:
# unload the module
cmd_output = subprocess.check_output("sudo modprobe -r brcmfmac", shell=True)
self.logPrintView("info", "[Fix_Services] unloaded brcmfmac", display,
{"status": "Turning it off #%s" % tries, "face": faces.SMART})
time.sleep(1 + tries)
# reload the module
try:
# reload the brcmfmac kernel module
cmd_output = subprocess.check_output("sudo modprobe brcmfmac", shell=True)
self.logPrintView("info", "[Fix_Services] reloaded brcmfmac")
time.sleep(10 + 4 * tries) # give it some time for wlan device to stabilize, or whatever
# success! now make the mon0
try:
cmd_output = subprocess.check_output("monstart", shell=True)
self.logPrintView("info", "[Fix_Services interface add wlan0mon] worked #%s: %s"
% (tries, cmd_output))
time.sleep(tries + 5)
try:
# try accessing mon0 in bettercap
result = connection.run("set wifi.interface wlan0mon")
if "success" in result:
logging.info("[Fix_Services set wifi.interface wlan0mon] worked!")
self._count = self._count + 1
time.sleep(1)
# stop looping and get back to recon
break
else:
logging.debug("[Fix_Services set wifi.interfaceface wlan0mon] failed? %s" % repr(result))
except Exception as err:
logging.debug("[Fix_Services set wifi.interface wlan0mon] except: %s" % repr(err))
except Exception as cerr: #
if not display:
print("failed loading wlan0mon attempt #%s: %s" % (tries, repr(cerr)))
except Exception as err: # from modprobe
if not display:
print("Failed reloading brcmfmac")
logging.error("[Fix_Services] Failed reloading brcmfmac %s" % repr(err))
except Exception as nope: # from modprobe -r
# fails if already unloaded, so probably fine
logging.error("[Fix_Services #%s modprobe -r] %s" % (tries, repr(nope)))
if not display:
print("[Fix_Services #%s modprobe -r] %s" % (tries, repr(nope)))
pass
tries = tries + 1
if tries < 3:
logging.info("[Fix_Services] wlan0mon didn't make it. trying again")
if not display:
print(" wlan0mon didn't make it. trying again")
else:
logging.info("[Fix_Services] wlan0mon loading failed, no choice but to reboot ..")
pwnagotchi.reboot()
# exited the loop, so hopefully it loaded
if tries < 3:
if display:
display.update(force=True, new_data={"status": "And back on again...",
"face": faces.INTENSE})
else:
print("And back on again...")
logging.info("[Fix_Services] wlan0mon back up")
else:
self.LASTTRY = time.time()
time.sleep(8 + tries * 2) # give it a bit before restarting recon in bettercap
self.isReloadingMon = False
logging.info("[Fix_Services] re-enable recon")
try:
result = connection.run("wifi.clear; wifi.recon on")
if "success" in result: # and result["success"] is True:
if display:
display.update(force=True, new_data={"status": "I can see again! (probably)",
"face": faces.HAPPY})
else:
print("I can see again")
logging.debug("[Fix_Services] wifi.recon on")
self.LASTTRY = time.time() + 120 # 2-minute pause until next time.
else:
logging.error("[Fix_Services] wifi.recon did not start up")
self.LASTTRY = time.time() - 300 # failed, so try again ASAP
self.isReloadingMon = False
except Exception as err:
logging.error("[Fix_Services wifi.recon on] %s" % repr(err))
pwnagotchi.reboot()
def on_unload(self, ui):
with ui._lock:
try:
logging.info("[Fix_Services] unloaded")
except Exception as err:
logging.error("[Fix_Services] unload err %s " % repr(err))
pass
# run from command line to brute force a reload
if __name__ == "__main__":
print("Performing brcmfmac reload and restart wlan0mon in 5 seconds...")
fb = FixServices()
data = {'Message': "kernel: brcmfmac: brcmf_cfg80211_nexmon_set_channel: Set Channel failed: chspec=1234"}
event = {'data': data}
agent = Client('localhost', port=8081, username="pwnagotchi", password="pwnagotchi")
time.sleep(2)
print("3 seconds")
time.sleep(3)
fb.on_epoch(agent, event, None)
# fb._tryTurningItOffAndOnAgain(agent)

View File

@ -1,251 +0,0 @@
import logging
import os
import shutil
import time
import pwnagotchi.plugins as plugins
import pwnagotchi
import pydrive2
from pydrive2.auth import GoogleAuth
from pydrive2.drive import GoogleDrive
from threading import Lock
from pwnagotchi.utils import StatusFile
import zipfile
class GdriveSync(plugins.Plugin):
__author__ = '@jayofelony'
__version__ = '1.2'
__license__ = 'GPL3'
__description__ = 'A plugin to backup various pwnagotchi files and folders to Google Drive. Once every hour from loading plugin.'
def __init__(self):
self.options = dict()
self.lock = Lock()
self.internet = False
self.ready = False
self.status = StatusFile('/root/.gdrive-backup')
self.backup = True
self.backupfiles = [
'/root/brain.nn',
'/root/brain.json',
'/root/.api-report.json',
'/root/handshakes',
'/root/peers',
'/etc/pwnagotchi'
]
def on_loaded(self):
"""
Called when the plugin is loaded
"""
# client_secrets.json needs to be not empty
if os.stat("/root/client_secrets.json").st_size == 0:
logging.error("[gDriveSync] /root/client_secrets.json is empty. Please RTFM!")
return
# backup file, so we know if there has been a backup made at least once before.
if not os.path.exists("/root/.gdrive-backup"):
self.backup = False
try:
gauth = GoogleAuth(settings_file="/root/settings.yaml")
gauth.LoadCredentialsFile("/root/credentials.json")
if gauth.credentials is None:
# Authenticate if they're not there
gauth.LocalWebserverAuth()
elif gauth.access_token_expired:
# Refresh them if expired
gauth.Refresh()
gauth.SaveCredentialsFile("/root/credentials.json")
gauth.Authorize()
# Create GoogleDrive instance
self.drive = GoogleDrive(gauth)
# if backup file does not exist, we will check for backup folder on gdrive.
if not self.backup:
# Use self.options['backup_folder'] as the folder ID where backups are stored
backup_folder = self.create_folder_if_not_exists(self.options['backup_folder'])
# Continue with the rest of the code using backup_folder_id
backup_folder_file_list = self.drive.ListFile({'q': f"'{backup_folder}' in parents and mimeType = 'application/vnd.google-apps.folder' and trashed=false"}).GetList()
if not backup_folder_file_list:
# Handle the case where no files were found
# logging.warning(f"[gDriveSync] No files found in the folder with ID {root_file_list} and {pwnagotchi_file_list}")
if self.options['backupfiles'] is not None:
self.backupfiles = self.backupfiles + self.options['backupfiles']
self.backup_files(self.backupfiles, '/home/pi/backup')
# Create a zip archive of the /backup folder
zip_file_path = os.path.join('/home/pi', 'backup.zip')
with zipfile.ZipFile(zip_file_path, 'w') as zip_ref:
for root, dirs, files in os.walk('/home/pi/backup'):
for file in files:
file_path = os.path.join(root, file)
arcname = os.path.relpath(file_path, '/home/pi/backup')
zip_ref.write(file_path, arcname=arcname)
# Upload the zip archive to Google Drive
self.upload_to_gdrive(zip_file_path, self.get_folder_id_by_name(self.drive, self.options['backup_folder']))
self.backup = True
self.status.update()
# Specify the local backup path
local_backup_path = '/home/pi/'
# Download the zip archive from Google Drive
zip_file_id = self.get_latest_backup_file_id(self.options['backup_folder'])
if zip_file_id:
zip_file = self.drive.CreateFile({'id': zip_file_id})
zip_file.GetContentFile(os.path.join(local_backup_path, 'backup.zip'))
logging.info("[gDriveSync] Downloaded backup.zip from Google Drive")
# Extract the zip archive to the root directory
with zipfile.ZipFile(os.path.join(local_backup_path, 'backup.zip'), 'r') as zip_ref:
zip_ref.extractall('/')
self.status.update()
shutil.rmtree("/home/pi/backup")
os.remove("/home/pi/backup.zip")
self.ready = True
logging.info("[gdrivesync] loaded")
# Restart so we can start opwngrid with the backup id
pwnagotchi.restart("AUTO")
# all set, gdriveSync is ready to run
self.ready = True
logging.info("[gdrivesync] loaded")
except Exception as e:
logging.error(f"Error: {e}")
self.ready = False
def get_latest_backup_file_id(self, backup_folder_id):
backup_folder_id = self.get_folder_id_by_name(self.drive, backup_folder_id)
# Retrieve the latest backup file in the Google Drive folder
file_list = self.drive.ListFile({'q': f"'{backup_folder_id}' in parents and trashed=false"}).GetList()
if file_list:
# Sort the files by creation date in descending order
latest_backup = max(file_list, key=lambda file: file['createdDate'])
return latest_backup['id']
else:
return None
def get_folder_id_by_name(self, drive, folder_name, parent_folder_id=None):
query = "mimeType='application/vnd.google-apps.folder' and trashed=false"
if parent_folder_id:
query += f" and '{parent_folder_id}' in parents"
file_list = drive.ListFile({'q': query}).GetList()
for file in file_list:
if file['title'] == folder_name:
return file['id']
return None
def create_folder_if_not_exists(self, backup_folder_name):
# First, try to retrieve the existing *BACKUP_FOLDER* folder
backup_folder_id = self.get_folder_id_by_name(self.drive, backup_folder_name)
if backup_folder_id is None:
# If not found, create *BACKUP_FOLDER*
backup_folder = self.drive.CreateFile(
{'title': backup_folder_name, 'mimeType': 'application/vnd.google-apps.folder'})
backup_folder.Upload()
backup_folder_id = backup_folder['id']
logging.info(f"[gDriveSync] Created folder '{backup_folder_name}' with ID: {backup_folder_id}")
return backup_folder_id
def on_unload(self, ui):
"""
Called when the plugin is unloaded
"""
logging.info("[gdrivesync] unloaded")
def on_internet_available(self, agent):
"""
Called when internet is available
"""
self.internet = True
def on_handshake(self, agent):
display = agent.view()
if not self.ready and not self.internet:
return
if self.lock.locked():
return
with self.lock:
if self.status.newer_then_hours(self.options['interval']):
logging.debug("[update] last check happened less than %d hours ago" % self.options['interval'])
return
logging.info("[gdrivesync] new handshake captured, backing up to gdrive")
if self.options['backupfiles'] is not None:
self.backupfiles = self.backupfiles + self.options['backupfiles']
self.backup_files(self.backupfiles, '/home/pi/backup')
# Create a zip archive of the /backup folder
zip_file_path = os.path.join('/home/pi', 'backup.zip')
with zipfile.ZipFile(zip_file_path, 'w') as zip_ref:
for root, dirs, files in os.walk('/home/pi/backup'):
for file in files:
file_path = os.path.join(root, file)
arcname = os.path.relpath(file_path, '/home/pi/backup')
zip_ref.write(file_path, arcname=arcname)
# Upload the zip archive to Google Drive
self.upload_to_gdrive(zip_file_path, self.get_folder_id_by_name(self.drive, self.options['backup_folder']))
display.on_uploading("Google Drive")
# Cleanup the local zip file
os.remove(zip_file_path)
shutil.rmtree("/home/pi/backup")
self.status.update()
display = agent.view()
display.update(force=True, new_data={'Backing up to gdrive ...'})
def backup_files(self, paths, dest_path):
for src_path in paths:
try:
if os.path.exists(src_path):
dest_relative_path = os.path.relpath(src_path, '/')
dest = os.path.join(dest_path, dest_relative_path)
if os.path.isfile(src_path):
# If it's a file, copy it to the destination preserving the directory structure
os.makedirs(os.path.dirname(dest), exist_ok=True)
# Check if the destination file already exists
if os.path.exists(dest):
# If it exists, remove it to overwrite
os.remove(dest)
elif os.path.isdir(src_path):
# If it's a directory, copy the entire directory to the destination
shutil.copytree(src_path, dest)
except Exception as e:
logging.error(f"[gDriveSync] Error during backup_path: {e}")
def upload_to_gdrive(self, backup_path, gdrive_folder):
try:
# Upload zip-file to google drive
# Create a GoogleDriveFile instance for the zip file
zip_file = self.drive.CreateFile({'title': 'backup.zip', 'parents': [{'id': gdrive_folder}]})
# Set the content of the file to the zip file
zip_file.SetContentFile(backup_path)
# Upload the file to Google Drive
zip_file.Upload()
logging.info(f"[gDriveSync] Backup uploaded to Google Drive")
except pydrive2.files.ApiRequestError as api_error:
self.handle_upload_error(api_error, backup_path, gdrive_folder)
except Exception as e:
logging.error(f"[gDriveSync] Error during upload_to_gdrive: {e}")
def handle_upload_error(self, api_error, backup_path, gdrive_folder):
if 'Rate Limit Exceeded' in str(api_error):
logging.warning("[gDriveSync] Rate limit exceeded. Waiting for some time before retrying...")
# We set to 100 seconds, because there is a limit 20k requests per 100s per user
time.sleep(100) # You can adjust the sleep duration based on your needs
self.upload_to_gdrive(backup_path, gdrive_folder)
else:
logging.error(f"[gDriveSync] API Request Error: {api_error}")

View File

@ -1,43 +0,0 @@
import logging
import RPi.GPIO as GPIO
import subprocess
import pwnagotchi.plugins as plugins
class GPIOButtons(plugins.Plugin):
__author__ = 'ratmandu@gmail.com'
__version__ = '1.0.0'
__license__ = 'GPL3'
__description__ = 'GPIO Button support plugin'
def __init__(self):
self.running = False
self.ports = {}
self.commands = None
def runCommand(self, channel):
command = self.ports[channel]
logging.info(f"Button Pressed! Running command: {command}")
process = subprocess.Popen(command, shell=True, stdin=None, stdout=open("/dev/null", "w"), stderr=None,
executable="/bin/bash")
process.wait()
def on_loaded(self):
logging.info("GPIO Button plugin loaded.")
# get list of GPIOs
gpios = self.options['gpios']
# set gpio numbering
GPIO.setmode(GPIO.BCM)
for gpio, command in gpios.items():
gpio = int(gpio)
self.ports[gpio] = command
GPIO.setup(gpio, GPIO.IN, GPIO.PUD_UP)
GPIO.add_event_detect(gpio, GPIO.FALLING, callback=self.runCommand, bouncetime=600)
#set pimoroni display hat mini LED off/dim
GPIO.setup(17, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(22, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(27, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
logging.info("Added command: %s to GPIO #%d", command, gpio)

View File

@ -1,164 +0,0 @@
import json
import logging
import os
import pwnagotchi.plugins as plugins
import pwnagotchi.ui.fonts as fonts
from pwnagotchi.ui.components import LabeledValue
from pwnagotchi.ui.view import BLACK
class GPS(plugins.Plugin):
__author__ = "evilsocket@gmail.com"
__version__ = "1.0.1"
__license__ = "GPL3"
__description__ = "Save GPS coordinates whenever an handshake is captured."
LINE_SPACING = 10
LABEL_SPACING = 0
def __init__(self):
self.running = False
self.coordinates = None
self.options = dict()
def on_loaded(self):
logging.info(f"gps plugin loaded for {self.options['device']}")
def on_ready(self, agent):
if os.path.exists(self.options["device"]) or ":" in self.options["device"]:
logging.info(
f"enabling bettercap's gps module for {self.options['device']}"
)
try:
agent.run("gps off")
except Exception:
logging.info(f"bettercap gps module was already off")
pass
agent.run(f"set gps.device {self.options['device']}")
agent.run(f"set gps.baudrate {self.options['speed']}")
agent.run("gps on")
logging.info(f"bettercap gps module enabled on {self.options['device']}")
self.running = True
else:
logging.warning("no GPS detected")
def on_handshake(self, agent, filename, access_point, client_station):
if self.running:
info = agent.session()
self.coordinates = info["gps"]
gps_filename = filename.replace(".pcap", ".gps.json")
if self.coordinates and all([
# avoid 0.000... measurements
self.coordinates["Latitude"], self.coordinates["Longitude"]
]):
logging.info(f"saving GPS to {gps_filename} ({self.coordinates})")
with open(gps_filename, "w+t") as fp:
json.dump(self.coordinates, fp)
else:
logging.info("not saving GPS. Couldn't find location.")
def on_ui_setup(self, ui):
try:
# Configure line_spacing
line_spacing = int(self.options['linespacing'])
except Exception:
# Set default value
line_spacing = self.LINE_SPACING
try:
# Configure position
pos = self.options['position'].split(',')
pos = [int(x.strip()) for x in pos]
lat_pos = (pos[0] + 5, pos[1])
lon_pos = (pos[0], pos[1] + line_spacing)
alt_pos = (pos[0] + 5, pos[1] + (2 * line_spacing))
except Exception:
# Set default value based on display type
if ui.is_waveshare_v2():
lat_pos = (127, 74)
lon_pos = (122, 84)
alt_pos = (127, 94)
elif ui.is_waveshare_v1():
lat_pos = (130, 70)
lon_pos = (125, 80)
alt_pos = (130, 90)
elif ui.is_inky():
lat_pos = (127, 60)
lon_pos = (122, 70)
alt_pos = (127, 80)
elif ui.is_waveshare144lcd():
# guessed values, add tested ones if you can
lat_pos = (67, 73)
lon_pos = (62, 83)
alt_pos = (67, 93)
elif ui.is_dfrobot_v2():
lat_pos = (127, 74)
lon_pos = (122, 84)
alt_pos = (127, 94)
elif ui.is_waveshare27inch():
lat_pos = (6, 120)
lon_pos = (1, 135)
alt_pos = (6, 150)
else:
# guessed values, add tested ones if you can
lat_pos = (127, 51)
lon_pos = (122, 61)
alt_pos = (127, 71)
ui.add_element(
"latitude",
LabeledValue(
color=BLACK,
label="lat:",
value="-",
position=lat_pos,
label_font=fonts.Small,
text_font=fonts.Small,
label_spacing=self.LABEL_SPACING,
),
)
ui.add_element(
"longitude",
LabeledValue(
color=BLACK,
label="long:",
value="-",
position=lon_pos,
label_font=fonts.Small,
text_font=fonts.Small,
label_spacing=self.LABEL_SPACING,
),
)
ui.add_element(
"altitude",
LabeledValue(
color=BLACK,
label="alt:",
value="-",
position=alt_pos,
label_font=fonts.Small,
text_font=fonts.Small,
label_spacing=self.LABEL_SPACING,
),
)
def on_unload(self, ui):
with ui._lock:
ui.remove_element('latitude')
ui.remove_element('longitude')
ui.remove_element('altitude')
def on_ui_update(self, ui):
with ui._lock:
if self.coordinates and all([
# avoid 0.000... measurements
self.coordinates["Latitude"], self.coordinates["Longitude"]
]):
# last char is sometimes not completely drawn ¯\_(ツ)_/¯
# using an ending-whitespace as workaround on each line
ui.set("latitude", f"{self.coordinates['Latitude']:.4f} ")
ui.set("longitude", f"{self.coordinates['Longitude']:.4f} ")
ui.set("altitude", f"{self.coordinates['Altitude']:.1f}m ")

View File

@ -1,151 +0,0 @@
import os
import logging
import time
import glob
import re
import pwnagotchi.grid as grid
import pwnagotchi.plugins
import pwnagotchi.plugins as plugins
from pwnagotchi.utils import StatusFile, WifiInfo, extract_from_pcap
from threading import Lock
def parse_pcap(filename):
logging.info("grid: parsing %s ..." % filename)
net_id = os.path.basename(filename).replace('.pcap', '')
if '_' in net_id:
# /root/handshakes/ESSID_BSSID.pcap
essid, bssid = net_id.split('_')
else:
# /root/handshakes/BSSID.pcap
essid, bssid = '', net_id
mac_re = re.compile('[0-9a-fA-F]{12}')
if not mac_re.match(bssid):
return '', ''
it = iter(bssid)
bssid = ':'.join([a + b for a, b in zip(it, it)])
info = {
WifiInfo.ESSID: essid,
WifiInfo.BSSID: bssid,
}
try:
info = extract_from_pcap(filename, [WifiInfo.BSSID, WifiInfo.ESSID])
except Exception as e:
logging.error("grid: %s" % e)
return info[WifiInfo.ESSID], info[WifiInfo.BSSID]
class Grid(plugins.Plugin):
__author__ = 'evilsocket@gmail.com'
__version__ = '1.0.1'
__license__ = 'GPL3'
__description__ = 'This plugin signals the unit cryptographic identity and list of pwned networks and list of pwned ' \
'networks to api.pwnagotchi.ai '
def __init__(self):
self.options = dict()
self.report = StatusFile('/root/.api-report.json', data_format='json')
self.unread_messages = 0
self.total_messages = 0
self.lock = Lock()
def is_excluded(self, what, agent):
config = agent.config()
for skip in config['main']['whitelist']:
skip = skip.lower()
what = what.lower()
if skip in what or skip.replace(':', '') in what:
return True
return False
def on_loaded(self):
logging.info("grid plugin loaded.")
def set_reported(self, reported, net_id):
if net_id not in reported:
reported.append(net_id)
self.report.update(data={'reported': reported})
def check_inbox(self, agent):
logging.debug("checking mailbox ...")
messages = grid.inbox()
self.total_messages = len(messages)
self.unread_messages = len([m for m in messages if m['seen_at'] is None])
if self.unread_messages:
plugins.on('unread_inbox', self.unread_messages)
logging.debug("[grid] unread:%d total:%d" % (self.unread_messages, self.total_messages))
agent.view().on_unread_messages(self.unread_messages, self.total_messages)
def check_handshakes(self, agent):
logging.debug("checking pcaps")
config = agent.config()
pcap_files = glob.glob(os.path.join(agent.config()['bettercap']['handshakes'], "*.pcap"))
num_networks = len(pcap_files)
reported = self.report.data_field_or('reported', default=[])
num_reported = len(reported)
num_new = num_networks - num_reported
if num_new > 0:
if self.options['report']:
logging.info("grid: %d new networks to report" % num_new)
logging.debug("self.options: %s" % self.options)
logging.debug(" exclude: %s" % config['main']['whitelist'])
for pcap_file in pcap_files:
net_id = os.path.basename(pcap_file).replace('.pcap', '')
if net_id not in reported:
if self.is_excluded(net_id, agent):
logging.debug("skipping %s due to exclusion filter" % pcap_file)
self.set_reported(reported, net_id)
continue
essid, bssid = parse_pcap(pcap_file)
if bssid:
if self.is_excluded(essid, agent) or self.is_excluded(bssid, agent):
logging.debug("not reporting %s due to exclusion filter" % pcap_file)
self.set_reported(reported, net_id)
else:
if grid.report_ap(essid, bssid):
self.set_reported(reported, net_id)
time.sleep(1.5)
else:
logging.warning("no bssid found?!")
else:
logging.debug("grid: reporting disabled")
def on_internet_available(self, agent):
logging.debug("internet available")
if self.lock.locked():
return
with self.lock:
try:
grid.update_data(agent.last_session)
except Exception as e:
logging.error("error connecting to the pwngrid-peer service: %s" % e)
logging.debug(e, exc_info=True)
return
try:
self.check_inbox(agent)
except Exception as e:
logging.error("[grid] error while checking inbox: %s" % e)
logging.debug(e, exc_info=True)
try:
self.check_handshakes(agent)
except Exception as e:
logging.error("[grid] error while checking pcaps: %s" % e)
logging.debug(e, exc_info=True)

View File

@ -1,179 +0,0 @@
import logging
import subprocess
import os
import json
import pwnagotchi.plugins as plugins
from threading import Lock
'''
hcxpcapngtool needed, to install:
> git clone https://github.com/ZerBea/hcxtools.git
> cd hcxtools
> apt-get install libcurl4-openssl-dev libssl-dev zlib1g-dev
> make
> sudo make install
'''
class Hashie(plugins.Plugin):
__author__ = 'Jayofelony'
__version__ = '1.0.4'
__license__ = 'GPL3'
__description__ = '''
Attempt to automatically convert pcaps to a crackable format.
If successful, the files containing the hashes will be saved
in the same folder as the handshakes.
The files are saved in their respective Hashcat format:
- EAPOL hashes are saved as *.22000
- PMKID hashes are saved as *.16800
All PCAP files without enough information to create a hash are
stored in a file that can be read by the webgpsmap plugin.
Why use it?:
- Automatically convert handshakes to crackable formats!
We dont all upload our hashes online ;)
- Repair PMKID handshakes that hcxpcapngtool misses
- If running at time of handshake capture, on_handshake can
be used to improve the chance of the repair succeeding
- Be a completionist! Not enough packets captured to crack a network?
This generates an output file for the webgpsmap plugin, use the
location data to revisit networks you need more packets for!
Additional information:
- Currently requires hcxpcapngtool compiled and installed
- Attempts to repair PMKID hashes when hcxpcapngtool cant find the SSID
- hcxpcapngtool sometimes has trouble extracting the SSID, so we
use the raw 16800 output and attempt to retrieve the SSID via tcpdump
- When access_point data is available (on_handshake), we leverage
the reported AP name and MAC to complete the hash
- The repair is very basic and could certainly be improved!
Todo:
Make it so users dont need hcxpcapngtool (unless it gets added to the base image)
Phase 1: Extract/construct 22000/16800 hashes through tcpdump commands
Phase 2: Extract/construct 22000/16800 hashes entirely in python
Improve the code, a lot
'''
def __init__(self):
self.lock = Lock()
self.options = dict()
def on_loaded(self):
logging.info("[Hashie] Plugin loaded")
def on_unloaded(self):
logging.info("[Hashie] Plugin unloaded")
# called when everything is ready and the main loop is about to start
def on_ready(self, agent):
config = agent.config()
handshake_dir = config['bettercap']['handshakes']
logging.info('[Hashie] Starting batch conversion of pcap files')
with self.lock:
self._process_stale_pcaps(handshake_dir)
def on_handshake(self, agent, filename, access_point, client_station):
with self.lock:
handshake_status = []
fullpathNoExt = filename.split('.')[0]
name = filename.split('/')[-1:][0].split('.')[0]
if os.path.isfile(fullpathNoExt + '.22000'):
handshake_status.append('Already have {}.22000 (EAPOL)'.format(name))
elif self._writeEAPOL(filename):
handshake_status.append('Created {}.22000 (EAPOL) from pcap'.format(name))
if os.path.isfile(fullpathNoExt + '.16800'):
handshake_status.append('Already have {}.16800 (PMKID)'.format(name))
elif self._writePMKID(filename):
handshake_status.append('Created {}.16800 (PMKID) from pcap'.format(name))
if handshake_status:
logging.info('[Hashie] Good news:\n\t' + '\n\t'.join(handshake_status))
def _writeEAPOL(self, fullpath):
fullpathNoExt = fullpath.split('.')[0]
filename = fullpath.split('/')[-1:][0].split('.')[0]
subprocess.getoutput('hcxpcapngtool -o {}.22000 {} >/dev/null 2>&1'.format(fullpathNoExt, fullpath))
if os.path.isfile(fullpathNoExt + '.22000'):
logging.debug('[Hashie] [+] EAPOL Success: {}.22000 created'.format(filename))
return True
return False
def _writePMKID(self, fullpath):
fullpathNoExt = fullpath.split('.')[0]
filename = fullpath.split('/')[-1:][0].split('.')[0]
subprocess.getoutput('hcxpcapngtool -o {}.16800 {} >/dev/null 2>&1'.format(fullpathNoExt, fullpath))
if os.path.isfile(fullpathNoExt + '.16800'):
logging.debug('[Hashie] [+] PMKID Success: {}.16800 created'.format(filename))
return True
return False
def _process_stale_pcaps(self, handshake_dir):
handshakes_list = [os.path.join(handshake_dir, filename) for filename in os.listdir(handshake_dir) if filename.endswith('.pcap')]
failed_jobs = []
successful_jobs = []
lonely_pcaps = []
for num, handshake in enumerate(handshakes_list):
fullpathNoExt = handshake.split('.')[0]
pcapFileName = handshake.split('/')[-1:][0]
if not os.path.isfile(fullpathNoExt + '.22000'): # if no 22000, try
if self._writeEAPOL(handshake):
successful_jobs.append('22000: ' + pcapFileName)
else:
failed_jobs.append('22000: ' + pcapFileName)
if not os.path.isfile(fullpathNoExt + '.16800'): # if no 16800, try
if self._writePMKID(handshake):
successful_jobs.append('16800: ' + pcapFileName)
else:
failed_jobs.append('16800: ' + pcapFileName)
if not os.path.isfile(fullpathNoExt + '.22000'): # if no 16800 AND no 22000
lonely_pcaps.append(handshake)
logging.debug('[hashie] Batch job: added {} to lonely list'.format(pcapFileName))
if ((num + 1) % 50 == 0) or (num + 1 == len(handshakes_list)): # report progress every 50, or when done
logging.info('[Hashie] Batch job: {}/{} done ({} fails)'.format(num + 1, len(handshakes_list), len(lonely_pcaps)))
if successful_jobs:
logging.info('[Hashie] Batch job: {} new handshake files created'.format(len(successful_jobs)))
if lonely_pcaps:
logging.info('[Hashie] Batch job: {} networks without enough packets to create a hash'.format(len(lonely_pcaps)))
self._getLocations(lonely_pcaps)
def _getLocations(self, lonely_pcaps):
# export a file for webgpsmap to load
with open('/root/.incompletePcaps', 'w') as isIncomplete:
count = 0
for pcapFile in lonely_pcaps:
filename = pcapFile.split('/')[-1:][0] # keep extension
fullpathNoExt = pcapFile.split('.')[0]
isIncomplete.write(filename + '\n')
if os.path.isfile(fullpathNoExt + '.gps.json') or os.path.isfile(fullpathNoExt + '.geo.json'):
count += 1
if count != 0:
logging.info('[Hashie] Used {} GPS/GEO files to find lonely networks, '
'go check webgpsmap! ;)'.format(str(count)))
else:
logging.info('[Hashie] Could not find any GPS/GEO files '
'for the lonely networks'.format(str(count)))
def _getLocationsCSV(self, lonely_pcaps):
# in case we need this later, export locations manually to CSV file, needs try/catch format/etc.
locations = []
for pcapFile in lonely_pcaps:
filename = pcapFile.split('/')[-1:][0].split('.')[0]
fullpathNoExt = pcapFile.split('.')[0]
if os.path.isfile(fullpathNoExt + '.gps.json'):
with open(fullpathNoExt + '.gps.json', 'r') as tempFileA:
data = json.load(tempFileA)
locations.append(filename + ',' + str(data['Latitude']) + ',' + str(data['Longitude']) + ',50')
elif os.path.isfile(fullpathNoExt + '.geo.json'):
with open(fullpathNoExt + '.geo.json', 'r') as tempFileB:
data = json.load(tempFileB)
locations.append(
filename + ',' + str(data['location']['lat']) + ',' + str(data['location']['lng']) + ',' + str(data['accuracy']))
if locations:
with open('/root/locations.csv', 'w') as tempFileD:
for loc in locations:
tempFileD.write(loc + '\n')
logging.info('[Hashie] Used {} GPS/GEO files to find lonely networks, '
'load /root/locations.csv into a mapping app and go say hi!'.format(len(locations)))

View File

@ -1,273 +0,0 @@
import os
import logging
import threading
from itertools import islice
from time import sleep
from datetime import datetime,timedelta
from pwnagotchi import plugins
from pwnagotchi.utils import StatusFile
from flask import render_template_string
from flask import jsonify
from flask import abort
from flask import Response
TEMPLATE = """
{% extends "base.html" %}
{% set active_page = "plugins" %}
{% block title %}
Logtail
{% endblock %}
{% block styles %}
{{ super() }}
<style>
* {
box-sizing: border-box;
}
#filter {
width: 100%;
font-size: 16px;
padding: 12px 20px 12px 40px;
border: 1px solid #ddd;
margin-bottom: 12px;
}
table {
border-collapse: collapse;
width: 100%;
border: 1px solid #ddd;
}
th, td {
text-align: left;
padding: 12px;
width: 1px;
white-space: nowrap;
}
td:nth-child(2) {
text-align: center;
}
thead, tr:hover {
background-color: #f1f1f1;
}
tr {
border-bottom: 1px solid #ddd;
}
div.sticky {
position: -webkit-sticky;
position: sticky;
top: 0;
display: table;
width: 100%;
}
div.sticky > * {
display: table-cell;
}
div.sticky > span {
width: 1%;
}
div.sticky > input {
width: 100%;
}
tr.default {
color: black;
}
tr.info {
color: black;
}
tr.warning {
color: darkorange;
}
tr.error {
color: crimson;
}
tr.debug {
color: blueviolet;
}
.ui-mobile .ui-page-active {
overflow: visible;
overflow-x: visible;
}
</style>
{% endblock %}
{% block script %}
var table = document.getElementById('table');
var filter = document.getElementById('filter');
var filterVal = filter.value.toUpperCase();
var xhr = new XMLHttpRequest();
xhr.open('GET', '{{ url_for('plugins') }}/logtail/stream');
xhr.send();
var position = 0;
var data;
var time;
var level;
var msg;
var colorClass;
function handleNewData() {
var messages = xhr.responseText.split('\\n');
filterVal = filter.value.toUpperCase();
messages.slice(position, -1).forEach(function(value) {
if (value.charAt(0) != '[') {
msg = value;
time = '';
level = '';
} else {
data = value.split(']');
time = data.shift() + ']';
level = data.shift() + ']';
msg = data.join(']');
switch(level) {
case ' [INFO]':
colorClass = 'info';
break;
case ' [WARNING]':
colorClass = 'warning';
break;
case ' [ERROR]':
colorClass = 'error';
break;
case ' [DEBUG]':
colorClass = 'debug';
break;
default:
colorClass = 'default';
break;
}
}
var tr = document.createElement('tr');
var td1 = document.createElement('td');
var td2 = document.createElement('td');
var td3 = document.createElement('td');
td1.textContent = time;
td2.textContent = level;
td3.textContent = msg;
tr.appendChild(td1);
tr.appendChild(td2);
tr.appendChild(td3);
tr.className = colorClass;
if (filterVal.length > 0 && value.toUpperCase().indexOf(filterVal) == -1) {
tr.style.display = "none";
}
table.appendChild(tr);
});
position = messages.length - 1;
}
var scrollingElement = (document.scrollingElement || document.body)
function scrollToBottom () {
scrollingElement.scrollTop = scrollingElement.scrollHeight;
}
var timer;
var scrollElm = document.getElementById('autoscroll');
timer = setInterval(function() {
handleNewData();
if (scrollElm.checked) {
scrollToBottom();
}
if (xhr.readyState == XMLHttpRequest.DONE) {
clearInterval(timer);
}
}, 1000);
var typingTimer;
var doneTypingInterval = 1000;
filter.onkeyup = function() {
clearTimeout(typingTimer);
typingTimer = setTimeout(doneTyping, doneTypingInterval);
}
filter.onkeydown = function() {
clearTimeout(typingTimer);
}
function doneTyping() {
document.body.style.cursor = 'progress';
var tr, tds, td, i, txtValue;
filterVal = filter.value.toUpperCase();
tr = table.getElementsByTagName("tr");
for (i = 1; i < tr.length; i++) {
txtValue = tr[i].textContent || tr[i].innerText;
if (txtValue.toUpperCase().indexOf(filterVal) > -1) {
tr[i].style.display = "table-row";
} else {
tr[i].style.display = "none";
}
}
document.body.style.cursor = 'default';
}
{% endblock %}
{% block content %}
<div class="sticky">
<input type="text" id="filter" placeholder="Search for ..." title="Type in a filter">
<span><input checked type="checkbox" id="autoscroll"></span>
<span><label for="autoscroll"> Autoscroll to bottom</label><br></span>
</div>
<table id="table">
<thead>
<th>
Time
</th>
<th>
Level
</th>
<th>
Message
</th>
</thead>
</table>
{% endblock %}
"""
class Logtail(plugins.Plugin):
__author__ = '33197631+dadav@users.noreply.github.com'
__version__ = '0.1.0'
__license__ = 'GPL3'
__description__ = 'This plugin tails the logfile.'
def __init__(self):
self.lock = threading.Lock()
self.options = dict()
self.ready = False
def on_config_changed(self, config):
self.config = config
self.ready = True
def on_loaded(self):
"""
Gets called when the plugin gets loaded
"""
logging.info("Logtail plugin loaded.")
def on_webhook(self, path, request):
if not self.ready:
return "Plugin not ready"
if not path or path == "/":
return render_template_string(TEMPLATE)
if path == 'stream':
def generate():
with open(self.config['main']['log']['path']) as f:
yield ''.join(f.readlines()[-self.options.get('max-lines', 4096):])
while True:
yield f.readline()
return Response(generate(), mimetype='text/plain')
abort(404)

View File

@ -1,209 +0,0 @@
# memtemp shows memory infos and cpu temperature
#
# mem usage, cpu load, cpu temp, cpu frequency
#
###############################################################
#
# Updated 18-10-2019 by spees <speeskonijn@gmail.com>
# - Changed the place where the data was displayed on screen
# - Made the data a bit more compact and easier to read
# - removed the label so we wont waste screen space
# - Updated version to 1.0.1
#
# 20-10-2019 by spees <speeskonijn@gmail.com>
# - Refactored to use the already existing functions
# - Now only shows memory usage in percentage
# - Added CPU load
# - Added horizontal and vertical orientation
#
# 19-09-2020 by crahan <crahan@n00.be>
# - Added CPU frequency
# - Made field types and order configurable (max 3 fields)
# - Made line spacing and position configurable
# - Updated code to dynamically generate UI elements
# - Changed horizontal UI elements to Text
# - Updated to version 1.0.2
###############################################################
from pwnagotchi.ui.components import LabeledValue, Text
from pwnagotchi.ui.view import BLACK
import pwnagotchi.ui.fonts as fonts
import pwnagotchi.plugins as plugins
import pwnagotchi
import logging
class MemTemp(plugins.Plugin):
__author__ = 'https://github.com/xenDE'
__version__ = '1.0.2'
__license__ = 'GPL3'
__description__ = 'A plugin that will display memory/cpu usage and temperature'
ALLOWED_FIELDS = {
'mem': 'mem_usage',
'cpu': 'cpu_load',
'cpus': 'cpu_load_since',
'temp': 'cpu_temp',
'freq': 'cpu_freq'
}
DEFAULT_FIELDS = ['mem', 'cpu', 'temp']
LINE_SPACING = 10
LABEL_SPACING = 0
FIELD_WIDTH = 4
def on_loaded(self):
self._last_cpu_load = self._cpu_stat()
logging.info("memtemp plugin loaded.")
def mem_usage(self):
return f"{int(pwnagotchi.mem_usage() * 100)}%"
def cpu_load(self):
return f"{int(pwnagotchi.cpu_load() * 100)}%"
def _cpu_stat(self):
"""
Returns the splitted first line of the /proc/stat file
"""
with open('/proc/stat', 'rt') as fp:
return list(map(int,fp.readline().split()[1:]))
def cpu_load_since(self):
"""
Returns the % load, since last time called
"""
parts0 = self._cpu_stat()
parts1 = self._last_cpu_load
self._last_cpu_load = parts0
parts_diff = [p1 - p0 for (p0, p1) in zip(parts0, parts1)]
user, nice, sys, idle, iowait, irq, softirq, steal, _guest, _guest_nice = parts_diff
idle_sum = idle + iowait
non_idle_sum = user + nice + sys + irq + softirq + steal
total = idle_sum + non_idle_sum
return f"{int(non_idle_sum / total * 100)}%"
def cpu_temp(self):
if self.options['scale'] == "fahrenheit":
temp = (pwnagotchi.temperature() * 9 / 5) + 32
symbol = "f"
elif self.options['scale'] == "kelvin":
temp = pwnagotchi.temperature() + 273.15
symbol = "k"
else:
# default to celsius
temp = pwnagotchi.temperature()
symbol = "c"
return f"{temp}{symbol}"
def cpu_freq(self):
with open('/sys/devices/system/cpu/cpu0/cpufreq/scaling_cur_freq', 'rt') as fp:
return f"{round(float(fp.readline())/1000000, 1)}G"
def pad_text(self, data):
return " " * (self.FIELD_WIDTH - len(data)) + data
def on_ui_setup(self, ui):
try:
# Configure field list
self.fields = self.options['fields'].split(',')
self.fields = [x.strip() for x in self.fields if x.strip() in self.ALLOWED_FIELDS.keys()]
self.fields = self.fields[:3] # limit to the first 3 fields
except Exception:
# Set default value
self.fields = self.DEFAULT_FIELDS
try:
# Configure line_spacing
line_spacing = int(self.options['linespacing'])
except Exception:
# Set default value
line_spacing = self.LINE_SPACING
try:
# Configure position
pos = self.options['position'].split(',')
pos = [int(x.strip()) for x in pos]
if self.options['orientation'] == "vertical":
v_pos = (pos[0], pos[1])
else:
h_pos = (pos[0], pos[1])
except Exception:
# Set default position based on screen type
if ui.is_waveshare_v2():
h_pos = (178, 84)
v_pos = (197, 74)
elif ui.is_waveshare_v1():
h_pos = (170, 80)
v_pos = (165, 61)
elif ui.is_waveshare144lcd():
h_pos = (53, 77)
v_pos = (73, 67)
elif ui.is_inky():
h_pos = (140, 68)
v_pos = (160, 54)
elif ui.is_waveshare2in7():
h_pos = (192, 138)
v_pos = (211, 122)
else:
h_pos = (155, 76)
v_pos = (175, 61)
if self.options['orientation'] == "vertical":
# Dynamically create the required LabeledValue objects
for idx, field in enumerate(self.fields):
v_pos_x = v_pos[0]
v_pos_y = v_pos[1] + ((len(self.fields) - 3) * -1 * line_spacing)
ui.add_element(
f"memtemp_{field}",
LabeledValue(
color=BLACK,
label=f"{self.pad_text(field)}:",
value="-",
position=(v_pos_x, v_pos_y + (idx * line_spacing)),
label_font=fonts.Small,
text_font=fonts.Small,
label_spacing=self.LABEL_SPACING,
)
)
else:
# default to horizontal
h_pos_x = h_pos[0] + ((len(self.fields) - 3) * -1 * 25)
h_pos_y = h_pos[1]
ui.add_element(
'memtemp_header',
Text(
color=BLACK,
value=" ".join([self.pad_text(x) for x in self.fields]),
position=(h_pos_x, h_pos_y),
font=fonts.Small,
)
)
ui.add_element(
'memtemp_data',
Text(
color=BLACK,
value=" ".join([self.pad_text("-") for x in self.fields]),
position=(h_pos_x, h_pos_y + line_spacing),
font=fonts.Small,
)
)
def on_unload(self, ui):
with ui._lock:
if self.options['orientation'] == "vertical":
for idx, field in enumerate(self.fields):
ui.remove_element(f"memtemp_{field}")
else:
# default to horizontal
ui.remove_element('memtemp_header')
ui.remove_element('memtemp_data')
def on_ui_update(self, ui):
with ui._lock:
if self.options['orientation'] == "vertical":
for idx, field in enumerate(self.fields):
ui.set(f"memtemp_{field}", getattr(self, self.ALLOWED_FIELDS[field])())
else:
# default to horizontal
data = " ".join([self.pad_text(getattr(self, self.ALLOWED_FIELDS[x])()) for x in self.fields])
ui.set('memtemp_data', data)

View File

@ -1,150 +0,0 @@
import logging
import json
import os
import threading
import requests
import time
import pwnagotchi.plugins as plugins
from pwnagotchi.utils import StatusFile
class NetPos(plugins.Plugin):
__author__ = 'zenzen san'
__version__ = '2.0.3'
__license__ = 'GPL3'
__description__ = """Saves a json file with the access points with more signal
whenever a handshake is captured.
When internet is available the files are converted in geo locations
using Mozilla LocationService """
API_URL = 'https://location.services.mozilla.com/v1/geolocate?key={api}'
def __init__(self):
self.report = StatusFile('/root/.net_pos_saved', data_format='json')
self.skip = list()
self.ready = False
self.lock = threading.Lock()
self.options = dict()
def on_loaded(self):
if 'api_key' not in self.options or ('api_key' in self.options and not self.options['api_key']):
logging.error("NET-POS: api_key isn't set. Can't use mozilla's api.")
return
if 'api_url' in self.options:
self.API_URL = self.options['api_url']
self.ready = True
logging.info("net-pos plugin loaded.")
logging.debug(f"net-pos: use api_url: {self.API_URL}")
def _append_saved(self, path):
to_save = list()
if isinstance(path, str):
to_save.append(path)
elif isinstance(path, list):
to_save += path
else:
raise TypeError("Expected list or str, got %s" % type(path))
with open('/root/.net_pos_saved', 'a') as saved_file:
for x in to_save:
saved_file.write(x + "\n")
def on_internet_available(self, agent):
if self.lock.locked():
return
with self.lock:
if self.ready:
config = agent.config()
display = agent.view()
reported = self.report.data_field_or('reported', default=list())
handshake_dir = config['bettercap']['handshakes']
all_files = os.listdir(handshake_dir)
all_np_files = [os.path.join(handshake_dir, filename)
for filename in all_files
if filename.endswith('.net-pos.json')]
new_np_files = set(all_np_files) - set(reported) - set(self.skip)
if new_np_files:
logging.debug("NET-POS: Found %d new net-pos files. Fetching positions ...", len(new_np_files))
display.set('status', f"Found {len(new_np_files)} new net-pos files. Fetching positions ...")
display.update(force=True)
for idx, np_file in enumerate(new_np_files):
geo_file = np_file.replace('.net-pos.json', '.geo.json')
if os.path.exists(geo_file):
# got already the position
reported.append(np_file)
self.report.update(data={'reported': reported})
continue
try:
geo_data = self._get_geo_data(np_file) # returns json obj
except requests.exceptions.RequestException as req_e:
logging.error("NET-POS: %s - RequestException: %s", np_file, req_e)
self.skip += np_file
continue
except json.JSONDecodeError as js_e:
logging.error("NET-POS: %s - JSONDecodeError: %s, removing it...", np_file, js_e)
os.remove(np_file)
continue
except OSError as os_e:
logging.error("NET-POS: %s - OSError: %s", np_file, os_e)
self.skip += np_file
continue
with open(geo_file, 'w+t') as sf:
json.dump(geo_data, sf)
reported.append(np_file)
self.report.update(data={'reported': reported})
display.set('status', f"Fetching positions ({idx + 1}/{len(new_np_files)})")
display.update(force=True)
def on_handshake(self, agent, filename, access_point, client_station):
netpos = self._get_netpos(agent)
if not netpos['wifiAccessPoints']:
return
netpos["ts"] = int("%.0f" % time.time())
netpos_filename = filename.replace('.pcap', '.net-pos.json')
logging.debug("NET-POS: Saving net-location to %s", netpos_filename)
try:
with open(netpos_filename, 'w+t') as net_pos_file:
json.dump(netpos, net_pos_file)
except OSError as os_e:
logging.error("NET-POS: %s", os_e)
def _get_netpos(self, agent):
aps = agent.get_access_points()
netpos = dict()
netpos['wifiAccessPoints'] = list()
# 6 seems a good number to save a wifi networks location
for access_point in sorted(aps, key=lambda i: i['rssi'], reverse=True)[:6]:
netpos['wifiAccessPoints'].append({'macAddress': access_point['mac'],
'signalStrength': access_point['rssi']})
return netpos
def _get_geo_data(self, path, timeout=30):
geourl = self.API_URL.format(api=self.options['api_key'])
try:
with open(path, "r") as json_file:
data = json.load(json_file)
except json.JSONDecodeError as js_e:
raise js_e
except OSError as os_e:
raise os_e
try:
result = requests.post(geourl,
json=data,
timeout=timeout)
return_geo = result.json()
if data["ts"]:
return_geo["ts"] = data["ts"]
return return_geo
except requests.exceptions.RequestException as req_e:
raise req_e

View File

@ -1,147 +0,0 @@
import os
import csv
import logging
import re
import requests
from datetime import datetime
from threading import Lock
from pwnagotchi.utils import StatusFile, remove_whitelisted
import pwnagotchi.plugins as plugins
from json.decoder import JSONDecodeError
class OnlineHashCrack(plugins.Plugin):
__author__ = '33197631+dadav@users.noreply.github.com'
__version__ = '2.1.0'
__license__ = 'GPL3'
__description__ = 'This plugin automatically uploads handshakes to https://onlinehashcrack.com'
def __init__(self):
self.ready = False
try:
self.report = StatusFile('/root/.ohc_uploads', data_format='json')
except JSONDecodeError:
os.remove('/root/.ohc_uploads')
self.report = StatusFile('/root/.ohc_uploads', data_format='json')
self.skip = list()
self.lock = Lock()
self.options = dict()
def on_loaded(self):
"""
Gets called when the plugin gets loaded
"""
if 'email' not in self.options or ('email' in self.options and not self.options['email']):
logging.error("OHC: Email isn't set. Can't upload to onlinehashcrack.com")
return
self.ready = True
logging.info("OHC: OnlineHashCrack plugin loaded.")
def _upload_to_ohc(self, path, timeout=30):
"""
Uploads the file to onlinehashcrack.com
"""
with open(path, 'rb') as file_to_upload:
data = {'email': self.options['email']}
payload = {'file': file_to_upload}
try:
result = requests.post('https://api.onlinehashcrack.com',
data=data,
files=payload,
timeout=timeout)
if 'already been sent' in result.text:
logging.debug(f"{path} was already uploaded.")
except requests.exceptions.RequestException as e:
logging.debug(f"OHC: Got an exception while uploading {path} -> {e}")
raise e
def _download_cracked(self, save_file, timeout=120):
"""
Downloads the cracked passwords and saves them
returns the number of downloaded passwords
"""
try:
s = requests.Session()
dashboard = s.get(self.options['dashboard'], timeout=timeout)
result = s.get('https://www.onlinehashcrack.com/wpa-exportcsv', timeout=timeout)
result.raise_for_status()
with open(save_file, 'wb') as output_file:
output_file.write(result.content)
except requests.exceptions.RequestException as req_e:
raise req_e
except OSError as os_e:
raise os_e
def on_webhook(self, path, request):
import requests
from flask import redirect
s = requests.Session()
s.get('https://www.onlinehashcrack.com/dashboard')
r = s.post('https://www.onlinehashcrack.com/dashboard', data={'emailTasks': self.options['email'], 'submit': ''})
return redirect(r.url, code=302)
def on_internet_available(self, agent):
"""
Called in manual mode when there's internet connectivity
"""
if not self.ready or self.lock.locked():
return
with self.lock:
display = agent.view()
config = agent.config()
reported = self.report.data_field_or('reported', default=list())
handshake_dir = config['bettercap']['handshakes']
handshake_filenames = os.listdir(handshake_dir)
handshake_paths = [os.path.join(handshake_dir, filename) for filename in handshake_filenames if
filename.endswith('.pcap')]
# pull out whitelisted APs
handshake_paths = remove_whitelisted(handshake_paths, config['main']['whitelist'])
handshake_new = set(handshake_paths) - set(reported) - set(self.skip)
if handshake_new:
logging.info("OHC: Internet connectivity detected. Uploading new handshakes to onlinehashcrack.com")
for idx, handshake in enumerate(handshake_new):
display.on_uploading(f"onlinehashcrack.com ({idx + 1}/{len(handshake_new)})")
try:
self._upload_to_ohc(handshake)
if handshake not in reported:
reported.append(handshake)
self.report.update(data={'reported': reported})
logging.debug(f"OHC: Successfully uploaded {handshake}")
except requests.exceptions.RequestException as req_e:
self.skip.append(handshake)
logging.debug("OHC: %s", req_e)
continue
except OSError as os_e:
self.skip.append(handshake)
logging.debug("OHC: %s", os_e)
continue
display.on_normal()
if 'dashboard' in self.options and self.options['dashboard']:
cracked_file = os.path.join(handshake_dir, 'onlinehashcrack.cracked')
if os.path.exists(cracked_file):
last_check = datetime.fromtimestamp(os.path.getmtime(cracked_file))
if last_check is not None and ((datetime.now() - last_check).seconds / (60 * 60)) < 1:
return
try:
self._download_cracked(cracked_file)
logging.info("OHC: Downloaded cracked passwords.")
except requests.exceptions.RequestException as req_e:
logging.debug("OHC: %s", req_e)
except OSError as os_e:
logging.debug("OHC: %s", os_e)
if 'single_files' in self.options and self.options['single_files']:
with open(cracked_file, 'r') as cracked_list:
for row in csv.DictReader(cracked_list):
if row['password']:
filename = re.sub(r'[^a-zA-Z0-9]', '', row['ESSID']) + '_' + row['BSSID'].replace(':','')
if os.path.exists( os.path.join(handshake_dir, filename+'.pcap') ):
with open(os.path.join(handshake_dir, filename+'.pcap.cracked'), 'w') as f:
f.write(row['password'])

View File

@ -1,263 +0,0 @@
import os
import logging
import threading
from time import sleep
from datetime import datetime,timedelta
from pwnagotchi import plugins
from pwnagotchi.utils import StatusFile
from flask import render_template_string
from flask import jsonify
TEMPLATE = """
{% extends "base.html" %}
{% set active_page = "plugins" %}
{% block title %}
Session stats
{% endblock %}
{% block styles %}
{{ super() }}
<link rel="stylesheet" href="/css/jquery.jqplot.min.css"/>
<link rel="stylesheet" href="/css/jquery.jqplot.css"/>
<style>
div.chart {
height: 400px;
width: 100%;
}
div#session {
width: 100%;
}
</style>
{% endblock %}
{% block scripts %}
{{ super() }}
<script type="text/javascript" src="/js/jquery.jqplot.min.js"></script>
<script type="text/javascript" src="/js/jquery.jqplot.js"></script>
<script type="text/javascript" src="/js/plugins/jqplot.mobile.js"></script>
<script type="text/javascript" src="/js/plugins/jqplot.json2.js"></script>
<script type="text/javascript" src="/js/plugins/jqplot.dateAxisRenderer.js"></script>
<script type="text/javascript" src="/js/plugins/jqplot.highlighter.js"></script>
<script type="text/javascript" src="/js/plugins/jqplot.cursor.js"></script>
<script type="text/javascript" src="/js/plugins/jqplot.enhancedLegendRenderer.js"></script>
{% endblock %}
{% block script %}
$(document).ready(function(){
var ajaxDataRenderer = function(url, plot, options) {
var ret = null;
$.ajax({
async: false,
url: url,
dataType:"json",
success: function(data) {
ret = data;
}
});
return ret;
};
function loadFiles(url, elm) {
var data = ajaxDataRenderer(url);
var x = document.getElementById(elm);
$.each(data['files'], function( index, value ) {
var option = document.createElement("option");
option.text = value;
x.add(option);
});
}
function loadData(url, elm, title, fill) {
var data = ajaxDataRenderer(url);
var plot_os = $.jqplot(elm, data.values,{
title: title,
stackSeries: fill,
seriesDefaults: {
showMarker: !fill,
fill: fill,
fillAndStroke: fill
},
legend: {
show: true,
renderer: $.jqplot.EnhancedLegendRenderer,
placement: 'outsideGrid',
labels: data.labels,
location: 's',
rendererOptions: {
numberRows: '2',
},
rowSpacing: '0px'
},
axes:{
xaxis:{
renderer:$.jqplot.DateAxisRenderer,
tickOptions:{formatString:'%H:%M:%S'}
},
yaxis:{
tickOptions:{formatString:'%.2f'}
}
},
highlighter: {
show: true,
sizeAdjust: 7.5
},
cursor:{
show: true,
tooltipLocation:'sw'
}
}).replot({
axes:{
xaxis:{
renderer:$.jqplot.DateAxisRenderer,
tickOptions:{formatString:'%H:%M:%S'}
},
yaxis:{
tickOptions:{formatString:'%.2f'}
}
}
});
}
function loadSessionFiles() {
loadFiles('/plugins/session-stats/session', 'session');
$("#session").change(function() {
loadSessionData();
});
}
function loadSessionData() {
var x = document.getElementById("session");
var session = x.options[x.selectedIndex].text;
loadData('/plugins/session-stats/os' + '?session=' + session, 'chart_os', 'OS', false)
loadData('/plugins/session-stats/temp' + '?session=' + session, 'chart_temp', 'Temp', false)
loadData('/plugins/session-stats/wifi' + '?session=' + session, 'chart_wifi', 'Wifi', true)
loadData('/plugins/session-stats/duration' + '?session=' + session, 'chart_duration', 'Sleeping', true)
loadData('/plugins/session-stats/reward' + '?session=' + session, 'chart_reward', 'Reward', false)
loadData('/plugins/session-stats/epoch' + '?session=' + session, 'chart_epoch', 'Epochs', false)
}
loadSessionFiles();
loadSessionData();
setInterval(loadSessionData, 60000);
});
{% endblock %}
{% block content %}
<select id="session">
<option selected>Current</option>
</select>
<div id="chart_os" class="chart"></div>
<div id="chart_temp" class="chart"></div>
<div id="chart_wifi" class="chart"></div>
<div id="chart_duration" class="chart"></div>
<div id="chart_reward" class="chart"></div>
<div id="chart_epoch" class="chart"></div>
{% endblock %}
"""
class GhettoClock:
def __init__(self):
self.lock = threading.Lock()
self._track = datetime.now()
self._counter_thread = threading.Thread(target=self.counter)
self._counter_thread.daemon = True
self._counter_thread.start()
def counter(self):
while True:
with self.lock:
self._track += timedelta(seconds=1)
sleep(1)
def now(self):
with self.lock:
return self._track
class SessionStats(plugins.Plugin):
__author__ = '33197631+dadav@users.noreply.github.com'
__version__ = '0.1.0'
__license__ = 'GPL3'
__description__ = 'This plugin displays stats of the current session.'
def __init__(self):
self.lock = threading.Lock()
self.options = dict()
self.stats = dict()
self.clock = GhettoClock()
def on_loaded(self):
"""
Gets called when the plugin gets loaded
"""
# this has to happen in "loaded" because the options are not yet
# available in the __init__
os.makedirs(self.options['save_directory'], exist_ok=True)
self.session_name = "stats_{}.json".format(self.clock.now().strftime("%Y_%m_%d_%H_%M"))
self.session = StatusFile(os.path.join(self.options['save_directory'],
self.session_name),
data_format='json')
logging.info("Session-stats plugin loaded.")
def on_epoch(self, agent, epoch, epoch_data):
"""
Save the epoch_data to self.stats
"""
with self.lock:
self.stats[self.clock.now().strftime("%H:%M:%S")] = epoch_data
self.session.update(data={'data': self.stats})
@staticmethod
def extract_key_values(data, subkeys):
result = dict()
result['values'] = list()
result['labels'] = subkeys
for plot_key in subkeys:
v = [ [ts,d[plot_key]] for ts, d in data.items()]
result['values'].append(v)
return result
def on_webhook(self, path, request):
if not path or path == "/":
return render_template_string(TEMPLATE)
session_param = request.args.get('session')
if path == "os":
extract_keys = ['cpu_load','mem_usage',]
elif path == "temp":
extract_keys = ['temperature']
elif path == "wifi":
extract_keys = [
'missed_interactions',
'num_hops',
'num_peers',
'tot_bond',
'avg_bond',
'num_deauths',
'num_associations',
'num_handshakes',
]
elif path == "duration":
extract_keys = [
'duration_secs',
'slept_for_secs',
]
elif path == "reward":
extract_keys = [
'reward',
]
elif path == "epoch":
extract_keys = [
'active_for_epochs',
]
elif path == "session":
return jsonify({'files': os.listdir(self.options['save_directory'])})
with self.lock:
data = self.stats
if session_param and session_param != 'Current':
file_stats = StatusFile(os.path.join(self.options['save_directory'], session_param), data_format='json')
data = file_stats.data_field_or('data', default=dict())
return jsonify(SessionStats.extract_key_values(data, extract_keys))

View File

@ -1,147 +0,0 @@
import os
import logging
from threading import Lock
from functools import partial
from pwnagotchi import plugins
from pwnagotchi import reboot
def systemd_dropin(name, content):
if not name.endswith('.service'):
name = '%s.service' % name
dropin_dir = "/etc/systemd/system/%s.d/" % name
os.makedirs(dropin_dir, exist_ok=True)
with open(os.path.join(dropin_dir, "switcher.conf"), "wt") as dropin:
dropin.write(content)
systemctl("daemon-reload")
def systemctl(command, unit=None):
if unit:
os.system("/bin/systemctl %s %s" % (command, unit))
else:
os.system("/bin/systemctl %s" % command)
def run_task(name, options):
task_service_name = "switcher-%s-task.service" % name
# save all the commands to a shell script
script_dir = '/usr/local/bin/'
script_path = os.path.join(script_dir, 'switcher-%s.sh' % name)
os.makedirs(script_dir, exist_ok=True)
with open(script_path, 'wt') as script_file:
script_file.write('#!/bin/bash\n')
for cmd in options['commands']:
script_file.write('%s\n' % cmd)
os.system("chmod a+x %s" % script_path)
# here we create the service which runs the tasks
with open('/etc/systemd/system/%s' % task_service_name, 'wt') as task_service:
task_service.write("""
[Unit]
Description=Executes the tasks of the pwnagotchi switcher plugin
After=pwnagotchi.service bettercap.service
[Service]
Type=oneshot
RemainAfterExit=yes
ExecStart=-/usr/local/bin/switcher-%s.sh
ExecStart=-/bin/rm /etc/systemd/system/%s
ExecStart=-/bin/rm /usr/local/bin/switcher-%s.sh
[Install]
WantedBy=multi-user.target
""" % (name, task_service_name, name))
if 'reboot' in options and options['reboot']:
# create a indication file!
# if this file is set, we want the switcher-tasks to run
open('/root/.switcher', 'a').close()
# add condition
systemd_dropin("pwnagotchi.service", """
[Unit]
ConditionPathExists=!/root/.switcher""")
systemd_dropin("bettercap.service", """
[Unit]
ConditionPathExists=!/root/.switcher""")
systemd_dropin(task_service_name, """
[Service]
ExecStart=-/bin/rm /root/.switcher
ExecStart=-/bin/rm /etc/systemd/system/switcher-reboot.timer""")
with open('/etc/systemd/system/switcher-reboot.timer', 'wt') as reboot_timer:
reboot_timer.write("""
[Unit]
Description=Reboot when time is up
ConditionPathExists=/root/.switcher
[Timer]
OnBootSec=%sm
Unit=reboot.target
[Install]
WantedBy=timers.target
""" % options['stopwatch'])
systemctl("daemon-reload")
systemctl("enable", "switcher-reboot.timer")
systemctl("enable", task_service_name)
reboot()
return
systemctl("daemon-reload")
systemctl("start", task_service_name)
class Switcher(plugins.Plugin):
__author__ = '33197631+dadav@users.noreply.github.com'
__version__ = '0.0.1'
__name__ = 'switcher'
__license__ = 'GPL3'
__description__ = 'This plugin is a generic task scheduler.'
def __init__(self):
self.ready = False
self.lock = Lock()
def trigger(self, name, *args, **kwargs):
with self.lock:
function_name = name.lstrip('on_')
if function_name in self.tasks:
task = self.tasks[function_name]
# is this task enabled?
if 'enabled' not in task or ('enabled' in task and not task['enabled']):
return
run_task(function_name, task)
def on_loaded(self):
if 'tasks' in self.options and self.options['tasks']:
self.tasks = self.options['tasks']
else:
logging.debug('[switcher] No tasks found...')
return
logging.info("[switcher] is loaded.")
# create hooks
logging.debug("[switcher] creating hooks...")
methods = ['webhook', 'internet_available', 'ui_setup', 'ui_update',
'unload', 'display_setup', 'ready', 'ai_ready', 'ai_policy',
'ai_training_start', 'ai_training_step', 'ai_training_end',
'ai_best_reward', 'ai_worst_reward', 'free_channel',
'bored', 'sad', 'excited', 'lonely', 'rebooting', 'wait',
'sleep', 'wifi_update', 'unfiltered_ap_list', 'association',
'deauthentication', 'channel_hop', 'handshake', 'epoch',
'peer_detected', 'peer_lost', 'config_changed']
for m in methods:
setattr(Switcher, 'on_%s' % m, partial(self.trigger, m))
logging.debug("[switcher] triggers are ready to fire...")

View File

@ -1,92 +0,0 @@
# Based on UPS Lite v1.1 from https://github.com/xenDE
# Made specifically to address the problems caused by the hardware changes in 1.3. Oh yeah I also removed the auto-shutdown feature because it's kind of broken.
#
# To setup, see page six of this manual to see how to enable i2c:
# https://github.com/linshuqin329/UPS-Lite/blob/master/UPS-Lite_V1.3_CW2015/Instructions%20for%20UPS-Lite%20V1.3.pdf
#
# Follow page seven, install the dependencies (python-smbus) and copy this script over for later use:
# https://github.com/linshuqin329/UPS-Lite/blob/master/UPS-Lite_V1.3_CW2015/UPS_Lite_V1.3_CW2015.py
#
# Now, install this plugin by copying this to the 'available-plugins' folder in your pwnagotchi, install and enable the plugin with the commands:
# sudo pwnagotchi plugins install upslite_plugin_1_3
# sudo pwnagotchi plugins enable upslite_plugin_1_3
#
# Now restart raspberry pi. Once back up ensure upslite_plugin_1_3 plugin is turned on in the WebUI. If there is still '0%' on your battery meter
# run the script we saved earlier and ensure that the pwnagotchi is plugged in both at the battery and the raspberry pi. The script should start trying to
# read the battery, and should be successful once there's a USB cable running power to the battery supply.
import logging
import struct
import RPi.GPIO as GPIO
import pwnagotchi
import pwnagotchi.plugins as plugins
import pwnagotchi.ui.fonts as fonts
from pwnagotchi.ui.components import LabeledValue
from pwnagotchi.ui.view import BLACK
CW2015_ADDRESS = 0X62
CW2015_REG_VCELL = 0X02
CW2015_REG_SOC = 0X04
CW2015_REG_MODE = 0X0A
# TODO: add enable switch in config.yml an cleanup all to the best place
class UPS:
def __init__(self):
# only import when the module is loaded and enabled
import smbus
# 0 = /dev/i2c-0 (port I2C0), 1 = /dev/i2c-1 (port I2C1)
self._bus = smbus.SMBus(1)
def voltage(self):
try:
read = self._bus.read_word_data(CW2015_ADDRESS, CW2015_REG_VCELL)
swapped = struct.unpack("<H", struct.pack(">H", read))[0]
return swapped * 1.25 / 1000 / 16
except:
return 0.0
def capacity(self):
try:
address = 0x36
read = self._bus.read_word_data(CW2015_ADDRESS, CW2015_REG_SOC)
swapped = struct.unpack("<H", struct.pack(">H", read))[0]
return swapped / 256
except:
return 0.0
def charging(self):
try:
GPIO.setmode(GPIO.BCM)
GPIO.setup(4, GPIO.IN)
return '+' if GPIO.input(4) == GPIO.HIGH else '-'
except:
return '-'
class UPSLite(plugins.Plugin):
__author__ = 'marbasec'
__version__ = '1.3.0'
__license__ = 'GPL3'
__description__ = 'A plugin that will add a voltage indicator for the UPS Lite v1.3'
def __init__(self):
self.ups = None
def on_loaded(self):
self.ups = UPS()
def on_ui_setup(self, ui):
ui.add_element('ups', LabeledValue(color=BLACK, label='UPS', value='0%', position=(ui.width() / 2 + 15, 0),
label_font=fonts.Bold, text_font=fonts.Medium))
def on_unload(self, ui):
with ui._lock:
ui.remove_element('ups')
def on_ui_update(self, ui):
capacity = self.ups.capacity()
charging = self.ups.charging()
ui.set('ups', "%2i%s" % (capacity, charging))

View File

@ -1,556 +0,0 @@
import logging
import json
import toml
import _thread
import pwnagotchi
from pwnagotchi import restart, plugins
from pwnagotchi.utils import save_config, merge_config
from flask import abort
from flask import render_template_string
INDEX = """
{% extends "base.html" %}
{% set active_page = "plugins" %}
{% block title %}
Webcfg
{% endblock %}
{% block meta %}
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=0" />
{% endblock %}
{% block styles %}
{{ super() }}
<style>
#divTop {
position: -webkit-sticky;
position: sticky;
top: 0px;
width: 100%;
font-size: 16px;
padding: 5px;
border: 1px solid #ddd;
margin-bottom: 5px;
}
#searchText {
width: 100%;
}
table {
table-layout: auto;
width: 100%;
}
table, th, td {
border: 1px solid black;
border-collapse: collapse;
}
th, td {
padding: 15px;
text-align: left;
}
table tr:nth-child(even) {
background-color: #eee;
}
table tr:nth-child(odd) {
background-color: #fff;
}
table th {
background-color: black;
color: white;
}
.remove {
background-color: #f44336;
color: white;
border: 2px solid #f44336;
padding: 4px 8px;
text-align: center;
text-decoration: none;
display: inline-block;
font-size: 12px;
margin: 4px 2px;
-webkit-transition-duration: 0.4s; /* Safari */
transition-duration: 0.4s;
cursor: pointer;
}
.remove:hover {
background-color: white;
color: black;
}
#btnSave {
position: -webkit-sticky;
position: sticky;
bottom: 0px;
width: 100%;
background-color: #0061b0;
border: none;
color: white;
padding: 15px 32px;
text-align: center;
text-decoration: none;
display: inline-block;
font-size: 16px;
cursor: pointer;
float: right;
}
#divTop {
display: table;
width: 100%;
}
#divTop > * {
display: table-cell;
}
#divTop > span {
width: 1%;
}
#divTop > input {
width: 100%;
}
@media screen and (max-width:700px) {
table, tr, td {
padding:0;
border:1px solid black;
}
table {
border:none;
}
tr:first-child, thead, th {
display:none;
border:none;
}
tr {
float: left;
width: 100%;
margin-bottom: 2em;
}
table tr:nth-child(odd) {
background-color: #eee;
}
td {
float: left;
width: 100%;
padding:1em;
}
td::before {
content:attr(data-label);
word-wrap: break-word;
background: #eee;
border-right:2px solid black;
width: 20%;
float:left;
padding:1em;
font-weight: bold;
margin:-1em 1em -1em -1em;
}
.del_btn_wrapper {
content:attr(data-label);
word-wrap: break-word;
background: #eee;
border-right:2px solid black;
width: 20%;
float:left;
padding:1em;
font-weight: bold;
margin:-1em 1em -1em -1em;
}
}
</style>
{% endblock %}
{% block content %}
<div id="divTop">
<input type="text" id="searchText" placeholder="Search for options ..." title="Type an option name">
<span><select id="selAddType"><option value="text">Text</option><option value="number">Number</option></select></span>
<span><button id="btnAdd" type="button" onclick="addOption()">+</button></span>
</div>
<div id="divSaveTop">
<button id="btnSave" type="button" onclick="saveConfig()">Save and restart</button>
<button id="btnSave" type="button" onclick="saveConfigNoRestart()">Merge and Save (CAUTION)</button>
</div>
<div id="content"></div>
{% endblock %}
{% block script %}
function addOption() {
var input, table, tr, td, divDelBtn, btnDel, selType, selTypeVal;
input = document.getElementById("searchText");
inputVal = input.value;
selType = document.getElementById("selAddType");
selTypeVal = selType.options[selType.selectedIndex].value;
table = document.getElementById("tableOptions");
if (table) {
tr = table.insertRow();
// del button
divDelBtn = document.createElement("div");
divDelBtn.className = "del_btn_wrapper";
td = document.createElement("td");
td.setAttribute("data-label", "");
btnDel = document.createElement("Button");
btnDel.innerHTML = "X";
btnDel.onclick = function(){ delRow(this);};
btnDel.className = "remove";
divDelBtn.appendChild(btnDel);
td.appendChild(divDelBtn);
tr.appendChild(td);
// option
td = document.createElement("td");
td.setAttribute("data-label", "Option");
td.innerHTML = inputVal;
tr.appendChild(td);
// value
td = document.createElement("td");
td.setAttribute("data-label", "Value");
input = document.createElement("input");
input.type = selTypeVal;
input.value = "";
td.appendChild(input);
tr.appendChild(td);
input.value = "";
}
}
function saveConfig(){
// get table
var table = document.getElementById("tableOptions");
if (table) {
var json = tableToJson(table);
sendJSON("webcfg/save-config", json, function(response) {
if (response) {
if (response.status == "200") {
alert("Config got updated");
} else {
alert("Error while updating the config (err-code: " + response.status + ")");
}
}
});
}
}
function saveConfigNoRestart(){
// get table
var table = document.getElementById("tableOptions");
if (table) {
var json = tableToJson(table);
sendJSON("webcfg/merge-save-config", json, function(response) {
if (response) {
if (response.status == "200") {
alert("Config got updated");
} else {
alert("Error while updating the config (err-code: " + response.status + ")");
}
}
});
}
}
var searchInput = document.getElementById("searchText");
searchInput.onkeyup = function() {
var filter, table, tr, td, i, txtValue;
filter = searchInput.value.toUpperCase();
table = document.getElementById("tableOptions");
if (table) {
tr = table.getElementsByTagName("tr");
for (i = 0; i < tr.length; i++) {
td = tr[i].getElementsByTagName("td")[1];
if (td) {
txtValue = td.textContent || td.innerText;
if (txtValue.toUpperCase().indexOf(filter) > -1) {
tr[i].style.display = "";
}else{
tr[i].style.display = "none";
}
}
}
}
}
function sendJSON(url, data, callback) {
var xobj = new XMLHttpRequest();
var csrf = "{{ csrf_token() }}";
xobj.open('POST', url);
xobj.setRequestHeader("Content-Type", "application/json");
xobj.setRequestHeader('x-csrf-token', csrf);
xobj.onreadystatechange = function () {
if (xobj.readyState == 4) {
callback(xobj);
}
};
xobj.send(JSON.stringify(data));
}
function loadJSON(url, callback) {
var xobj = new XMLHttpRequest();
xobj.overrideMimeType("application/json");
xobj.open('GET', url, true);
xobj.onreadystatechange = function () {
if (xobj.readyState == 4 && xobj.status == "200") {
callback(JSON.parse(xobj.responseText));
}
};
xobj.send(null);
}
// https://stackoverflow.com/questions/19098797/fastest-way-to-flatten-un-flatten-nested-json-objects
function unFlattenJson(data) {
"use strict";
if (Object(data) !== data || Array.isArray(data))
return data;
var result = {}, cur, prop, idx, last, temp, inarray;
for(var p in data) {
cur = result, prop = "", last = 0, inarray = false;
do {
idx = p.indexOf(".", last);
temp = p.substring(last, idx !== -1 ? idx : undefined);
inarray = temp.startsWith('#') && !isNaN(parseInt(temp.substring(1)))
cur = cur[prop] || (cur[prop] = (inarray ? [] : {}));
if (inarray){
prop = temp.substring(1);
}else{
prop = temp;
}
last = idx + 1;
} while(idx >= 0);
cur[prop] = data[p];
}
return result[""];
}
function flattenJson(data) {
var result = {};
function recurse (cur, prop) {
if (Object(cur) !== cur) {
result[prop] = cur;
} else if (Array.isArray(cur)) {
for(var i=0, l=cur.length; i<l; i++)
recurse(cur[i], prop ? prop+".#"+i : ""+i);
if (l == 0)
result[prop] = [];
} else {
var isEmpty = true;
for (var p in cur) {
isEmpty = false;
recurse(cur[p], prop ? prop+"."+p : p);
}
if (isEmpty)
result[prop] = {};
}
}
recurse(data, "");
return result;
}
function delRow(btn) {
var tr = btn.parentNode.parentNode.parentNode;
tr.parentNode.removeChild(tr);
}
function jsonToTable(json) {
var table = document.createElement("table");
table.id = "tableOptions";
// create header
var tr = table.insertRow();
var thDel = document.createElement("th");
thDel.innerHTML = "";
var thOpt = document.createElement("th");
thOpt.innerHTML = "Option";
var thVal = document.createElement("th");
thVal.innerHTML = "Value";
tr.appendChild(thDel);
tr.appendChild(thOpt);
tr.appendChild(thVal);
var td, divDelBtn, btnDel;
// iterate over keys
Object.keys(json).forEach(function(key) {
tr = table.insertRow();
// del button
divDelBtn = document.createElement("div");
divDelBtn.className = "del_btn_wrapper";
td = document.createElement("td");
td.setAttribute("data-label", "");
btnDel = document.createElement("Button");
btnDel.innerHTML = "X";
btnDel.onclick = function(){ delRow(this);};
btnDel.className = "remove";
divDelBtn.appendChild(btnDel);
td.appendChild(divDelBtn);
tr.appendChild(td);
// option
td = document.createElement("td");
td.setAttribute("data-label", "Option");
td.innerHTML = key;
tr.appendChild(td);
// value
td = document.createElement("td");
td.setAttribute("data-label", "Value");
if(typeof(json[key])==='boolean'){
input = document.createElement("select");
input.setAttribute("id", "boolSelect");
tvalue = document.createElement("option");
tvalue.setAttribute("value", "true");
ttext = document.createTextNode("True")
tvalue.appendChild(ttext);
fvalue = document.createElement("option");
fvalue.setAttribute("value", "false");
ftext = document.createTextNode("False");
fvalue.appendChild(ftext);
input.appendChild(tvalue);
input.appendChild(fvalue);
input.value = json[key];
document.body.appendChild(input);
td.appendChild(input);
tr.appendChild(td);
} else {
input = document.createElement("input");
if(Array.isArray(json[key])) {
input.type = 'text';
input.value = '[]';
}else{
input.type = typeof(json[key]);
input.value = json[key];
}
td.appendChild(input);
tr.appendChild(td);
}
});
return table;
}
function tableToJson(table) {
var rows = table.getElementsByTagName("tr");
var i, td, key, value;
var json = {};
for (i = 0; i < rows.length; i++) {
td = rows[i].getElementsByTagName("td");
if (td.length == 3) {
// td[0] = del button
key = td[1].textContent || td[1].innerText;
var input = td[2].getElementsByTagName("input");
var select = td[2].getElementsByTagName("select");
if (input && input != undefined && input.length > 0 ) {
if (input[0].type == "text") {
if (input[0].value.startsWith("[") && input[0].value.endsWith("]")) {
json[key] = JSON.parse(input[0].value);
}else{
json[key] = input[0].value;
}
}else if (input[0].type == "number") {
json[key] = Number(input[0].value);
}
} else if(select && select != undefined && select.length > 0) {
var myValue = select[0].options[select[0].selectedIndex].value;
json[key] = myValue === 'true';
}
}
}
return unFlattenJson(json);
}
loadJSON("webcfg/get-config", function(response) {
var flat_json = flattenJson(response);
var table = jsonToTable(flat_json);
var divContent = document.getElementById("content");
divContent.innerHTML = "";
divContent.appendChild(table);
});
{% endblock %}
"""
def serializer(obj):
if isinstance(obj, set):
return list(obj)
raise TypeError
class WebConfig(plugins.Plugin):
__author__ = '33197631+dadav@users.noreply.github.com'
__version__ = '1.0.0'
__license__ = 'GPL3'
__description__ = 'This plugin allows the user to make runtime changes.'
def __init__(self):
self.ready = False
self.mode = 'MANU'
self._agent = None
def on_config_changed(self, config):
self.config = config
self.ready = True
def on_ready(self, agent):
self._agent = agent
self.mode = 'MANU' if agent.mode == 'manual' else 'AUTO'
def on_internet_available(self, agent):
self._agent = agent
self.mode = 'MANU' if agent.mode == 'manual' else 'AUTO'
def on_loaded(self):
"""
Gets called when the plugin gets loaded
"""
logging.info("webcfg: Plugin loaded.")
def on_webhook(self, path, request):
"""
Serves the current configuration
"""
if not self.ready:
return "Plugin not ready"
if request.method == "GET":
if path == "/" or not path:
return render_template_string(INDEX)
elif path == "get-config":
# send configuration
return json.dumps(self.config, default=serializer)
else:
abort(404)
elif request.method == "POST":
if path == "save-config":
try:
save_config(request.get_json(), '/etc/pwnagotchi/config.toml') # test
_thread.start_new_thread(restart, (self.mode,))
return "success"
except Exception as ex:
logging.error(ex)
return "config error", 500
elif path == "merge-save-config":
try:
self.config = merge_config(request.get_json(), self.config)
pwnagotchi.config = merge_config(request.get_json(), pwnagotchi.config)
logging.debug("PWNAGOTCHI CONFIG:\n%s" % repr(pwnagotchi.config))
if self._agent:
self._agent._config = merge_config(request.get_json(), self._agent._config)
logging.debug(" Agent CONFIG:\n%s" % repr(self._agent._config))
logging.debug(" Updated CONFIG:\n%s" % request.get_json())
save_config(request.get_json(), '/etc/pwnagotchi/config.toml') # test
return "success"
except Exception as ex:
logging.error("[webcfg mergesave] %s" % ex)
return "config error", 500
abort(404)

View File

@ -1,287 +0,0 @@
<html>
<head>
<meta http-equiv="Content-Type" content="text/xml; charset=utf-8" />
<title>GPS MAP</title>
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.5.1/dist/leaflet.css"/>
<link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/1.4.1/MarkerCluster.css" />
<link rel="stylesheet" type="text/css" href="https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/1.4.1/MarkerCluster.Default.css" />
<script type='text/javascript' src="https://unpkg.com/leaflet@1.5.1/dist/leaflet.js"></script>
<script type='text/javascript' src='https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/1.4.1/leaflet.markercluster.js'></script>
<style type="text/css">
/* for map */
html, body { height: 100%; width: 100%; margin:0; background-color:#000;}
.pwnAPPin path {
fill: #ce7575;
}
.pwnAPPinOpen path {
fill: #76ce75;
}
/* animated ap marker */
.pwnAPPin .ring_outer, .pwnAPPinOpen .ring_outer {
animation: opacityPulse 2s cubic-bezier(1, 0.14, 1, 1);
animation-iteration-count: infinite;
opacity: .5;
}
.pwnAPPin .ring_inner, .pwnAPPinOpen .ring_inner {
animation: opacityPulse 2s cubic-bezier(0.4, 0.74, 0.56, 0.82);
animation-iteration-count: infinite;
opacity: .8;
}
@keyframes opacityPulse {
0% {
opacity: 0.1;
}
50% {
opacity: 1.0;
}
100% {
opacity: 0.1;
}
}
@keyframes bounceInDown {
from, 60%, 75%, 90%, to {
animation-timing-function: cubic-bezier(0.215, 0.61, 0.355, 1);
}
0% {
opacity: 0;
transform: translate3d(0, -3000px, 0);
}
60% {
opacity: 1;
transform: translate3d(0, 5px, 0);
}
75% {
transform: translate3d(0, -3px, 0);
}
90% {
transform: translate3d(0, 5px, 0);
}
to {
transform: none;
}
}
.bounceInDown {
animation-name: bounceInDown;
animation-duration: 2s;
animation-fill-mode: both;
}
/* animated radar */
.radar {
animation: pulsate 1s ease-out;
-webkit-animation: pulsate 1s ease-out;
-webkit-animation-iteration-count: infinite;
/* opacity: 0.0 */
}
#loading {
top: 50%;
left: 50%;
position: fixed;
background-color: rgba(255, 255, 255, 0.9);
border: 0.5vw #ff0000 solid;
border-radius: 2vw;
padding: 5vw;
transform: translateX(-50%) translateY(-50%);
text-align:center;
display: none;
}
#loading .face { font-size:8vw; }
#loading .text { position:absolute;bottom:0;text-align:center; font-size: 2vw;color:#a0a0a0;}
#filterbox { position: fixed;top:0px;left:0px;z-index:999999;margin-left:55px;width:100%;height:20px;border-bottom:2px solid #303030;display: grid;grid-template-columns: 1fr 0.1fr;grid-template-rows: 1fr;grid-template-areas: ". .";}
#search { grid-area: 1 / 1 / 2 / 2;height:30px;padding:3px;background-color:#000;color:#e0e0e0;border:none;}
#matchcount { grid-area: 1 / 2 / 2 / 3;height:30px;margin-right:55px;padding-right:5px;background-color:#000;color:#a0a0a0;font-weight:bold;}
#mapdiv { width:100%; height: 100%; }
</style>
</head>
<body>
<div id="mapdiv"></div>
<div id="filterbox">
<input type="text" id="search" placeholder="filter: #cracked #notcracked AA:BB:CC aabbcc AndroidAP ..."/>
<div id="matchcount">0&nbsp;APs</div>
</div>
<div id="loading"><div class="face"><nobr>(⌐■&nbsp;<span id="loading_ap_img"></span>&nbsp;■)</nobr></div><div class="text" id="loading_infotext">loading positions...</div></div>
<script type="text/javascript">
function loadJSON(url, callback) {
document.getElementById("loading").style.display = "flex";
var xobj = new XMLHttpRequest();
xobj.overrideMimeType("application/json");
xobj.open('GET', url, true);
xobj.onreadystatechange = function () {
if (xobj.readyState == 4 && xobj.status == "200") {
callback(xobj.responseText);
}
};
xobj.send(null);
}
function escapeHtml(unsafe) {
return String(unsafe)
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;")
.replace(/'/g, "&#039;");
}
function formatMacAddress(macAddress) {
if (macAddress !== null) {
macAddress = macAddress.toUpperCase();
if (macAddress.length >= 3 && macAddress.length <= 16) {
macAddress = macAddress.replace(/\W/ig, '');
macAddress = macAddress.replace(/(.{2})/g, "$1:");
macAddress = macAddress.replace(/:+$/,'');
}
}
return macAddress;
}
// select your map theme from https://leaflet-extras.github.io/leaflet-providers/preview/
// use 2 layers with alpha for a nice dark style
var Esri_WorldImagery = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {
attribution: 'Tiles &copy; Esri &mdash; Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community'
});
var CartoDB_DarkMatter = L.tileLayer('https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png', {
attribution: '&copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors &copy; <a href="https://carto.com/attributions">CARTO</a>',
subdomains: 'abcd',
opacity:0.8,
maxZoom: 20
});
var mymap = L.map('mapdiv');
var svg = '<svg class="pwnAPPin" width="80px" height="60px" viewBox="0 0 44 28" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><desc>pwnagotchi AP icon.</desc><defs><linearGradient x1="50%" y1="0%" x2="50%" y2="100%" id="linearGradient-1"><stop stop-color="#FFFFFF" offset="0%"></stop><stop stop-color="#000000" offset="100%"></stop></linearGradient></defs><g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd"><g id="marker"><path class="ring_outer" d="M28.6,8 C34.7,9.4 39,12.4 39,16 C39,20.7 31.3,24.6 21.7,24.6 C12.1,24.6 4.3,20.7 4.3,16 C4.3,12.5 8.5,9.5 14.6,8.1 C15.3,8 14.2,6.6 13.3,6.8 C5.5,8.4 0,12.2 0,16.7 C0,22.7 9.7,27.4 21.7,27.4 C33.7,27.4 43.3,22.6 43.3,16.7 C43.3,12.1 37.6,8.3 29.6,6.7 C28.8,6.5 27.8,7.9 28.6,8.1 L28.6,8 Z" id="Shape" fill="#878787" fill-rule="nonzero"></path><path class="ring_inner" d="M28.1427313,11.0811939 C30.4951542,11.9119726 32.0242291,13.2174821 32.0242291,14.6416742 C32.0242291,17.2526931 27.6722467,19.2702986 22.261674,19.2702986 C16.8511013,19.2702986 12.4991189,17.2526931 12.4991189,14.7603569 C12.4991189,13.5735301 13.4400881,12.505386 15.0867841,11.6746073 C15.792511,11.3185592 14.7339207,9.30095371 13.9105727,9.77568442 C10.6171806,10.9625112 8.5,12.9801167 8.5,15.2350876 C8.5,19.0329333 14.4986784,22.0000002 21.9088106,22.0000002 C29.2013216,22.0000002 35.2,19.0329333 35.2,15.2350876 C35.2,12.861434 32.7299559,10.6064632 28.8484581,9.30095371 C28.0251101,9.18227103 27.4370044,10.8438285 28.0251101,11.0811939 L28.1427313,11.0811939 Z" id="Shape" fill="#5F5F5F" fill-rule="nonzero"></path><g id="ap" transform="translate(13.000000, 0.000000)"><rect id="apfront" fill="#000000" x="0" y="14" width="18" height="4"></rect><polygon id="apbody" fill="url(#linearGradient-1)" points="3.83034404 10 14.169656 10 18 14 0 14"></polygon><circle class="ring_outer" id="led1" fill="#931F1F" cx="3" cy="16" r="1"></circle><circle class="ring_inner" id="led2" fill="#931F1F" cx="7" cy="16" r="1"></circle><circle class="ring_outer" id="led3" fill="#931F1F" cx="11" cy="16" r="1"></circle><circle class="ring_inner" id="led4" fill="#931F1F" cx="15" cy="16" r="1"></circle><polygon id="antenna2" fill="#000000" points="8.8173082 0 9.1826918 0 9.5 11 8.5 11"></polygon><polygon id="antenna3" fill="#000000" transform="translate(15.000000, 5.500000) rotate(15.000000) translate(-15.000000, -5.500000) " points="14.8173082 0 15.1826918 0 15.5 11 14.5 11"></polygon><polygon id="antenna1" fill="#000000" transform="translate(3.000000, 5.500000) rotate(-15.000000) translate(-3.000000, -5.500000) " points="2.8173082 0 3.1826918 0 3.5 11 2.5 11"></polygon></g></g></g></svg>';
var svgOpen = '<svg class="pwnAPPinOpen" width="80px" height="60px" viewBox="0 0 44 28" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><desc>pwnagotchi AP icon.</desc><defs><linearGradient x1="50%" y1="0%" x2="50%" y2="100%" id="linearGradient-1"><stop stop-color="#FFFFFF" offset="0%"></stop><stop stop-color="#000000" offset="100%"></stop></linearGradient></defs><g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd"><g id="marker"><path class="ring_outer" d="M28.6,8 C34.7,9.4 39,12.4 39,16 C39,20.7 31.3,24.6 21.7,24.6 C12.1,24.6 4.3,20.7 4.3,16 C4.3,12.5 8.5,9.5 14.6,8.1 C15.3,8 14.2,6.6 13.3,6.8 C5.5,8.4 0,12.2 0,16.7 C0,22.7 9.7,27.4 21.7,27.4 C33.7,27.4 43.3,22.6 43.3,16.7 C43.3,12.1 37.6,8.3 29.6,6.7 C28.8,6.5 27.8,7.9 28.6,8.1 L28.6,8 Z" id="Shape" fill="#878787" fill-rule="nonzero"></path><path class="ring_inner" d="M28.1427313,11.0811939 C30.4951542,11.9119726 32.0242291,13.2174821 32.0242291,14.6416742 C32.0242291,17.2526931 27.6722467,19.2702986 22.261674,19.2702986 C16.8511013,19.2702986 12.4991189,17.2526931 12.4991189,14.7603569 C12.4991189,13.5735301 13.4400881,12.505386 15.0867841,11.6746073 C15.792511,11.3185592 14.7339207,9.30095371 13.9105727,9.77568442 C10.6171806,10.9625112 8.5,12.9801167 8.5,15.2350876 C8.5,19.0329333 14.4986784,22.0000002 21.9088106,22.0000002 C29.2013216,22.0000002 35.2,19.0329333 35.2,15.2350876 C35.2,12.861434 32.7299559,10.6064632 28.8484581,9.30095371 C28.0251101,9.18227103 27.4370044,10.8438285 28.0251101,11.0811939 L28.1427313,11.0811939 Z" id="Shape" fill="#5F5F5F" fill-rule="nonzero"></path><g id="ap" transform="translate(13.000000, 0.000000)"><rect id="apfront" fill="#000000" x="0" y="14" width="18" height="4"></rect><polygon id="apbody" fill="url(#linearGradient-1)" points="3.83034404 10 14.169656 10 18 14 0 14"></polygon><circle class="ring_outer" id="led1" fill="#1f9321" cx="3" cy="16" r="1"></circle><circle class="ring_inner" id="led2" fill="#1f9321" cx="7" cy="16" r="1"></circle><circle class="ring_outer" id="led3" fill="#1f9321" cx="11" cy="16" r="1"></circle><circle class="ring_inner" id="led4" fill="#1f9321" cx="15" cy="16" r="1"></circle><polygon id="antenna2" fill="#000000" points="8.8173082 0 9.1826918 0 9.5 11 8.5 11"></polygon><polygon id="antenna3" fill="#000000" transform="translate(15.000000, 5.500000) rotate(15.000000) translate(-15.000000, -5.500000) " points="14.8173082 0 15.1826918 0 15.5 11 14.5 11"></polygon><polygon id="antenna1" fill="#000000" transform="translate(3.000000, 5.500000) rotate(-15.000000) translate(-3.000000, -5.500000) " points="2.8173082 0 3.1826918 0 3.5 11 2.5 11"></polygon></g></g></g></svg>';
document.getElementById('loading_ap_img').innerHTML = svg;
var myIcon = L.divIcon({
className: "leaflet-data-marker",
html: svg.replace('#','%23'),
iconAnchor : [40, 30],
iconSize : [80, 60],
popupAnchor : [0, -30],
});
var myIconOpen = L.divIcon({
className: "leaflet-data-marker",
html: svgOpen.replace('#','%23'),
iconAnchor : [40, 30],
iconSize : [80, 60],
popupAnchor : [0, -30],
});
var positionsLoaded = false;
var positions = [];
var accuracys = [];
var markers = [];
var marker_pos = [];
var markerClusters = L.markerClusterGroup();
function drawPositions() {
count = 0;
//mymap.removeLayer(markerClusters);
mymap.eachLayer(function (layer) {
mymap.removeLayer(layer);
});
Esri_WorldImagery.addTo(mymap);
CartoDB_DarkMatter.addTo(mymap);
markerClusters = L.markerClusterGroup();
accuracys = [];
markers = [];
marker_pos = [];
filterText = document.getElementById("search").value;
//console.log(filterText);
Object.keys(positions).forEach(function(key) {
if(positions[key].lng){
filterPattern =
positions[key].ssid + ' ' +
formatMacAddress(positions[key].mac) + ' ' +
positions[key].mac
;
if (positions[key].pass) {
filterPattern += positions[key].pass + ' #cracked';
} else {
filterPattern += ' #notcracked';
}
filterPattern = filterPattern.toLowerCase();
//console.log(filterPattern);
var matched = true;
if (filterText) {
filterText.split(" ").forEach(function (item) {
if (!filterPattern.includes(item.toLowerCase())) {
matched = false;
}
});
}
if (matched) {
count++;
new_marker_pos = [positions[key].lat, positions[key].lng];
if (positions[key].acc) {
radius = Math.round(Math.min(positions[key].acc, 500));
markerColor = 'red';
markerColorCode = '#f03';
fillOpacity = 0.002;
if (positions[key].pass) {
markerColor = 'green';
markerColorCode = '#1aff00';
fillOpacity = 0.1;
}
accuracys.push(
L.circle(new_marker_pos, {
color: markerColor,
fillColor: markerColorCode,
fillOpacity: fillOpacity,
weight: 1,
opacity: 0.1,
radius: Math.min(positions[key].acc, 500),
}).setStyle({'className': 'radar'}).addTo(mymap)
);
}
passInfo = '';
if (positions[key].pass) {
passInfo = '<br/><b>Pass:</b> '+escapeHtml(positions[key].pass);
newMarker = L.marker(new_marker_pos, { icon: myIconOpen, title: positions[key].ssid }); //.addTo(mymap);
} else {
newMarker = L.marker(new_marker_pos, { icon: myIcon, title: positions[key].ssid }); //.addTo(mymap);
}
newMarker.bindPopup("<b>"+escapeHtml(positions[key].ssid)+"</b><br><nobr>MAC: "+escapeHtml(formatMacAddress(positions[key].mac))+"</nobr><br/>"+"<nobr>position type: "+escapeHtml(positions[key].type)+"</nobr><br/>"+"<nobr>position accuracy: "+escapeHtml(Math.round(positions[key].acc))+"</nobr>"+passInfo, { maxWidth: "auto" });
markers.push(newMarker);
marker_pos.push(new_marker_pos);
markerClusters.addLayer( newMarker );
}
}
});
document.getElementById("matchcount").innerHTML = count + "&nbsp;APs";
if (count > 0) {
mymap.addLayer( markerClusters );
var bounds = new L.LatLngBounds(marker_pos);
mymap.fitBounds(bounds);
document.getElementById("loading").style.display = "none";
} else {
document.getElementById("loading_infotext").innerHTML = "NO POSITION DATA FOUND :(";
}
}
// draw map on Enter in FilterInputField
const node = document.getElementById("search").addEventListener("keyup", function(event) {
if (event.key === "Enter") {
if (positionsLoaded) {
drawPositions();
}
}
});
// load positions
loadJSON("/plugins/webgpsmap/all", function(response) {
positions = JSON.parse(response);
positionsLoaded = true;
drawPositions();
});
// get current position and set marker in interval if https request
if (location.protocol === 'https:') {
var myLocationMarker = {};
function onLocationFound(e) {
if (myLocationMarker != undefined) {
mymap.removeLayer(myLocationMarker);
};
myLocationMarker = L.marker(e.latlng).addTo(mymap);
setTimeout(function(){ mymap.locate(); }, 30000);
}
mymap.on('locationfound', onLocationFound);
mymap.locate({setView: true});
}
</script>
</body></html>

View File

@ -1,413 +0,0 @@
import sys
import pwnagotchi.plugins as plugins
import logging
import os
import json
import re
from flask import Response
from functools import lru_cache
from dateutil.parser import parse
'''
webgpsmap shows existing position data stored in your /handshakes/ directory
the plugin does the following:
- search for *.pcap files in your /handshakes/ dir
- for every found .pcap file it looks for a .geo.json or .gps.json or .paw-gps.json file with
latitude+longitude data inside and shows this position on the map
- if also an .cracked file with a plaintext password inside exist, it reads the content and shows the
position as green instead of red and the password inside the infopox of the position
special:
you can save the html-map as one file for offline use or host on your own webspace with "/plugins/webgpsmap/offlinemap"
'''
class Webgpsmap(plugins.Plugin):
__author__ = 'https://github.com/xenDE and https://github.com/dadav'
__version__ = '1.4.0'
__name__ = 'webgpsmap'
__license__ = 'GPL3'
__description__ = 'a plugin for pwnagotchi that shows a openstreetmap with positions of ap-handshakes in your webbrowser'
ALREADY_SENT = list()
SKIP = list()
def __init__(self):
self.ready = False
def on_config_changed(self, config):
self.config = config
self.ready = True
def on_loaded(self):
"""
Plugin got loaded
"""
logging.info("[webgpsmap]: plugin loaded")
def on_webhook(self, path, request):
"""
Returns ewquested data
"""
# defaults:
response_header_contenttype = None
response_header_contentdisposition = None
response_mimetype = "application/xhtml+xml"
if not self.ready:
try:
response_data = bytes('''<html>
<head>
<meta charset="utf-8"/>
<style>body{font-size:1000%;}</style>
</head>
<body>Not ready yet</body>
</html>''', "utf-8")
response_status = 500
response_mimetype = "application/xhtml+xml"
response_header_contenttype = 'text/html'
except Exception as error:
logging.error(f"[webgpsmap] on_webhook NOT_READY error: {error}")
return
else:
if request.method == "GET":
if path == '/' or not path:
# returns the html template
self.ALREADY_SENT = list()
try:
response_data = bytes(self.get_html(), "utf-8")
except Exception as error:
logging.error(f"[webgpsmap] on_webhook / error: {error}")
return
response_status = 200
response_mimetype = "application/xhtml+xml"
response_header_contenttype = 'text/html'
elif path.startswith('all'):
# returns all positions
try:
self.ALREADY_SENT = list()
response_data = bytes(json.dumps(self.load_gps_from_dir(self.config['bettercap']['handshakes'])), "utf-8")
response_status = 200
response_mimetype = "application/json"
response_header_contenttype = 'application/json'
except Exception as error:
logging.error(f"[webgpsmap] on_webhook all error: {error}")
return
elif path.startswith('offlinemap'):
# for download an all-in-one html file with positions.json inside
try:
self.ALREADY_SENT = list()
json_data = json.dumps(self.load_gps_from_dir(self.config['bettercap']['handshakes']))
html_data = self.get_html()
html_data = html_data.replace('var positions = [];', 'var positions = ' + json_data + ';positionsLoaded=true;drawPositions();')
response_data = bytes(html_data, "utf-8")
response_status = 200
response_mimetype = "application/xhtml+xml"
response_header_contenttype = 'text/html'
response_header_contentdisposition = 'attachment; filename=webgpsmap.html'
except Exception as error:
logging.error(f"[webgpsmap] on_webhook offlinemap: error: {error}")
return
# elif path.startswith('/newest'):
# # returns all positions newer then timestamp
# response_data = bytes(json.dumps(self.load_gps_from_dir(self.config['bettercap']['handshakes']), newest_only=True), "utf-8")
# response_status = 200
# response_mimetype = "application/json"
# response_header_contenttype = 'application/json'
else:
# unknown GET path
response_data = bytes('''<html>
<head>
<meta charset="utf-8"/>
<style>body{font-size:1000%;}</style>
</head>
<body>4😋4</body>
</html>''', "utf-8")
response_status = 404
else:
# unknown request.method
response_data = bytes('''<html>
<head>
<meta charset="utf-8"/>
<style>body{font-size:1000%;}</style>
</head>
<body>4😋4 for bad boys</body>
</html>''', "utf-8")
response_status = 404
try:
r = Response(response=response_data, status=response_status, mimetype=response_mimetype)
if response_header_contenttype is not None:
r.headers["Content-Type"] = response_header_contenttype
if response_header_contentdisposition is not None:
r.headers["Content-Disposition"] = response_header_contentdisposition
return r
except Exception as error:
logging.error(f"[webgpsmap] on_webhook CREATING_RESPONSE error: {error}")
return
# cache 2048 items
@lru_cache(maxsize=2048, typed=False)
def _get_pos_from_file(self, path):
return PositionFile(path)
def load_gps_from_dir(self, gpsdir, newest_only=False):
"""
Parses the gps-data from disk
"""
handshake_dir = gpsdir
gps_data = dict()
logging.info(f"[webgpsmap] scanning {handshake_dir}")
all_files = os.listdir(handshake_dir)
# print(all_files)
all_pcap_files = [os.path.join(handshake_dir, filename) for filename in all_files if filename.endswith('.pcap')]
all_geo_or_gps_files = []
for filename_pcap in all_pcap_files:
filename_base = filename_pcap[:-5] # remove ".pcap"
logging.debug(f"[webgpsmap] found: {filename_base}")
filename_position = None
logging.debug("[webgpsmap] search for .gps.json")
check_for = os.path.basename(filename_base) + ".gps.json"
if check_for in all_files:
filename_position = str(os.path.join(handshake_dir, check_for))
logging.debug("[webgpsmap] search for .geo.json")
check_for = os.path.basename(filename_base) + ".geo.json"
if check_for in all_files:
filename_position = str(os.path.join(handshake_dir, check_for))
logging.debug("[webgpsmap] search for .paw-gps.json")
check_for = os.path.basename(filename_base) + ".paw-gps.json"
if check_for in all_files:
filename_position = str(os.path.join(handshake_dir, check_for))
logging.debug(f"[webgpsmap] end search for position data files and use {filename_position}")
if filename_position is not None:
all_geo_or_gps_files.append(filename_position)
# all_geo_or_gps_files = set(all_geo_or_gps_files) - set(SKIP) # remove skipped networks? No!
if newest_only:
all_geo_or_gps_files = set(all_geo_or_gps_files) - set(self.ALREADY_SENT)
logging.info(f"[webgpsmap] Found {len(all_geo_or_gps_files)} position-data files from {len(all_pcap_files)} handshakes. Fetching positions ...")
for pos_file in all_geo_or_gps_files:
try:
pos = self._get_pos_from_file(pos_file)
if not pos.type() == PositionFile.GPS and not pos.type() == PositionFile.GEO and not pos.type() == PositionFile.PAWGPS:
continue
ssid, mac = pos.ssid(), pos.mac()
ssid = "unknown" if not ssid else ssid
# invalid mac is strange and should abort; ssid is ok
if not mac:
raise ValueError("Mac can't be parsed from filename")
pos_type = 'unknown'
if pos.type() == PositionFile.GPS:
pos_type = 'gps'
elif pos.type() == PositionFile.GEO:
pos_type = 'geo'
elif pos.type() == PositionFile.PAWGPS:
pos_type = 'paw'
gps_data[ssid+"_"+mac] = {
'ssid': ssid,
'mac': mac,
'type': pos_type,
'lng': pos.lng(),
'lat': pos.lat(),
'acc': pos.accuracy(),
'ts_first': pos.timestamp_first(),
'ts_last': pos.timestamp_last(),
}
# get ap password if exist
check_for = os.path.basename(pos_file).split(".")[0] + ".pcap.cracked"
if check_for in all_files:
gps_data[ssid + "_" + mac]["pass"] = pos.password()
self.ALREADY_SENT += pos_file
except json.JSONDecodeError as error:
self.SKIP += pos_file
logging.error(f"[webgpsmap] JSONDecodeError in: {pos_file} - error: {error}")
continue
except ValueError as error:
self.SKIP += pos_file
logging.error(f"[webgpsmap] ValueError: {pos_file} - error: {error}")
continue
except OSError as error:
self.SKIP += pos_file
logging.error(f"[webgpsmap] OSError: {pos_file} - error: {error}")
continue
logging.info(f"[webgpsmap] loaded {len(gps_data)} positions")
return gps_data
def get_html(self):
"""
Returns the html page
"""
try:
template_file = os.path.dirname(os.path.realpath(__file__)) + "/" + "webgpsmap.html"
html_data = open(template_file, "r").read()
except Exception as error:
logging.error(f"[webgpsmap] error loading template file {template_file} - error: {error}")
return html_data
class PositionFile:
"""
Wraps gps / net-pos files
"""
GPS = 1
GEO = 2
PAWGPS = 3
def __init__(self, path):
self._file = path
self._filename = os.path.basename(path)
try:
logging.debug(f"[webgpsmap] loading {path}")
with open(path, 'r') as json_file:
self._json = json.load(json_file)
logging.debug(f"[webgpsmap] loaded {path}")
except json.JSONDecodeError as js_e:
raise js_e
def mac(self):
"""
Returns the mac from filename
"""
parsed_mac = re.search(r'.*_?([a-zA-Z0-9]{12})\.(?:gps|geo|paw-gps)\.json', self._filename)
if parsed_mac:
mac = parsed_mac.groups()[0]
return mac
return None
def ssid(self):
"""
Returns the ssid from filename
"""
parsed_ssid = re.search(r'(.+)_[a-zA-Z0-9]{12}\.(?:gps|geo|paw-gps)\.json', self._filename)
if parsed_ssid:
return parsed_ssid.groups()[0]
return None
def json(self):
"""
returns the parsed json
"""
return self._json
def timestamp_first(self):
"""
returns the timestamp of AP first seen
"""
# use file timestamp creation time of the pcap file
return int("%.0f" % os.path.getctime(self._file))
def timestamp_last(self):
"""
returns the timestamp of AP last seen
"""
return_ts = None
if 'ts' in self._json:
return_ts = self._json['ts']
elif 'Updated' in self._json:
# convert gps datetime to unix timestamp: "2019-10-05T23:12:40.422996+01:00"
dateObj = parse(self._json['Updated'])
return_ts = int("%.0f" % dateObj.timestamp())
else:
# use file timestamp last modification of the json file
return_ts = int("%.0f" % os.path.getmtime(self._file))
return return_ts
def password(self):
"""
returns the password from file.pcap.cracked or None
"""
return_pass = None
# 2do: make better filename split/remove extension because this one has problems with "." in path
base_filename, ext1, ext2 = re.split('\.', self._file)
password_file_path = base_filename + ".pcap.cracked"
if os.path.isfile(password_file_path):
try:
password_file = open(password_file_path, 'r')
return_pass = password_file.read()
password_file.close()
except OSError as error:
logging.error(f"[webgpsmap] OS error loading password: {password_file_path} - error: {format(error)}")
except:
logging.error(f"[webgpsmap] Unexpected error loading password: {password_file_path} - error: {sys.exc_info()[0]}")
raise
return return_pass
def type(self):
"""
returns the type of the file
"""
if self._file.endswith('.gps.json'):
return PositionFile.GPS
if self._file.endswith('.geo.json'):
return PositionFile.GEO
if self._file.endswith('.paw-gps.json'):
return PositionFile.PAWGPS
return None
def lat(self):
try:
lat = None
# try to get value from known formats
if 'Latitude' in self._json:
lat = self._json['Latitude']
if 'lat' in self._json:
lat = self._json['lat'] # an old paw-gps format: {"long": 14.693561, "lat": 40.806375}
if 'location' in self._json:
if 'lat' in self._json['location']:
lat = self._json['location']['lat']
# check value
if lat is None:
raise ValueError(f"Lat is None in {self._filename}")
if lat == 0:
raise ValueError(f"Lat is 0 in {self._filename}")
return lat
except KeyError:
pass
return None
def lng(self):
try:
lng = None
# try to get value from known formats
if 'Longitude' in self._json:
lng = self._json['Longitude']
if 'long' in self._json:
lng = self._json['long'] # an old paw-gps format: {"long": 14.693561, "lat": 40.806375}
if 'location' in self._json:
if 'lng' in self._json['location']:
lng = self._json['location']['lng']
# check value
if lng is None:
raise ValueError(f"Lng is None in {self._filename}")
if lng == 0:
raise ValueError(f"Lng is 0 in {self._filename}")
return lng
except KeyError:
pass
return None
def accuracy(self):
if self.type() == PositionFile.GPS:
return 50.0 # a default
if self.type() == PositionFile.PAWGPS:
return 50.0 # a default
if self.type() == PositionFile.GEO:
try:
return self._json['accuracy']
except KeyError:
pass
return None

View File

@ -1,209 +0,0 @@
import os
import logging
import json
import csv
import requests
import pwnagotchi
from io import StringIO
from datetime import datetime
from pwnagotchi.utils import WifiInfo, FieldNotFoundError, extract_from_pcap, StatusFile, remove_whitelisted
from threading import Lock
from pwnagotchi import plugins
from pwnagotchi._version import __version__ as __pwnagotchi_version__
def _extract_gps_data(path):
"""
Extract data from gps-file
return json-obj
"""
try:
if path.endswith('.geo.json'):
with open(path, 'r') as json_file:
tempJson = json.load(json_file)
d = datetime.utcfromtimestamp(int(tempJson["ts"]))
return {"Latitude": tempJson["location"]["lat"],
"Longitude": tempJson["location"]["lng"],
"Altitude": 10,
"Accuracy": tempJson["accuracy"],
"Updated": d.strftime('%Y-%m-%dT%H:%M:%S.%f')}
else:
with open(path, 'r') as json_file:
return json.load(json_file)
except OSError as os_err:
raise os_err
except json.JSONDecodeError as json_err:
raise json_err
def _format_auth(data):
out = ""
for auth in data:
out = f"{out}[{auth}]"
return [f"{auth}" for auth in data]
def _transform_wigle_entry(gps_data, pcap_data, plugin_version):
"""
Transform to wigle entry in file
"""
dummy = StringIO()
# write kismet header
dummy.write(f"WigleWifi-1.6,appRelease={plugin_version},model=pwnagotchi,release={__pwnagotchi_version__},"
f"device={pwnagotchi.name()},display=kismet,board=RaspberryPi,brand=pwnagotchi,star=Sol,body=3,subBody=0\n")
dummy.write(
"MAC,SSID,AuthMode,FirstSeen,Channel,RSSI,CurrentLatitude,CurrentLongitude,AltitudeMeters,AccuracyMeters,Type\n")
writer = csv.writer(dummy, delimiter=",", quoting=csv.QUOTE_NONE, escapechar="\\")
writer.writerow([
pcap_data[WifiInfo.BSSID],
pcap_data[WifiInfo.ESSID],
_format_auth(pcap_data[WifiInfo.ENCRYPTION]),
datetime.strptime(gps_data['Updated'].rsplit('.')[0],
"%Y-%m-%dT%H:%M:%S").strftime('%Y-%m-%d %H:%M:%S'),
pcap_data[WifiInfo.CHANNEL],
pcap_data[WifiInfo.RSSI],
gps_data['Latitude'],
gps_data['Longitude'],
gps_data['Altitude'],
gps_data['Accuracy'],
'WIFI'])
return dummy.getvalue()
def _send_to_wigle(lines, api_key, donate=True, timeout=30):
"""
Uploads the file to wigle-net
"""
dummy = StringIO()
for line in lines:
dummy.write(f"{line}")
dummy.seek(0)
headers = {'Authorization': f"Basic {api_key}",
'Accept': 'application/json'}
data = {'donate': 'on' if donate else 'false'}
payload = {'file': (pwnagotchi.name() + ".csv", dummy, 'multipart/form-data', {'Expires': '0'})}
try:
res = requests.post('https://api.wigle.net/api/v2/file/upload',
data=data,
headers=headers,
files=payload,
timeout=timeout)
json_res = res.json()
if not json_res['success']:
raise requests.exceptions.RequestException(json_res['message'])
except requests.exceptions.RequestException as re_e:
raise re_e
class Wigle(plugins.Plugin):
__author__ = "Dadav and updated by Jayofelony"
__version__ = "3.0.1"
__license__ = "GPL3"
__description__ = "This plugin automatically uploads collected WiFi to wigle.net"
def __init__(self):
self.ready = False
self.report = StatusFile('/root/.wigle_uploads', data_format='json')
self.skip = list()
self.lock = Lock()
self.options = dict()
def on_loaded(self):
if 'api_key' not in self.options or ('api_key' in self.options and self.options['api_key'] is None):
logging.debug("WIGLE: api_key isn't set. Can't upload to wigle.net")
return
if 'donate' not in self.options:
self.options['donate'] = False
self.ready = True
logging.info("WIGLE: ready")
def on_internet_available(self, agent):
"""
Called when there's internet connectivity
"""
if not self.ready or self.lock.locked():
return
from scapy.all import Scapy_Exception
config = agent.config()
display = agent.view()
reported = self.report.data_field_or('reported', default=list())
handshake_dir = config['bettercap']['handshakes']
all_files = os.listdir(handshake_dir)
all_gps_files = [os.path.join(handshake_dir, filename)
for filename in all_files
if filename.endswith('.gps.json') or filename.endswith('.geo.json')]
all_gps_files = remove_whitelisted(all_gps_files, config['main']['whitelist'])
new_gps_files = set(all_gps_files) - set(reported) - set(self.skip)
if new_gps_files:
logging.info("WIGLE: Internet connectivity detected. Uploading new handshakes to wigle.net")
csv_entries = list()
no_err_entries = list()
for gps_file in new_gps_files:
if gps_file.endswith('.gps.json'):
pcap_filename = gps_file.replace('.gps.json', '.pcap')
if gps_file.endswith('.geo.json'):
pcap_filename = gps_file.replace('.geo.json', '.pcap')
if not os.path.exists(pcap_filename):
logging.debug("WIGLE: Can't find pcap for %s", gps_file)
self.skip.append(gps_file)
continue
try:
gps_data = _extract_gps_data(gps_file)
except OSError as os_err:
logging.debug("WIGLE: %s", os_err)
self.skip.append(gps_file)
continue
except json.JSONDecodeError as json_err:
logging.debug("WIGLE: %s", json_err)
self.skip.append(gps_file)
continue
if gps_data['Latitude'] == 0 and gps_data['Longitude'] == 0:
logging.debug("WIGLE: Not enough gps-information for %s. Trying again next time.", gps_file)
self.skip.append(gps_file)
continue
try:
pcap_data = extract_from_pcap(pcap_filename, [WifiInfo.BSSID,
WifiInfo.ESSID,
WifiInfo.ENCRYPTION,
WifiInfo.CHANNEL,
WifiInfo.RSSI])
except FieldNotFoundError:
logging.debug("WIGLE: Could not extract all information. Skip %s", gps_file)
self.skip.append(gps_file)
continue
except Scapy_Exception as sc_e:
logging.debug("WIGLE: %s", sc_e)
self.skip.append(gps_file)
continue
new_entry = _transform_wigle_entry(gps_data, pcap_data, self.__version__)
csv_entries.append(new_entry)
no_err_entries.append(gps_file)
if csv_entries:
display.on_uploading('wigle.net')
try:
_send_to_wigle(csv_entries, self.options['api_key'], donate=self.options['donate'])
reported += no_err_entries
self.report.update(data={'reported': reported})
logging.info("WIGLE: Successfully uploaded %d files", len(no_err_entries))
except requests.exceptions.RequestException as re_e:
self.skip += no_err_entries
logging.debug("WIGLE: Got an exception while uploading %s", re_e)
except OSError as os_e:
self.skip += no_err_entries
logging.debug("WIGLE: Got the following error: %s", os_e)
display.on_normal()

View File

@ -1,137 +0,0 @@
import os
import logging
import requests
from datetime import datetime
from threading import Lock
from pwnagotchi.utils import StatusFile, remove_whitelisted
from pwnagotchi import plugins
from json.decoder import JSONDecodeError
class WpaSec(plugins.Plugin):
__author__ = '33197631+dadav@users.noreply.github.com'
__version__ = '2.1.0'
__license__ = 'GPL3'
__description__ = 'This plugin automatically uploads handshakes to https://wpa-sec.stanev.org'
def __init__(self):
self.ready = False
self.lock = Lock()
try:
self.report = StatusFile('/root/.wpa_sec_uploads', data_format='json')
except JSONDecodeError:
os.remove("/root/.wpa_sec_uploads")
self.report = StatusFile('/root/.wpa_sec_uploads', data_format='json')
self.options = dict()
self.skip = list()
def _upload_to_wpasec(self, path, timeout=30):
"""
Uploads the file to https://wpa-sec.stanev.org, or another endpoint.
"""
with open(path, 'rb') as file_to_upload:
cookie = {'key': self.options['api_key']}
payload = {'file': file_to_upload}
try:
result = requests.post(self.options['api_url'],
cookies=cookie,
files=payload,
timeout=timeout)
if ' already submitted' in result.text:
logging.debug("%s was already submitted.", path)
except requests.exceptions.RequestException as req_e:
raise req_e
def _download_from_wpasec(self, output, timeout=30):
"""
Downloads the results from wpasec and safes them to output
Output-Format: bssid, station_mac, ssid, password
"""
api_url = self.options['api_url']
if not api_url.endswith('/'):
api_url = f"{api_url}/"
api_url = f"{api_url}?api&dl=1"
cookie = {'key': self.options['api_key']}
try:
result = requests.get(api_url, cookies=cookie, timeout=timeout)
with open(output, 'wb') as output_file:
output_file.write(result.content)
except requests.exceptions.RequestException as req_e:
raise req_e
except OSError as os_e:
raise os_e
def on_loaded(self):
"""
Gets called when the plugin gets loaded
"""
if 'api_key' not in self.options or ('api_key' in self.options and not self.options['api_key']):
logging.error("WPA_SEC: API-KEY isn't set. Can't upload to wpa-sec.stanev.org")
return
if 'api_url' not in self.options or ('api_url' in self.options and not self.options['api_url']):
logging.error("WPA_SEC: API-URL isn't set. Can't upload, no endpoint configured.")
return
self.ready = True
logging.info("WPA_SEC: plugin loaded")
def on_webhook(self, path, request):
from flask import make_response, redirect
response = make_response(redirect(self.options['api_url'], code=302))
response.set_cookie('key', self.options['api_key'])
return response
def on_internet_available(self, agent):
"""
Called in manual mode when there's internet connectivity
"""
if not self.ready or self.lock.locked():
return
with self.lock:
config = agent.config()
display = agent.view()
reported = self.report.data_field_or('reported', default=list())
handshake_dir = config['bettercap']['handshakes']
handshake_filenames = os.listdir(handshake_dir)
handshake_paths = [os.path.join(handshake_dir, filename) for filename in handshake_filenames if filename.endswith('.pcap')]
handshake_paths = remove_whitelisted(handshake_paths, config['main']['whitelist'])
handshake_new = set(handshake_paths) - set(reported) - set(self.skip)
if handshake_new:
logging.info("WPA_SEC: Internet connectivity detected. Uploading new handshakes to wpa-sec.stanev.org")
for idx, handshake in enumerate(handshake_new):
display.on_uploading(f"wpa-sec.stanev.org ({idx + 1}/{len(handshake_new)})")
try:
self._upload_to_wpasec(handshake)
reported.append(handshake)
self.report.update(data={'reported': reported})
logging.debug("WPA_SEC: Successfully uploaded %s", handshake)
except requests.exceptions.RequestException as req_e:
self.skip.append(handshake)
logging.debug("WPA_SEC: %s", req_e)
continue
except OSError as os_e:
logging.debug("WPA_SEC: %s", os_e)
continue
display.on_normal()
if 'download_results' in self.options and self.options['download_results']:
cracked_file = os.path.join(handshake_dir, 'wpa-sec.cracked.potfile')
if os.path.exists(cracked_file):
last_check = datetime.fromtimestamp(os.path.getmtime(cracked_file))
if last_check is not None and ((datetime.now() - last_check).seconds / (60 * 60)) < 1:
return
try:
self._download_from_wpasec(os.path.join(handshake_dir, 'wpa-sec.cracked.potfile'))
logging.info("WPA_SEC: Downloaded cracked passwords.")
except requests.exceptions.RequestException as req_e:
logging.debug("WPA_SEC: %s", req_e)
except OSError as os_e:
logging.debug("WPA_SEC: %s", os_e)