mirror of
https://github.com/jayofelony/pwnagotchi.git
synced 2025-07-01 18:37:27 -04:00
212
.github/workflows/publish.yml
vendored
Normal file
212
.github/workflows/publish.yml
vendored
Normal file
@ -0,0 +1,212 @@
|
||||
name: Publish
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version number'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Remove unnecessary directories
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
|
||||
- name: Check disk space
|
||||
run: df -BG
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Validate tag
|
||||
id: tag-setter
|
||||
run: |
|
||||
TAG=${{ github.event.inputs.version }}
|
||||
if [[ $TAG =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "Tag $TAG is valid."
|
||||
echo "TAG=$TAG" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Tag $TAG is not a valid semantic version. Aborting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y libdbus-1-dev curl unzip gettext qemu-utils qemu qemu-user-static binfmt-support
|
||||
pip install -r requirements.txt
|
||||
|
||||
- name: Update QEMU
|
||||
run: |
|
||||
sudo update-binfmts --enable qemu-aarch64
|
||||
echo $(ls /usr/bin/qemu-aarch64-static)
|
||||
|
||||
- name: Restart binfmt-support
|
||||
run: sudo service binfmt-support restart
|
||||
|
||||
- name: Mount binfmt_misc
|
||||
run: |
|
||||
if ! grep -qs '/proc/sys/fs/binfmt_misc ' /proc/mounts; then
|
||||
echo "Mounting binfmt_misc"
|
||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||
fi
|
||||
|
||||
- name: Restart binfmt-support
|
||||
run: sudo service binfmt-support restart
|
||||
|
||||
- name: Update Languages
|
||||
run: make update_langs
|
||||
|
||||
- name: Compile Languages
|
||||
run: make compile_langs
|
||||
|
||||
- name: Check disk space
|
||||
run: df -BG
|
||||
|
||||
- name: Check qemu-user-static package
|
||||
run: |
|
||||
echo "Checking qemu-user-static package..."
|
||||
dpkg -s qemu-user-static && echo "qemu-user-static is installed." || echo "qemu-user-static is NOT installed."
|
||||
|
||||
- name: Check binfmt-support service
|
||||
run: |
|
||||
echo "Checking binfmt-support service..."
|
||||
service binfmt-support status && echo "binfmt-support service is running." || echo "binfmt-support service is NOT running."
|
||||
|
||||
- name: Check binfmt_misc filesystem
|
||||
run: |
|
||||
echo "Checking binfmt_misc filesystem..."
|
||||
mount | grep binfmt_misc && echo "binfmt_misc is mounted." || echo "binfmt_misc is NOT mounted."
|
||||
echo $(ls /proc/sys/fs/binfmt_misc | grep qemu-aarch64)
|
||||
|
||||
- name: Run Makefile
|
||||
run: make
|
||||
env:
|
||||
PWN_VERSION: ${{ steps.tag-setter.outputs.TAG }}
|
||||
|
||||
- name: PiShrink
|
||||
run: |
|
||||
wget https://raw.githubusercontent.com/Drewsif/PiShrink/master/pishrink.sh
|
||||
chmod +x pishrink.sh
|
||||
sudo mv pishrink.sh /usr/local/bin
|
||||
find /home/runner/work/ -type f -name "*.img" -exec sudo pishrink.sh {} \;
|
||||
|
||||
- name: Compress .img files
|
||||
run: |
|
||||
find /home/runner/work/ -type f -name "*.img" -exec xz --no-warn {} \;
|
||||
|
||||
- name: Create tag
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const version = "${{ steps.tag-setter.outputs.TAG }}"
|
||||
console.log(`Creating tag ${version}`)
|
||||
await github.rest.git.createRef({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
ref: `refs/tags/${version}`,
|
||||
sha: context.sha
|
||||
})
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const tag = "${{ steps.tag-setter.outputs.TAG }}"
|
||||
console.log(`Creating release with tag: ${tag}`)
|
||||
const release = await github.rest.repos.createRelease({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
tag_name: tag,
|
||||
name: tag,
|
||||
draft: false,
|
||||
prerelease: true,
|
||||
generate_release_notes: true
|
||||
})
|
||||
console.log(`Created release with id: ${release.data.id}`)
|
||||
return release.data.id
|
||||
|
||||
- name: Upload Release Asset
|
||||
id: upload-release-asset
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const release_id = "${{ steps.create_release.outputs.result }}";
|
||||
const asset_content_type = 'application/octet-stream';
|
||||
const distDir = '/home/runner/work/';
|
||||
|
||||
const uploadFile = async (filePath) => {
|
||||
if (fs.lstatSync(filePath).isDirectory()) {
|
||||
const files = fs.readdirSync(filePath);
|
||||
for (const file of files) {
|
||||
await uploadFile(path.join(filePath, file));
|
||||
}
|
||||
} else {
|
||||
// Check if the file has a .xz extension
|
||||
if (path.extname(filePath) === '.xz') {
|
||||
console.log(`Uploading ${filePath}...`);
|
||||
|
||||
const asset_name = path.basename(filePath);
|
||||
const asset_size = fs.statSync(filePath).size;
|
||||
const asset = fs.createReadStream(filePath);
|
||||
|
||||
const response = await github.rest.repos.uploadReleaseAsset({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
release_id: release_id,
|
||||
name: asset_name,
|
||||
data: asset,
|
||||
headers: {
|
||||
'content-type': asset_content_type,
|
||||
'content-length': asset_size
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`Uploaded ${filePath}: ${response.data.browser_download_url}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await uploadFile(distDir);
|
||||
|
||||
- name: Update Release
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const release_id = "${{ steps.create_release.outputs.result }}"
|
||||
console.log(`Updating release with id: ${release_id}`)
|
||||
github.rest.repos.updateRelease({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
release_id: release_id,
|
||||
tag_name: "${{ steps.tag-setter.outputs.TAG }}",
|
||||
name: "${{ steps.tag-setter.outputs.TAG }}",
|
||||
draft: false,
|
||||
prerelease: false
|
||||
})
|
||||
|
||||
- name: Save environment variable
|
||||
run: echo "${{ steps.tag-setter.outputs.TAG }}" > env_var.txt
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: env-var
|
||||
path: env_var.txt
|
2
.idea/misc.xml
generated
2
.idea/misc.xml
generated
@ -3,7 +3,7 @@
|
||||
<component name="Black">
|
||||
<option name="sdkName" value="Python 3.11 (pwnagotchi)" />
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.10 (pwnagotchi-torch-bookworm)" project-jdk-type="Python SDK" />
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.10" project-jdk-type="Python SDK" />
|
||||
<component name="PythonCompatibilityInspectionAdvertiser">
|
||||
<option name="version" value="3" />
|
||||
</component>
|
||||
|
2
.idea/pwnagotchi.iml
generated
2
.idea/pwnagotchi.iml
generated
@ -4,7 +4,7 @@
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.10 (pwnagotchi-torch-bookworm)" jdkType="Python SDK" />
|
||||
<orderEntry type="jdk" jdkName="Python 3.10" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
<component name="PyDocumentationSettings">
|
||||
|
10
Makefile
10
Makefile
@ -1,6 +1,6 @@
|
||||
PACKER_VERSION := 1.10.0
|
||||
PWN_HOSTNAME := pwnagotchi
|
||||
PWN_VERSION := $(shell cut -d"'" -f2 < pwnagotchi/_version.py)
|
||||
PWN_VERSION := ${PWN_VERSION}
|
||||
|
||||
MACHINE_TYPE := $(shell uname -m)
|
||||
ifneq (,$(filter x86_64,$(MACHINE_TYPE)))
|
||||
@ -48,15 +48,11 @@ $(PACKER):
|
||||
rm $(PACKER).zip
|
||||
chmod +x $@
|
||||
|
||||
SDIST := dist/pwnagotchi-$(PWN_VERSION).tar.gz
|
||||
$(SDIST): setup.py pwnagotchi
|
||||
python3 setup.py sdist
|
||||
|
||||
# Building the image requires packer, but don't rebuild the image just because packer updated.
|
||||
pwnagotchi: | $(PACKER)
|
||||
|
||||
# If the packer or ansible files are updated, rebuild the image.
|
||||
pwnagotchi: $(SDIST) builder/pwnagotchi.json.pkr.hcl builder/raspberrypi64.yml $(shell find builder/data -type f)
|
||||
pwnagotchi: builder/pwnagotchi.json.pkr.hcl builder/raspberrypi64.yml $(shell find builder/data -type f)
|
||||
|
||||
cd builder && $(PACKER) init pwnagotchi.json.pkr.hcl && sudo $(UNSHARE) $(PACKER) build -var "pwn_hostname=$(PWN_HOSTNAME)" -var "pwn_version=$(PWN_VERSION)" pwnagotchi.json.pkr.hcl
|
||||
|
||||
@ -64,5 +60,5 @@ pwnagotchi: $(SDIST) builder/pwnagotchi.json.pkr.hcl builder/raspberrypi64.yml $
|
||||
image: pwnagotchi
|
||||
|
||||
clean:
|
||||
- rm -rf build dist pwnagotchi.egg-info
|
||||
- rm -rf dist pwnagotchi.egg-info
|
||||
- rm -f $(PACKER)
|
@ -2,6 +2,9 @@
|
||||
<a href="https://github.com/jayofelony/pwnagotchi-bookworm/releases/latest"><img alt="Release" src="https://img.shields.io/github/release/jayofelony/pwnagotchi-bookworm.svg"></a><br/>
|
||||
**This fork of [Pwnagotchi](https://www.pwnagotchi.ai) is only for 64-bit Raspberry Pi's. Such as the 02W, 3(b+) and 4(b) and the new Raspberry Pi 5!!.**
|
||||
|
||||
The RPi5 can only be used headless currently. (without display.)
|
||||
|
||||
|
||||
If you are using an older 32-bit version Raspberry Pi, ZeroWH, use this [fork](https://github.com/jayofelony/pwnagotchi-torch/releases/tag/v2.6.4) and make sure you download the `armhf` version.
|
||||
|
||||
---
|
||||
|
@ -1,11 +1,9 @@
|
||||
# This is not working quite yet
|
||||
# https://github.com/mkaczanowski/packer-builder-arm/pull/172
|
||||
packer {
|
||||
required_plugins {
|
||||
#arm = {
|
||||
# version = "~> 1"
|
||||
# source = "github.com/cdecoux/builder-arm"
|
||||
#}
|
||||
arm = {
|
||||
version = "1.0.0"
|
||||
source = "github.com/cdecoux/builder-arm"
|
||||
}
|
||||
ansible = {
|
||||
source = "github.com/hashicorp/ansible"
|
||||
version = "~> 1"
|
||||
@ -28,8 +26,8 @@ source "arm" "rpi64-pwnagotchi" {
|
||||
file_target_extension = "xz"
|
||||
file_unarchive_cmd = ["unxz", "$ARCHIVE_PATH"]
|
||||
image_path = "../../../pwnagotchi-rpi-bookworm-${var.pwn_version}-arm64.img"
|
||||
qemu_binary_source_path = "/usr/bin/qemu-aarch64-static"
|
||||
qemu_binary_destination_path = "/usr/bin/qemu-aarch64-static"
|
||||
qemu_binary_source_path = "/usr/libexec/qemu-binfmt/aarch64-binfmt-P"
|
||||
qemu_binary_destination_path = "/usr/libexec/qemu-binfmt/aarch64-binfmt-P"
|
||||
image_build_method = "resize"
|
||||
image_size = "9G"
|
||||
image_type = "dos"
|
||||
@ -51,6 +49,8 @@ source "arm" "rpi64-pwnagotchi" {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
# a build block invokes sources and runs provisioning steps on them. The
|
||||
# documentation for build blocks can be found here:
|
||||
# https://www.packer.io/docs/from-1.5/blocks/build
|
||||
@ -86,7 +86,6 @@ build {
|
||||
"data/etc/systemd/system/pwngrid-peer.service",
|
||||
]
|
||||
}
|
||||
|
||||
provisioner "file" {
|
||||
destination = "/etc/update-motd.d/01-motd"
|
||||
source = "data/etc/update-motd.d/01-motd"
|
||||
@ -95,11 +94,7 @@ build {
|
||||
inline = ["chmod +x /etc/update-motd.d/*"]
|
||||
}
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"apt-get -y --allow-releaseinfo-change update",
|
||||
"apt-get -y dist-upgrade",
|
||||
"apt-get install -y --no-install-recommends ansible"
|
||||
]
|
||||
inline = ["apt-get -y --allow-releaseinfo-change update", "apt-get -y dist-upgrade", "apt-get install -y --no-install-recommends ansible"]
|
||||
}
|
||||
provisioner "ansible-local" {
|
||||
command = "ANSIBLE_FORCE_COLOR=1 PYTHONUNBUFFERED=1 PWN_VERSION=${var.pwn_version} PWN_HOSTNAME=${var.pwn_hostname} ansible-playbook"
|
||||
|
@ -40,11 +40,11 @@
|
||||
source: "https://github.com/jayofelony/caplets.git"
|
||||
bettercap:
|
||||
source: "https://github.com/jayofelony/bettercap.git"
|
||||
url: "https://github.com/jayofelony/bettercap/releases/download/2.32.2/bettercap-2.32.2.zip"
|
||||
url: "https://github.com/jayofelony/bettercap/releases/download/v2.32.4/bettercap-2.32.4-aarch64.zip"
|
||||
ui: "https://github.com/bettercap/ui/releases/download/v1.3.0/ui.zip"
|
||||
pwngrid:
|
||||
source: "https://github.com/jayofelony/pwngrid.git"
|
||||
url: "https://github.com/jayofelony/pwngrid/releases/download/v1.10.5/pwngrid-1.10.5-aarch64.zip"
|
||||
url: "https://github.com/jayofelony/pwngrid/releases/download/v1.11.0/pwngrid-1.11.0-aarch64.zip"
|
||||
apt:
|
||||
downgrade:
|
||||
- libpcap-dev_1.9.1-4_arm64.deb
|
||||
@ -174,6 +174,9 @@
|
||||
- xxd
|
||||
- zlib1g-dev
|
||||
- zram-tools
|
||||
environment:
|
||||
ARCHFLAGS: "-arch aarch64"
|
||||
QEMU_UNAME: "{{ kernel.full }}"
|
||||
|
||||
tasks:
|
||||
# First we install packages
|
||||
|
@ -1 +1 @@
|
||||
__version__ = '2.8.1'
|
||||
__version__ = '2.8.2'
|
||||
|
@ -48,7 +48,7 @@ class GPS(plugins.Plugin):
|
||||
if self.running:
|
||||
info = agent.session()
|
||||
self.coordinates = info["gps"]
|
||||
gps_filename = filename.replace(".pcap", ".gps.json")
|
||||
gps_filename = filename.replace(".pcapng", ".gps.json")
|
||||
|
||||
if self.coordinates and all([
|
||||
# avoid 0.000... measurements
|
||||
|
@ -5,22 +5,21 @@ import glob
|
||||
import re
|
||||
|
||||
import pwnagotchi.grid as grid
|
||||
import pwnagotchi.plugins
|
||||
import pwnagotchi.plugins as plugins
|
||||
from pwnagotchi.utils import StatusFile, WifiInfo, extract_from_pcap
|
||||
from pwnagotchi.utils import StatusFile, WifiInfo, extract_from_pcapng
|
||||
from threading import Lock
|
||||
|
||||
|
||||
def parse_pcap(filename):
|
||||
logging.info("grid: parsing %s ..." % filename)
|
||||
|
||||
net_id = os.path.basename(filename).replace('.pcap', '')
|
||||
net_id = os.path.basename(filename).replace('.pcapng', '')
|
||||
|
||||
if '_' in net_id:
|
||||
# /root/handshakes/ESSID_BSSID.pcap
|
||||
# /root/handshakes/ESSID_BSSID.pcapng
|
||||
essid, bssid = net_id.split('_')
|
||||
else:
|
||||
# /root/handshakes/BSSID.pcap
|
||||
# /root/handshakes/BSSID.pcapng
|
||||
essid, bssid = '', net_id
|
||||
|
||||
mac_re = re.compile('[0-9a-fA-F]{12}')
|
||||
@ -36,7 +35,7 @@ def parse_pcap(filename):
|
||||
}
|
||||
|
||||
try:
|
||||
info = extract_from_pcap(filename, [WifiInfo.BSSID, WifiInfo.ESSID])
|
||||
info = extract_from_pcapng(filename, [WifiInfo.BSSID, WifiInfo.ESSID])
|
||||
except Exception as e:
|
||||
logging.error("grid: %s" % e)
|
||||
|
||||
@ -87,10 +86,10 @@ class Grid(plugins.Plugin):
|
||||
agent.view().on_unread_messages(self.unread_messages, self.total_messages)
|
||||
|
||||
def check_handshakes(self, agent):
|
||||
logging.debug("checking pcaps")
|
||||
logging.debug("checking pcapng's")
|
||||
config = agent.config()
|
||||
|
||||
pcap_files = glob.glob(os.path.join(agent.config()['bettercap']['handshakes'], "*.pcap"))
|
||||
pcap_files = glob.glob(os.path.join(agent.config()['bettercap']['handshakes'], "*.pcapng"))
|
||||
num_networks = len(pcap_files)
|
||||
reported = self.report.data_field_or('reported', default=[])
|
||||
num_reported = len(reported)
|
||||
@ -103,7 +102,7 @@ class Grid(plugins.Plugin):
|
||||
logging.debug(" exclude: %s" % config['main']['whitelist'])
|
||||
|
||||
for pcap_file in pcap_files:
|
||||
net_id = os.path.basename(pcap_file).replace('.pcap', '')
|
||||
net_id = os.path.basename(pcap_file).replace('.pcapng', '')
|
||||
if net_id not in reported:
|
||||
if self.is_excluded(net_id, agent):
|
||||
logging.debug("skipping %s due to exclusion filter" % pcap_file)
|
||||
|
@ -82,12 +82,12 @@ class Hashie(plugins.Plugin):
|
||||
if os.path.isfile(fullpathNoExt + '.22000'):
|
||||
handshake_status.append('Already have {}.22000 (EAPOL)'.format(name))
|
||||
elif self._writeEAPOL(filename):
|
||||
handshake_status.append('Created {}.22000 (EAPOL) from pcap'.format(name))
|
||||
handshake_status.append('Created {}.22000 (EAPOL) from pcapng'.format(name))
|
||||
|
||||
if os.path.isfile(fullpathNoExt + '.16800'):
|
||||
handshake_status.append('Already have {}.16800 (PMKID)'.format(name))
|
||||
elif self._writePMKID(filename):
|
||||
handshake_status.append('Created {}.16800 (PMKID) from pcap'.format(name))
|
||||
handshake_status.append('Created {}.16800 (PMKID) from pcapng'.format(name))
|
||||
|
||||
if handshake_status:
|
||||
logging.info('[Hashie] Good news:\n\t' + '\n\t'.join(handshake_status))
|
||||
@ -111,7 +111,7 @@ class Hashie(plugins.Plugin):
|
||||
return False
|
||||
|
||||
def _process_stale_pcaps(self, handshake_dir):
|
||||
handshakes_list = [os.path.join(handshake_dir, filename) for filename in os.listdir(handshake_dir) if filename.endswith('.pcap')]
|
||||
handshakes_list = [os.path.join(handshake_dir, filename) for filename in os.listdir(handshake_dir) if filename.endswith('.pcapng')]
|
||||
failed_jobs = []
|
||||
successful_jobs = []
|
||||
lonely_pcaps = []
|
||||
|
@ -108,7 +108,7 @@ class NetPos(plugins.Plugin):
|
||||
return
|
||||
|
||||
netpos["ts"] = int("%.0f" % time.time())
|
||||
netpos_filename = filename.replace('.pcap', '.net-pos.json')
|
||||
netpos_filename = filename.replace('.pcapng', '.net-pos.json')
|
||||
logging.debug("NET-POS: Saving net-location to %s", netpos_filename)
|
||||
|
||||
try:
|
||||
|
@ -142,6 +142,6 @@ class OnlineHashCrack(plugins.Plugin):
|
||||
for row in csv.DictReader(cracked_list):
|
||||
if row['password']:
|
||||
filename = re.sub(r'[^a-zA-Z0-9]', '', row['ESSID']) + '_' + row['BSSID'].replace(':','')
|
||||
if os.path.exists( os.path.join(handshake_dir, filename+'.pcap') ):
|
||||
with open(os.path.join(handshake_dir, filename+'.pcap.cracked'), 'w') as f:
|
||||
if os.path.exists( os.path.join(handshake_dir, filename+'.pcapng')):
|
||||
with open(os.path.join(handshake_dir, filename+'.pcapng.cracked'), 'w') as f:
|
||||
f.write(row['password'])
|
||||
|
@ -13,8 +13,8 @@ from dateutil.parser import parse
|
||||
webgpsmap shows existing position data stored in your /handshakes/ directory
|
||||
|
||||
the plugin does the following:
|
||||
- search for *.pcap files in your /handshakes/ dir
|
||||
- for every found .pcap file it looks for a .geo.json or .gps.json or .paw-gps.json file with
|
||||
- search for *.pcapng files in your /handshakes/ dir
|
||||
- for every found .pcapng file it looks for a .geo.json or .gps.json or file with
|
||||
latitude+longitude data inside and shows this position on the map
|
||||
- if also an .cracked file with a plaintext password inside exist, it reads the content and shows the
|
||||
position as green instead of red and the password inside the infopox of the position
|
||||
@ -87,7 +87,8 @@ class Webgpsmap(plugins.Plugin):
|
||||
# returns all positions
|
||||
try:
|
||||
self.ALREADY_SENT = list()
|
||||
response_data = bytes(json.dumps(self.load_gps_from_dir(self.config['bettercap']['handshakes'])), "utf-8")
|
||||
response_data = bytes(
|
||||
json.dumps(self.load_gps_from_dir(self.config['bettercap']['handshakes'])), "utf-8")
|
||||
response_status = 200
|
||||
response_mimetype = "application/json"
|
||||
response_header_contenttype = 'application/json'
|
||||
@ -100,7 +101,8 @@ class Webgpsmap(plugins.Plugin):
|
||||
self.ALREADY_SENT = list()
|
||||
json_data = json.dumps(self.load_gps_from_dir(self.config['bettercap']['handshakes']))
|
||||
html_data = self.get_html()
|
||||
html_data = html_data.replace('var positions = [];', 'var positions = ' + json_data + ';positionsLoaded=true;drawPositions();')
|
||||
html_data = html_data.replace('var positions = [];',
|
||||
'var positions = ' + json_data + ';positionsLoaded=true;drawPositions();')
|
||||
response_data = bytes(html_data, "utf-8")
|
||||
response_status = 200
|
||||
response_mimetype = "application/xhtml+xml"
|
||||
@ -163,7 +165,8 @@ class Webgpsmap(plugins.Plugin):
|
||||
|
||||
all_files = os.listdir(handshake_dir)
|
||||
# print(all_files)
|
||||
all_pcap_files = [os.path.join(handshake_dir, filename) for filename in all_files if filename.endswith('.pcap')]
|
||||
all_pcap_files = [os.path.join(handshake_dir, filename) for filename in all_files if
|
||||
filename.endswith('.pcapng')]
|
||||
all_geo_or_gps_files = []
|
||||
for filename_pcap in all_pcap_files:
|
||||
filename_base = filename_pcap[:-5] # remove ".pcap"
|
||||
@ -180,11 +183,6 @@ class Webgpsmap(plugins.Plugin):
|
||||
if check_for in all_files:
|
||||
filename_position = str(os.path.join(handshake_dir, check_for))
|
||||
|
||||
logging.debug("[webgpsmap] search for .paw-gps.json")
|
||||
check_for = os.path.basename(filename_base) + ".paw-gps.json"
|
||||
if check_for in all_files:
|
||||
filename_position = str(os.path.join(handshake_dir, check_for))
|
||||
|
||||
logging.debug(f"[webgpsmap] end search for position data files and use {filename_position}")
|
||||
|
||||
if filename_position is not None:
|
||||
@ -195,7 +193,8 @@ class Webgpsmap(plugins.Plugin):
|
||||
if newest_only:
|
||||
all_geo_or_gps_files = set(all_geo_or_gps_files) - set(self.ALREADY_SENT)
|
||||
|
||||
logging.info(f"[webgpsmap] Found {len(all_geo_or_gps_files)} position-data files from {len(all_pcap_files)} handshakes. Fetching positions ...")
|
||||
logging.info(
|
||||
f"[webgpsmap] Found {len(all_geo_or_gps_files)} position-data files from {len(all_pcap_files)} handshakes. Fetching positions ...")
|
||||
|
||||
for pos_file in all_geo_or_gps_files:
|
||||
try:
|
||||
@ -213,8 +212,6 @@ class Webgpsmap(plugins.Plugin):
|
||||
pos_type = 'gps'
|
||||
elif pos.type() == PositionFile.GEO:
|
||||
pos_type = 'geo'
|
||||
elif pos.type() == PositionFile.PAWGPS:
|
||||
pos_type = 'paw'
|
||||
gps_data[ssid + "_" + mac] = {
|
||||
'ssid': ssid,
|
||||
'mac': mac,
|
||||
@ -227,7 +224,7 @@ class Webgpsmap(plugins.Plugin):
|
||||
}
|
||||
|
||||
# get ap password if exist
|
||||
check_for = os.path.basename(pos_file).split(".")[0] + ".pcap.cracked"
|
||||
check_for = os.path.basename(pos_file).split(".")[0] + ".pcapng.cracked"
|
||||
if check_for in all_files:
|
||||
gps_data[ssid + "_" + mac]["pass"] = pos.password()
|
||||
|
||||
@ -265,7 +262,6 @@ class PositionFile:
|
||||
"""
|
||||
GPS = 1
|
||||
GEO = 2
|
||||
PAWGPS = 3
|
||||
|
||||
def __init__(self, path):
|
||||
self._file = path
|
||||
@ -282,7 +278,7 @@ class PositionFile:
|
||||
"""
|
||||
Returns the mac from filename
|
||||
"""
|
||||
parsed_mac = re.search(r'.*_?([a-zA-Z0-9]{12})\.(?:gps|geo|paw-gps)\.json', self._filename)
|
||||
parsed_mac = re.search(r'.*_?([a-zA-Z0-9]{12})\.(?:gps|geo)\.json', self._filename)
|
||||
if parsed_mac:
|
||||
mac = parsed_mac.groups()[0]
|
||||
return mac
|
||||
@ -292,7 +288,7 @@ class PositionFile:
|
||||
"""
|
||||
Returns the ssid from filename
|
||||
"""
|
||||
parsed_ssid = re.search(r'(.+)_[a-zA-Z0-9]{12}\.(?:gps|geo|paw-gps)\.json', self._filename)
|
||||
parsed_ssid = re.search(r'(.+)_[a-zA-Z0-9]{12}\.(?:gps|geo)\.json', self._filename)
|
||||
if parsed_ssid:
|
||||
return parsed_ssid.groups()[0]
|
||||
return None
|
||||
@ -333,7 +329,7 @@ class PositionFile:
|
||||
return_pass = None
|
||||
# 2do: make better filename split/remove extension because this one has problems with "." in path
|
||||
base_filename, ext1, ext2 = re.split('\.', self._file)
|
||||
password_file_path = base_filename + ".pcap.cracked"
|
||||
password_file_path = base_filename + ".pcapng.cracked"
|
||||
if os.path.isfile(password_file_path):
|
||||
try:
|
||||
password_file = open(password_file_path, 'r')
|
||||
@ -354,8 +350,6 @@ class PositionFile:
|
||||
return PositionFile.GPS
|
||||
if self._file.endswith('.geo.json'):
|
||||
return PositionFile.GEO
|
||||
if self._file.endswith('.paw-gps.json'):
|
||||
return PositionFile.PAWGPS
|
||||
return None
|
||||
|
||||
def lat(self):
|
||||
@ -403,8 +397,6 @@ class PositionFile:
|
||||
def accuracy(self):
|
||||
if self.type() == PositionFile.GPS:
|
||||
return 50.0 # a default
|
||||
if self.type() == PositionFile.PAWGPS:
|
||||
return 50.0 # a default
|
||||
if self.type() == PositionFile.GEO:
|
||||
try:
|
||||
return self._json['accuracy']
|
||||
|
@ -7,7 +7,7 @@ import pwnagotchi
|
||||
|
||||
from io import StringIO
|
||||
from datetime import datetime
|
||||
from pwnagotchi.utils import WifiInfo, FieldNotFoundError, extract_from_pcap, StatusFile, remove_whitelisted
|
||||
from pwnagotchi.utils import WifiInfo, FieldNotFoundError, extract_from_pcapng, StatusFile, remove_whitelisted
|
||||
from threading import Lock
|
||||
from pwnagotchi import plugins
|
||||
from pwnagotchi._version import __version__ as __pwnagotchi_version__
|
||||
@ -153,11 +153,11 @@ class Wigle(plugins.Plugin):
|
||||
no_err_entries = list()
|
||||
for gps_file in new_gps_files:
|
||||
if gps_file.endswith('.gps.json'):
|
||||
pcap_filename = gps_file.replace('.gps.json', '.pcap')
|
||||
pcap_filename = gps_file.replace('.gps.json', '.pcapng')
|
||||
if gps_file.endswith('.geo.json'):
|
||||
pcap_filename = gps_file.replace('.geo.json', '.pcap')
|
||||
pcap_filename = gps_file.replace('.geo.json', '.pcapng')
|
||||
if not os.path.exists(pcap_filename):
|
||||
logging.debug("WIGLE: Can't find pcap for %s", gps_file)
|
||||
logging.debug("WIGLE: Can't find pcapng for %s", gps_file)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
try:
|
||||
@ -175,7 +175,7 @@ class Wigle(plugins.Plugin):
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
try:
|
||||
pcap_data = extract_from_pcap(pcap_filename, [WifiInfo.BSSID,
|
||||
pcap_data = extract_from_pcapng(pcap_filename, [WifiInfo.BSSID,
|
||||
WifiInfo.ESSID,
|
||||
WifiInfo.ENCRYPTION,
|
||||
WifiInfo.CHANNEL,
|
||||
|
@ -98,7 +98,7 @@ class WpaSec(plugins.Plugin):
|
||||
reported = self.report.data_field_or('reported', default=list())
|
||||
handshake_dir = config['bettercap']['handshakes']
|
||||
handshake_filenames = os.listdir(handshake_dir)
|
||||
handshake_paths = [os.path.join(handshake_dir, filename) for filename in handshake_filenames if filename.endswith('.pcap')]
|
||||
handshake_paths = [os.path.join(handshake_dir, filename) for filename in handshake_filenames if filename.endswith('.pcapng')]
|
||||
handshake_paths = remove_whitelisted(handshake_paths, config['main']['whitelist'])
|
||||
handshake_new = set(handshake_paths) - set(reported) - set(self.skip)
|
||||
|
||||
|
@ -9,36 +9,39 @@ class Waveshare3in52(DisplayImpl):
|
||||
super(Waveshare3in52, self).__init__(config, 'waveshare3in52')
|
||||
|
||||
def layout(self):
|
||||
fonts.setup(10, 8, 10, 18, 25, 9)
|
||||
self._layout['width'] = 240
|
||||
self._layout['height'] = 360
|
||||
self._layout['face'] = (0, 43)
|
||||
self._layout['name'] = (0, 14)
|
||||
self._layout['channel'] = (0, 0)
|
||||
self._layout['aps'] = (0, 71)
|
||||
self._layout['uptime'] = (0, 25)
|
||||
self._layout['line1'] = [0, 12, 240, 12]
|
||||
self._layout['line2'] = [0, 116, 240, 116]
|
||||
self._layout['friend_face'] = (12, 88)
|
||||
self._layout['friend_name'] = (1, 103)
|
||||
self._layout['shakes'] = (26, 117)
|
||||
self._layout['mode'] = (0, 117)
|
||||
fonts.setup(16, 14, 16, 100, 31, 15)
|
||||
self._layout['width'] = 360
|
||||
self._layout['height'] = 240
|
||||
self._layout['face'] = (0, 40)
|
||||
self._layout['name'] = (0, 0)
|
||||
self._layout['channel'] = (300, 0)
|
||||
self._layout['aps'] = (0, 220)
|
||||
self._layout['uptime'] = (120, 0)
|
||||
self._layout['line1'] = [0, 24, 360, 24]
|
||||
self._layout['line2'] = [0, 220, 360, 220]
|
||||
self._layout['friend_face'] = (0, 195)
|
||||
self._layout['friend_name'] = (0, 185)
|
||||
self._layout['shakes'] = (100, 220)
|
||||
self._layout['mode'] = (0,200)
|
||||
self._layout['status'] = {
|
||||
'pos': (65, 26),
|
||||
'pos': (3, 170),
|
||||
'font': fonts.status_font(fonts.Small),
|
||||
'max': 12
|
||||
'max': 100
|
||||
}
|
||||
return self._layout
|
||||
|
||||
def initialize(self):
|
||||
logging.info("initializing waveshare 3.52 inch lcd display")
|
||||
logging.info("initializing waveshare 3.52 inch display")
|
||||
from pwnagotchi.ui.hw.libs.waveshare.v3in52.epd3in52 import EPD
|
||||
self._display = EPD()
|
||||
self._display.init()
|
||||
self._display.Clear()
|
||||
|
||||
def render(self, canvas):
|
||||
self._display.display(canvas)
|
||||
buf = self._display.getbuffer(canvas)
|
||||
self._display.display(buf)
|
||||
self._display.refresh()
|
||||
|
||||
|
||||
def clear(self):
|
||||
self._display.Clear()
|
||||
|
@ -82,7 +82,7 @@ def remove_whitelisted(list_of_handshakes, list_of_whitelisted_strings, valid_on
|
||||
|
||||
for handshake in list_of_handshakes:
|
||||
try:
|
||||
normalized_handshake = normalize(os.path.basename(handshake).rstrip('.pcap'))
|
||||
normalized_handshake = normalize(os.path.basename(handshake).rstrip('.pcapng'))
|
||||
for whitelist in list_of_whitelisted_strings:
|
||||
normalized_whitelist = normalize(whitelist)
|
||||
if normalized_whitelist in normalized_handshake:
|
||||
@ -440,7 +440,7 @@ def secs_to_hhmmss(secs):
|
||||
|
||||
|
||||
def total_unique_handshakes(path):
|
||||
expr = os.path.join(path, "*.pcap")
|
||||
expr = os.path.join(path, "*.pcapng")
|
||||
return len(glob.glob(expr))
|
||||
|
||||
|
||||
@ -498,11 +498,11 @@ def md5(fname):
|
||||
return hash_md5.hexdigest()
|
||||
|
||||
|
||||
def extract_from_pcap(path, fields):
|
||||
def extract_from_pcapng(path, fields):
|
||||
"""
|
||||
Search in pcap-file for specified information
|
||||
Search in pcapng-file for specified information
|
||||
|
||||
path: Path to pcap file
|
||||
path: Path to pcapng file
|
||||
fields: Array of fields that should be extracted
|
||||
|
||||
If a field is not found, FieldNotFoundError is raised
|
||||
|
Reference in New Issue
Block a user