include docker repos

... skip emobility since it is a dev repo
This commit is contained in:
Marco Ochse
2017-10-13 18:58:14 +00:00
parent f1ada16414
commit 0d5d80b1e3
152 changed files with 22265 additions and 0 deletions

View File

@ -0,0 +1,38 @@
FROM alpine
MAINTAINER MO
# Include dist
ADD dist/ /root/dist/
# Setup env and apt
RUN apk -U upgrade && \
apk add bash curl git libc6-compat libzmq openjdk8-jre procps wget && \
# Get and install packages
git clone https://github.com/dtag-dev-sec/listbot /etc/listbot && \
cd /root/dist/ && \
mkdir -p /usr/share/logstash/ && \
wget https://artifacts.elastic.co/downloads/logstash/logstash-5.6.1.tar.gz && \
wget http://geolite.maxmind.com/download/geoip/database/GeoLite2-ASN.tar.gz && \
tar xvfz logstash-5.6.1.tar.gz --strip-components=1 -C /usr/share/logstash/ && \
/usr/share/logstash/bin/logstash-plugin install logstash-filter-translate && \
/usr/share/logstash/bin/logstash-plugin install logstash-output-syslog && \
tar xvfz GeoLite2-ASN.tar.gz --strip-components=1 -C /usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-geoip-4.3.1-java/vendor/ && \
# Add and move files
cd /root/dist/ && \
cp update.sh /usr/bin/ && \
chmod u+x /usr/bin/update.sh && \
mkdir -p /etc/logstash/conf.d && \
cp logstash.conf /etc/logstash/conf.d/ && \
cp elasticsearch-template-es5x.json /usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-7.4.0-java/lib/logstash/outputs/elasticsearch/ && \
# Clean up
apk del wget && \
rm -rf /root/*
# Healthcheck
HEALTHCHECK --retries=10 CMD curl -s -XGET 'http://127.0.0.1:9600'
# Start logstash
CMD update.sh && /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash.conf

View File

@ -0,0 +1,48 @@
{
"template" : "logstash-*",
"version" : 50001,
"settings" : {
"index.refresh_interval" : "5s",
"index.number_of_shards" : "1",
"index.number_of_replicas" : "0"
},
"mappings" : {
"_default_" : {
"_all" : {"enabled" : true, "norms" : false},
"dynamic_templates" : [ {
"message_field" : {
"path_match" : "message",
"match_mapping_type" : "string",
"mapping" : {
"type" : "text",
"norms" : false
}
}
}, {
"string_fields" : {
"match" : "*",
"match_mapping_type" : "string",
"mapping" : {
"type" : "text", "norms" : false,
"fields" : {
"keyword" : { "type": "keyword", "ignore_above": 256 }
}
}
}
} ],
"properties" : {
"@timestamp": { "type": "date", "include_in_all": false },
"@version": { "type": "keyword", "include_in_all": false },
"geoip" : {
"dynamic": true,
"properties" : {
"ip": { "type": "ip" },
"location" : { "type" : "geo_point" },
"latitude" : { "type" : "half_float" },
"longitude" : { "type" : "half_float" }
}
}
}
}
}
}

32
docker/elk/logstash/dist/gen_cve_map.sh vendored Executable file
View File

@ -0,0 +1,32 @@
#!/bin/bash
myURL="https://rules.emergingthreats.net/open/suricata-4.0/rules/sid-msg.map"
myRULESFILE="sid-msg.map"
myCVEMAP="cve.yaml"
# Clear cve map
rm $myCVEMAP
# Download SID map from ET if server offers newer file
wget -N $myURL
myRULESCOUNT=$(wc -l < $myRULESFILE)
# Just extract rules with CVE ID, for proper matching we also need SID
let i=0
let j=0
while read -r myRULE
do
(( ++i ))
echo -ne "Processing rules, please be patient ($i / $myRULESCOUNT)\r"
myCVE=$(echo $myRULE | grep -o -E "(cve,|CVE-|CAN-)([0-9]{4}-([0-9]{4}|[0-9]{5}))" | tr a-z A-Z | tr ",|-" " " | awk '{ print $1"-"$2"-"$3 }')
if [ "$myCVE" != "" ]
then
mySID=$(echo $myRULE | awk '{ print $1 }')
echo \"$mySID\": \"$myCVE\" >> $myCVEMAP
(( ++j ))
fi
done < "$myRULESFILE"
echo
echo "Done. $j CVE IDs have been mapped."
# Clean up
rm $myRULESFILE

127
docker/elk/logstash/dist/gen_iprep_map.sh vendored Executable file
View File

@ -0,0 +1,127 @@
#!/bin/bash
myIPREPMAP="iprep.yaml"
myRED=""
myGREEN=""
myBLUE=""
myWHITE=""
# Prepare for new files
rm -rf *.raw *.yaml.tmp iprep.yaml
### Define repeating commands as functions
# Download only if host is up, file is newer and follow redirects
fuCURL () {
local myFILE=$1
local myURL=$2
local myHOST=$(echo $2 | cut -d "/" -f3)
echo -n "[ Now checking host ] [$myBLUE $myHOST $myWHITE] "
curl --connect-timeout 5 -IsS $myHOST 2>&1>/dev/null
if [ $? -eq 0 ];
then
echo "[$myGREEN OK $myWHITE]"
echo -n "[ Now downloading ] [$myBLUE $myURL $myWHITE] "
curl -fLso $myFILE -z $myFILE $myURL
if [ $? -eq 0 ];
then
echo "[$myGREEN OK $myWHITE]"
else
echo "[$myRED ERROR $myWHITE]"
fi
else
echo "[$myRED ERROR $myWHITE]"
fi
}
# Only match lines with CIDR addresses, unzip if necessary
# Duplicates will be eliminated for the final translation map!
fuMATCHCIDR () {
local myFILE=$1
if [ -f $myFILE ];
then
myZIP=$(file $myFILE | grep -c "Zip")
if [ "$myZIP" == "1" ]
then
unzip -p $myFILE | grep -o -P "\b(?:\d{1,3}\.){3}\d{1,3}/\d{1,2}\b" | xargs -I '{}' prips '{}'
else
grep -o -P "\b(?:\d{1,3}\.){3}\d{1,3}/\d{1,2}\b" $myFILE | xargs -I '{}' prips '{}'
fi
fi
}
# Only match lines with IPv4 addresses, unzip if necessary
# Duplicates will be eliminated for the final translation map!
fuMATCHIP () {
local myFILE=$1
if [ -f $myFILE ];
then
myZIP=$(file $myFILE | grep -c "Zip")
if [ "$myZIP" == "1" ]
then
unzip -p $myFILE | grep -o -P "\b(?:\d{1,3}\.){3}\d{1,3}\b"
else
grep -o -P "\b(?:\d{1,3}\.){3}\d{1,3}\b" $myFILE
fi
fi
}
### Define download function
fuDOWNLOAD () {
local myURL=$1
local myTAG=$2
local myTMPFILE="$3.tmp"
local myYAMLFILE="$3.raw"
fuCURL $myTMPFILE $myURL
fuMATCHCIDR $myTMPFILE | awk '{ print "\""$1"\": \"" "'"$myTAG"'" "\"" }' > $myYAMLFILE
fuMATCHIP $myTMPFILE | awk '{ print "\""$1"\": \"" "'"$myTAG"'" "\"" }' >> $myYAMLFILE
mySIZE=$(wc -l < $myYAMLFILE)
if [ "$mySIZE" != "0" ]
then
echo "[ Control output ] [$myBLUE $(head -n 1 $myYAMLFILE) $myWHITE]"
else
echo "[ Control output ] [$myRED EMPTY FILE $myWHITE]"
fi
}
# Download reputation lists
fuDOWNLOAD "https://reputation.alienvault.com/reputation.generic" "bad reputation" "alienvault"
fuDOWNLOAD "https://www.badips.com/get/list/any/2" "known attacker" "badips"
fuDOWNLOAD "http://osint.bambenekconsulting.com/feeds/c2-ipmasterlist.txt" "C2 server" "bambenek"
fuDOWNLOAD "https://lists.blocklist.de/lists/all.txt" "known attacker" "blocklist"
fuDOWNLOAD "https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/botscout_1d.ipset" "form spammer" "firehol_botscout"
fuDOWNLOAD "https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/cruzit_web_attacks.ipset" "known attacker" "firehol_cruzit"
fuDOWNLOAD "https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/malwaredomainlist.ipset" "known atttacker" "firehol_mwdomainlist"
fuDOWNLOAD "https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/proxylists_1d.ipset" "anonymizer" "firehol_proxylists"
fuDOWNLOAD "https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/proxyrss_1d.ipset" "anonymizer" "firehol_proxyrss"
fuDOWNLOAD "https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/proxyspy_1d.ipset" "anonymizer" "firehol_proxyspy"
fuDOWNLOAD "https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/ri_web_proxies_30d.ipset" "anonymizer" "firehol_web_proxies"
fuDOWNLOAD "https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/socks_proxy_7d.ipset" "anonymizer" "firehol_socks_proxy"
fuDOWNLOAD "https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/sslproxies_1d.ipset" "anonymizer" "firehol_sslproxies"
fuDOWNLOAD "http://danger.rulez.sk/projects/bruteforceblocker/blist.php" "known attacker" "rulez"
fuDOWNLOAD "http://cinsscore.com/list/ci-badguys.txt" "known attacker" "cinsscore"
fuDOWNLOAD "https://rules.emergingthreats.net/open/suricata/rules/compromised-ips.txt" "compromised" "et_compromised"
fuDOWNLOAD "http://blocklist.greensnow.co/greensnow.txt" "known attacker" "greensnow"
fuDOWNLOAD "http://www.nothink.org/blacklist/blacklist_malware_irc.txt" "malware" "nothink"
fuDOWNLOAD "http://cybersweat.shop/iprep/iprep_ramnode.txt" "known attacker" "cybersweat"
fuDOWNLOAD "http://spys.me/proxy.txt" "anonymizer" "spys"
fuDOWNLOAD "http://ransomwaretracker.abuse.ch/downloads/RW_IPBL.txt" "ransomware" "ransomwaretracker"
fuDOWNLOAD "https://report.cs.rutgers.edu/DROP/attackers" "known attacker" "rutgers"
fuDOWNLOAD "http://sblam.com/blacklist.txt" "form spammer" "sblam"
fuDOWNLOAD "https://sslbl.abuse.ch/blacklist/sslipblacklist.csv" "C2 server" "sslbl"
fuDOWNLOAD "http://www.talosintelligence.com/feeds/ip-filter.blf" "bad reputation" "talos"
fuDOWNLOAD "https://check.torproject.org/cgi-bin/TorBulkExitList.py?ip=1.1.1.1" "tor exit node" "torexit"
fuDOWNLOAD "https://torstatus.blutmagie.de/ip_list_all.php/Tor_ip_list_ALL.csv" "tor exit node" "torip"
fuDOWNLOAD "https://www.turris.cz/greylist-data/greylist-latest.csv" "bad reputation" "turris"
fuDOWNLOAD "https://zeustracker.abuse.ch/blocklist.php?download=badips" "malware" "zeustracker"
fuDOWNLOAD "https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/mass_scanner.txt" "mass scanner" "maltrail_mass_scanner"
fuDOWNLOAD "https://myip.ms/files/blacklist/general/full_blacklist_database.zip" "bot, crawler" "myip"
# Generate logstash translation map for ip reputation lookup
echo -n "[ Building translation map ] "
cat *.raw > $myIPREPMAP.tmp
# Remove duplicates
sort -u $myIPREPMAP.tmp > $myIPREPMAP
echo "[$myGREEN DONE $myWHITE]"

416
docker/elk/logstash/dist/logstash.conf vendored Normal file
View File

@ -0,0 +1,416 @@
# Input section
input {
# Suricata
file {
path => ["/data/suricata/log/eve.json"]
codec => json
type => "Suricata"
}
# P0f
file {
path => ["/data/p0f/log/p0f.json"]
codec => json
type => "P0f"
}
# Conpot
file {
path => ["/data/conpot/log/conpot.json"]
codec => json
type => "ConPot"
}
# Cowrie
file {
path => ["/data/cowrie/log/cowrie.json"]
codec => json
type => "Cowrie"
}
# Dionaea
file {
path => ["/data/dionaea/log/dionaea.json"]
codec => json
type => "Dionaea"
}
# Elasticpot
file {
path => ["/data/elasticpot/log/elasticpot.log"]
codec => json
type => "ElasticPot"
}
# eMobility
file {
path => ["/data/emobility/log/centralsystemEWS.log"]
type => "eMobility"
}
# Glastopf
file {
path => ["/data/glastopf/log/glastopf.log"]
type => "Glastopf"
}
# Honeytrap
file {
path => ["/data/honeytrap/log/attackers.json"]
codec => json
type => "Honeytrap"
}
# Mailoney
file {
path => ["/data/mailoney/log/commands.log"]
type => "Mailoney"
}
# Rdpy
file {
path => ["/data/rdpy/log/rdpy.log"]
type => "Rdpy"
}
# Host Syslog
file {
path => ["/data/host/log/auth.log"]
codec => plain
type => "Syslog"
}
# Host NGINX
file {
path => ["/data/host/log/nginx/access.log"]
codec => json
type => "NGINX"
}
# Vnclowpot
file {
path => ["/data/vnclowpot/log/vnclowpot.log"]
type => "Vnclowpot"
}
}
# Filter Section
filter {
# Suricata
if [type] == "Suricata" {
date {
match => [ "timestamp", "ISO8601" ]
}
translate {
refresh_interval => 86400
field => "[alert][signature_id]"
destination => "[alert][cve_id]"
dictionary_path => "/etc/listbot/cve.yaml"
}
}
# P0f
if [type] == "P0f" {
date {
match => [ "timestamp", "yyyy'/'MM'/'dd HH:mm:ss" ]
remove_field => ["timestamp"]
}
mutate {
rename => {
"server_port" => "dest_port"
"server_ip" => "dest_ip"
"client_port" => "src_port"
"client_ip" => "src_ip"
}
}
}
# Conpot
if [type] == "ConPot" {
date {
match => [ "timestamp", "ISO8601" ]
}
}
# Cowrie
if [type] == "Cowrie" {
date {
match => [ "timestamp", "ISO8601" ]
}
mutate {
rename => {
"dst_port" => "dest_port"
"dst_ip" => "dest_ip"
}
}
}
# Dionaea
if [type] == "Dionaea" {
date {
match => [ "timestamp", "ISO8601" ]
}
mutate {
rename => {
"dst_port" => "dest_port"
"dst_ip" => "dest_ip"
}
gsub => [
"src_ip", "::ffff:", "",
"dest_ip", "::ffff:", ""
]
}
if [credentials] {
mutate {
add_field => {
"login.username" => "%{[credentials][username]}"
"login.password" => "%{[credentials][password]}"
}
remove_field => "[credentials]"
}
}
}
# ElasticPot
if [type] == "ElasticPot" {
date {
match => [ "timestamp", "ISO8601" ]
}
}
# eMobility
if [type] == "eMobility" {
grok {
match => [ "message", "\A%{IP:src_ip}\.%{POSINT:src_port:integer}\|%{IP:dest_ip}\.%{POSINT:dest_port:integer}:%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424SD}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{URIPROTO:http_method}\|%{URIPATH:http_uri}\|%{TIMESTAMP_ISO8601:timestamp}" ]
}
date {
match => [ "timestamp", "ISO8601" ]
}
}
# Glastopf
if [type] == "Glastopf" {
grok {
match => [ "message", "\A%{TIMESTAMP_ISO8601:timestamp}%{SPACE}%{NOTSPACE}%{SPACE}%{IP:src_ip}%{SPACE}%{WORD}%{SPACE}%{URIPROTO:http_method}%{SPACE}%{NOTSPACE:http_uri}%{SPACE}%{NOTSPACE}%{SPACE}%{HOSTNAME}:%{NUMBER:dest_port:integer}" ]
}
date {
match => [ "timestamp", "yyyy-MM-dd HH:mm:ss,SSS" ]
remove_field => ["timestamp"]
}
}
# Honeytrap
if [type] == "Honeytrap" {
date {
match => [ "timestamp", "ISO8601" ]
}
mutate {
rename => {
"[attack_connection][local_port]" => "dest_port"
"[attack_connection][local_ip]" => "dest_ip"
"[attack_connection][remote_port]" => "src_port"
"[attack_connection][remote_ip]" => "src_ip"
}
}
}
# Mailoney
if [type] == "Mailoney" {
grok {
match => [ "message", "\A%{NAGIOSTIME}\[%{IPV4:src_ip}:%{INT:src_port:integer}] %{GREEDYDATA:smtp_input}" ]
}
mutate {
add_field => {
"dest_port" => "25"
}
}
date {
match => [ "nagios_epoch", "UNIX" ]
remove_field => ["nagios_epoch"]
}
}
# Rdpy
if [type] == "Rdpy" {
grok { match => { "message" => [ "\A%{TIMESTAMP_ISO8601:timestamp},domain:%{CISCO_REASON:domain},username:%{CISCO_REASON:username},password:%{CISCO_REASON:password},hostname:%{GREEDYDATA:hostname}", "\A%{TIMESTAMP_ISO8601:timestamp},Connection from %{IPV4:src_ip}:%{INT:src_port:integer}" ] } }
date {
match => [ "timestamp", "ISO8601" ]
remove_field => ["timestamp"]
}
mutate {
add_field => {
"dest_port" => "3389"
}
}
}
# Syslog
if [type] == "Syslog" {
grok {
match => {
"message" => ["%{SYSLOGPAMSESSION}", "%{CRONLOG}", "%{SYSLOGLINE}"]
}
overwrite => "message"
}
date {
match => [ "timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
remove_field => ["timestamp"]
}
date {
match => ["timestamp8601", "ISO8601"]
remove_field => ["timestamp8601"]
}
grok {
match => { "message" => "Connection closed by %{IP:src_ip}" }
add_tag => [ "ssh_connection_closed" ]
tag_on_failure => []
}
grok {
match => { "message" => "Received disconnect from %{IP:src_ip}" }
add_tag => [ "ssh_connection_disconnect" ]
tag_on_failure => []
}
grok {
match => { "message" => "Failed password for invalid user %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2" }
add_tag => [ "ssh_failed_password" ]
tag_on_failure => []
}
grok {
match => { "message" => "Did not receive identification string from %{IP:src_ip}" }
add_tag => [ "ssh_no_id" ]
tag_on_failure => []
}
grok {
match => { "message" => "User %{USERNAME:username} from %{IP:src_ip} not allowed because not listed in AllowUsers" }
add_tag => [ "ssh_user_not_allowed" ]
tag_on_failure => []
}
grok {
match => { "message" => "authentication failure; logname=%{USERNAME:logname} uid=%{BASE10NUM:uid} euid=%{BASE10NUM:euid} tty=%{TTY:tty} ruser=%{USERNAME:ruser} rhost=(?:%{HOSTNAME:remote_host}|\s*) user=%{USERNAME:user}"}
add_tag => [ "ssh_auth_failure" ]
tag_on_failure => []
}
grok {
match => { "message" => "pam_unix\(sshd:auth\): authentication failure; logname= uid=0 euid=0 tty=%{NOTSPACE:tty} ruser= rhost=(?:%{HOSTNAME:remote_host}|\s*) user=%{USERNAME:user}"}
add_tag => [ "ssh_auth_failure" ]
tag_on_failure => []
}
grok {
match => { "message" => "Failed password for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"}
add_tag => [ "ssh_failed_password" ]
tag_on_failure => []
}
grok {
match => { "message" => "Accepted password for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"}
add_tag => [ "ssh_accepted_password" ]
tag_on_failure => []
}
grok {
match => { "message" => "Accepted publickey for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"}
add_tag => [ "ssh_accepted_pubkey" ]
tag_on_failure => []
}
grok {
match => { "message" => "Accepted keyboard-interactive/pam for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"}
add_tag => [ "ssh_accepted_interactive" ]
tag_on_failure => []
}
}
# NGINX
if [type] == "NGINX" {
date {
match => [ "timestamp", "ISO8601" ]
}
}
# Vnclowpot
if [type] == "Vnclowpot" {
grok {
match => [ "message", "\A%{NOTSPACE}%{SPACE}%{TIME}%{SPACE}%{IPV4:src_ip}:%{INT:src_port}%{SPACE}%{NOTSPACE:vnc_handshake}" ]
}
date {
match => [ "timestamp", "yyyy/MM/dd HH:mm:ss" ]
remove_field => ["timestamp"]
}
mutate {
add_field => {
"dest_port" => "5900"
}
}
}
# Drop if parse fails
if "_grokparsefailure" in [tags] { drop {} }
# Add geo coordinates / ASN info / IP rep.
if [src_ip] {
geoip {
cache_size => 10000
source => "src_ip"
database => "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-geoip-4.3.1-java/vendor/GeoLite2-City.mmdb"
}
geoip {
cache_size => 10000
source => "src_ip"
database => "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-geoip-4.3.1-java/vendor/GeoLite2-ASN.mmdb"
}
translate {
refresh_interval => 86400
field => "src_ip"
destination => "ip_rep"
dictionary_path => "/etc/listbot/iprep.yaml"
}
}
# In some rare conditions dest_port, src_port is indexed as string, forcing integer for now
if [dest_port] {
mutate {
convert => { "dest_port" => "integer" }
}
}
if [src_port] {
mutate {
convert => { "src_port" => "integer" }
}
}
# Add T-Pot hostname and external IP
if [type] == "ConPot" or [type] == "Cowrie" or [type] == "Dionaea" or [type] == "ElasticPot" or [type] == "eMobility" or [type] == "Glastopf" or [type] == "Honeytrap" or [type] == "Mailoney" or [type] == "Rdpy" or [type] == "Suricata" or [type] == "Vnclowpot" {
mutate {
add_field => {
"t-pot_ip_ext" => "${MY_EXTIP}"
"t-pot_ip_int" => "${MY_INTIP}"
"t-pot_hostname" => "${MY_HOSTNAME}"
}
}
}
}
# Output section
output {
elasticsearch {
hosts => ["elasticsearch:9200"]
}
if [type] == "Suricata" {
file {
file_mode => 0760
path => "/data/suricata/log/suricata_ews.log"
}
}
# Debug output
#if [type] == "XYZ" {
# stdout {
# codec => rubydebug
# }
#}
# Debug output
#stdout {
# codec => rubydebug
#}
}

12
docker/elk/logstash/dist/update.sh vendored Normal file
View File

@ -0,0 +1,12 @@
#!/bin/bash
# Let's ensure normal operation on exit or if interrupted ...
function fuCLEANUP {
exit 0
}
trap fuCLEANUP EXIT
# Download updated translation maps
cd /etc/listbot
git pull
cd /