Updated scripts, Logstash confs, elasticsearch conf & created sysmon template

- Logstash
-- Cleaned output configurations
-- Created Sysmon teamplte
-- Added sysmon template to sysmon elasticsearch output
-- Removed sniffing = True from every elasticsearch output
- Scripts
-- Updated Install config
-- Added creation of Kibana index patterns to install script
-- Added headers to every script but posh script
-- renamed scripts to keep naming standard helk-*
keyword-vs-text-changes
Roberto Rodriguez 2017-12-20 14:55:57 -05:00
parent e5f4d646fd
commit 3178c85172
14 changed files with 226 additions and 28 deletions

View File

@ -30,11 +30,11 @@
#
# Path to directory where to store the data (separate multiple locations by comma):
#
#path.data: /path/to/data
path.data: /var/lib/elasticsearch
#
# Path to log files:
#
#path.logs: /path/to/logs
path.logs: /var/log/elasticsearch
#
# ----------------------------------- Memory -----------------------------------
#
@ -69,7 +69,7 @@ network.host: localhost
#
# Prevent the "split brain" by configuring the majority of nodes (total number of master-eligible nodes / 2 + 1):
#
#discovery.zen.minimum_master_nodes: 3
#discovery.zen.minimum_master_nodes:
#
# For more information, consult the zen discovery module documentation.
#

View File

@ -0,0 +1,92 @@
{
"template" : "sysmon-*",
"settings" : {
"index.refresh_interval": "5s"
},
"mappings":{
"doc":{
"dynamic_templates": [{
"strings":{
"match_mapping_type": "string",
"mapping": {
"type": "text",
"norms": false,
"fields": {
"raw": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
}],
"properties":{
"@timestamp":{"type":"date"},
"destination":{
"properties":{
"port":{
"properties":{
"number":{"type":"integer"}
}
}
}
},
"event_data":{"type":"object"},
"event_id":{"type":"integer"},
"geoip":{
"properties":{
"dma_code":{"type":"integer"},
"latitude":{"type":"float"},
"location":{"type": "geo_point"},
"longitude":{"type":"float"},
"postal_code":{"type":"integer"}
}
},
"network":{
"properties":{
"initiated":{"type":"boolean"}
}
},
"process":{
"properties":{
"id":{"type":"integer"},
"image":{
"properties":{
"signed":{"type":"boolean"}
}
},
"parent":{
"properties":{
"id":{"type":"integer"}
}
},
"source":{
"properties":{
"id":{"type":"integer"},
"threadid":{"type":"integer"}
}
},
"target":{
"properties":{
"id":{"type":"integer"}
}
},
"terminalsessionid":{"type":"integer"}
}
},
"process_id":{"type":"integer"},
"source":{
"properties":{
"port":{
"properties":{
"number":{"type":"integer"}
}
}
}
},
"thread_id":{"type":"integer"},
"version":{"type":"integer"}
}
}
}
}

View File

@ -1,3 +1,9 @@
# HELK filter conf file
# HELK build version: 0.9 (BETA)
# HELK ELK version: 6.x
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: BSD 3-Clause
filter {
if [source_name] == "Microsoft-Windows-PowerShell" or [source_name] == "PowerShell"{
if [event_id] == 4103 {

View File

@ -1,3 +1,9 @@
# HELK filter conf file
# HELK build version: 0.9 (BETA)
# HELK ELK version: 6.x
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: BSD 3-Clause
filter {
if [log_name] == "Microsoft-Windows-Sysmon/Operational"{
if [event_id] == 1 {

View File

@ -2,9 +2,10 @@ output {
if [log_name] == "Microsoft-Windows-Sysmon/Operational"{
elasticsearch {
hosts => ["elasticsearch:9200", "127.0.0.1:9200"]
sniffing => true
manage_template => false
index => "sysmon-%{+YYYY.MM.dd}"
template => "/opt/helk/templates/sysmon-template.json"
template_name => "sysmon"
template_overwrite => true
}
}
}

View File

@ -1,10 +1,15 @@
# HELK output conf file
# HELK build version: 0.9 (BETA)
# HELK ELK version: 6.x
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: BSD 3-Clause
output {
if [log_name] == "Security"{
elasticsearch {
hosts => ["elasticsearch:9200", "127.0.0.1:9200"]
sniffing => true
manage_template => false
index => "winevent-security--%{+YYYY.MM.dd}"
index => "winevent-security-%{+YYYY.MM.dd}"
}
}
}

View File

@ -1,8 +1,13 @@
# HELK output conf file
# HELK build version: 0.9 (BETA)
# HELK ELK version: 6.x
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: BSD 3-Clause
output {
if [log_name] == "System"{
elasticsearch {
hosts => ["elasticsearch:9200", "127.0.0.1:9200"]
sniffing => true
manage_template => false
index => "winevent-system-%{+YYYY.MM.dd}"
}

View File

@ -1,8 +1,13 @@
# HELK output conf file
# HELK build version: 0.9 (BETA)
# HELK ELK version: 6.x
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: BSD 3-Clause
output {
if [log_name] == "Application"{
elasticsearch {
hosts => ["elasticsearch:9200", "127.0.0.1:9200"]
sniffing => true
manage_template => false
index => "winevent-application-%{+YYYY.MM.dd}"
}

View File

@ -1,8 +1,13 @@
# HELK output conf file
# HELK build version: 0.9 (BETA)
# HELK ELK version: 6.x
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: BSD 3-Clause
output {
if [source_name] == "Microsoft-Windows-PowerShell" or [source_name] == "PowerShell"{
elasticsearch {
hosts => ["elasticsearch:9200", "127.0.0.1:9200"]
sniffing => true
manage_template => false
index => "powershell-%{+YYYY.MM.dd}"
}

View File

@ -1,10 +1,12 @@
#!/bin/bash
# Docker & Docker-compose Installation Script
# Author: Roberto Rodriguez @Cyb3rWard0g
# HELK script: helk_docker_install.sh
# HELK script description: Installs Docker & Docker-Compose on your HELK server.
# HELK build version: 0.9 (BETA)
# HELK ELK version: 6.x
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: BSD 3-Clause
# Description: This script installs Docker & Docker-Compose on your HELK server.
# ELK version: 5x
# References:
# https://www.digitalocean.com/community/tutorials/how-to-install-and-use-docker-on-ubuntu-16-04
# https://www.digitalocean.com/community/tutorials/how-to-install-docker-compose-on-ubuntu-16-04

View File

@ -1,9 +1,17 @@
#!/bin/bash
# HELK script: helk_geoipdb_update.sh
# HELK script description: Update the MaxMind GeoIP databases
# HELK build version: 0.9 (BETA)
# HELK ELK version: 6.x
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: BSD 3-Clause
# References:
# HELK Supporting script
#
# Original script from: SOF-ELK Supporting script (C)2016 Lewes Technology Consulting, LLC
#
# This script is used to update the MaxMind GeoIP databases
# SOF-ELK Supporting script (C)2016 Lewes Technology Consulting, LLC
# https://github.com/philhagen/sof-elk/blob/develop/supporting-scripts/geoip_update.sh
GEOIP_LIBDIR=/etc/logstash/geoip
GEOIP_CITYSOURCEURL=http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz

View File

@ -1,12 +1,14 @@
#!/bin/bash
# HELK Installation Script (Elasticsearch, Logstash, Kibana & Nginx)
# HELK build version: 0.9 (BETA Script)
# Author: Roberto Rodriguez @Cyb3rWard0g
# HELK script: helk_install.sh
# HELK script description: Install all the needed components of the HELK
# HELK build version: 0.9 (BETA)
# HELK ELK version: 6.x
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: BSD 3-Clause
# Description: This script installs every single component of the ELK Stack plus Nginx
# ELK version: 5x
# Blog: https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html
# References:
# https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html
LOGFILE="/var/log/helk-install.log"
@ -47,7 +49,7 @@ ERROR=$?
fi
echo "[HELK INFO] Adding elastic packages source list definitions to your sources list.."
echo "deb https://artifacts.elastic.co/packages/5.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-5.x.list >> $LOGFILE 2>&1
echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-6.x.list >> $LOGFILE 2>&1
ERROR=$?
if [ $ERROR -ne 0 ]; then
echoerror "Could not add elastic packages source list definitions to your source list (Error Code: $ERROR)."
@ -129,6 +131,9 @@ ERROR=$?
echoerror "Could not start kibana and set kibana to start automatically when the system boots (Error Code: $ERROR)."
fi
echo "[HELK INFO] Creating Index Patterns in Kibana"
# *********** Installing Nginx ***************
echo "[HELK INFO] Installing Nginx.."
apt-get -y install nginx >> $LOGFILE 2>&1
@ -219,9 +224,9 @@ ERROR=$?
fi
# *********** Creating Cron Job to run OTX script every monday at 8AM and capture last 30 days of Intel *************
echo "[HELK INFO] Creating a cronjob for OTX intel script"
mkdir /opt/helk/scripts
cp -v otx_helk.py /opt/helk/scripts/
cronjob="0 8 * * 1 python /opt/helk/scripts/otx_helk.py"
mkdir /opt/helk/scripts >> $LOGFILE 2>&1
cp -v helk_otx.py /opt/helk/scripts/ >> $LOGFILE 2>&1
cronjob="0 8 * * 1 python /opt/helk/scripts/helk_otx.py"
echo "$cronjob" | crontab - >> $LOGFILE 2>&1
ERROR=$?
if [ $ERROR -ne 0 ]; then
@ -236,6 +241,14 @@ ERROR=$?
echoerror "Could not install logstash (Error Code: $ERROR)."
fi
echo "[HELK INFO] Creating templates directory and copying custom templates over.."
mkdir /opt/helk/templates >> $LOGFILE 2>&1
cp -v ../logstash/output_templates/* /opt/helk/templates/ >> $LOGFILE 2>&1
ERROR=$?
if [ $ERROR -ne 0 ]; then
echoerror "Could not create templates directory and copy custom templates over (Error Code: $ERROR)."
fi
echo "[HELK INFO] Copying logstash's .conf files.."
cp -av ../logstash/pipeline/* /etc/logstash/conf.d/ >> $LOGFILE 2>&1
@ -254,6 +267,14 @@ ERROR=$?
echoerror "Could not start logstash and set it to start automatically when the system boots (Error Code: $ERROR)"
fi
# *********** Create Kibana Index Patterns ***************
echo "[HELK INFO] Creating Kibana index patterns automatically.."
sh ./helk_kibana_index_pattern_creation.sh >> $LOGFILE 2>&1
ERROR=$?
if [ $ERROR -ne 0 ]; then
echoerror "Could not create kibana index patterns (Error Code: $ERROR)."
fi
echo "**********************************************************************************************************"
echo "[HELK INFO] Your HELK has been installed"
echo "[HELK INFO] Browse to your host IP address from a different computer and enter the following credentials:"

View File

@ -0,0 +1,34 @@
#!/usr/bin/env bash
# HELK script: helk_kibana_index_pattern_creation.sh
# HELK script description: Creates Kibana index patterns automatically.
# HELK build version: 0.9 (BETA)
# HELK ELK version: 6.x
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: BSD 3-Clause
# References:
# https://github.com/elastic/kibana/issues/3709 (https://github.com/hobti01)
# https://explainshell.com/explain?cmd=set+-euxo%20pipefail
set -euo pipefail
url="http://localhost:5601"
declare -A index_patterns=("sysmon-*"
"winevent-security-*"
"winevent-system-*"
"winevent-application-*"
"powershell-*"
)
time_field="@timestamp"
# Create index pattern
# curl -f to fail on error
# For loop to create every single intex pattern
for index in ${!index_patterns[@]}; do
curl -f -XPOST -H "Content-Type: application/json" -H "kbn-xsrf: anything" \
"$url/api/saved_objects/index-pattern/${index}" \
-d"{\"attributes\":{\"title\":\"${index}\",\"timeFieldName\":\"$time_field\"}}"
done
# Make Sysmon the default index
curl -XPOST -H "Content-Type: application/json" -H "kbn-xsrf: anything" \
"$url/api/kibana/settings/defaultIndex" \
-d"{\"value\":\"sysmon-*\"}"

8
scripts/otx_helk.py → scripts/helk_otx.py Normal file → Executable file
View File

@ -1,4 +1,12 @@
#!/usr/bin/env python
# HELK script: helk_otx.py
# HELK script description: Pulling intelligence from OTX (AlienVault)
# HELK build version: 0.9 (BETA)
# HELK ELK version: 6.x
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: BSD 3-Clause
from OTXv2 import OTXv2
from pandas.io.json import json_normalize