HELK v0.1.3-alpha08032018

All
+ Moved all to docker folder. Getting ready to start sharing other ways to deploy helk (terraform & Packer maybe)

Compose-files
+ Basic & Trial Elastic Subscriptions available now and can be automatically managed via the helk_install script

ELK Version : 6.3.2

Elasticsearch
+ Set 4GB for ES_JAVA_OPTS by default allowing the modification of it via docker-compose and calculating half of the host memory if it is not set
+ Added Entrypoint script and using docker-entrypoint to start ES

Logstash
+ Big Pipeline Update by Nate Guagenti (@neu5ron)
++better cli & file name searching
++”dst_ip_public:true” filter out all rfc1918/non-routable
++Geo ASName
++Identification of 16+ windows IP fields
++Arrayed IPs support
++IPv6&IPv4 differentiation
++removing “-“ values and MORE!!!
++ THANK YOU SO MUCH NATE!!!
++ PR: https://github.com/Cyb3rWard0g/HELK/pull/93
+ Added entrypoint script to push new output_templates straight to Elasticsearch per Nate's recommendation
+ Starting Logstash now with docker-entrypoint
+ "event_data" is now taken out of winlogbeat logs to allow integration with nxlog (sauce added by Nate Guagenti (@neu5ron)

Kibana
+ Kibana yml file updated to allow a longer time for timeout

Nginx:
+ it handles communications to Kibana and Jupyterhub via port 443 SSL
+ certificate and key get created at build time
+ Nate added several settings to improve the way how nginx operates

Jupyterhub
+ Multiple users and mulitple notebooks open at the same time are possible now
+ Jupytehub now has 3 users hunter1,hunter2.hunter3 and password patterh is <user>P@ssw0rd!
+ Every notebook created is also JupyterLab
+ Updated ES-Hadoop 6.3.2

Kafka Update
+ 1.1.1 Update

Spark Master + Brokers
+ reduce memory for brokers by default to 512m

Resources:
+ Added new images for Wiki
keyword-vs-text-changes v0.1.2-alpha08032018
Roberto Rodriguez 2018-08-03 11:13:25 -07:00
parent c7af8e42bc
commit 634e24e3aa
164 changed files with 5319 additions and 1601 deletions

View File

@ -2,13 +2,15 @@ version: '3'
services: services:
helk-elasticsearch: helk-elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:6.3.1 image: docker.elastic.co/elasticsearch/elasticsearch:6.3.2
container_name: helk-elasticsearch container_name: helk-elasticsearch
volumes: volumes:
- ./helk-elasticsearch/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml - ./helk-elasticsearch/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
- esdata:/usr/share/elasticsearch/data - esdata:/usr/share/elasticsearch/data
- ./helk-elasticsearch/scripts:/usr/share/elasticsearch/scripts
entrypoint: /usr/share/elasticsearch/scripts/elasticsearch-entrypoint.sh
environment: environment:
- "ES_JAVA_OPTS=-Xms6g -Xmx6g" - "ES_JAVA_OPTS=-Xms4g -Xmx4g"
ulimits: ulimits:
memlock: memlock:
soft: -1 soft: -1
@ -19,15 +21,17 @@ services:
aliases: aliases:
- helk_elasticsearch.hunt.local - helk_elasticsearch.hunt.local
helk-logstash: helk-logstash:
image: docker.elastic.co/logstash/logstash:6.3.1 image: docker.elastic.co/logstash/logstash:6.3.2
container_name: helk-logstash container_name: helk-logstash
volumes: volumes:
- ./helk-logstash/logstash.yml:/usr/share/logstash/config/logstash.yml - ./helk-logstash/logstash.yml:/usr/share/logstash/config/logstash.yml
- ./helk-logstash/pipeline:/usr/share/logstash/pipeline - ./helk-logstash/pipeline:/usr/share/logstash/pipeline
- ./helk-logstash/output_templates:/usr/share/logstash/output_templates - ./helk-logstash/output_templates:/usr/share/logstash/output_templates
- ./helk-logstash/enrichments/cti:/usr/share/logstash/cti - ./helk-logstash/enrichments/cti:/usr/share/logstash/cti
- ./helk-logstash/scripts:/usr/share/logstash/scripts
environment: environment:
- "LS_JAVA_OPTS=-Xms2g -Xmx2g" - "LS_JAVA_OPTS=-Xms1g -Xmx1g"
entrypoint: /usr/share/logstash/scripts/logstash-entrypoint.sh
restart: always restart: always
depends_on: depends_on:
- helk-elasticsearch - helk-elasticsearch
@ -36,7 +40,7 @@ services:
aliases: aliases:
- helk_logstash.hunt.local - helk_logstash.hunt.local
helk-kibana: helk-kibana:
image: docker.elastic.co/kibana/kibana:6.3.1 image: docker.elastic.co/kibana/kibana:6.3.2
container_name: helk-kibana container_name: helk-kibana
volumes: volumes:
- ./helk-kibana/kibana.yml:/usr/share/kibana/config/kibana.yml - ./helk-kibana/kibana.yml:/usr/share/kibana/config/kibana.yml
@ -51,13 +55,16 @@ services:
aliases: aliases:
- helk_kibana.hunt.local - helk_kibana.hunt.local
helk-nginx: helk-nginx:
image: cyb3rward0g/helk-nginx:0.0.3 image: cyb3rward0g/helk-nginx:0.0.6
container_name: helk-nginx container_name: helk-nginx
volumes: volumes:
- ./helk-nginx/htpasswd.users:/etc/nginx/htpasswd.users - ./helk-nginx/htpasswd.users:/etc/nginx/htpasswd.users
- ./helk-nginx/default:/etc/nginx/sites-available/default - ./helk-nginx/default:/etc/nginx/sites-available/default
- ./helk-nginx/scripts/:/opt/helk/scripts/
entrypoint: /opt/helk/scripts/nginx-entrypoint.sh
ports: ports:
- "80:80" - "80:80"
- "443:443"
restart: always restart: always
depends_on: depends_on:
- helk-kibana - helk-kibana
@ -65,12 +72,24 @@ services:
helk: helk:
aliases: aliases:
- helk_nginx.hunt.local - helk_nginx.hunt.local
helk-jupyter:
image: cyb3rward0g/helk-jupyter:0.0.4
container_name: helk-jupyter
restart: always
depends_on:
- helk-nginx
networks:
helk:
aliases:
- helk_jupyter.hunt.local
helk-spark-master: helk-spark-master:
image: cyb3rward0g/helk-spark-master:2.3.1 image: cyb3rward0g/helk-spark-master:2.3.1-a
container_name: helk-spark-master container_name: helk-spark-master
environment:
- SPARK_MASTER_PORT=7077
- SPARK_MASTER_WEBUI_PORT=8080
ports: ports:
- "8080:8080" - "8080:8080"
- "7077:7077"
restart: always restart: always
depends_on: depends_on:
- helk-elasticsearch - helk-elasticsearch
@ -79,11 +98,13 @@ services:
aliases: aliases:
- helk_spark_master.hunt.local - helk_spark_master.hunt.local
helk-spark-worker: helk-spark-worker:
image: cyb3rward0g/helk-spark-worker:2.3.1 image: cyb3rward0g/helk-spark-worker:2.3.1-a
container_name: helk-spark-worker container_name: helk-spark-worker
environment: environment:
- SPARK_WORKER_MEMORY=1g - SPARK_MASTER=spark://helk-spark-master:7077
- SPARK_WORKER_MEMORY=512m
- SPARK_WORKER_WEBUI_PORT=8081 - SPARK_WORKER_WEBUI_PORT=8081
- SPARK_WORKER_PORT=42950
ports: ports:
- "8081:8081" - "8081:8081"
restart: always restart: always
@ -94,11 +115,13 @@ services:
aliases: aliases:
- helk_spark_worker.hunt.local - helk_spark_worker.hunt.local
helk-spark-worker2: helk-spark-worker2:
image: cyb3rward0g/helk-spark-worker:2.3.1 image: cyb3rward0g/helk-spark-worker:2.3.1-a
container_name: helk-spark-worker2 container_name: helk-spark-worker2
environment: environment:
- SPARK_WORKER_MEMORY=1g - SPARK_MASTER=spark://helk-spark-master:7077
- SPARK_WORKER_MEMORY=512m
- SPARK_WORKER_WEBUI_PORT=8082 - SPARK_WORKER_WEBUI_PORT=8082
- SPARK_WORKER_PORT=42951
ports: ports:
- "8082:8082" - "8082:8082"
restart: always restart: always
@ -108,33 +131,20 @@ services:
helk: helk:
aliases: aliases:
- helk_spark_worker2.hunt.local - helk_spark_worker2.hunt.local
helk-jupyter: helk-zookeeper:
image: cyb3rward0g/helk-jupyter:0.0.2 image: cyb3rward0g/helk-zookeeper:1.1.1
container_name: helk-jupyter container_name: helk-zookeeper
ports: ports:
- "8880:8880" - "2181:2181"
- "4040-4050:4040-4050"
restart: always restart: always
depends_on: depends_on:
- helk-kibana - helk-kibana
networks:
helk:
aliases:
- helk_jupyter.hunt.local
helk-zookeeper:
image: cyb3rward0g/helk-zookeeper:3.4.10
container_name: helk-zookeeper
ports:
- "2181:2181"
restart: always
depends_on:
- helk-elasticsearch
networks: networks:
helk: helk:
aliases: aliases:
- helk_zookeeper.hunt.local - helk_zookeeper.hunt.local
helk-kafka-broker: helk-kafka-broker:
image: cyb3rward0g/helk-kafka-broker:1.1.0 image: cyb3rward0g/helk-kafka-broker:1.1.1
container_name: helk-kafka-broker container_name: helk-kafka-broker
restart: always restart: always
depends_on: depends_on:
@ -153,7 +163,7 @@ services:
aliases: aliases:
- helk_kafka_broker.hunt.local - helk_kafka_broker.hunt.local
helk-kafka-broker2: helk-kafka-broker2:
image: cyb3rward0g/helk-kafka-broker:1.1.0 image: cyb3rward0g/helk-kafka-broker:1.1.1
container_name: helk-kafka-broker2 container_name: helk-kafka-broker2
restart: always restart: always
depends_on: depends_on:
@ -170,7 +180,7 @@ services:
networks: networks:
helk: helk:
aliases: aliases:
- helk_kafka_broker.hunt.local - helk_kafka_broker2.hunt.local
helk-sigma: helk-sigma:
image: thomaspatzke/helk-sigma image: thomaspatzke/helk-sigma
container_name: helk-sigma container_name: helk-sigma

View File

@ -7,8 +7,10 @@ services:
volumes: volumes:
- ./helk-elasticsearch/trial/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml - ./helk-elasticsearch/trial/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
- esdata:/usr/share/elasticsearch/data - esdata:/usr/share/elasticsearch/data
- ./helk-elasticsearch/scripts:/usr/share/elasticsearch/scripts
entrypoint: /usr/share/elasticsearch/scripts/elasticsearch-entrypoint.sh
environment: environment:
- "ES_JAVA_OPTS=-Xms6g -Xmx6g" - "ES_JAVA_OPTS=-Xms4g -Xmx4g"
ulimits: ulimits:
memlock: memlock:
soft: -1 soft: -1
@ -19,15 +21,17 @@ services:
aliases: aliases:
- helk_elasticsearch.hunt.local - helk_elasticsearch.hunt.local
helk-logstash: helk-logstash:
image: docker.elastic.co/logstash/logstash:6.3.1 image: docker.elastic.co/logstash/logstash:6.3.2
container_name: helk-logstash container_name: helk-logstash
volumes: volumes:
- ./helk-logstash/trial/logstash.yml:/usr/share/logstash/config/logstash.yml - ./helk-logstash/trial/logstash.yml:/usr/share/logstash/config/logstash.yml
- ./helk-logstash/trial/pipeline:/usr/share/logstash/pipeline - ./helk-logstash/trial/pipeline:/usr/share/logstash/pipeline
- ./helk-logstash/output_templates:/usr/share/logstash/output_templates - ./helk-logstash/output_templates:/usr/share/logstash/output_templates
- ./helk-logstash/enrichments/cti:/usr/share/logstash/cti - ./helk-logstash/enrichments/cti:/usr/share/logstash/cti
- ./helk-logstash/trial/scripts:/usr/share/logstash/scripts
environment: environment:
- "LS_JAVA_OPTS=-Xms2g -Xmx2g" - "LS_JAVA_OPTS=-Xms1g -Xmx1g"
entrypoint: /usr/share/logstash/scripts/logstash-entrypoint.sh
restart: always restart: always
depends_on: depends_on:
- helk-elasticsearch - helk-elasticsearch
@ -36,7 +40,7 @@ services:
aliases: aliases:
- helk_logstash.hunt.local - helk_logstash.hunt.local
helk-kibana: helk-kibana:
image: docker.elastic.co/kibana/kibana:6.3.1 image: docker.elastic.co/kibana/kibana:6.3.2
container_name: helk-kibana container_name: helk-kibana
volumes: volumes:
- ./helk-kibana/trial/kibana.yml:/usr/share/kibana/config/kibana.yml - ./helk-kibana/trial/kibana.yml:/usr/share/kibana/config/kibana.yml
@ -51,12 +55,15 @@ services:
aliases: aliases:
- helk_kibana.hunt.local - helk_kibana.hunt.local
helk-nginx: helk-nginx:
image: cyb3rward0g/helk-nginx:0.0.3 image: cyb3rward0g/helk-nginx:0.0.6
container_name: helk-nginx container_name: helk-nginx
volumes: volumes:
- ./helk-nginx/trial/default:/etc/nginx/sites-available/default - ./helk-nginx/trial/default:/etc/nginx/sites-available/default
- ./helk-nginx/scripts/:/opt/helk/scripts/
entrypoint: /opt/helk/scripts/nginx-entrypoint.sh
ports: ports:
- "80:80" - "80:80"
- "443:443"
restart: always restart: always
depends_on: depends_on:
- helk-kibana - helk-kibana
@ -64,12 +71,24 @@ services:
helk: helk:
aliases: aliases:
- helk_nginx.hunt.local - helk_nginx.hunt.local
helk-jupyter:
image: cyb3rward0g/helk-jupyter:0.0.4
container_name: helk-jupyter
restart: always
depends_on:
- helk-nginx
networks:
helk:
aliases:
- helk_jupyter.hunt.local
helk-spark-master: helk-spark-master:
image: cyb3rward0g/helk-spark-master:2.3.1 image: cyb3rward0g/helk-spark-master:2.3.1-a
container_name: helk-spark-master container_name: helk-spark-master
environment:
- SPARK_MASTER_PORT=7077
- SPARK_MASTER_WEBUI_PORT=8080
ports: ports:
- "8080:8080" - "8080:8080"
- "7077:7077"
restart: always restart: always
depends_on: depends_on:
- helk-elasticsearch - helk-elasticsearch
@ -78,11 +97,13 @@ services:
aliases: aliases:
- helk_spark_master.hunt.local - helk_spark_master.hunt.local
helk-spark-worker: helk-spark-worker:
image: cyb3rward0g/helk-spark-worker:2.3.1 image: cyb3rward0g/helk-spark-worker:2.3.1-a
container_name: helk-spark-worker container_name: helk-spark-worker
environment: environment:
- SPARK_WORKER_MEMORY=1g - SPARK_MASTER=spark://helk-spark-master:7077
- SPARK_WORKER_MEMORY=512m
- SPARK_WORKER_WEBUI_PORT=8081 - SPARK_WORKER_WEBUI_PORT=8081
- SPARK_WORKER_PORT=42950
ports: ports:
- "8081:8081" - "8081:8081"
restart: always restart: always
@ -93,11 +114,13 @@ services:
aliases: aliases:
- helk_spark_worker.hunt.local - helk_spark_worker.hunt.local
helk-spark-worker2: helk-spark-worker2:
image: cyb3rward0g/helk-spark-worker:2.3.1 image: cyb3rward0g/helk-spark-worker:2.3.1-a
container_name: helk-spark-worker2 container_name: helk-spark-worker2
environment: environment:
- SPARK_WORKER_MEMORY=1g - SPARK_MASTER=spark://helk-spark-master:7077
- SPARK_WORKER_MEMORY=512m
- SPARK_WORKER_WEBUI_PORT=8082 - SPARK_WORKER_WEBUI_PORT=8082
- SPARK_WORKER_PORT=42951
ports: ports:
- "8082:8082" - "8082:8082"
restart: always restart: always
@ -107,21 +130,8 @@ services:
helk: helk:
aliases: aliases:
- helk_spark_worker2.hunt.local - helk_spark_worker2.hunt.local
helk-jupyter:
image: cyb3rward0g/helk-jupyter:0.0.2
container_name: helk-jupyter
ports:
- "8880:8880"
- "4040-4050:4040-4050"
restart: always
depends_on:
- helk-kibana
networks:
helk:
aliases:
- helk_jupyter.hunt.local
helk-zookeeper: helk-zookeeper:
image: cyb3rward0g/helk-zookeeper:3.4.10 image: cyb3rward0g/helk-zookeeper:1.1.1
container_name: helk-zookeeper container_name: helk-zookeeper
ports: ports:
- "2181:2181" - "2181:2181"
@ -133,7 +143,7 @@ services:
aliases: aliases:
- helk_zookeeper.hunt.local - helk_zookeeper.hunt.local
helk-kafka-broker: helk-kafka-broker:
image: cyb3rward0g/helk-kafka-broker:1.1.0 image: cyb3rward0g/helk-kafka-broker:1.1.1
container_name: helk-kafka-broker container_name: helk-kafka-broker
restart: always restart: always
depends_on: depends_on:
@ -152,7 +162,7 @@ services:
aliases: aliases:
- helk_kafka_broker.hunt.local - helk_kafka_broker.hunt.local
helk-kafka-broker2: helk-kafka-broker2:
image: cyb3rward0g/helk-kafka-broker:1.1.0 image: cyb3rward0g/helk-kafka-broker:1.1.1
container_name: helk-kafka-broker2 container_name: helk-kafka-broker2
restart: always restart: always
depends_on: depends_on:

View File

@ -1,12 +1,12 @@
# HELK script: HELK Elasticsearch Dockerfile # HELK script: HELK Elasticsearch Dockerfile
# HELK build Stage: Alpha # HELK build Stage: Alpha
# HELK ELK version: 6.3.1 # HELK ELK version: 6.3.2
# Author: Roberto Rodriguez (@Cyb3rWard0g) # Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: GPL-3.0 # License: GPL-3.0
# References: # References:
# https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html # https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html
FROM docker.elastic.co/elasticsearch/elasticsearch:6.3.1 FROM docker.elastic.co/elasticsearch/elasticsearch:6.3.2
LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g"
LABEL description="Dockerfile base for the HELK Elasticsearch." LABEL description="Dockerfile base for the HELK Elasticsearch."

View File

@ -0,0 +1,21 @@
#!/bin/bash
# HELK script: elasticsearch-entrypoint.sh
# HELK script description: sets elasticsearch configs and starts elasticsearch
# HELK build Stage: Alpha
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: GPL-3.0
# *********** Looking for ES ***************
if [[ ! -z "$ES_JAVA_OPTS" ]]; then
echo "[HELK-DOCKER-INSTALLATION-INFO] Setting ES_JAVA_OPTS to $ES_JAVA_OPTS"
else
# ****** Setup heap size and memory locking *****
ES_MEMORY=$(awk '/MemAvailable/{printf "%.f", $2/1024/1024/2}' /proc/meminfo)
echo "[HELK-DOCKER-INSTALLATION-INFO] Setting ES_HEAP_SIZE to ${ES_MEMORY}.."
export ES_JAVA_OPTS="-Xms${ES_MEMORY}g -Xmx${ES_MEMORY}g"
fi
# ********** Starting Elasticsearch *****************
echo "[HELK-DOCKER-INSTALLATION-INFO] Running docker-entrypoint script.."
/usr/local/bin/docker-entrypoint.sh

View File

@ -1,15 +1,13 @@
# HELK script: HELK Elasticsearch Dockerfile # HELK script: HELK Elasticsearch Dockerfile
# HELK build Stage: Alpha # HELK build Stage: Alpha
# HELK ELK version: 6.3.1 # HELK ELK version: 6.3.2
# Author: Roberto Rodriguez (@Cyb3rWard0g) # Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: GPL-3.0 # License: GPL-3.0
# References: # References:
# https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html # https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html
# *********** ELK Version *************** FROM docker.elastic.co/elasticsearch/elasticsearch:6.3.2
FROM docker.elastic.co/elasticsearch/elasticsearch:6.3.1
LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g"
LABEL description="Dockerfile base for the HELK Elasticsearch." LABEL description="Dockerfile base for the HELK Elasticsearch."

View File

@ -14,10 +14,10 @@ USER root
# *********** Installing Prerequisites *************** # *********** Installing Prerequisites ***************
# -qq : No output except for errors # -qq : No output except for errors
RUN echo "[HELK-DOCKER-INSTALLATION-INFO] Extracting templates from packages.." \ RUN echo "[HELK-DOCKER-INSTALLATION-INFO] Extracting templates from packages.." \
&& apt-get install -qqy \ && apt-get install -qqy --no-install-recommends \
python3-pip \ curl python3-pip python3-dev python-tk unzip python3-setuptools \
python-tk \ libcurl4-openssl-dev build-essential libssl-dev libffi-dev \
unzip libxml2-dev libxslt1-dev zlib1g-dev
RUN apt-get -qy clean \ RUN apt-get -qy clean \
autoremove autoremove
@ -27,55 +27,41 @@ RUN pip3 install --upgrade pip
# *********** Installing Jupyter Hub Prerequisites # *********** Installing Jupyter Hub Prerequisites
RUN curl -sL https://deb.nodesource.com/setup_8.x | sudo -E bash - RUN curl -sL https://deb.nodesource.com/setup_8.x | sudo -E bash -
RUN apt-get install -y nodejs RUN apt-get install -y --no-install-recommends nodejs
# *********** Installing HELK python packages *************** # *********** Installing HELK python packages ***************
RUN pip3 install pandas \ RUN pip3 install pandas \
jupyter \ jupyter \
jupyterlab \ jupyterlab==0.33.4 \
jupyterhub jupyterhub==0.9.1
# *********** Installing Jupyter Lab Extension - JupyterHub ***************
RUN npm install -g configurable-http-proxy RUN npm install -g configurable-http-proxy
RUN jupyter labextension install @jupyterlab/hub-extension RUN jupyter labextension install @jupyterlab/hub-extension@0.10.0
# *********** Creating the right directories *************** # *********** Creating the Jupyter directories ***************
RUN bash -c 'mkdir -pv /opt/helk/{es-hadoop,jupyter}' RUN bash -c 'mkdir -pv /opt/helk/{es-hadoop,jupyter,jupyterhub}'
RUN mkdir -v /usr/local/share/jupyter/kernels/pyspark3
RUN mkdir -v /var/log/spark
# *********** Setting Jupyterhub*********************** # *********** Configure Jupyterhub ***************
ENV JUPYTER_DIR=/opt/helk/jupyter ENV JUPYTER_DIR=/opt/helk/jupyter
# *********** Adding HELK scripts and files to Container *************** # *********** Adding HELK scripts and files to Container ***************
ADD scripts/jupyter-entrypoint.sh ${JUPYTER_DIR} COPY scripts/jupyter-entrypoint.sh ${JUPYTER_DIR}
ADD notebooks ${JUPYTER_DIR}/notebooks COPY notebooks ${JUPYTER_DIR}/notebooks
COPY spark/* ${SPARK_HOME}/conf/
COPY kernels/pyspark_kernel.json /usr/local/share/jupyter/kernels/pyspark3/kernel.json
COPY jupyterhub/jupyterhub_config.py /opt/helk/jupyter/
# *********** Download ES-Hadoop *************** # *********** Download ES-Hadoop ***************
ENV ESHADOOP_VERSION=6.3.1 ENV ESHADOOP_VERSION=6.3.2
RUN wget https://artifacts.elastic.co/downloads/elasticsearch-hadoop/elasticsearch-hadoop-${ESHADOOP_VERSION}.zip -P /opt/helk/es-hadoop/ \ RUN wget https://artifacts.elastic.co/downloads/elasticsearch-hadoop/elasticsearch-hadoop-${ESHADOOP_VERSION}.zip -P /opt/helk/es-hadoop/ \
&& unzip -j /opt/helk/es-hadoop/*.zip -d /opt/helk/es-hadoop/ \ && unzip -j /opt/helk/es-hadoop/*.zip -d /opt/helk/es-hadoop/ \
&& rm /opt/helk/es-hadoop/*.zip && rm /opt/helk/es-hadoop/*.zip
# *********** Configure Jupyterhub *************** EXPOSE 8000
ENV JUPYTER_LOGS_PATH=${JUPYTER_DIR}/log
ENV JUPYTER_CONSOLE_LOG=${JUPYTER_LOGS_PATH}/jupyter.log
ENV JUPYTER_EXEC=$SPARK_HOME/bin/pyspark
ENV JUPYTER_LOGS=">> $JUPYTER_CONSOLE_LOG 2>&1"
RUN mkdir -v $JUPYTER_LOGS_PATH
ADD spark/log4j.properties ${SPARK_HOME}/conf/
ADD spark/spark-defaults.conf ${SPARK_HOME}/conf/
# *********** Update Jupyter PySpark Kernel *************
#ADD kernels/pyspark_kernel.json /usr/local/share/jupyter/kernels/python3/kernel.json
# ************* Adding SPARK environment variables *************
ENV PATH=$SPARK_HOME/bin:$PATH
ENV PYSPARK_PYTHON=/usr/bin/python3
ENV PYSPARK_DRIVER_PYTHON=/usr/local/bin/jupyter
ENV PYTHONPATH $SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.7-src.zip
ENV PYSPARK_DRIVER_PYTHON_OPTS="lab --no-browser --ip=* --port=8880 --allow-root --notebook-dir=/opt/helk/jupyter/notebooks"
EXPOSE 4040 8880
# *********** RUN HELK *************** # *********** RUN HELK ***************
WORKDIR ${JUPYTER_DIR} WORKDIR ${JUPYTER_DIR}
ENTRYPOINT ["./jupyter-entrypoint.sh"] ENTRYPOINT ["./jupyter-entrypoint.sh"]
CMD ["/bin/bash","-c","/usr/local/bin/jupyterhub","-f","/opt/helk/jupyter/jupyterhub_config.py"]

View File

@ -0,0 +1,17 @@
# HELK script: HELK JupyterHub Config
# HELK build Stage: Alpha
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: GPL-3.0
#c = get_config()
c.JupyterHub.log_level = 10
c.Authenticator.whitelist = {'hunter1','hunter2','hunter3'}
c.Authenticator.admin_users = {'hunter1'}
c.Spawner.cmd = ['jupyter-labhub']
c.Spawner.notebook_dir = '/opt/helk/jupyterhub'
c.JupyterHub.hub_ip = 'helk-jupyter'
c.JupyterHub.port = 8000
c.JupyterHub.base_url = '/jupyter'

View File

@ -0,0 +1,17 @@
{
"display_name": "PySpark_Python3",
"language": "python",
"argv": [
"/usr/bin/python3",
"-m",
"ipykernel_launcher",
"-f",
"{connection_file}"
],
"env": {
"SPARK_HOME": "/opt/helk/spark/",
"PYTHONPATH": "/opt/helk/spark/python/:/opt/helk/spark/python/lib/py4j-0.10.7-src.zip",
"PYTHONSTARTUP": "/opt/helk/spark/python/pyspark/shell.py",
"PYSPARK_PYTHON": "/usr/bin/python3"
}
}

View File

@ -0,0 +1,41 @@
#!/bin/bash
# HELK script: jupyter-entryppoint.sh
# HELK script description: Creates JupyterHub Users
# HELK build Stage: Alpha
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: GPL-3.0
# ************* Creating JupyterHub Users ***************
declare -a users_index=("hunter1" "hunter2" "hunter3")
JUPYTERHUB_GID=711
JUPYTERHUB_UID=711
JUPYTERHUB_HOME=/opt/helk/jupyterhub
JUPYTER_HOME=/opt/helk/jupyter
echo "[HELK-JUPYTER-DOCKER-INSTALLATION-INFO] Creating JupyterHub Group..."
groupadd -g ${JUPYTERHUB_GID} jupyterhub
for u in ${users_index[@]}; do
echo "[HELK-JUPYTER-DOCKER-INSTALLATION-INFO] Creating JupyterHub user ${u} .."
student_password="${u}P@ssw0rd!"
echo $student_password >> /opt/helk/user_credentials.txt
JUPYTERHUB_USER_DIRECTORY=${JUPYTERHUB_HOME}/${u}
mkdir -v $JUPYTERHUB_USER_DIRECTORY
useradd -p $(openssl passwd -1 ${student_password}) -u ${JUPYTERHUB_UID} -g ${JUPYTERHUB_GID} -d $JUPYTERHUB_USER_DIRECTORY --no-create-home -s /bin/bash ${u}
echo "[HELK-JUPYTER-DOCKER-INSTALLATION-INFO] copying notebooks to ${JUPYTERHUB_USER_DIRECTORY} notebooks directory ..."
cp -R ${JUPYTER_HOME}/notebooks ${JUPYTERHUB_USER_DIRECTORY}/notebooks
chown -R ${u}:jupyterhub $JUPYTERHUB_USER_DIRECTORY
chmod 700 -R $JUPYTERHUB_USER_DIRECTORY
((JUPYTERHUB_UID=$JUPYTERHUB_UID + 1))
done
chmod 777 -R /var/log/spark
chmod 777 -R /opt/helk/spark
exec "$@"

View File

@ -37,4 +37,4 @@ log4j.logger.parquet=ERROR
# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support # SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR

View File

@ -0,0 +1,39 @@
# HELK build Stage: Alpha
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: GPL-3.0
# HELK References:
# https://spark.apache.org/docs/latest/configuration.html
# https://graphframes.github.io/quick-start.html
# https://spark-packages.org/package/graphframes/graphframes
# https://spark.apache.org/docs/latest/sql-programming-guide.html#pyspark-usage-guide-for-pandas-with-apache-arrow
# ************ Application Properties ****************
# Logs the effective SparkConf as INFO when a SparkContext is started. Default: false
spark.logConf true
# The cluster manager to connect to.
spark.master spark://helk-spark-master:7077
# Restarts the driver automatically if it fails with a non-zero exit status
spark.driver.supervise true
# ************ Runtime Environment ****************
# Sets the number of latest rolling log files that are going to be retained by the system. Older log files will be deleted.
spark.executor.logs.rolling.maxRetainedFiles 20
# Set the strategy of rolling of executor logs.
spark.executor.logs.rolling.strategy spark.executor.logs.rolling.time.interval
# Comma-separated list of jars to include on the driver and executor classpaths. Globs are allowed.
spark.jars /opt/helk/es-hadoop/elasticsearch-hadoop-6.3.2.jar
# Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths.
# The coordinates should be groupId:artifactId:version.
spark.jars.packages graphframes:graphframes:0.5.0-spark2.1-s_2.11,org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.0,databricks:spark-sklearn:0.2.3
# ************ Spark UI ****************
# Base directory in which Spark events are logged
spark.eventLog.dir /var/log/spark
# Whether to log Spark events, useful for reconstructing the Web UI after the application has finished.
spark.eventLog.enabled true
# Enable running Spark Master as reverse proxy for worker and application UIs.
# In this mode, Spark master will reverse proxy the worker and application UIs to enable access without requiring direct access to their hosts.
spark.ui.reverseProxy true
spark.sql.execution.arrow.enabled true

View File

@ -24,10 +24,10 @@ RUN apt-get -qy clean \
RUN bash -c 'mkdir -pv /opt/helk/kafka' RUN bash -c 'mkdir -pv /opt/helk/kafka'
# *********** Install Kafka *************** # *********** Install Kafka ***************
ENV KAFKA_VERSION=1.1.0 ENV KAFKA_VERSION=1.1.1
ENV KAFKA_LOGS_PATH=/var/log/kafka ENV KAFKA_LOGS_PATH=/var/log/kafka
ENV KAFKA_CONSOLE_LOG=/var/log/kafka/helk-kafka.log ENV KAFKA_CONSOLE_LOG=/var/log/kafka/helk-kafka.log
ENV KAFKA_HOME=/opt/helk/kafka/kafka_2.11-${KAFKA_VERSION} ENV KAFKA_HOME=/opt/helk/kafka/kafka_2.11-${KAFKA_VERSION}
RUN wget -qO- http://mirrors.ocf.berkeley.edu/apache/kafka/1.1.0/kafka_2.11-${KAFKA_VERSION}.tgz | sudo tar xvz -C /opt/helk/kafka/ \ RUN wget -qO- http://mirrors.ocf.berkeley.edu/apache/kafka/${KAFKA_VERSION}/kafka_2.11-${KAFKA_VERSION}.tgz | sudo tar xvz -C /opt/helk/kafka/ \
&& mkdir -v $KAFKA_LOGS_PATH && mkdir -v $KAFKA_LOGS_PATH

View File

@ -3,7 +3,7 @@
# Author: Roberto Rodriguez (@Cyb3rWard0g) # Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: GPL-3.0 # License: GPL-3.0
FROM cyb3rward0g/helk-kafka-base:1.1.0 FROM cyb3rward0g/helk-kafka-base:1.1.1
LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g"
LABEL description="Dockerfile base for the HELK Kafka Broker." LABEL description="Dockerfile base for the HELK Kafka Broker."

View File

@ -32,7 +32,7 @@ fi
# *********** Starting Kafka ************** # *********** Starting Kafka **************
exec $KAFKA_SCRIPT $KAFKA_CONFIG >> $KAFKA_CONSOLE_LOG 2>&1 & exec $KAFKA_SCRIPT $KAFKA_CONFIG >> $KAFKA_CONSOLE_LOG 2>&1 &
sleep 20 sleep 30
# *********** Creating Kafka Topics************** # *********** Creating Kafka Topics**************
declare -a temas=("winlogbeat" "sysmontransformed" "securitytransformed") declare -a temas=("winlogbeat" "sysmontransformed" "securitytransformed")

View File

@ -1,12 +1,12 @@
# HELK script: HELK Kibana Dockerfile # HELK script: HELK Kibana Dockerfile
# HELK build Stage: Alpha # HELK build Stage: Alpha
# HELK ELK version: 6.3.1 # HELK ELK version: 6.3.2
# Author: Roberto Rodriguez (@Cyb3rWard0g) # Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: GPL-3.0 # License: GPL-3.0
# References: # References:
# https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html # https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html
FROM docker.elastic.co/kibana/kibana:6.3.1 FROM docker.elastic.co/kibana/kibana:6.3.2
LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g"
LABEL description="Dockerfile base for the HELK Kibana." LABEL description="Dockerfile base for the HELK Kibana."

View File

@ -12,7 +12,7 @@ server.host: "helk-kibana"
#server.basePath: "" #server.basePath: ""
# The maximum payload size in bytes for incoming server requests. # The maximum payload size in bytes for incoming server requests.
#server.maxPayloadBytes: 1048576 server.maxPayloadBytes: 2048576
# The Kibana server's name. This is used for display purposes. # The Kibana server's name. This is used for display purposes.
server.name: "helk-kibana" server.name: "helk-kibana"
@ -58,11 +58,11 @@ elasticsearch.url: "http://helk-elasticsearch:9200"
# Time in milliseconds to wait for Elasticsearch to respond to pings. Defaults to the value of # Time in milliseconds to wait for Elasticsearch to respond to pings. Defaults to the value of
# the elasticsearch.requestTimeout setting. # the elasticsearch.requestTimeout setting.
#elasticsearch.pingTimeout: 1500 elasticsearch.pingTimeout: 7500
# Time in milliseconds to wait for responses from the back end or Elasticsearch. This value # Time in milliseconds to wait for responses from the back end or Elasticsearch. This value
# must be a positive integer. # must be a positive integer.
elasticsearch.requestTimeout: 60000 elasticsearch.requestTimeout: 300000
# List of Kibana client-side headers to send to Elasticsearch. To send *no* client-side # List of Kibana client-side headers to send to Elasticsearch. To send *no* client-side
# headers, set this value to [] (an empty list). # headers, set this value to [] (an empty list).

View File

@ -14,7 +14,7 @@ until curl -s $ELASTICSEARCH_ACCESS -o /dev/null; do
sleep 1 sleep 1
done done
# *********** Check if Elasticsearch is up *************** # *********** Change Kibana and Logstash password ***************
echo "[HELK-DOCKER-INSTALLATION-INFO] Submitting a request to change the password of a Kibana and Logstash users .." echo "[HELK-DOCKER-INSTALLATION-INFO] Submitting a request to change the password of a Kibana and Logstash users .."
until curl -s -H 'Content-Type:application/json' -XPUT $ELASTICSEARCH_ACCESS/_xpack/security/user/kibana/_password -d "{\"password\": \"kibanapassword\"}" until curl -s -H 'Content-Type:application/json' -XPUT $ELASTICSEARCH_ACCESS/_xpack/security/user/kibana/_password -d "{\"password\": \"kibanapassword\"}"
do do

View File

@ -1,6 +1,6 @@
# HELK script: HELK Logstash Dockerfile # HELK script: HELK Logstash Dockerfile
# HELK build Stage: Alpha # HELK build Stage: Alpha
# HELK ELK version: 6.3.1 # HELK ELK version: 6.3.2
# Author: Roberto Rodriguez (@Cyb3rWard0g) # Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: GPL-3.0 # License: GPL-3.0
@ -8,6 +8,6 @@
# https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html # https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html
# https://github.com/spujadas/elk-docker/blob/master/Dockerfile # https://github.com/spujadas/elk-docker/blob/master/Dockerfile
FROM docker.elastic.co/logstash/logstash:6.3.1 FROM docker.elastic.co/logstash/logstash:6.3.2
LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g"
LABEL description="Dockerfile base for the HELK Logstash." LABEL description="Dockerfile base for the HELK Logstash."

View File

@ -62,7 +62,7 @@ pipeline.batch.size: 500
# #
# Where to fetch the pipeline configuration for the main pipeline # Where to fetch the pipeline configuration for the main pipeline
# #
# path.config: /etc/LS_SETTINGS_DIRstash/pipeline #path.config: /usr/share/logstash/pipeline
# #
# Pipeline configuration string for the main pipeline # Pipeline configuration string for the main pipeline
# #

View File

@ -1,3 +1,8 @@
# HELK winevent-remove-winlogbeats-prepend-of-eventdata filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
# Use the following to get rid of the prepended "event_data" nest that (elastic) winlogbeats adds to windows logs # Use the following to get rid of the prepended "event_data" nest that (elastic) winlogbeats adds to windows logs
if [type] == "wineventlog" and [beat] { if [type] == "wineventlog" and [beat] {

View File

@ -1,3 +1,8 @@
# HELK winevent-cleanup-no-dashes-only-values filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
if [event_id] { if [event_id] {
mutate { add_field => { "z_logstash_pipeline" => "1500" } } mutate { add_field => { "z_logstash_pipeline" => "1500" } }

View File

@ -1,3 +1,8 @@
# HELK winevent-conversions-ip-conversions-basic filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
# Use this to determine if windows event log or not (for now, until we are properly marking all windows log types as something like "log_type: winevent") # Use this to determine if windows event log or not (for now, until we are properly marking all windows log types as something like "log_type: winevent")
if [event_id] { if [event_id] {
@ -150,11 +155,11 @@ filter {
# Seen in the following EventIDs (not necessarily exhaustive) # Seen in the following EventIDs (not necessarily exhaustive)
# Microsoft-Windows-TerminalServices-RDPClient/Operational:1102,1024 # Microsoft-Windows-TerminalServices-RDPClient/Operational:1102,1024
# Only perform on the above EIDs because otherwise it may be values that are incomprehensible # Only perform on the above EIDs because otherwise it may be values that are incomprehensible
else if [Value] and [wef][channel] == "Microsoft-Windows-TerminalServices-RDPClient/Operational" { else if [Value] and [log_name] == "Microsoft-Windows-TerminalServices-RDPClient/Operational" {
if [wef.eid] == 1102 { if [event_id] == 1102 {
mutate { rename => { "Value" => "dst_ip_addr" } } mutate { rename => { "Value" => "dst_ip_addr" } }
} }
else if [wef.eid] == 1024 { else if [event_id] == 1024 {
mutate { rename => { "Value" => "dst_ip_addr" } } mutate { rename => { "Value" => "dst_ip_addr" } }
} }
} }

View File

@ -1,3 +1,8 @@
# HELK winevent-cleanup-lowercasing-windows-is-case-sensitive filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
if [event_id] { if [event_id] {
mutate { add_field => { "z_logstash_pipeline" => "1522" } } mutate { add_field => { "z_logstash_pipeline" => "1522" } }

View File

@ -1,3 +1,8 @@
# HELK winevent-conversions-process-cli filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
if [event_id] { if [event_id] {

View File

@ -1,3 +1,8 @@
# HELK winevent-cleanup-other filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
if [event_id] { if [event_id] {
if [user_logon_guid] { if [user_logon_guid] {

View File

@ -2,6 +2,7 @@
# HELK build Stage: Alpha # HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron) # Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0 # License: GPL-3.0
filter { filter {
if [log_name] == "Security" { if [log_name] == "Security" {
# event_id 4698 for Created Scheduled Task # event_id 4698 for Created Scheduled Task

View File

@ -1,3 +1,8 @@
# HELK dst-ip-cleanups filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
#TONOTE: On all IPs below, even though we have determined that it is IPv4 and IPv6 and will alter rename the field if it is an IPv6 address differently than IPv4 -- We still need to keep it as that single field for this configuration -- because sometimes a single IP type can already have both IPv6 and IPv4 from before -- examples are Cisco ASA Logs and Windows EventID "4769" in Channel "Security" #TONOTE: On all IPs below, even though we have determined that it is IPv4 and IPv6 and will alter rename the field if it is an IPv6 address differently than IPv4 -- We still need to keep it as that single field for this configuration -- because sometimes a single IP type can already have both IPv6 and IPv4 from before -- examples are Cisco ASA Logs and Windows EventID "4769" in Channel "Security"

View File

@ -1,3 +1,8 @@
# HELK src-ip-cleanups filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
#TONOTE: On all IPs below, even though we have determined that it is IPv4 and IPv6 and will alter rename the field if it is an IPv6 address differently than IPv4 -- We still need to keep it as that single field for this configuration -- because sometimes a single IP type can already have both IPv6 and IPv4 from before -- examples are Cisco ASA Logs and Windows EventID "4769" in Channel "Security" #TONOTE: On all IPs below, even though we have determined that it is IPv4 and IPv6 and will alter rename the field if it is an IPv6 address differently than IPv4 -- We still need to keep it as that single field for this configuration -- because sometimes a single IP type can already have both IPv6 and IPv4 from before -- examples are Cisco ASA Logs and Windows EventID "4769" in Channel "Security"

View File

@ -1,3 +1,8 @@
# HELK dst-nat-ip-cleanups filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
#TONOTE: On all IPs below, even though we have determined that it is IPv4 and IPv6 and will alter rename the field if it is an IPv6 address differently than IPv4 -- We still need to keep it as that single field for this configuration -- because sometimes a single IP type can already have both IPv6 and IPv4 from before -- examples are Cisco ASA Logs and Windows EventID "4769" in Channel "Security" #TONOTE: On all IPs below, even though we have determined that it is IPv4 and IPv6 and will alter rename the field if it is an IPv6 address differently than IPv4 -- We still need to keep it as that single field for this configuration -- because sometimes a single IP type can already have both IPv6 and IPv4 from before -- examples are Cisco ASA Logs and Windows EventID "4769" in Channel "Security"

View File

@ -1,3 +1,8 @@
# HELK src-nat-ip-cleanups filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
#TONOTE: On all IPs below, even though we have determined that it is IPv4 and IPv6 and will alter rename the field if it is an IPv6 address differently than IPv4 -- We still need to keep it as that single field for this configuration -- because sometimes a single IP type can already have both IPv6 and IPv4 from before -- examples are Cisco ASA Logs and Windows EventID "4769" in Channel "Security" #TONOTE: On all IPs below, even though we have determined that it is IPv4 and IPv6 and will alter rename the field if it is an IPv6 address differently than IPv4 -- We still need to keep it as that single field for this configuration -- because sometimes a single IP type can already have both IPv6 and IPv4 from before -- examples are Cisco ASA Logs and Windows EventID "4769" in Channel "Security"

View File

@ -1,3 +1,8 @@
# HELK dst-ip filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
# If dst_ip_addr field exists from previous config settings # If dst_ip_addr field exists from previous config settings
if [dst_ip_addr] { if [dst_ip_addr] {

View File

@ -1,3 +1,8 @@
# HELK src-ip filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
# If src_ip_addr field exists from previous config settings # If src_ip_addr field exists from previous config settings
if [src_ip_addr] { if [src_ip_addr] {

View File

@ -1,3 +1,8 @@
# HELK dst-nat-ip filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
# If dst_nat_ip_addr field exists from previous config settings # If dst_nat_ip_addr field exists from previous config settings
if [dst_nat_ip_addr] { if [dst_nat_ip_addr] {

View File

@ -1,3 +1,8 @@
# HELK src-nat-ip filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter { filter {
# If src_nat_ip_addr field exists from previous config settings # If src_nat_ip_addr field exists from previous config settings
if [src_nat_ip_addr] { if [src_nat_ip_addr] {

View File

@ -9,7 +9,7 @@ output {
hosts => ["helk-elasticsearch:9200"] hosts => ["helk-elasticsearch:9200"]
index => "logs-endpoint-winevent-sysmon-%{+YYYY.MM.dd}" index => "logs-endpoint-winevent-sysmon-%{+YYYY.MM.dd}"
document_id => "%{[@metadata][log_hash]}" document_id => "%{[@metadata][log_hash]}"
document_type => "_doc" #document_type => "_doc"
} }
kafka { kafka {
bootstrap_servers => "helk-kafka-broker:9092" bootstrap_servers => "helk-kafka-broker:9092"

View File

@ -9,7 +9,7 @@ output {
hosts => ["helk-elasticsearch:9200"] hosts => ["helk-elasticsearch:9200"]
index => "logs-endpoint-winevent-security-%{+YYYY.MM.dd}" index => "logs-endpoint-winevent-security-%{+YYYY.MM.dd}"
document_id => "%{[@metadata][log_hash]}" document_id => "%{[@metadata][log_hash]}"
document_type => "_doc" #document_type => "_doc"
} }
kafka { kafka {
bootstrap_servers => "helk-kafka-broker:9092" bootstrap_servers => "helk-kafka-broker:9092"

View File

@ -9,7 +9,7 @@ output {
hosts => ["helk-elasticsearch:9200"] hosts => ["helk-elasticsearch:9200"]
index => "logs-endpoint-winevent-system-%{+YYYY.MM.dd}" index => "logs-endpoint-winevent-system-%{+YYYY.MM.dd}"
document_id => "%{[@metadata][log_hash]}" document_id => "%{[@metadata][log_hash]}"
document_type => "_doc" #document_type => "_doc"
} }
} }
} }

View File

@ -9,7 +9,7 @@ output {
hosts => ["helk-elasticsearch:9200"] hosts => ["helk-elasticsearch:9200"]
index => "logs-endpoint-winevent-application-%{+YYYY.MM.dd}" index => "logs-endpoint-winevent-application-%{+YYYY.MM.dd}"
document_id => "%{[@metadata][log_hash]}" document_id => "%{[@metadata][log_hash]}"
document_type => "_doc" #document_type => "_doc"
} }
} }
} }

View File

@ -9,7 +9,7 @@ output {
hosts => ["helk-elasticsearch:9200"] hosts => ["helk-elasticsearch:9200"]
index => "logs-endpoint-winevent-powershell-%{+YYYY.MM.dd}" index => "logs-endpoint-winevent-powershell-%{+YYYY.MM.dd}"
document_id => "%{[@metadata][log_hash]}" document_id => "%{[@metadata][log_hash]}"
document_type => "_doc" #document_type => "_doc"
} }
} }
} }

View File

@ -9,7 +9,7 @@ output {
hosts => ["helk-elasticsearch:9200"] hosts => ["helk-elasticsearch:9200"]
index => "logs-endpoint-winevent-wmiactivity-%{+YYYY.MM.dd}" index => "logs-endpoint-winevent-wmiactivity-%{+YYYY.MM.dd}"
document_id => "%{[@metadata][log_hash]}" document_id => "%{[@metadata][log_hash]}"
document_type => "_doc" #document_type => "_doc"
} }
} }
} }

View File

@ -9,7 +9,7 @@ output {
elasticsearch { elasticsearch {
hosts => ["helk-elasticsearch:9200"] hosts => ["helk-elasticsearch:9200"]
index => "mitre-attack-%{+YYYY.MM.dd}" index => "mitre-attack-%{+YYYY.MM.dd}"
document_type => "_doc" #document_type => "_doc"
} }
} }
} }

View File

@ -0,0 +1,40 @@
#!/bin/bash
# HELK script: logstash-entrypoint.sh
# HELK script description: Pushes output templates to ES and starts Logstash
# HELK build Stage: Alpha
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: GPL-3.0
# ********* Setting LS_JAVA_OPTS ***************
if [[ ! -z "$LS_JAVA_OPTS" ]]; then
echo "[HELK-DOCKER-INSTALLATION-INFO] Setting LS_JAVA_OPTS to $LS_JAVA_OPTS"
else
# ****** Setup heap size *****
LS_MEMORY=$(awk '/MemAvailable/{printf "%.f", $2/1024/1024/2}' /proc/meminfo)
echo "[HELK-DOCKER-INSTALLATION-INFO] Setting LS_HEAP_SIZE to ${LS_MEMORY}.."
export LS_JAVA_OPTS="-Xms${LS_MEMORY}g -Xmx${LS_MEMORY}g"
fi
# *********** Looking for ES ***************
echo "[HELK-DOCKER-INSTALLATION-INFO] Waiting for elasticsearch URI to be accessible.."
until curl -s helk-elasticsearch:9200 -o /dev/null; do
sleep 1
done
echo "[HELK-DOCKER-INSTALLATION-INFO] Uploading templates to elasticsearch.."
DIR=/usr/share/logstash/output_templates
for file in ${DIR}/*.json
do
template_name=$(echo $file | sed -r ' s/^.*\/[0-9]+\-//');
echo "[HELK-DOCKER-INSTALLATION-INFO] Uploading $template_name template to elasticsearch..";
curl -s -H 'Content-Type: application/json' -XPUT "http://helk-elasticsearch:9200/_template/$template_name" -d@${file};
done
# ********** Install Plugin *****************
echo "[HELK-DOCKER-INSTALLATION-INFO] Installing Logstsh plugins.."
logstash-plugin install logstash-filter-prune
# ********** Starting Logstash *****************
echo "[HELK-DOCKER-INSTALLATION-INFO] Running docker-entrypoint script.."
/usr/local/bin/docker-entrypoint

View File

@ -4,8 +4,7 @@
# License: GPL-3.0 # License: GPL-3.0
input { input {
kafka kafka {
{
bootstrap_servers => "helk-kafka-broker:9092,helk-kafka-broker2:9093" bootstrap_servers => "helk-kafka-broker:9092,helk-kafka-broker2:9093"
topics => ["winlogbeat"] topics => ["winlogbeat"]
decorate_events => true decorate_events => true

View File

@ -5,8 +5,7 @@
# License: GPL-3.0 # License: GPL-3.0
input { input {
file file {
{
path => "/usr/share/logstash/cti/mitre_attack.csv" path => "/usr/share/logstash/cti/mitre_attack.csv"
start_position => "beginning" start_position => "beginning"
sincedb_path => "/dev/null" sincedb_path => "/dev/null"

View File

@ -0,0 +1,11 @@
# HELK Kafka input conf file
# HELK build Stage: Alpha
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: GPL-3.0
input {
beats {
port => 5044
include_codec_tag => false
}
}

View File

@ -0,0 +1,19 @@
# HELK All filter conf file
# HELK build Stage: Alpha
# Author: Roberto Rodriguez (@Cyb3rWard0g)
# License: GPL-3.0
filter {
if [message] {
mutate {
add_field => {
"z_logstash_pipeline" => "0098"
"log_ingest_timestamp" => "%{@timestamp}"
}
copy => {
"message" => "z_original_message"
"type" => "z_logstash_type"
}
}
}
}

View File

@ -5,6 +5,7 @@
filter { filter {
if [message] { if [message] {
mutate { add_field => { "z_logstash_pipeline" => "0099" } }
fingerprint { fingerprint {
source => "message" source => "message"
target => "[@metadata][log_hash]" target => "[@metadata][log_hash]"

View File

@ -0,0 +1,33 @@
# HELK winevent-remove-winlogbeats-prepend-of-eventdata filter conf
# HELK build Stage: Alpha
# Author: Nate Guagenti (@neu5ron)
# License: GPL-3.0
filter {
# Use the following to get rid of the prepended "event_data" nest that (elastic) winlogbeats adds to windows logs
if [type] == "wineventlog" and [beat] {
ruby {
code => "
eventdata = event.get('event_data')
# Sometimes does not exist, so check that first -- then move the nests
if !eventdata.nil?
eventdata.each {|k, v|
if eventdata.to_s != '(NULL)'
event.set(k, v)
end
}
end
# Finally remove the nest completely
event.remove('event_data')
"
tag_on_exception => "_rubyexception_1010"
#code => "
# event.get('event_data').each {|k, v|
# event.set(k, v)
# }
# event.remove('event_data')
#"
#tag_on_exception => "_rubyexception_1010"
}
}
}

Some files were not shown because too many files have changed in this diff Show More