mirror of https://github.com/infosecn1nja/HELK.git
Update KSQL Post Additions
parent
ccbee9f6fb
commit
eecd5f6c09
|
@ -100,7 +100,7 @@ services:
|
||||||
REPLICATION_FACTOR: 1
|
REPLICATION_FACTOR: 1
|
||||||
ADVERTISED_LISTENER: ${ADVERTISED_LISTENER}
|
ADVERTISED_LISTENER: ${ADVERTISED_LISTENER}
|
||||||
ZOOKEEPER_NAME: helk-zookeeper
|
ZOOKEEPER_NAME: helk-zookeeper
|
||||||
KAFKA_CREATE_TOPICS: winlogbeat
|
KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN
|
||||||
KAFKA_HEAP_OPTS: -Xmx1G -Xms1G
|
KAFKA_HEAP_OPTS: -Xmx1G -Xms1G
|
||||||
LOG_RETENTION_HOURS: 4
|
LOG_RETENTION_HOURS: 4
|
||||||
ports:
|
ports:
|
||||||
|
@ -108,7 +108,7 @@ services:
|
||||||
networks:
|
networks:
|
||||||
helk:
|
helk:
|
||||||
helk-ksql-server:
|
helk-ksql-server:
|
||||||
image: confluentinc/cp-ksql-server:5.0.1
|
image: confluentinc/cp-ksql-server:5.1.0
|
||||||
container_name: helk-ksql-server
|
container_name: helk-ksql-server
|
||||||
restart: always
|
restart: always
|
||||||
depends_on:
|
depends_on:
|
||||||
|
@ -127,7 +127,7 @@ services:
|
||||||
networks:
|
networks:
|
||||||
helk:
|
helk:
|
||||||
helk-ksql-cli:
|
helk-ksql-cli:
|
||||||
image: confluentinc/cp-ksql-cli:5.0.1
|
image: confluentinc/cp-ksql-cli:5.1.0
|
||||||
container_name: helk-ksql-cli
|
container_name: helk-ksql-cli
|
||||||
depends_on:
|
depends_on:
|
||||||
- helk-kafka-broker
|
- helk-kafka-broker
|
||||||
|
|
|
@ -105,7 +105,7 @@ services:
|
||||||
REPLICATION_FACTOR: 1
|
REPLICATION_FACTOR: 1
|
||||||
ADVERTISED_LISTENER: ${ADVERTISED_LISTENER}
|
ADVERTISED_LISTENER: ${ADVERTISED_LISTENER}
|
||||||
ZOOKEEPER_NAME: helk-zookeeper
|
ZOOKEEPER_NAME: helk-zookeeper
|
||||||
KAFKA_CREATE_TOPICS: winlogbeat
|
KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN
|
||||||
KAFKA_HEAP_OPTS: -Xmx1G -Xms1G
|
KAFKA_HEAP_OPTS: -Xmx1G -Xms1G
|
||||||
LOG_RETENTION_HOURS: 4
|
LOG_RETENTION_HOURS: 4
|
||||||
ports:
|
ports:
|
||||||
|
@ -113,7 +113,7 @@ services:
|
||||||
networks:
|
networks:
|
||||||
helk:
|
helk:
|
||||||
helk-ksql-server:
|
helk-ksql-server:
|
||||||
image: confluentinc/cp-ksql-server:5.0.1
|
image: confluentinc/cp-ksql-server:5.1.0
|
||||||
container_name: helk-ksql-server
|
container_name: helk-ksql-server
|
||||||
restart: always
|
restart: always
|
||||||
depends_on:
|
depends_on:
|
||||||
|
@ -132,7 +132,7 @@ services:
|
||||||
networks:
|
networks:
|
||||||
helk:
|
helk:
|
||||||
helk-ksql-cli:
|
helk-ksql-cli:
|
||||||
image: confluentinc/cp-ksql-cli:5.0.1
|
image: confluentinc/cp-ksql-cli:5.1.0
|
||||||
container_name: helk-ksql-cli
|
container_name: helk-ksql-cli
|
||||||
depends_on:
|
depends_on:
|
||||||
- helk-kafka-broker
|
- helk-kafka-broker
|
||||||
|
|
|
@ -140,7 +140,7 @@ services:
|
||||||
REPLICATION_FACTOR: 1
|
REPLICATION_FACTOR: 1
|
||||||
ADVERTISED_LISTENER: ${ADVERTISED_LISTENER}
|
ADVERTISED_LISTENER: ${ADVERTISED_LISTENER}
|
||||||
ZOOKEEPER_NAME: helk-zookeeper
|
ZOOKEEPER_NAME: helk-zookeeper
|
||||||
KAFKA_CREATE_TOPICS: winlogbeat
|
KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN
|
||||||
KAFKA_HEAP_OPTS: -Xmx1G -Xms1G
|
KAFKA_HEAP_OPTS: -Xmx1G -Xms1G
|
||||||
LOG_RETENTION_HOURS: 4
|
LOG_RETENTION_HOURS: 4
|
||||||
ports:
|
ports:
|
||||||
|
@ -148,7 +148,7 @@ services:
|
||||||
networks:
|
networks:
|
||||||
helk:
|
helk:
|
||||||
helk-ksql-server:
|
helk-ksql-server:
|
||||||
image: confluentinc/cp-ksql-server:5.0.1
|
image: confluentinc/cp-ksql-server:5.1.0
|
||||||
container_name: helk-ksql-server
|
container_name: helk-ksql-server
|
||||||
restart: always
|
restart: always
|
||||||
depends_on:
|
depends_on:
|
||||||
|
@ -167,7 +167,7 @@ services:
|
||||||
networks:
|
networks:
|
||||||
helk:
|
helk:
|
||||||
helk-ksql-cli:
|
helk-ksql-cli:
|
||||||
image: confluentinc/cp-ksql-cli:5.0.1
|
image: confluentinc/cp-ksql-cli:5.1.0
|
||||||
container_name: helk-ksql-cli
|
container_name: helk-ksql-cli
|
||||||
depends_on:
|
depends_on:
|
||||||
- helk-kafka-broker
|
- helk-kafka-broker
|
||||||
|
|
|
@ -145,7 +145,7 @@ services:
|
||||||
REPLICATION_FACTOR: 1
|
REPLICATION_FACTOR: 1
|
||||||
ADVERTISED_LISTENER: ${ADVERTISED_LISTENER}
|
ADVERTISED_LISTENER: ${ADVERTISED_LISTENER}
|
||||||
ZOOKEEPER_NAME: helk-zookeeper
|
ZOOKEEPER_NAME: helk-zookeeper
|
||||||
KAFKA_CREATE_TOPICS: winlogbeat
|
KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN
|
||||||
KAFKA_HEAP_OPTS: -Xmx1G -Xms1G
|
KAFKA_HEAP_OPTS: -Xmx1G -Xms1G
|
||||||
LOG_RETENTION_HOURS: 4
|
LOG_RETENTION_HOURS: 4
|
||||||
ports:
|
ports:
|
||||||
|
@ -153,7 +153,7 @@ services:
|
||||||
networks:
|
networks:
|
||||||
helk:
|
helk:
|
||||||
helk-ksql-server:
|
helk-ksql-server:
|
||||||
image: confluentinc/cp-ksql-server:5.0.1
|
image: confluentinc/cp-ksql-server:5.1.0
|
||||||
container_name: helk-ksql-server
|
container_name: helk-ksql-server
|
||||||
restart: always
|
restart: always
|
||||||
depends_on:
|
depends_on:
|
||||||
|
@ -172,7 +172,7 @@ services:
|
||||||
networks:
|
networks:
|
||||||
helk:
|
helk:
|
||||||
helk-ksql-cli:
|
helk-ksql-cli:
|
||||||
image: confluentinc/cp-ksql-cli:5.0.1
|
image: confluentinc/cp-ksql-cli:5.1.0
|
||||||
container_name: helk-ksql-cli
|
container_name: helk-ksql-cli
|
||||||
depends_on:
|
depends_on:
|
||||||
- helk-kafka-broker
|
- helk-kafka-broker
|
||||||
|
|
|
@ -33,7 +33,7 @@ while [[ -z $(grep "Server running at http://$KIBANA" /usr/share/kibana/config/k
|
||||||
done
|
done
|
||||||
|
|
||||||
# *********** Creating Kibana index-patterns ***************
|
# *********** Creating Kibana index-patterns ***************
|
||||||
declare -a index_patterns=("logs-endpoint-*" "logs-*" "logs-endpoint-winevent-sysmon-*" "logs-endpoint-winevent-security-*" "logs-endpoint-winevent-system-*" "logs-endpoint-winevent-application-*" "logs-endpoint-winevent-wmiactivity-*" "logs-endpoint-winevent-powershell-*" "mitre-attack-*" "elastalert_status" "elastalert_status_status" "elastalert_status_error" "elastalert_status_silence" "elastalert_status_past")
|
declare -a index_patterns=("logs-endpoint-*" "logs-*" "logs-endpoint-winevent-sysmon-*" "logs-endpoint-winevent-security-*" "logs-endpoint-winevent-system-*" "logs-endpoint-winevent-application-*" "logs-endpoint-winevent-wmiactivity-*" "logs-endpoint-winevent-powershell-*" "mitre-attack-*" "elastalert_status" "elastalert_status_status" "elastalert_status_error" "elastalert_status_silence" "elastalert_status_past" "sysmon-join-*")
|
||||||
|
|
||||||
echo "[+++] Creating Kibana Index Patterns..."
|
echo "[+++] Creating Kibana Index Patterns..."
|
||||||
for index in ${!index_patterns[@]}; do
|
for index in ${!index_patterns[@]}; do
|
||||||
|
|
|
@ -34,7 +34,7 @@ while [[ -z $(grep "Server running at http://$KIBANA" /usr/share/kibana/config/k
|
||||||
done
|
done
|
||||||
|
|
||||||
# *********** Creating Kibana index-patterns ***************
|
# *********** Creating Kibana index-patterns ***************
|
||||||
declare -a index_patterns=("logs-endpoint-*" "logs-*" "logs-endpoint-winevent-sysmon-*" "logs-endpoint-winevent-security-*" "logs-endpoint-winevent-system-*" "logs-endpoint-winevent-application-*" "logs-endpoint-winevent-wmiactivity-*" "logs-endpoint-winevent-powershell-*" "mitre-attack-*" "elastalert_status" "elastalert_status_status" "elastalert_status_error" "elastalert_status_silence" "elastalert_status_past")
|
declare -a index_patterns=("logs-endpoint-*" "logs-*" "logs-endpoint-winevent-sysmon-*" "logs-endpoint-winevent-security-*" "logs-endpoint-winevent-system-*" "logs-endpoint-winevent-application-*" "logs-endpoint-winevent-wmiactivity-*" "logs-endpoint-winevent-powershell-*" "mitre-attack-*" "elastalert_status" "elastalert_status_status" "elastalert_status_error" "elastalert_status_silence" "elastalert_status_past" "sysmon-join-*")
|
||||||
|
|
||||||
echo "[+++] Creating Kibana Index Patterns..."
|
echo "[+++] Creating Kibana Index Patterns..."
|
||||||
for index in ${!index_patterns[@]}; do
|
for index in ${!index_patterns[@]}; do
|
||||||
|
|
|
@ -0,0 +1,166 @@
|
||||||
|
CREATE STREAM WINLOGBEAT_STREAM \
|
||||||
|
(source_name VARCHAR, \
|
||||||
|
type VARCHAR, \
|
||||||
|
task VARCHAR, \
|
||||||
|
log_name VARCHAR, \
|
||||||
|
computer_name VARCHAR, \
|
||||||
|
event_data STRUCT< \
|
||||||
|
UtcTime VARCHAR, \
|
||||||
|
ProcessGuid VARCHAR, \
|
||||||
|
ProcessId INTEGER, \
|
||||||
|
Image VARCHAR, \
|
||||||
|
FileVersion VARCHAR, \
|
||||||
|
Description VARCHAR, \
|
||||||
|
Product VARCHAR, \
|
||||||
|
Company VARCHAR, \
|
||||||
|
CommandLine VARCHAR, \
|
||||||
|
CurrentDirectory VARCHAR, \
|
||||||
|
User VARCHAR, \
|
||||||
|
LogonGuid VARCHAR, \
|
||||||
|
LogonId VARCHAR, \
|
||||||
|
TerminalSessionId INTEGER, \
|
||||||
|
IntegrityLevel VARCHAR, \
|
||||||
|
Hashes VARCHAR, \
|
||||||
|
ParentProcessGuid VARCHAR, \
|
||||||
|
ParentProcessId INTEGER, \
|
||||||
|
ParentImage VARCHAR, \
|
||||||
|
ParentCommandLine VARCHAR, \
|
||||||
|
Protocol VARCHAR, \
|
||||||
|
Initiated VARCHAR, \
|
||||||
|
SourceIsIpv6 VARCHAR, \
|
||||||
|
SourceIp VARCHAR, \
|
||||||
|
SourceHostname VARCHAR, \
|
||||||
|
SourcePort INTEGER, \
|
||||||
|
SourcePortName VARCHAR, \
|
||||||
|
DestinationIsIpv6 VARCHAR, \
|
||||||
|
DestinationIp VARCHAR, \
|
||||||
|
DestinationHostname VARCHAR, \
|
||||||
|
DestinationPort INTEGER, \
|
||||||
|
DestinationPortName VARCHAR>, \
|
||||||
|
event_id INTEGER) \
|
||||||
|
WITH (KAFKA_TOPIC='winlogbeat', VALUE_FORMAT='JSON');
|
||||||
|
|
||||||
|
CREATE STREAM WINLOGBEAT_STREAM_REKEY \
|
||||||
|
WITH (VALUE_FORMAT='JSON', PARTITIONS=1, TIMESTAMP='event_date_creation') \
|
||||||
|
AS SELECT \
|
||||||
|
STRINGTOTIMESTAMP(event_data->UtcTime, 'yyyy-MM-dd HH:mm:ss.SSS') AS event_date_creation, \
|
||||||
|
event_data->ProcessGuid AS process_guid, \
|
||||||
|
event_data->ProcessId AS process_id, \
|
||||||
|
event_data->Image AS process_path, \
|
||||||
|
event_data->FileVersion AS file_version, \
|
||||||
|
event_data->Description AS file_description, \
|
||||||
|
event_data->Company AS file_company, \
|
||||||
|
event_data->CommandLine AS process_command_line, \
|
||||||
|
event_data->CurrentDirectory AS process_current_directory, \
|
||||||
|
event_data->User AS user_account, \
|
||||||
|
event_data->LogonGuid AS user_logon_guid, \
|
||||||
|
event_data->LogonId AS user_logon_id, \
|
||||||
|
event_data->TerminalSessionId AS user_session_id, \
|
||||||
|
event_data->IntegrityLevel AS process_integrity_level, \
|
||||||
|
event_data->Hashes AS hashes, \
|
||||||
|
event_data->ParentProcessGuid AS parent_process_guid, \
|
||||||
|
event_data->ParentProcessId AS parent_process_id, \
|
||||||
|
event_data->ParentImage AS parent_process_path, \
|
||||||
|
event_data->ParentCommandLine AS parent_process_command_line, \
|
||||||
|
event_data->Protocol AS network_protocol, \
|
||||||
|
event_data->Initiated AS network_connection_initiated, \
|
||||||
|
event_data->SourceIsIpv6 AS src_is_ipv6, \
|
||||||
|
event_data->SourceIp AS src_ip_addr, \
|
||||||
|
event_data->SourceHostname AS src_host_name, \
|
||||||
|
event_data->SourcePort AS src_port, \
|
||||||
|
event_data->SourcePortName AS src_port_name, \
|
||||||
|
event_data->DestinationIsIpv6 AS dst_is_ipv6, \
|
||||||
|
event_data->DestinationIp AS dst_ip_addr, \
|
||||||
|
event_data->DestinationHostname AS dst_host_name, \
|
||||||
|
event_data->DestinationPort AS dst_port, \
|
||||||
|
event_data->DestinationPortName AS dst_port_name, \
|
||||||
|
event_id, \
|
||||||
|
source_name, \
|
||||||
|
log_name \
|
||||||
|
FROM WINLOGBEAT_STREAM WHERE source_name='Microsoft-Windows-Sysmon' PARTITION BY process_guid;
|
||||||
|
|
||||||
|
CREATE STREAM SYSMON_PROCESS_CREATE WITH (VALUE_FORMAT='JSON', PARTITIONS=1, TIMESTAMP='event_date_creation') \
|
||||||
|
AS SELECT \
|
||||||
|
event_date_creation, \
|
||||||
|
process_guid, \
|
||||||
|
process_id, \
|
||||||
|
process_path, \
|
||||||
|
file_version, \
|
||||||
|
file_description, \
|
||||||
|
file_company, \
|
||||||
|
process_command_line, \
|
||||||
|
process_current_directory, \
|
||||||
|
user_account, \
|
||||||
|
user_logon_guid, \
|
||||||
|
user_logon_id, \
|
||||||
|
user_session_id, \
|
||||||
|
process_integrity_level, \
|
||||||
|
hashes, \
|
||||||
|
parent_process_guid, \
|
||||||
|
parent_process_id, \
|
||||||
|
parent_process_path, \
|
||||||
|
parent_process_command_line, \
|
||||||
|
event_id, \
|
||||||
|
source_name, \
|
||||||
|
log_name \
|
||||||
|
FROM WINLOGBEAT_STREAM_REKEY WHERE event_id=1;
|
||||||
|
|
||||||
|
CREATE STREAM SYSMON_NETWORK_CONNECT WITH (VALUE_FORMAT='JSON', PARTITIONS=1, TIMESTAMP='event_date_creation') \
|
||||||
|
AS SELECT \
|
||||||
|
event_date_creation, \
|
||||||
|
process_guid, \
|
||||||
|
process_id, \
|
||||||
|
process_path, \
|
||||||
|
user_account, \
|
||||||
|
network_protocol, \
|
||||||
|
network_connection_initiated, \
|
||||||
|
src_is_ipv6, \
|
||||||
|
src_ip_addr, \
|
||||||
|
src_host_name, \
|
||||||
|
src_port, \
|
||||||
|
src_port_name, \
|
||||||
|
dst_is_ipv6, \
|
||||||
|
dst_ip_addr, \
|
||||||
|
dst_host_name, \
|
||||||
|
dst_port, \
|
||||||
|
dst_port_name, \
|
||||||
|
event_id, \
|
||||||
|
source_name, \
|
||||||
|
log_name \
|
||||||
|
FROM WINLOGBEAT_STREAM_REKEY WHERE event_id=3;
|
||||||
|
|
||||||
|
CREATE TABLE SYSMON_PROCESS_CREATE_TABLE \
|
||||||
|
(event_date_creation VARCHAR, \
|
||||||
|
process_guid VARCHAR, \
|
||||||
|
process_id INTEGER, \
|
||||||
|
process_path VARCHAR, \
|
||||||
|
file_version VARCHAR, \
|
||||||
|
file_description VARCHAR, \
|
||||||
|
file_company VARCHAR, \
|
||||||
|
process_command_line VARCHAR, \
|
||||||
|
process_current_directory VARCHAR, \
|
||||||
|
user_account VARCHAR, \
|
||||||
|
user_logon_guid VARCHAR, \
|
||||||
|
user_logon_id VARCHAR, \
|
||||||
|
user_session_id INTEGER, \
|
||||||
|
process_integrity_level VARCHAR, \
|
||||||
|
hashes VARCHAR, \
|
||||||
|
parent_process_guid VARCHAR, \
|
||||||
|
parent_process_id INTEGER, \
|
||||||
|
parent_process_path VARCHAR, \
|
||||||
|
parent_process_command_line VARCHAR, \
|
||||||
|
event_id INTEGER, \
|
||||||
|
source_name VARCHAR, \
|
||||||
|
log_name VARCHAR) \
|
||||||
|
WITH (KAFKA_TOPIC='SYSMON_PROCESS_CREATE', VALUE_FORMAT='JSON', KEY='process_guid');
|
||||||
|
|
||||||
|
CREATE STREAM SYSMON_JOIN WITH (PARTITIONS=1) AS \
|
||||||
|
SELECT N.EVENT_DATE_CREATION, N.PROCESS_GUID, N.PROCESS_ID, N.PROCESS_PATH, N.USER_ACCOUNT, \
|
||||||
|
N.NETWORK_PROTOCOL, N.NETWORK_CONNECTION_INITIATED, N.SRC_IS_IPV6, N.SRC_IP_ADDR, \
|
||||||
|
N.SRC_HOST_NAME, N.SRC_PORT, N.SRC_PORT_NAME, N.DST_IS_IPV6, N.DST_IP_ADDR, N.DST_HOST_NAME, \
|
||||||
|
N.DST_PORT, N.DST_PORT_NAME, N.SOURCE_NAME, N.LOG_NAME, \
|
||||||
|
P.PROCESS_COMMAND_LINE, P.HASHES, P.PARENT_PROCESS_PATH, P.PARENT_PROCESS_COMMAND_LINE, \
|
||||||
|
P.USER_LOGON_GUID, P.USER_LOGON_ID, P.USER_SESSION_ID, P.PROCESS_CURRENT_DIRECTORY, \
|
||||||
|
P.PROCESS_INTEGRITY_LEVEL, P.PARENT_PROCESS_GUID, P.PARENT_PROCESS_ID \
|
||||||
|
FROM SYSMON_NETWORK_CONNECT N INNER JOIN SYSMON_PROCESS_CREATE_TABLE P \
|
||||||
|
ON N.PROCESS_GUID = P.PROCESS_GUID;
|
|
@ -6,7 +6,7 @@
|
||||||
input {
|
input {
|
||||||
kafka {
|
kafka {
|
||||||
bootstrap_servers => "helk-kafka-broker:9092"
|
bootstrap_servers => "helk-kafka-broker:9092"
|
||||||
topics => ["winlogbeat"]
|
topics => ["winlogbeat", "SYSMON_JOIN"]
|
||||||
decorate_events => true
|
decorate_events => true
|
||||||
codec => "json"
|
codec => "json"
|
||||||
auto_offset_reset => "earliest"
|
auto_offset_reset => "earliest"
|
||||||
|
|
|
@ -70,6 +70,7 @@ filter {
|
||||||
"ParentCommandLine" => "process_parent_command_line"
|
"ParentCommandLine" => "process_parent_command_line"
|
||||||
"IntegrityLevel" => "process_integrity_level"
|
"IntegrityLevel" => "process_integrity_level"
|
||||||
"LogonGuid" => "user_logon_guid"
|
"LogonGuid" => "user_logon_guid"
|
||||||
|
"LogonIdd" => "user_logon_id"
|
||||||
"ParentProcessGuid" => "process_parent_guid"
|
"ParentProcessGuid" => "process_parent_guid"
|
||||||
"ParentProcessId" => "process_parent_id"
|
"ParentProcessId" => "process_parent_id"
|
||||||
"TerminalSessionId" => "user_session_id"
|
"TerminalSessionId" => "user_session_id"
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
# HELK sysmon-join output conf file
|
||||||
|
# HELK build Stage: Alpha
|
||||||
|
# Author: Roberto Rodriguez (@Cyb3rWard0g)
|
||||||
|
# License: GPL-3.0
|
||||||
|
|
||||||
|
output {
|
||||||
|
if [@metadata][kafka][topic] == "SYSMON_JOIN" {
|
||||||
|
elasticsearch {
|
||||||
|
hosts => ["helk-elasticsearch:9200"]
|
||||||
|
index => "sysmon-join-%{+YYYY.MM.dd}"
|
||||||
|
user => 'elastic'
|
||||||
|
#password => 'elasticpassword'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -14,8 +14,8 @@ echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elasticsearch URL to $ELASTICSE
|
||||||
|
|
||||||
# ********* Setting LS_JAVA_OPTS ***************
|
# ********* Setting LS_JAVA_OPTS ***************
|
||||||
if [[ -z "$LS_JAVA_OPTS" ]]; then
|
if [[ -z "$LS_JAVA_OPTS" ]]; then
|
||||||
LS_MEMORY=$(awk '/MemAvailable/{printf "%.f", $2/1024/1024/2}' /proc/meminfo)
|
LS_MEMORY=$(awk '/MemAvailable/{printf "%.f", $2/1024/4}' /proc/meminfo)
|
||||||
LS_JAVA_OPTS="-Xms${LS_MEMORY}g -Xmx${LS_MEMORY}g"
|
export LS_JAVA_OPTS="-Xms${LS_MEMORY}m -Xmx${LS_MEMORY}m"
|
||||||
fi
|
fi
|
||||||
echo "[HELK-DOCKER-INSTALLATION-INFO] Setting LS_JAVA_OPTS to $LS_JAVA_OPTS"
|
echo "[HELK-DOCKER-INSTALLATION-INFO] Setting LS_JAVA_OPTS to $LS_JAVA_OPTS"
|
||||||
|
|
||||||
|
|
|
@ -197,14 +197,22 @@ install_docker_compose(){
|
||||||
set_elasticsearch_password(){
|
set_elasticsearch_password(){
|
||||||
if [[ -z "$ELASTICSEARCH_PASSWORD_INPUT" ]] && [[ $SUBSCRIPTION_CHOICE == "trial" ]]; then
|
if [[ -z "$ELASTICSEARCH_PASSWORD_INPUT" ]] && [[ $SUBSCRIPTION_CHOICE == "trial" ]]; then
|
||||||
while true; do
|
while true; do
|
||||||
read -p "[HELK-INSTALLATION-INFO] Set HELK Elasticsearch Password: " ELASTICSEARCH_PASSWORD_INPUT
|
read -t 30 -p "[HELK-INSTALLATION-INFO] Set HELK Elasticsearch Password: " -e -i "elasticpassword" ELASTICSEARCH_PASSWORD_INPUT
|
||||||
read -p "[HELK-INSTALLATION-INFO] Verify HELK Elasticsearch Password: " ELASTICSEARCH_PASSWORD_INPUT_VERIFIED
|
READ_INPUT=$?
|
||||||
# *********** Validating Password Input ***************
|
ELASTICSEARCH_PASSWORD_INPUT=${ELASTICSEARCH_PASSWORD_INPUT:-"elasticpassword"}
|
||||||
if [[ "$ELASTICSEARCH_PASSWORD_INPUT" == "$ELASTICSEARCH_PASSWORD_INPUT_VERIFIED" ]]; then
|
if [ $READ_INPUT = 142 ]; then
|
||||||
|
echo -e "\n[HELK-INSTALLATION-INFO] HELK elasticsearch password set to ${ELASTICSEARCH_PASSWORD_INPUT}"
|
||||||
break
|
break
|
||||||
else
|
else
|
||||||
echo -e "${RED}Error...${STD}"
|
read -p "[HELK-INSTALLATION-INFO] Verify HELK Elasticsearch Password: " ELASTICSEARCH_PASSWORD_INPUT_VERIFIED
|
||||||
echo "[HELK-INSTALLATION-INFO] Your password values do not match.."
|
echo -e "[HELK-INSTALLATION-INFO] HELK elasticsearch password set to ${ELASTICSEARCH_PASSWORD_INPUT}"
|
||||||
|
# *********** Validating Password Input ***************
|
||||||
|
if [[ "$ELASTICSEARCH_PASSWORD_INPUT" == "$ELASTICSEARCH_PASSWORD_INPUT_VERIFIED" ]]; then
|
||||||
|
break
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error...${STD}"
|
||||||
|
echo "[HELK-INSTALLATION-INFO] Your password values do not match.."
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
export ELASTIC_PASSWORD=$ELASTICSEARCH_PASSWORD_INPUT
|
export ELASTIC_PASSWORD=$ELASTICSEARCH_PASSWORD_INPUT
|
||||||
|
@ -217,14 +225,22 @@ set_elasticsearch_password(){
|
||||||
set_kibana_ui_password(){
|
set_kibana_ui_password(){
|
||||||
if [[ -z "$KIBANA_UI_PASSWORD_INPUT" ]]; then
|
if [[ -z "$KIBANA_UI_PASSWORD_INPUT" ]]; then
|
||||||
while true; do
|
while true; do
|
||||||
read -p "[HELK-INSTALLATION-INFO] Set HELK Kibana UI Password: " KIBANA_UI_PASSWORD_INPUT
|
read -t 30 -p "[HELK-INSTALLATION-INFO] Set HELK Kibana UI Password: " -e -i "hunting" KIBANA_UI_PASSWORD_INPUT
|
||||||
read -p "[HELK-INSTALLATION-INFO] Verify HELK Kibana UI Password: " KIBANA_UI_PASSWORD_INPUT_VERIFIED
|
READ_INPUT=$?
|
||||||
# *********** Validating Password Input ***************
|
KIBANA_UI_PASSWORD_INPUT=${KIBANA_UI_PASSWORD_INPUT:-"hunting"}
|
||||||
if [[ "$KIBANA_UI_PASSWORD_INPUT" == "$KIBANA_UI_PASSWORD_INPUT_VERIFIED" ]]; then
|
if [ $READ_INPUT = 142 ]; then
|
||||||
|
echo -e "\n[HELK-INSTALLATION-INFO] HELK Kibana UI password set to ${KIBANA_UI_PASSWORD_INPUT}"
|
||||||
break
|
break
|
||||||
else
|
else
|
||||||
echo -e "${RED}Error...${STD}"
|
read -p "[HELK-INSTALLATION-INFO] Verify HELK Kibana UI Password: " KIBANA_UI_PASSWORD_INPUT_VERIFIED
|
||||||
echo "[HELK-INSTALLATION-INFO] Your password values do not match.."
|
echo -e "[HELK-INSTALLATION-INFO] HELK Kibana UI password set to ${KIBANA_UI_PASSWORD_INPUT}"
|
||||||
|
# *********** Validating Password Input ***************
|
||||||
|
if [[ "$KIBANA_UI_PASSWORD_INPUT" == "$KIBANA_UI_PASSWORD_INPUT_VERIFIED" ]]; then
|
||||||
|
break
|
||||||
|
else
|
||||||
|
echo -e "${RED}Error...${STD}"
|
||||||
|
echo "[HELK-INSTALLATION-INFO] Your password values do not match.."
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
|
@ -263,7 +279,13 @@ set_network(){
|
||||||
# *********** Accepting Defaults or Allowing user to set the HELK IP ***************
|
# *********** Accepting Defaults or Allowing user to set the HELK IP ***************
|
||||||
local ip_choice
|
local ip_choice
|
||||||
read -t 30 -p "[HELK-INSTALLATION-INFO] Set HELK IP. Default value is your current IP: " -e -i ${HOST_IP} ip_choice
|
read -t 30 -p "[HELK-INSTALLATION-INFO] Set HELK IP. Default value is your current IP: " -e -i ${HOST_IP} ip_choice
|
||||||
|
READ_INPUT=$?
|
||||||
HOST_IP="${ip_choice:-$HOST_IP}"
|
HOST_IP="${ip_choice:-$HOST_IP}"
|
||||||
|
if [ $READ_INPUT = 142 ]; then
|
||||||
|
echo -e "\n[HELK-INSTALLATION-INFO] HELK IP set to ${HOST_IP}"
|
||||||
|
else
|
||||||
|
echo "[HELK-INSTALLATION-INFO] HELK IP set to ${HOST_IP}"
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -288,17 +310,24 @@ set_helk_subscription(){
|
||||||
# *********** Accepting Defaults or Allowing user to set HELK subscription ***************
|
# *********** Accepting Defaults or Allowing user to set HELK subscription ***************
|
||||||
while true; do
|
while true; do
|
||||||
local subscription_input
|
local subscription_input
|
||||||
read -t 30 -p "[HELK-INSTALLATION-INFO] Set HELK elastic subscription (basic or trial). Default value is basic: " -e -i "basic" subscription_input
|
read -t 30 -p "[HELK-INSTALLATION-INFO] Set HELK elastic subscription (basic or trial): " -e -i "basic" subscription_input
|
||||||
|
READ_INPUT=$?
|
||||||
SUBSCRIPTION_CHOICE=${subscription_input:-"basic"}
|
SUBSCRIPTION_CHOICE=${subscription_input:-"basic"}
|
||||||
# *********** Validating subscription Input ***************
|
if [ $READ_INPUT = 142 ]; then
|
||||||
case $SUBSCRIPTION_CHOICE in
|
echo -e "\n[HELK-INSTALLATION-INFO] HELK elastic subscription set to ${SUBSCRIPTION_CHOICE}"
|
||||||
basic) break;;
|
break
|
||||||
trial) break;;
|
else
|
||||||
*)
|
echo "[HELK-INSTALLATION-INFO] HELK elastic subscription set to ${SUBSCRIPTION_CHOICE}"
|
||||||
echo -e "${RED}Error...${STD}"
|
# *********** Validating subscription Input ***************
|
||||||
echo "[HELK-INSTALLATION-ERROR] Not a valid subscription. Valid Options: basic or trial"
|
case $SUBSCRIPTION_CHOICE in
|
||||||
;;
|
basic) break;;
|
||||||
esac
|
trial) break;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error...${STD}"
|
||||||
|
echo "[HELK-INSTALLATION-ERROR] Not a valid subscription. Valid Options: basic or trial"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
@ -317,23 +346,29 @@ set_helk_build(){
|
||||||
echo " "
|
echo " "
|
||||||
|
|
||||||
local CONFIG_CHOICE
|
local CONFIG_CHOICE
|
||||||
read -p "Enter build choice [ 1 - 2] " CONFIG_CHOICE
|
read -t 30 -p "Enter build choice [ 1 - 2]: " -e -i "1" CONFIG_CHOICE
|
||||||
case $CONFIG_CHOICE in
|
READ_INPUT=$?
|
||||||
1) HELK_BUILD='helk-kibana-analysis';break ;;
|
HELK_BUILD=${CONFIG_CHOICE:-"helk-kibana-analysis"}
|
||||||
2) HELK_BUILD='helk-kibana-notebook-analysis';break;;
|
if [ $READ_INPUT = 142 ]; then
|
||||||
*)
|
echo -e "\n[HELK-INSTALLATION-INFO] HELK build set to ${HELK_BUILD}"
|
||||||
echo -e "${RED}Error...${STD}"
|
break
|
||||||
echo "[HELK-INSTALLATION-ERROR] Not a valid build"
|
else
|
||||||
;;
|
echo "[HELK-INSTALLATION-INFO] HELK build set to ${HELK_BUILD}"
|
||||||
esac
|
case $CONFIG_CHOICE in
|
||||||
|
1) HELK_BUILD='helk-kibana-analysis';break ;;
|
||||||
|
2) HELK_BUILD='helk-kibana-notebook-analysis';break;;
|
||||||
|
*)
|
||||||
|
echo -e "${RED}Error...${STD}"
|
||||||
|
echo "[HELK-INSTALLATION-ERROR] Not a valid build"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# *********** Install and set up pre-requirements ***************
|
# *********** Install and set up pre-requirements ***************
|
||||||
prepare_helk(){
|
prepare_helk(){
|
||||||
echo "[HELK-INSTALLATION-INFO] HELK IP set to ${HOST_IP}"
|
|
||||||
echo "[HELK-INSTALLATION-INFO] HELK elastic subscription set to ${SUBSCRIPTION_CHOICE}"
|
|
||||||
if [ "$SYSTEM_KERNEL" == "Linux" ]; then
|
if [ "$SYSTEM_KERNEL" == "Linux" ]; then
|
||||||
# *********** Check if curl is installed ***************
|
# *********** Check if curl is installed ***************
|
||||||
if ! [ -x "$(command -v curl)" ]; then
|
if ! [ -x "$(command -v curl)" ]; then
|
||||||
|
|
Loading…
Reference in New Issue