diff --git a/docker/helk-kibana-analysis-basic.yml b/docker/helk-kibana-analysis-basic.yml index 72e316b..077b6aa 100644 --- a/docker/helk-kibana-analysis-basic.yml +++ b/docker/helk-kibana-analysis-basic.yml @@ -100,7 +100,7 @@ services: REPLICATION_FACTOR: 1 ADVERTISED_LISTENER: ${ADVERTISED_LISTENER} ZOOKEEPER_NAME: helk-zookeeper - KAFKA_CREATE_TOPICS: winlogbeat + KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN KAFKA_HEAP_OPTS: -Xmx1G -Xms1G LOG_RETENTION_HOURS: 4 ports: @@ -108,7 +108,7 @@ services: networks: helk: helk-ksql-server: - image: confluentinc/cp-ksql-server:5.0.1 + image: confluentinc/cp-ksql-server:5.1.0 container_name: helk-ksql-server restart: always depends_on: @@ -127,7 +127,7 @@ services: networks: helk: helk-ksql-cli: - image: confluentinc/cp-ksql-cli:5.0.1 + image: confluentinc/cp-ksql-cli:5.1.0 container_name: helk-ksql-cli depends_on: - helk-kafka-broker diff --git a/docker/helk-kibana-analysis-trial.yml b/docker/helk-kibana-analysis-trial.yml index 389a869..160dce7 100644 --- a/docker/helk-kibana-analysis-trial.yml +++ b/docker/helk-kibana-analysis-trial.yml @@ -105,7 +105,7 @@ services: REPLICATION_FACTOR: 1 ADVERTISED_LISTENER: ${ADVERTISED_LISTENER} ZOOKEEPER_NAME: helk-zookeeper - KAFKA_CREATE_TOPICS: winlogbeat + KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN KAFKA_HEAP_OPTS: -Xmx1G -Xms1G LOG_RETENTION_HOURS: 4 ports: @@ -113,7 +113,7 @@ services: networks: helk: helk-ksql-server: - image: confluentinc/cp-ksql-server:5.0.1 + image: confluentinc/cp-ksql-server:5.1.0 container_name: helk-ksql-server restart: always depends_on: @@ -132,7 +132,7 @@ services: networks: helk: helk-ksql-cli: - image: confluentinc/cp-ksql-cli:5.0.1 + image: confluentinc/cp-ksql-cli:5.1.0 container_name: helk-ksql-cli depends_on: - helk-kafka-broker diff --git a/docker/helk-kibana-notebook-analysis-basic.yml b/docker/helk-kibana-notebook-analysis-basic.yml index 42527b2..093d4f3 100644 --- a/docker/helk-kibana-notebook-analysis-basic.yml +++ b/docker/helk-kibana-notebook-analysis-basic.yml @@ -140,7 +140,7 @@ services: REPLICATION_FACTOR: 1 ADVERTISED_LISTENER: ${ADVERTISED_LISTENER} ZOOKEEPER_NAME: helk-zookeeper - KAFKA_CREATE_TOPICS: winlogbeat + KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN KAFKA_HEAP_OPTS: -Xmx1G -Xms1G LOG_RETENTION_HOURS: 4 ports: @@ -148,7 +148,7 @@ services: networks: helk: helk-ksql-server: - image: confluentinc/cp-ksql-server:5.0.1 + image: confluentinc/cp-ksql-server:5.1.0 container_name: helk-ksql-server restart: always depends_on: @@ -167,7 +167,7 @@ services: networks: helk: helk-ksql-cli: - image: confluentinc/cp-ksql-cli:5.0.1 + image: confluentinc/cp-ksql-cli:5.1.0 container_name: helk-ksql-cli depends_on: - helk-kafka-broker diff --git a/docker/helk-kibana-notebook-analysis-trial.yml b/docker/helk-kibana-notebook-analysis-trial.yml index ac3c636..0e1efa6 100644 --- a/docker/helk-kibana-notebook-analysis-trial.yml +++ b/docker/helk-kibana-notebook-analysis-trial.yml @@ -145,7 +145,7 @@ services: REPLICATION_FACTOR: 1 ADVERTISED_LISTENER: ${ADVERTISED_LISTENER} ZOOKEEPER_NAME: helk-zookeeper - KAFKA_CREATE_TOPICS: winlogbeat + KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN KAFKA_HEAP_OPTS: -Xmx1G -Xms1G LOG_RETENTION_HOURS: 4 ports: @@ -153,7 +153,7 @@ services: networks: helk: helk-ksql-server: - image: confluentinc/cp-ksql-server:5.0.1 + image: confluentinc/cp-ksql-server:5.1.0 container_name: helk-ksql-server restart: always depends_on: @@ -172,7 +172,7 @@ services: networks: helk: helk-ksql-cli: - image: confluentinc/cp-ksql-cli:5.0.1 + image: confluentinc/cp-ksql-cli:5.1.0 container_name: helk-ksql-cli depends_on: - helk-kafka-broker diff --git a/docker/helk-kibana/scripts/basic/kibana-setup.sh b/docker/helk-kibana/scripts/basic/kibana-setup.sh index 6fa47e6..84b05a4 100755 --- a/docker/helk-kibana/scripts/basic/kibana-setup.sh +++ b/docker/helk-kibana/scripts/basic/kibana-setup.sh @@ -33,7 +33,7 @@ while [[ -z $(grep "Server running at http://$KIBANA" /usr/share/kibana/config/k done # *********** Creating Kibana index-patterns *************** -declare -a index_patterns=("logs-endpoint-*" "logs-*" "logs-endpoint-winevent-sysmon-*" "logs-endpoint-winevent-security-*" "logs-endpoint-winevent-system-*" "logs-endpoint-winevent-application-*" "logs-endpoint-winevent-wmiactivity-*" "logs-endpoint-winevent-powershell-*" "mitre-attack-*" "elastalert_status" "elastalert_status_status" "elastalert_status_error" "elastalert_status_silence" "elastalert_status_past") +declare -a index_patterns=("logs-endpoint-*" "logs-*" "logs-endpoint-winevent-sysmon-*" "logs-endpoint-winevent-security-*" "logs-endpoint-winevent-system-*" "logs-endpoint-winevent-application-*" "logs-endpoint-winevent-wmiactivity-*" "logs-endpoint-winevent-powershell-*" "mitre-attack-*" "elastalert_status" "elastalert_status_status" "elastalert_status_error" "elastalert_status_silence" "elastalert_status_past" "sysmon-join-*") echo "[+++] Creating Kibana Index Patterns..." for index in ${!index_patterns[@]}; do diff --git a/docker/helk-kibana/scripts/trial/kibana-setup.sh b/docker/helk-kibana/scripts/trial/kibana-setup.sh index f97f8e5..995345b 100755 --- a/docker/helk-kibana/scripts/trial/kibana-setup.sh +++ b/docker/helk-kibana/scripts/trial/kibana-setup.sh @@ -34,7 +34,7 @@ while [[ -z $(grep "Server running at http://$KIBANA" /usr/share/kibana/config/k done # *********** Creating Kibana index-patterns *************** -declare -a index_patterns=("logs-endpoint-*" "logs-*" "logs-endpoint-winevent-sysmon-*" "logs-endpoint-winevent-security-*" "logs-endpoint-winevent-system-*" "logs-endpoint-winevent-application-*" "logs-endpoint-winevent-wmiactivity-*" "logs-endpoint-winevent-powershell-*" "mitre-attack-*" "elastalert_status" "elastalert_status_status" "elastalert_status_error" "elastalert_status_silence" "elastalert_status_past") +declare -a index_patterns=("logs-endpoint-*" "logs-*" "logs-endpoint-winevent-sysmon-*" "logs-endpoint-winevent-security-*" "logs-endpoint-winevent-system-*" "logs-endpoint-winevent-application-*" "logs-endpoint-winevent-wmiactivity-*" "logs-endpoint-winevent-powershell-*" "mitre-attack-*" "elastalert_status" "elastalert_status_status" "elastalert_status_error" "elastalert_status_silence" "elastalert_status_past" "sysmon-join-*") echo "[+++] Creating Kibana Index Patterns..." for index in ${!index_patterns[@]}; do diff --git a/docker/helk-ksql/sysmon-join.commands b/docker/helk-ksql/sysmon-join.commands new file mode 100644 index 0000000..7691dfb --- /dev/null +++ b/docker/helk-ksql/sysmon-join.commands @@ -0,0 +1,166 @@ +CREATE STREAM WINLOGBEAT_STREAM \ +(source_name VARCHAR, \ +type VARCHAR, \ +task VARCHAR, \ +log_name VARCHAR, \ +computer_name VARCHAR, \ +event_data STRUCT< \ + UtcTime VARCHAR, \ + ProcessGuid VARCHAR, \ + ProcessId INTEGER, \ + Image VARCHAR, \ + FileVersion VARCHAR, \ + Description VARCHAR, \ + Product VARCHAR, \ + Company VARCHAR, \ + CommandLine VARCHAR, \ + CurrentDirectory VARCHAR, \ + User VARCHAR, \ + LogonGuid VARCHAR, \ + LogonId VARCHAR, \ + TerminalSessionId INTEGER, \ + IntegrityLevel VARCHAR, \ + Hashes VARCHAR, \ + ParentProcessGuid VARCHAR, \ + ParentProcessId INTEGER, \ + ParentImage VARCHAR, \ + ParentCommandLine VARCHAR, \ + Protocol VARCHAR, \ + Initiated VARCHAR, \ + SourceIsIpv6 VARCHAR, \ + SourceIp VARCHAR, \ + SourceHostname VARCHAR, \ + SourcePort INTEGER, \ + SourcePortName VARCHAR, \ + DestinationIsIpv6 VARCHAR, \ + DestinationIp VARCHAR, \ + DestinationHostname VARCHAR, \ + DestinationPort INTEGER, \ + DestinationPortName VARCHAR>, \ +event_id INTEGER) \ +WITH (KAFKA_TOPIC='winlogbeat', VALUE_FORMAT='JSON'); + +CREATE STREAM WINLOGBEAT_STREAM_REKEY \ +WITH (VALUE_FORMAT='JSON', PARTITIONS=1, TIMESTAMP='event_date_creation') \ +AS SELECT \ +STRINGTOTIMESTAMP(event_data->UtcTime, 'yyyy-MM-dd HH:mm:ss.SSS') AS event_date_creation, \ +event_data->ProcessGuid AS process_guid, \ +event_data->ProcessId AS process_id, \ +event_data->Image AS process_path, \ +event_data->FileVersion AS file_version, \ +event_data->Description AS file_description, \ +event_data->Company AS file_company, \ +event_data->CommandLine AS process_command_line, \ +event_data->CurrentDirectory AS process_current_directory, \ +event_data->User AS user_account, \ +event_data->LogonGuid AS user_logon_guid, \ +event_data->LogonId AS user_logon_id, \ +event_data->TerminalSessionId AS user_session_id, \ +event_data->IntegrityLevel AS process_integrity_level, \ +event_data->Hashes AS hashes, \ +event_data->ParentProcessGuid AS parent_process_guid, \ +event_data->ParentProcessId AS parent_process_id, \ +event_data->ParentImage AS parent_process_path, \ +event_data->ParentCommandLine AS parent_process_command_line, \ +event_data->Protocol AS network_protocol, \ +event_data->Initiated AS network_connection_initiated, \ +event_data->SourceIsIpv6 AS src_is_ipv6, \ +event_data->SourceIp AS src_ip_addr, \ +event_data->SourceHostname AS src_host_name, \ +event_data->SourcePort AS src_port, \ +event_data->SourcePortName AS src_port_name, \ +event_data->DestinationIsIpv6 AS dst_is_ipv6, \ +event_data->DestinationIp AS dst_ip_addr, \ +event_data->DestinationHostname AS dst_host_name, \ +event_data->DestinationPort AS dst_port, \ +event_data->DestinationPortName AS dst_port_name, \ +event_id, \ +source_name, \ +log_name \ +FROM WINLOGBEAT_STREAM WHERE source_name='Microsoft-Windows-Sysmon' PARTITION BY process_guid; + +CREATE STREAM SYSMON_PROCESS_CREATE WITH (VALUE_FORMAT='JSON', PARTITIONS=1, TIMESTAMP='event_date_creation') \ +AS SELECT \ +event_date_creation, \ +process_guid, \ +process_id, \ +process_path, \ +file_version, \ +file_description, \ +file_company, \ +process_command_line, \ +process_current_directory, \ +user_account, \ +user_logon_guid, \ +user_logon_id, \ +user_session_id, \ +process_integrity_level, \ +hashes, \ +parent_process_guid, \ +parent_process_id, \ +parent_process_path, \ +parent_process_command_line, \ +event_id, \ +source_name, \ +log_name \ +FROM WINLOGBEAT_STREAM_REKEY WHERE event_id=1; + +CREATE STREAM SYSMON_NETWORK_CONNECT WITH (VALUE_FORMAT='JSON', PARTITIONS=1, TIMESTAMP='event_date_creation') \ +AS SELECT \ +event_date_creation, \ +process_guid, \ +process_id, \ +process_path, \ +user_account, \ +network_protocol, \ +network_connection_initiated, \ +src_is_ipv6, \ +src_ip_addr, \ +src_host_name, \ +src_port, \ +src_port_name, \ +dst_is_ipv6, \ +dst_ip_addr, \ +dst_host_name, \ +dst_port, \ +dst_port_name, \ +event_id, \ +source_name, \ +log_name \ +FROM WINLOGBEAT_STREAM_REKEY WHERE event_id=3; + +CREATE TABLE SYSMON_PROCESS_CREATE_TABLE \ +(event_date_creation VARCHAR, \ +process_guid VARCHAR, \ +process_id INTEGER, \ +process_path VARCHAR, \ +file_version VARCHAR, \ +file_description VARCHAR, \ +file_company VARCHAR, \ +process_command_line VARCHAR, \ +process_current_directory VARCHAR, \ +user_account VARCHAR, \ +user_logon_guid VARCHAR, \ +user_logon_id VARCHAR, \ +user_session_id INTEGER, \ +process_integrity_level VARCHAR, \ +hashes VARCHAR, \ +parent_process_guid VARCHAR, \ +parent_process_id INTEGER, \ +parent_process_path VARCHAR, \ +parent_process_command_line VARCHAR, \ +event_id INTEGER, \ +source_name VARCHAR, \ +log_name VARCHAR) \ +WITH (KAFKA_TOPIC='SYSMON_PROCESS_CREATE', VALUE_FORMAT='JSON', KEY='process_guid'); + +CREATE STREAM SYSMON_JOIN WITH (PARTITIONS=1) AS \ +SELECT N.EVENT_DATE_CREATION, N.PROCESS_GUID, N.PROCESS_ID, N.PROCESS_PATH, N.USER_ACCOUNT, \ +N.NETWORK_PROTOCOL, N.NETWORK_CONNECTION_INITIATED, N.SRC_IS_IPV6, N.SRC_IP_ADDR, \ +N.SRC_HOST_NAME, N.SRC_PORT, N.SRC_PORT_NAME, N.DST_IS_IPV6, N.DST_IP_ADDR, N.DST_HOST_NAME, \ +N.DST_PORT, N.DST_PORT_NAME, N.SOURCE_NAME, N.LOG_NAME, \ +P.PROCESS_COMMAND_LINE, P.HASHES, P.PARENT_PROCESS_PATH, P.PARENT_PROCESS_COMMAND_LINE, \ +P.USER_LOGON_GUID, P.USER_LOGON_ID, P.USER_SESSION_ID, P.PROCESS_CURRENT_DIRECTORY, \ +P.PROCESS_INTEGRITY_LEVEL, P.PARENT_PROCESS_GUID, P.PARENT_PROCESS_ID \ +FROM SYSMON_NETWORK_CONNECT N INNER JOIN SYSMON_PROCESS_CREATE_TABLE P \ +ON N.PROCESS_GUID = P.PROCESS_GUID; \ No newline at end of file diff --git a/docker/helk-logstash/pipeline/0002-kafka-input.conf b/docker/helk-logstash/pipeline/0002-kafka-input.conf index a38004d..cd9071c 100644 --- a/docker/helk-logstash/pipeline/0002-kafka-input.conf +++ b/docker/helk-logstash/pipeline/0002-kafka-input.conf @@ -6,7 +6,7 @@ input { kafka { bootstrap_servers => "helk-kafka-broker:9092" - topics => ["winlogbeat"] + topics => ["winlogbeat", "SYSMON_JOIN"] decorate_events => true codec => "json" auto_offset_reset => "earliest" diff --git a/docker/helk-logstash/pipeline/1531-winevent-sysmon-filter.conf b/docker/helk-logstash/pipeline/1531-winevent-sysmon-filter.conf index 7673095..fcb4456 100644 --- a/docker/helk-logstash/pipeline/1531-winevent-sysmon-filter.conf +++ b/docker/helk-logstash/pipeline/1531-winevent-sysmon-filter.conf @@ -70,6 +70,7 @@ filter { "ParentCommandLine" => "process_parent_command_line" "IntegrityLevel" => "process_integrity_level" "LogonGuid" => "user_logon_guid" + "LogonIdd" => "user_logon_id" "ParentProcessGuid" => "process_parent_guid" "ParentProcessId" => "process_parent_id" "TerminalSessionId" => "user_session_id" diff --git a/docker/helk-logstash/pipeline/9956-attack-output.conf b/docker/helk-logstash/pipeline/9956-attack-output.conf index 643d33a..459ecd3 100644 --- a/docker/helk-logstash/pipeline/9956-attack-output.conf +++ b/docker/helk-logstash/pipeline/9956-attack-output.conf @@ -13,4 +13,4 @@ output { #password => 'elasticpassword' } } -} +} \ No newline at end of file diff --git a/docker/helk-logstash/pipeline/9957-winevent-sysmon-join-output.conf b/docker/helk-logstash/pipeline/9957-winevent-sysmon-join-output.conf new file mode 100644 index 0000000..3ecba15 --- /dev/null +++ b/docker/helk-logstash/pipeline/9957-winevent-sysmon-join-output.conf @@ -0,0 +1,15 @@ +# HELK sysmon-join output conf file +# HELK build Stage: Alpha +# Author: Roberto Rodriguez (@Cyb3rWard0g) +# License: GPL-3.0 + +output { + if [@metadata][kafka][topic] == "SYSMON_JOIN" { + elasticsearch { + hosts => ["helk-elasticsearch:9200"] + index => "sysmon-join-%{+YYYY.MM.dd}" + user => 'elastic' + #password => 'elasticpassword' + } + } +} \ No newline at end of file diff --git a/docker/helk-logstash/scripts/basic/logstash-entrypoint.sh b/docker/helk-logstash/scripts/basic/logstash-entrypoint.sh index 90ae930..3662827 100755 --- a/docker/helk-logstash/scripts/basic/logstash-entrypoint.sh +++ b/docker/helk-logstash/scripts/basic/logstash-entrypoint.sh @@ -14,8 +14,8 @@ echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elasticsearch URL to $ELASTICSE # ********* Setting LS_JAVA_OPTS *************** if [[ -z "$LS_JAVA_OPTS" ]]; then - LS_MEMORY=$(awk '/MemAvailable/{printf "%.f", $2/1024/1024/2}' /proc/meminfo) - LS_JAVA_OPTS="-Xms${LS_MEMORY}g -Xmx${LS_MEMORY}g" + LS_MEMORY=$(awk '/MemAvailable/{printf "%.f", $2/1024/4}' /proc/meminfo) + export LS_JAVA_OPTS="-Xms${LS_MEMORY}m -Xmx${LS_MEMORY}m" fi echo "[HELK-DOCKER-INSTALLATION-INFO] Setting LS_JAVA_OPTS to $LS_JAVA_OPTS" diff --git a/docker/helk_install.sh b/docker/helk_install.sh index d7ac5b1..dca03be 100755 --- a/docker/helk_install.sh +++ b/docker/helk_install.sh @@ -197,14 +197,22 @@ install_docker_compose(){ set_elasticsearch_password(){ if [[ -z "$ELASTICSEARCH_PASSWORD_INPUT" ]] && [[ $SUBSCRIPTION_CHOICE == "trial" ]]; then while true; do - read -p "[HELK-INSTALLATION-INFO] Set HELK Elasticsearch Password: " ELASTICSEARCH_PASSWORD_INPUT - read -p "[HELK-INSTALLATION-INFO] Verify HELK Elasticsearch Password: " ELASTICSEARCH_PASSWORD_INPUT_VERIFIED - # *********** Validating Password Input *************** - if [[ "$ELASTICSEARCH_PASSWORD_INPUT" == "$ELASTICSEARCH_PASSWORD_INPUT_VERIFIED" ]]; then + read -t 30 -p "[HELK-INSTALLATION-INFO] Set HELK Elasticsearch Password: " -e -i "elasticpassword" ELASTICSEARCH_PASSWORD_INPUT + READ_INPUT=$? + ELASTICSEARCH_PASSWORD_INPUT=${ELASTICSEARCH_PASSWORD_INPUT:-"elasticpassword"} + if [ $READ_INPUT = 142 ]; then + echo -e "\n[HELK-INSTALLATION-INFO] HELK elasticsearch password set to ${ELASTICSEARCH_PASSWORD_INPUT}" break else - echo -e "${RED}Error...${STD}" - echo "[HELK-INSTALLATION-INFO] Your password values do not match.." + read -p "[HELK-INSTALLATION-INFO] Verify HELK Elasticsearch Password: " ELASTICSEARCH_PASSWORD_INPUT_VERIFIED + echo -e "[HELK-INSTALLATION-INFO] HELK elasticsearch password set to ${ELASTICSEARCH_PASSWORD_INPUT}" + # *********** Validating Password Input *************** + if [[ "$ELASTICSEARCH_PASSWORD_INPUT" == "$ELASTICSEARCH_PASSWORD_INPUT_VERIFIED" ]]; then + break + else + echo -e "${RED}Error...${STD}" + echo "[HELK-INSTALLATION-INFO] Your password values do not match.." + fi fi done export ELASTIC_PASSWORD=$ELASTICSEARCH_PASSWORD_INPUT @@ -217,14 +225,22 @@ set_elasticsearch_password(){ set_kibana_ui_password(){ if [[ -z "$KIBANA_UI_PASSWORD_INPUT" ]]; then while true; do - read -p "[HELK-INSTALLATION-INFO] Set HELK Kibana UI Password: " KIBANA_UI_PASSWORD_INPUT - read -p "[HELK-INSTALLATION-INFO] Verify HELK Kibana UI Password: " KIBANA_UI_PASSWORD_INPUT_VERIFIED - # *********** Validating Password Input *************** - if [[ "$KIBANA_UI_PASSWORD_INPUT" == "$KIBANA_UI_PASSWORD_INPUT_VERIFIED" ]]; then + read -t 30 -p "[HELK-INSTALLATION-INFO] Set HELK Kibana UI Password: " -e -i "hunting" KIBANA_UI_PASSWORD_INPUT + READ_INPUT=$? + KIBANA_UI_PASSWORD_INPUT=${KIBANA_UI_PASSWORD_INPUT:-"hunting"} + if [ $READ_INPUT = 142 ]; then + echo -e "\n[HELK-INSTALLATION-INFO] HELK Kibana UI password set to ${KIBANA_UI_PASSWORD_INPUT}" break else - echo -e "${RED}Error...${STD}" - echo "[HELK-INSTALLATION-INFO] Your password values do not match.." + read -p "[HELK-INSTALLATION-INFO] Verify HELK Kibana UI Password: " KIBANA_UI_PASSWORD_INPUT_VERIFIED + echo -e "[HELK-INSTALLATION-INFO] HELK Kibana UI password set to ${KIBANA_UI_PASSWORD_INPUT}" + # *********** Validating Password Input *************** + if [[ "$KIBANA_UI_PASSWORD_INPUT" == "$KIBANA_UI_PASSWORD_INPUT_VERIFIED" ]]; then + break + else + echo -e "${RED}Error...${STD}" + echo "[HELK-INSTALLATION-INFO] Your password values do not match.." + fi fi done fi @@ -263,7 +279,13 @@ set_network(){ # *********** Accepting Defaults or Allowing user to set the HELK IP *************** local ip_choice read -t 30 -p "[HELK-INSTALLATION-INFO] Set HELK IP. Default value is your current IP: " -e -i ${HOST_IP} ip_choice + READ_INPUT=$? HOST_IP="${ip_choice:-$HOST_IP}" + if [ $READ_INPUT = 142 ]; then + echo -e "\n[HELK-INSTALLATION-INFO] HELK IP set to ${HOST_IP}" + else + echo "[HELK-INSTALLATION-INFO] HELK IP set to ${HOST_IP}" + fi fi } @@ -288,17 +310,24 @@ set_helk_subscription(){ # *********** Accepting Defaults or Allowing user to set HELK subscription *************** while true; do local subscription_input - read -t 30 -p "[HELK-INSTALLATION-INFO] Set HELK elastic subscription (basic or trial). Default value is basic: " -e -i "basic" subscription_input + read -t 30 -p "[HELK-INSTALLATION-INFO] Set HELK elastic subscription (basic or trial): " -e -i "basic" subscription_input + READ_INPUT=$? SUBSCRIPTION_CHOICE=${subscription_input:-"basic"} - # *********** Validating subscription Input *************** - case $SUBSCRIPTION_CHOICE in - basic) break;; - trial) break;; - *) - echo -e "${RED}Error...${STD}" - echo "[HELK-INSTALLATION-ERROR] Not a valid subscription. Valid Options: basic or trial" - ;; - esac + if [ $READ_INPUT = 142 ]; then + echo -e "\n[HELK-INSTALLATION-INFO] HELK elastic subscription set to ${SUBSCRIPTION_CHOICE}" + break + else + echo "[HELK-INSTALLATION-INFO] HELK elastic subscription set to ${SUBSCRIPTION_CHOICE}" + # *********** Validating subscription Input *************** + case $SUBSCRIPTION_CHOICE in + basic) break;; + trial) break;; + *) + echo -e "${RED}Error...${STD}" + echo "[HELK-INSTALLATION-ERROR] Not a valid subscription. Valid Options: basic or trial" + ;; + esac + fi done fi } @@ -317,23 +346,29 @@ set_helk_build(){ echo " " local CONFIG_CHOICE - read -p "Enter build choice [ 1 - 2] " CONFIG_CHOICE - case $CONFIG_CHOICE in - 1) HELK_BUILD='helk-kibana-analysis';break ;; - 2) HELK_BUILD='helk-kibana-notebook-analysis';break;; - *) - echo -e "${RED}Error...${STD}" - echo "[HELK-INSTALLATION-ERROR] Not a valid build" - ;; - esac + read -t 30 -p "Enter build choice [ 1 - 2]: " -e -i "1" CONFIG_CHOICE + READ_INPUT=$? + HELK_BUILD=${CONFIG_CHOICE:-"helk-kibana-analysis"} + if [ $READ_INPUT = 142 ]; then + echo -e "\n[HELK-INSTALLATION-INFO] HELK build set to ${HELK_BUILD}" + break + else + echo "[HELK-INSTALLATION-INFO] HELK build set to ${HELK_BUILD}" + case $CONFIG_CHOICE in + 1) HELK_BUILD='helk-kibana-analysis';break ;; + 2) HELK_BUILD='helk-kibana-notebook-analysis';break;; + *) + echo -e "${RED}Error...${STD}" + echo "[HELK-INSTALLATION-ERROR] Not a valid build" + ;; + esac + fi done fi } # *********** Install and set up pre-requirements *************** prepare_helk(){ - echo "[HELK-INSTALLATION-INFO] HELK IP set to ${HOST_IP}" - echo "[HELK-INSTALLATION-INFO] HELK elastic subscription set to ${SUBSCRIPTION_CHOICE}" if [ "$SYSTEM_KERNEL" == "Linux" ]; then # *********** Check if curl is installed *************** if ! [ -x "$(command -v curl)" ]; then