From c7086ab9c62f60968bd91d88558bd11ccfd9c98e Mon Sep 17 00:00:00 2001 From: Roberto Rodriguez Date: Thu, 31 Jan 2019 11:29:49 -0500 Subject: [PATCH] [HOT FIX] 01312019 helk ELK Updated to version 6.5.4 helk-logstash fix https://github.com/Cyb3rWard0g/HELK/issues/156 + Pipeline Updated ++ More security events ++ Reduced regex complexity to split process paths to process names ++ Enabled Kafka output again for Win Security and Win Sysmon logs ++ Added more win security conversion events helk-elastalert fix https://github.com/Cyb3rWard0g/HELK/issues/157 fix https://github.com/Cyb3rWard0g/HELK/issues/159 ELK: + Consolidated ELK scripts to one per container instead of trial and basic helk-sigma + Updated own fork helk-jupyter + Updated Elastic ES-Hadoop to 6.5.4 helk-jupyter + jupyterlab-manager widgets + Updated pandas 0.24.0 + Updated altair 2.3.0 --- .../rules/cobalt_strike_msagent.yml | 10 - .../rules/cobalt_strike_rundll32.yml | 10 - docker/helk-elastalert/rules/psexec_psh.yml | 10 - docker/helk-elastalert/rules/whoami.yml | 10 - .../scripts/elastalert-entrypoint.sh | 41 +- docker/helk-elastalert/scripts/pull-sigma.sh | 8 +- docker/helk-elasticsearch/Dockerfile | 2 +- .../scripts/basic/elasticsearch-entrypoint.sh | 21 - .../{trial => }/elasticsearch-entrypoint.sh | 19 +- docker/helk-jupyter/Dockerfile | 8 +- docker/helk-jupyter/spark/spark-defaults.conf | 2 +- docker/helk-kibana-analysis-basic.yml | 24 +- docker/helk-kibana-analysis-trial.yml | 34 +- .../helk-kibana-notebook-analysis-basic.yml | 32 +- .../helk-kibana-notebook-analysis-trial.yml | 42 +- docker/helk-kibana/Dockerfile | 2 +- .../scripts/basic/kibana-entrypoint.sh | 41 - .../helk-kibana/scripts/kibana-entrypoint.sh | 82 + docker/helk-kibana/scripts/kibana-setup.sh | 142 ++ .../scripts/trial/kibana-entrypoint.sh | 89 - docker/helk-logstash/Dockerfile | 8 +- docker/helk-logstash/config/logstash.yml | 6 +- .../pipeline/0002-kafka-input.conf | 14 +- ... => 1010-winevent-winlogbeats-filter.conf} | 13 +- ...t-cleanup-lowercasing-windows-filter.conf} | 4 +- .../1523-winevent-process-name-split.conf | 16 +- .../pipeline/1531-winevent-sysmon-filter.conf | 2 +- .../1532-winevent-security-filter.conf | 49 +- .../pipeline/1544-winevent-cleanup-other.conf | 27 +- .../1545-winevent-security-conversions.conf | 301 ++- .../2511-winevent-powershell-filter.conf | 2 +- .../pipeline/9950-winevent-sysmon-output.conf | 5 + .../9951-winevent-security-output.conf | 5 + .../pipeline/9958-osquery-output.conf | 1 + .../scripts/basic/logstash-entrypoint.sh | 43 - .../scripts/logstash-entrypoint.sh | 99 + .../scripts/trial/logstash-entrypoint.sh | 68 - docker/helk-zeppelin/Dockerfile | 70 - docker/helk-zeppelin/conf/interpreter.json | 1658 ----------------- docker/helk-zeppelin/spark-defaults.conf | 38 - docker/helk-zeppelin/zeppelin-env.sh.template | 90 - docker/helk_install.sh | 15 +- docker/helk_update.sh | 1 - 43 files changed, 838 insertions(+), 2326 deletions(-) delete mode 100644 docker/helk-elastalert/rules/cobalt_strike_msagent.yml delete mode 100644 docker/helk-elastalert/rules/cobalt_strike_rundll32.yml delete mode 100644 docker/helk-elastalert/rules/psexec_psh.yml delete mode 100644 docker/helk-elastalert/rules/whoami.yml delete mode 100755 docker/helk-elasticsearch/scripts/basic/elasticsearch-entrypoint.sh rename docker/helk-elasticsearch/scripts/{trial => }/elasticsearch-entrypoint.sh (51%) delete mode 100755 docker/helk-kibana/scripts/basic/kibana-entrypoint.sh create mode 100755 docker/helk-kibana/scripts/kibana-entrypoint.sh create mode 100755 docker/helk-kibana/scripts/kibana-setup.sh delete mode 100755 docker/helk-kibana/scripts/trial/kibana-entrypoint.sh rename docker/helk-logstash/pipeline/{1010-winevent-remove-winlogbeats-prepend-of-eventdata.conf => 1010-winevent-winlogbeats-filter.conf} (69%) rename docker/helk-logstash/pipeline/{1522-winevent-cleanup-lowercasing-windows-is-case-insensitive.conf => 1522-winevent-cleanup-lowercasing-windows-filter.conf} (97%) delete mode 100755 docker/helk-logstash/scripts/basic/logstash-entrypoint.sh create mode 100755 docker/helk-logstash/scripts/logstash-entrypoint.sh delete mode 100755 docker/helk-logstash/scripts/trial/logstash-entrypoint.sh delete mode 100644 docker/helk-zeppelin/Dockerfile delete mode 100644 docker/helk-zeppelin/conf/interpreter.json delete mode 100644 docker/helk-zeppelin/spark-defaults.conf delete mode 100644 docker/helk-zeppelin/zeppelin-env.sh.template diff --git a/docker/helk-elastalert/rules/cobalt_strike_msagent.yml b/docker/helk-elastalert/rules/cobalt_strike_msagent.yml deleted file mode 100644 index f2162287..00000000 --- a/docker/helk-elastalert/rules/cobalt_strike_msagent.yml +++ /dev/null @@ -1,10 +0,0 @@ -name: "cobalt_strike_msagent" -index: logs-endpoint-winevent-* -filter: - - query: - query_string: - query: "pipe_name:msagent_*" -type: any -alert: -- slack -slack_webhook_url: "YOUR OWN SLACK WEBHOOK URL" \ No newline at end of file diff --git a/docker/helk-elastalert/rules/cobalt_strike_rundll32.yml b/docker/helk-elastalert/rules/cobalt_strike_rundll32.yml deleted file mode 100644 index f61bff19..00000000 --- a/docker/helk-elastalert/rules/cobalt_strike_rundll32.yml +++ /dev/null @@ -1,10 +0,0 @@ -name: "cobalt_strike_rundll32" -index: logs-endpoint-winevent-* -filter: - - query: - query_string: - query: "process_command_line.keyword:\"C:\\Windows\\system32\\rundll32.exe\""" -type: any -alert: -- slack -slack_webhook_url: "YOUR OWN SLACK WEBHOOK URL" \ No newline at end of file diff --git a/docker/helk-elastalert/rules/psexec_psh.yml b/docker/helk-elastalert/rules/psexec_psh.yml deleted file mode 100644 index 62aac771..00000000 --- a/docker/helk-elastalert/rules/psexec_psh.yml +++ /dev/null @@ -1,10 +0,0 @@ -name: "psexec_psh" -index: logs-endpoint-winevent-* -filter: - - query: - query_string: - query: "registry_key_value:(COMSPEC OR powershell.exe) OR service_image_path:(COMSPEC OR powershell.exe)" -type: any -alert: -- slack -slack_webhook_url: "YOUR OWN SLACK WEBHOOK URL" \ No newline at end of file diff --git a/docker/helk-elastalert/rules/whoami.yml b/docker/helk-elastalert/rules/whoami.yml deleted file mode 100644 index 783b46bc..00000000 --- a/docker/helk-elastalert/rules/whoami.yml +++ /dev/null @@ -1,10 +0,0 @@ -name: "whoami" -index: logs-endpoint-winevent-* -filter: - - query: - query_string: - query: "process_command_line: whoami" -type: any -alert: -- slack -slack_webhook_url: "YOUR OWN SLACK WEBHOOK URL" \ No newline at end of file diff --git a/docker/helk-elastalert/scripts/elastalert-entrypoint.sh b/docker/helk-elastalert/scripts/elastalert-entrypoint.sh index e28292d7..b2e5f122 100755 --- a/docker/helk-elastalert/scripts/elastalert-entrypoint.sh +++ b/docker/helk-elastalert/scripts/elastalert-entrypoint.sh @@ -12,35 +12,33 @@ if [[ -z "$ES_HOST" ]]; then ES_HOST=helk-elasticsearch fi -echo "[HELK-DOCKER-INSTALLATION-INFO] Setting Elasticsearch server name to $ES_HOST" +echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Setting Elasticsearch server name to $ES_HOST" if [[ -z "$ES_PORT" ]]; then ES_PORT=9200 fi -echo "[HELK-DOCKER-INSTALLATION-INFO] Setting Elasticsearch server port to $ES_PORT" +echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Setting Elasticsearch server port to $ES_PORT" -if [[ "$ELASTIC_USERNAME" ]] && [[ "$ES_PASSWORD" ]]; then +if [[ -n "$ELASTIC_PASSWORD" ]]; then + if [[ -z "$ELASTIC_USERNAME" ]]; then + ELASTIC_USERNAME=elastic + fi echo "es_username: $ELASTIC_USERNAME" >> $ESALERT_HOME/config.yaml echo "es_password: $ELASTIC_PASSWORD" >> $ESALERT_HOME/config.yaml - echo "[HELK-DOCKER-INSTALLATION-INFO] Setting Elasticsearch username to $ELASTIC_USERNAME" - echo "[HELK-DOCKER-INSTALLATION-INFO] Setting Elasticsearch password to $ELASTIC_PASSWORD" + echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Setting Elasticsearch username to $ELASTIC_USERNAME" + echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Setting Elasticsearch password to $ELASTIC_PASSWORD" ELASTICSEARCH_ACCESS=http://$ELASTIC_USERNAME:"$ELASTIC_PASSWORD"@$ES_HOST:$ES_PORT - if [[ "$KIBANA_HOST" ]] && [[ "$KIBANA_PORT" ]]; then - KIBANA=$KIBANA_HOST:$KIBANA_PORT - else - exit 1 - fi else ELASTICSEARCH_ACCESS=http://$ES_HOST:$ES_PORT fi # *********** Update Elastalert Config ****************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Updating Elastalert main config.." +echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Updating Elastalert main config.." sed -i "s/^es_host\:.*$/es_host\: ${ES_HOST}/g" $ESALERT_HOME/config.yaml sed -i "s/^es_port\:.*$/es_port\: ${ES_PORT}/g" $ESALERT_HOME/config.yaml # *********** Check if Elasticsearch is up *************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Waiting for elasticsearch URI to be accessible.." +echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Waiting for elasticsearch URI to be accessible.." until curl -s $ES_HOST:$ES_PORT -o /dev/null; do sleep 1 done @@ -48,29 +46,24 @@ done # *********** Creating Elastalert Status Index *************** response_code=$(curl -s -o /dev/null -w "%{http_code}" $ELASTICSEARCH_ACCESS/elastalert_status) if [[ $response_code == 404 ]]; then - echo "[HELK-DOCKER-INSTALLATION-INFO] Creating Elastalert index.." - if [[ "$ELASTIC_PASSWORD" ]]; then - # *********** Waiting for Kibana port to be up *************** - echo "[++] Checking to see if kibana port is up..." - until curl -s $KIBANA -o /dev/null; do - sleep 1 - done - elastalert-create-index --host $ES_HOST --port $ES_PORT --username $ELASTIC_USERNAME--password $ELASTIC_PASSWORD --no-auth --no-ssl --url-prefix '' --old-index '' + echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Creating Elastalert index.." + if [[ -n "$ELASTIC_PASSWORD" ]]; then + elastalert-create-index --host $ES_HOST --port $ES_PORT --username $ELASTIC_USERNAME --password $ELASTIC_PASSWORD --no-auth --no-ssl --url-prefix '' --old-index '' else elastalert-create-index --host $ES_HOST --port $ES_PORT --no-auth --no-ssl --url-prefix '' --old-index '' fi else - echo "[HELK-DOCKER-INSTALLATION-INFO] Elastalert index already exists" + echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Elastalert index already exists" fi # *********** Transform SIGMA Rules to Elastalert Signatures ************* -echo "[HELK-DOCKER-INSTALLATION-INFO] Executing pull-sigma.sh script.." +echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Executing pull-sigma.sh script.." /etc/elastalert/pull-sigma.sh # *********** Setting Slack Integration ************* rule_counter=0 if [[ "$SLACK_WEBHOOK_URL" ]]; then - echo "[HELK-DOCKER-INSTALLATION-INFO] Setting Slack webhook url to $SLACK_WEBHOOK_URL.." + echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Setting Slack webhook url to $SLACK_WEBHOOK_URL.." for er in $ESALERT_HOME/rules/*; do priority=$(sed -n -e 's/^priority: //p' $er) if [[ $priority = "1" ]]; then @@ -96,5 +89,5 @@ if [[ "$SLACK_WEBHOOK_URL" ]]; then echo " " fi -echo "[HELK-DOCKER-INSTALLATION-INFO] Starting Elastalert.." +echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Starting Elastalert.." exec "$@" \ No newline at end of file diff --git a/docker/helk-elastalert/scripts/pull-sigma.sh b/docker/helk-elastalert/scripts/pull-sigma.sh index 2fe5b1c3..deca065b 100644 --- a/docker/helk-elastalert/scripts/pull-sigma.sh +++ b/docker/helk-elastalert/scripts/pull-sigma.sh @@ -10,7 +10,7 @@ cd $ESALERT_SIGMA_HOME # ******* Check if Elastalert rules folder has SIGMA rules ************ -echo "[+++] Checking if Elastalert rules folder has SIGMA rules.." +echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Checking if Elastalert rules folder has SIGMA rules.." if ls $ESALERT_HOME/rules/ | grep -v '^helk_' >/dev/null 2>&1; then echo "[+++++] SIGMA rules available in rules folder.." SIGMA_RULES_AVAILABLE=YES @@ -19,11 +19,11 @@ else fi # ******* Check if local SIGMA repo needs update ************* -echo "[+++] Fetch updates for SIGMA remote.." +echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Fetch updates for SIGMA remote.." git remote update # Reference: https://stackoverflow.com/a/3278427 -echo "[+++] Checking to see if local SIGMA repo is up to date or not.." +echo "[HELK-ELASTALERT-DOCKER-INSTALLATION-INFO] Checking to see if local SIGMA repo is up to date or not.." UPSTREAM=${1:-'@{u}'} LOCAL=$(git rev-parse @) REMOTE=$(git rev-parse "$UPSTREAM") @@ -85,7 +85,7 @@ find $ESALERT_HOME/rules/ -type f -name 'sigma_sysmon_powershell_suspicious_para # ******** Deleting Empty Files *********** echo " " -echo "\Removing empty files.." +echo "Removing empty files.." echo "-------------------------" rule_counter=0 for ef in $ESALERT_HOME/rules/* ; do diff --git a/docker/helk-elasticsearch/Dockerfile b/docker/helk-elasticsearch/Dockerfile index 9273358a..5b203dc1 100644 --- a/docker/helk-elasticsearch/Dockerfile +++ b/docker/helk-elasticsearch/Dockerfile @@ -6,6 +6,6 @@ # References: # https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html -FROM docker.elastic.co/elasticsearch/elasticsearch:6.5.3 +FROM docker.elastic.co/elasticsearch/elasticsearch:6.5.4 LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" LABEL description="Dockerfile base for the HELK Elasticsearch." \ No newline at end of file diff --git a/docker/helk-elasticsearch/scripts/basic/elasticsearch-entrypoint.sh b/docker/helk-elasticsearch/scripts/basic/elasticsearch-entrypoint.sh deleted file mode 100755 index c0d8e0b7..00000000 --- a/docker/helk-elasticsearch/scripts/basic/elasticsearch-entrypoint.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash - -# HELK script: elasticsearch-entrypoint.sh -# HELK script description: sets elasticsearch configs and starts elasticsearch -# HELK build Stage: Alpha -# Author: Roberto Rodriguez (@Cyb3rWard0g) -# License: GPL-3.0 - -# *********** Looking for ES *************** -if [[ -z "$ES_JAVA_OPTS" ]]; then - ES_MEMORY=$(awk '/MemAvailable/{printf "%.f", $2/1024/1024/2}' /proc/meminfo) - if [ $ES_MEMORY -gt 31 ]; then - ES_MEMORY=31 - fi - export ES_JAVA_OPTS="-Xms${ES_MEMORY}g -Xmx${ES_MEMORY}g" -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting ES_JAVA_OPTS to $ES_JAVA_OPTS" - -# ********** Starting Elasticsearch ***************** -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Running docker-entrypoint script.." -/usr/local/bin/docker-entrypoint.sh \ No newline at end of file diff --git a/docker/helk-elasticsearch/scripts/trial/elasticsearch-entrypoint.sh b/docker/helk-elasticsearch/scripts/elasticsearch-entrypoint.sh similarity index 51% rename from docker/helk-elasticsearch/scripts/trial/elasticsearch-entrypoint.sh rename to docker/helk-elasticsearch/scripts/elasticsearch-entrypoint.sh index 6fed88b2..0084e50f 100755 --- a/docker/helk-elasticsearch/scripts/trial/elasticsearch-entrypoint.sh +++ b/docker/helk-elasticsearch/scripts/elasticsearch-entrypoint.sh @@ -6,7 +6,7 @@ # Author: Roberto Rodriguez (@Cyb3rWard0g) # License: GPL-3.0 -# *********** Looking for ES *************** +# *********** Setting ES_JAVA_OPTS *************** if [[ -z "$ES_JAVA_OPTS" ]]; then ES_MEMORY=$(awk '/MemAvailable/{printf "%.f", $2/1024/1024/2}' /proc/meminfo) if [ $ES_MEMORY -gt 31 ]; then @@ -16,11 +16,20 @@ if [[ -z "$ES_JAVA_OPTS" ]]; then fi echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting ES_JAVA_OPTS to $ES_JAVA_OPTS" -# *********** HELK ES Password *************** -if [[ -z "$ELASTIC_PASSWORD" ]]; then - export ELASTIC_PASSWORD=elasticpassword +# ******** Checking License Type *************** +ENVIRONMENT_VARIABLES=$(env) +XPACK_LICENSE_TYPE="$(echo $ENVIRONMENT_VARIABLES | grep -oE 'xpack.license.self_generated.type=[^ ]*' | sed s/.*=//)" + +# ******** Set Trial License Variables *************** +if [[ $XPACK_LICENSE_TYPE == "trial" ]]; then + # *********** HELK ES Password *************** + if [[ -z "$ELASTIC_PASSWORD" ]]; then + export ELASTIC_PASSWORD=elasticpassword + fi + echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elastic password to $ELASTIC_PASSWORD" fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elastic password to $ELASTIC_PASSWORD" + +echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elastic license to $XPACK_LICENSE_TYPE" # ********** Starting Elasticsearch ***************** echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Running docker-entrypoint script.." diff --git a/docker/helk-jupyter/Dockerfile b/docker/helk-jupyter/Dockerfile index 2efa1e60..68307401 100644 --- a/docker/helk-jupyter/Dockerfile +++ b/docker/helk-jupyter/Dockerfile @@ -11,7 +11,7 @@ ENV DEBIAN_FRONTEND noninteractive # *********** Setting Environment Variables *************** ENV JUPYTER_DIR=/opt/helk/jupyter -ENV ESHADOOP_VERSION=6.5.3 +ENV ESHADOOP_VERSION=6.5.4 ENV POSTGRESQL_VERSION=42.2.5 # *********** Installing Prerequisites *************** @@ -23,11 +23,12 @@ RUN apt-get update -qq \ libxml2-dev libxslt1-dev zlib1g-dev postgresql \ # ********** Install PIP Packages ************ && sudo pip3 install --upgrade pip \ - && pip3 install pandas==0.23.4 \ - altair==2.2.2 \ + && pip3 install pandas==0.24.0 \ + altair==2.3.0 \ jupyter==1.0.0 \ jupyterlab==0.35.4 \ jupyterhub==0.9.4 \ + ipywidgets==7.4.2 \ # *********** Setting Jupyter Hub & Jupyter ********************** && curl -sL https://deb.nodesource.com/setup_8.x | sudo -E bash - \ && apt-get install -y --no-install-recommends nodejs \ @@ -35,6 +36,7 @@ RUN apt-get update -qq \ && jupyter labextension install @jupyterlab/hub-extension@0.12.0 \ && jupyter labextension install @jupyterlab/celltags@0.1.4 \ && jupyter labextension install @mflevine/jupyterlab_html@0.1.4 \ + && jupyter labextension install @jupyter-widgets/jupyterlab-manager@0.38.1 \ && bash -c 'mkdir -pv /opt/helk/{es-hadoop,jupyter,jupyterhub}' \ && mkdir -v /usr/local/share/jupyter/kernels/pyspark3 \ && mkdir -v /var/log/spark \ diff --git a/docker/helk-jupyter/spark/spark-defaults.conf b/docker/helk-jupyter/spark/spark-defaults.conf index 1e9cd757..228ff1c8 100644 --- a/docker/helk-jupyter/spark/spark-defaults.conf +++ b/docker/helk-jupyter/spark/spark-defaults.conf @@ -22,7 +22,7 @@ spark.executor.logs.rolling.maxRetainedFiles 20 # Set the strategy of rolling of executor logs. spark.executor.logs.rolling.strategy spark.executor.logs.rolling.time.interval # Comma-separated list of jars to include on the driver and executor classpaths. Globs are allowed. -spark.jars /opt/helk/es-hadoop/elasticsearch-hadoop-6.5.3.jar +spark.jars /opt/helk/es-hadoop/elasticsearch-hadoop-6.5.4.jar # Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths. # The coordinates should be groupId:artifactId:version. spark.jars.packages graphframes:graphframes:0.6.0-spark2.3-s_2.11,org.apache.spark:spark-sql-kafka-0-10_2.11:2.4.0 diff --git a/docker/helk-kibana-analysis-basic.yml b/docker/helk-kibana-analysis-basic.yml index 352c98fe..1f7617aa 100644 --- a/docker/helk-kibana-analysis-basic.yml +++ b/docker/helk-kibana-analysis-basic.yml @@ -2,14 +2,14 @@ version: '3.5' services: helk-elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:6.5.3 + image: docker.elastic.co/elasticsearch/elasticsearch:6.5.4 container_name: helk-elasticsearch secrets: - source: elasticsearch.yml target: /usr/share/elasticsearch/config/elasticsearch.yml volumes: - esdata:/usr/share/elasticsearch/data - - ./helk-elasticsearch//scripts/basic:/usr/share/elasticsearch/scripts + - ./helk-elasticsearch//scripts:/usr/share/elasticsearch/scripts entrypoint: /usr/share/elasticsearch/scripts/elasticsearch-entrypoint.sh environment: - cluster.name=helk-cluster @@ -28,7 +28,7 @@ services: networks: helk: helk-logstash: - image: docker.elastic.co/logstash/logstash:6.5.3 + image: docker.elastic.co/logstash/logstash:6.5.4 container_name: helk-logstash secrets: - source: logstash.yml @@ -37,24 +37,24 @@ services: - ./helk-logstash/pipeline:/usr/share/logstash/pipeline - ./helk-logstash/output_templates:/usr/share/logstash/output_templates - ./helk-logstash/enrichments/cti:/usr/share/logstash/cti - - ./helk-logstash/scripts/basic:/usr/share/logstash/scripts + - ./helk-logstash/scripts:/usr/share/logstash/scripts entrypoint: /usr/share/logstash/scripts/logstash-entrypoint.sh ports: - "5044:5044" restart: always depends_on: - - helk-zookeeper + - helk-kibana networks: helk: helk-kibana: - image: docker.elastic.co/kibana/kibana:6.5.3 + image: docker.elastic.co/kibana/kibana:6.5.4 container_name: helk-kibana secrets: - source: kibana.yml target: /usr/share/kibana/config/kibana.yml volumes: - ./helk-kibana/dashboards:/usr/share/kibana/dashboards - - ./helk-kibana/scripts/basic:/usr/share/kibana/scripts + - ./helk-kibana/scripts:/usr/share/kibana/scripts entrypoint: /usr/share/kibana/scripts/kibana-entrypoint.sh restart: always depends_on: @@ -84,7 +84,7 @@ services: container_name: helk-zookeeper restart: always depends_on: - - helk-kibana + - helk-logstash networks: helk: helk-kafka-broker: @@ -100,7 +100,7 @@ services: REPLICATION_FACTOR: 1 ADVERTISED_LISTENER: ${ADVERTISED_LISTENER} ZOOKEEPER_NAME: helk-zookeeper - KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN, filebeat + KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN, filebeat, winsysmon, winsecurity KAFKA_HEAP_OPTS: -Xmx1G -Xms1G LOG_RETENTION_HOURS: 4 ports: @@ -130,7 +130,6 @@ services: image: confluentinc/cp-ksql-cli:5.1.0 container_name: helk-ksql-cli depends_on: - - helk-kafka-broker - helk-ksql-server environment: KSQL_HEAP_OPTS: -Xmx1g @@ -139,12 +138,11 @@ services: networks: helk: helk-elastalert: - image: cyb3rward0g/helk-elastalert:0.2.0 + image: cyb3rward0g/helk-elastalert:0.2.1 container_name: helk-elastalert restart: always depends_on: - - helk-elasticsearch - - helk-kibana + - helk-logstash environment: ES_HOST: helk-elasticsearch ES_PORT: 9200 diff --git a/docker/helk-kibana-analysis-trial.yml b/docker/helk-kibana-analysis-trial.yml index abd65e3f..dafae217 100644 --- a/docker/helk-kibana-analysis-trial.yml +++ b/docker/helk-kibana-analysis-trial.yml @@ -2,14 +2,14 @@ version: '3.5' services: helk-elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:6.5.3 + image: docker.elastic.co/elasticsearch/elasticsearch:6.5.4 container_name: helk-elasticsearch secrets: - source: elasticsearch.yml target: /usr/share/elasticsearch/config/elasticsearch.yml volumes: - esdata:/usr/share/elasticsearch/data - - ./helk-elasticsearch/scripts/trial:/usr/share/elasticsearch/scripts + - ./helk-elasticsearch/scripts:/usr/share/elasticsearch/scripts entrypoint: /usr/share/elasticsearch/scripts/elasticsearch-entrypoint.sh environment: - cluster.name=helk-cluster @@ -29,16 +29,13 @@ services: networks: helk: helk-logstash: - image: docker.elastic.co/logstash/logstash:6.5.3 + build: helk-logstash/ container_name: helk-logstash - secrets: - - source: logstash.yml - target: /usr/share/logstash/config/logstash.yml volumes: - ./helk-logstash/pipeline:/usr/share/logstash/pipeline - ./helk-logstash/output_templates:/usr/share/logstash/output_templates - ./helk-logstash/enrichments/cti:/usr/share/logstash/cti - - ./helk-logstash/scripts/trial:/usr/share/logstash/scripts + - ./helk-logstash/scripts:/usr/share/logstash/scripts entrypoint: /usr/share/logstash/scripts/logstash-entrypoint.sh environment: - xpack.monitoring.elasticsearch.username=logstash_system @@ -48,22 +45,22 @@ services: - "5044:5044" restart: always depends_on: - - helk-zookeeper + - helk-kibana networks: helk: helk-kibana: - image: docker.elastic.co/kibana/kibana:6.5.3 + image: docker.elastic.co/kibana/kibana:6.5.4 container_name: helk-kibana secrets: - source: kibana.yml target: /usr/share/kibana/config/kibana.yml volumes: - ./helk-kibana/dashboards:/usr/share/kibana/dashboards - - ./helk-kibana/scripts/trial:/usr/share/kibana/scripts + - ./helk-kibana/scripts:/usr/share/kibana/scripts entrypoint: /usr/share/kibana/scripts/kibana-entrypoint.sh environment: KIBANA_UI_PASSWORD: ${KIBANA_UI_PASSWORD} - ELASTIC_PASSWORD: ${ELASTIC_PASSWORD} + ELASTICSEARCH_PASSWORD: ${ELASTIC_PASSWORD} restart: always depends_on: - helk-elasticsearch @@ -89,7 +86,7 @@ services: container_name: helk-zookeeper restart: always depends_on: - - helk-kibana + - helk-logstash networks: helk: helk-kafka-broker: @@ -105,7 +102,7 @@ services: REPLICATION_FACTOR: 1 ADVERTISED_LISTENER: ${ADVERTISED_LISTENER} ZOOKEEPER_NAME: helk-zookeeper - KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN, filebeat + KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN, filebeat, winsysmon, winsecurity KAFKA_HEAP_OPTS: -Xmx1G -Xms1G LOG_RETENTION_HOURS: 4 ports: @@ -135,7 +132,6 @@ services: image: confluentinc/cp-ksql-cli:5.1.0 container_name: helk-ksql-cli depends_on: - - helk-kafka-broker - helk-ksql-server environment: KSQL_HEAP_OPTS: -Xmx1g @@ -144,19 +140,15 @@ services: networks: helk: helk-elastalert: - image: cyb3rward0g/helk-elastalert:0.2.0 + image: cyb3rward0g/helk-elastalert:0.2.1 container_name: helk-elastalert restart: always depends_on: - - helk-elasticsearch - - helk-kibana + - helk-logstash environment: ES_HOST: helk-elasticsearch ES_PORT: 9200 - ELASTIC_USERNAME: elastic ELASTIC_PASSWORD: ${ELASTIC_PASSWORD} - KIBANA_HOST: helk-kibana - KIBANA_PORT: 5601 networks: helk: @@ -171,7 +163,5 @@ volumes: secrets: elasticsearch.yml: file: ./helk-elasticsearch/config/elasticsearch.yml - logstash.yml: - file: ./helk-logstash/config/logstash.yml kibana.yml: file: ./helk-kibana/config/kibana.yml \ No newline at end of file diff --git a/docker/helk-kibana-notebook-analysis-basic.yml b/docker/helk-kibana-notebook-analysis-basic.yml index 48ad79ac..9e51932d 100644 --- a/docker/helk-kibana-notebook-analysis-basic.yml +++ b/docker/helk-kibana-notebook-analysis-basic.yml @@ -2,14 +2,14 @@ version: '3.5' services: helk-elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:6.5.3 + image: docker.elastic.co/elasticsearch/elasticsearch:6.5.4 container_name: helk-elasticsearch secrets: - source: elasticsearch.yml target: /usr/share/elasticsearch/config/elasticsearch.yml volumes: - esdata:/usr/share/elasticsearch/data - - ./helk-elasticsearch//scripts/basic:/usr/share/elasticsearch/scripts + - ./helk-elasticsearch//scripts:/usr/share/elasticsearch/scripts entrypoint: /usr/share/elasticsearch/scripts/elasticsearch-entrypoint.sh environment: - cluster.name=helk-cluster @@ -28,7 +28,7 @@ services: networks: helk: helk-logstash: - image: docker.elastic.co/logstash/logstash:6.5.3 + image: docker.elastic.co/logstash/logstash:6.5.4 container_name: helk-logstash secrets: - source: logstash.yml @@ -37,24 +37,24 @@ services: - ./helk-logstash/pipeline:/usr/share/logstash/pipeline - ./helk-logstash/output_templates:/usr/share/logstash/output_templates - ./helk-logstash/enrichments/cti:/usr/share/logstash/cti - - ./helk-logstash/scripts/basic:/usr/share/logstash/scripts + - ./helk-logstash/scripts:/usr/share/logstash/scripts entrypoint: /usr/share/logstash/scripts/logstash-entrypoint.sh ports: - "5044:5044" restart: always depends_on: - - helk-zookeeper + - helk-kibana networks: helk: helk-kibana: - image: docker.elastic.co/kibana/kibana:6.5.3 + image: docker.elastic.co/kibana/kibana:6.5.4 container_name: helk-kibana secrets: - source: kibana.yml target: /usr/share/kibana/config/kibana.yml volumes: - ./helk-kibana/dashboards:/usr/share/kibana/dashboards - - ./helk-kibana/scripts/basic:/usr/share/kibana/scripts + - ./helk-kibana/scripts:/usr/share/kibana/scripts entrypoint: /usr/share/kibana/scripts/kibana-entrypoint.sh restart: always depends_on: @@ -81,7 +81,7 @@ services: networks: helk: helk-jupyter: - image: cyb3rward0g/helk-jupyter:0.0.8 + image: cyb3rward0g/helk-jupyter:0.0.9 container_name: helk-jupyter volumes: - ./helk-jupyter/notebooks:/opt/helk/jupyter/notebooks @@ -90,7 +90,7 @@ services: JUPYTER_USERS: hunter1, hunter2 restart: always depends_on: - - helk-elasticsearch + - helk-logstash networks: helk: helk-spark-master: @@ -103,7 +103,7 @@ services: - "8080:8080" restart: always depends_on: - - helk-elasticsearch + - helk-logstash networks: helk: helk-spark-worker: @@ -124,7 +124,7 @@ services: container_name: helk-zookeeper restart: always depends_on: - - helk-kibana + - helk-logstash networks: helk: helk-kafka-broker: @@ -140,7 +140,7 @@ services: REPLICATION_FACTOR: 1 ADVERTISED_LISTENER: ${ADVERTISED_LISTENER} ZOOKEEPER_NAME: helk-zookeeper - KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN, filebeat + KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN, filebeat, winsysmon, winsecurity KAFKA_HEAP_OPTS: -Xmx1G -Xms1G LOG_RETENTION_HOURS: 4 ports: @@ -170,21 +170,19 @@ services: image: confluentinc/cp-ksql-cli:5.1.0 container_name: helk-ksql-cli depends_on: - - helk-kafka-broker - helk-ksql-server environment: - KSQL_HEAP_OPTS: -Xmx1g + KSQL_HEAP_OPTS: -Xmx550m entrypoint: /bin/sh tty: true networks: helk: helk-elastalert: - image: cyb3rward0g/helk-elastalert:0.2.0 + image: cyb3rward0g/helk-elastalert:0.2.1 container_name: helk-elastalert restart: always depends_on: - - helk-elasticsearch - - helk-kibana + - helk-logstash environment: ES_HOST: helk-elasticsearch ES_PORT: 9200 diff --git a/docker/helk-kibana-notebook-analysis-trial.yml b/docker/helk-kibana-notebook-analysis-trial.yml index 5ba010cf..d047f884 100644 --- a/docker/helk-kibana-notebook-analysis-trial.yml +++ b/docker/helk-kibana-notebook-analysis-trial.yml @@ -2,14 +2,14 @@ version: '3.5' services: helk-elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:6.5.3 + image: docker.elastic.co/elasticsearch/elasticsearch:6.5.4 container_name: helk-elasticsearch secrets: - source: elasticsearch.yml target: /usr/share/elasticsearch/config/elasticsearch.yml volumes: - esdata:/usr/share/elasticsearch/data - - ./helk-elasticsearch/scripts/trial:/usr/share/elasticsearch/scripts + - ./helk-elasticsearch/scripts:/usr/share/elasticsearch/scripts entrypoint: /usr/share/elasticsearch/scripts/elasticsearch-entrypoint.sh environment: - cluster.name=helk-cluster @@ -29,16 +29,13 @@ services: networks: helk: helk-logstash: - image: docker.elastic.co/logstash/logstash:6.5.3 + build: helk-logstash/ container_name: helk-logstash - secrets: - - source: logstash.yml - target: /usr/share/logstash/config/logstash.yml volumes: - ./helk-logstash/pipeline:/usr/share/logstash/pipeline - ./helk-logstash/output_templates:/usr/share/logstash/output_templates - ./helk-logstash/enrichments/cti:/usr/share/logstash/cti - - ./helk-logstash/scripts/trial:/usr/share/logstash/scripts + - ./helk-logstash/scripts:/usr/share/logstash/scripts entrypoint: /usr/share/logstash/scripts/logstash-entrypoint.sh environment: - xpack.monitoring.elasticsearch.username=logstash_system @@ -48,22 +45,22 @@ services: - "5044:5044" restart: always depends_on: - - helk-zookeeper + - helk-kibana networks: helk: helk-kibana: - image: docker.elastic.co/kibana/kibana:6.5.3 + image: docker.elastic.co/kibana/kibana:6.5.4 container_name: helk-kibana secrets: - source: kibana.yml target: /usr/share/kibana/config/kibana.yml volumes: - ./helk-kibana/dashboards:/usr/share/kibana/dashboards - - ./helk-kibana/scripts/trial:/usr/share/kibana/scripts + - ./helk-kibana/scripts:/usr/share/kibana/scripts entrypoint: /usr/share/kibana/scripts/kibana-entrypoint.sh environment: KIBANA_UI_PASSWORD: ${KIBANA_UI_PASSWORD} - ELASTIC_PASSWORD: ${ELASTIC_PASSWORD} + ELASTICSEARCH_PASSWORD: ${ELASTIC_PASSWORD} restart: always depends_on: - helk-elasticsearch @@ -86,7 +83,7 @@ services: networks: helk: helk-jupyter: - image: cyb3rward0g/helk-jupyter:0.0.8 + image: cyb3rward0g/helk-jupyter:0.0.9 container_name: helk-jupyter volumes: - ./helk-jupyter/notebooks:/opt/helk/jupyter/notebooks @@ -95,7 +92,7 @@ services: JUPYTER_USERS: hunter1, hunter2 restart: always depends_on: - - helk-elasticsearch + - helk-logstash networks: helk: helk-spark-master: @@ -108,7 +105,7 @@ services: - "8080:8080" restart: always depends_on: - - helk-elasticsearch + - helk-logstash networks: helk: helk-spark-worker: @@ -129,7 +126,7 @@ services: container_name: helk-zookeeper restart: always depends_on: - - helk-kibana + - helk-logstash networks: helk: helk-kafka-broker: @@ -145,7 +142,7 @@ services: REPLICATION_FACTOR: 1 ADVERTISED_LISTENER: ${ADVERTISED_LISTENER} ZOOKEEPER_NAME: helk-zookeeper - KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN, filebeat + KAFKA_CREATE_TOPICS: winlogbeat, SYSMON_JOIN, filebeat, winsysmon, winsecurity KAFKA_HEAP_OPTS: -Xmx1G -Xms1G LOG_RETENTION_HOURS: 4 ports: @@ -175,28 +172,23 @@ services: image: confluentinc/cp-ksql-cli:5.1.0 container_name: helk-ksql-cli depends_on: - - helk-kafka-broker - helk-ksql-server environment: - KSQL_HEAP_OPTS: -Xmx1g + KSQL_HEAP_OPTS: -Xmx550m entrypoint: /bin/sh tty: true networks: helk: helk-elastalert: - image: cyb3rward0g/helk-elastalert:0.2.0 + image: cyb3rward0g/helk-elastalert:0.2.1 container_name: helk-elastalert restart: always depends_on: - - helk-elasticsearch - - helk-kibana + - helk-logstash environment: ES_HOST: helk-elasticsearch ES_PORT: 9200 - ELASTIC_USERNAME: elastic ELASTIC_PASSWORD: ${ELASTIC_PASSWORD} - KIBANA_HOST: helk-kibana - KIBANA_PORT: 5601 networks: helk: @@ -211,8 +203,6 @@ volumes: secrets: elasticsearch.yml: file: ./helk-elasticsearch/config/elasticsearch.yml - logstash.yml: - file: ./helk-logstash/config/logstash.yml kibana.yml: file: ./helk-kibana/config/kibana.yml diff --git a/docker/helk-kibana/Dockerfile b/docker/helk-kibana/Dockerfile index 4d49f4c3..bf548d37 100644 --- a/docker/helk-kibana/Dockerfile +++ b/docker/helk-kibana/Dockerfile @@ -6,6 +6,6 @@ # References: # https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html -FROM docker.elastic.co/kibana/kibana:6.5.3 +FROM docker.elastic.co/kibana/kibana:6.5.4 LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" LABEL description="Dockerfile base for the HELK Kibana." \ No newline at end of file diff --git a/docker/helk-kibana/scripts/basic/kibana-entrypoint.sh b/docker/helk-kibana/scripts/basic/kibana-entrypoint.sh deleted file mode 100755 index 51d7150f..00000000 --- a/docker/helk-kibana/scripts/basic/kibana-entrypoint.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/sh - -# HELK script: kibana-entrypoint.sh -# HELK script description: Starts Kibana service -# HELK build Stage: Alpha -# Author: Roberto Rodriguez (@Cyb3rWard0g) -# License: GPL-3.0 - -# *********** Install Plugins ********************* - -# *********** Environment Variables *************** -if [[ -z "$ELASTICSEARCH_URL" ]]; then - export ELASTICSEARCH_URL="http://helk-elasticsearch:9200" -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elasticsearch URL to $ELASTICSEARCH_URL" - -if [[ -z "$SERVER_HOST" ]]; then - export SERVER_HOST="helk-kibana" -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Kibana server host to $SERVER_HOST" - -if [[ -z "$SERVER_PORT" ]]; then - export SERVER_PORT="5601" -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Kibana server port to $SERVER_PORT" - - -# *********** Start Kibana services *************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Waiting for elasticsearch URI to be accessible.." -until curl -s $ELASTICSEARCH_URL -o /dev/null; do - sleep 1 -done - -echo "[HELK-DOCKER-INSTALLATION-INFO] Starting Kibana service.." -exec /usr/local/bin/kibana-docker & - -# *********** Creating Kibana Dashboards, visualizations and index-patterns *************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Running helk_kibana_setup.sh script..." -/usr/share/kibana/scripts/kibana-setup.sh - -tail -f /usr/share/kibana/config/kibana_logs.log \ No newline at end of file diff --git a/docker/helk-kibana/scripts/kibana-entrypoint.sh b/docker/helk-kibana/scripts/kibana-entrypoint.sh new file mode 100755 index 00000000..fed476f8 --- /dev/null +++ b/docker/helk-kibana/scripts/kibana-entrypoint.sh @@ -0,0 +1,82 @@ +#!/bin/sh + +# HELK script: kibana-entrypoint.sh +# HELK script description: Starts Kibana service +# HELK build Stage: Alpha +# Author: Roberto Rodriguez (@Cyb3rWard0g) +# License: GPL-3.0 + +# *********** Install Plugins ********************* + +# *********** Environment Variables *************** +if [[ -z "$ELASTICSEARCH_URL" ]]; then + export ELASTICSEARCH_URL=http://helk-elasticsearch:9200 +fi +echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Setting Elasticsearch URL to $ELASTICSEARCH_URL" + +if [[ -z "$SERVER_HOST" ]]; then + export SERVER_HOST=helk-kibana +fi +echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Setting Kibana server host to $SERVER_HOST" + +if [[ -z "$SERVER_PORT" ]]; then + export SERVER_PORT=5601 +fi +echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Setting Kibana server port to $SERVER_PORT" + +# ******** Set Trial License Variables *************** +if [[ -n "$ELASTICSEARCH_PASSWORD" ]]; then + if [[ -z "$ELASTICSEARCH_USERNAME" ]]; then + export ELASTICSEARCH_USERNAME=elastic + fi + echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Setting Elasticsearch's username to access Elasticsearch to $ELASTICSEARCH_USERNAME" + + if [[ -z "$KIBANA_USER" ]]; then + export KIBANA_USER=kibana + fi + echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Setting Kibana's username to access Elasticsearch to $KIBANA_USER" + + if [[ -z "$KIBANA_PASSWORD" ]]; then + export KIBANA_PASSWORD=kibanapassword + fi + echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Setting Kibana's password to access Elasticsearch to $KIBANA_PASSWORD" + + if [[ -z "$KIBANA_UI_PASSWORD" ]]; then + export KIBANA_UI_PASSWORD=hunting + fi + echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Setting Kibana's UI password to $KIBANA_UI_PASSWORD" + + # *********** Check if Elasticsearch is up *************** + echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Waiting for elasticsearch URI to be accessible.." + until curl -s -u $ELASTICSEARCH_USERNAME:$ELASTICSEARCH_PASSWORD $ELASTICSEARCH_URL -o /dev/null; do + sleep 1 + done + + # *********** Change Kibana and Logstash password *************** + echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Submitting a request to change the password of a Kibana and Logstash users .." + until curl -u $ELASTICSEARCH_USERNAME:$ELASTICSEARCH_PASSWORD -H 'Content-Type:application/json' -XPUT $ELASTICSEARCH_URL/_xpack/security/user/kibana/_password -d "{\"password\": \"$KIBANA_PASSWORD\"}" + do + sleep 1 + done + + until curl -u $ELASTICSEARCH_USERNAME:$ELASTICSEARCH_PASSWORD -H 'Content-Type:application/json' -XPUT $ELASTICSEARCH_URL/_xpack/security/user/logstash_system/_password -d "{\"password\": \"logstashpassword\"}" + do + sleep 1 + done + +else + # *********** Check if Elasticsearch is up *************** + echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Waiting for elasticsearch URI to be accessible.." + until curl -s $ELASTICSEARCH_URL -o /dev/null; do + sleep 1 + done +fi + +echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Starting Kibana service.." +exec /usr/local/bin/kibana-docker & + +# *********** Creating Kibana Dashboards, visualizations and index-patterns *************** +echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Running helk_kibana_setup.sh script..." +/usr/share/kibana/scripts/kibana-setup.sh + +tail -f /usr/share/kibana/config/kibana_logs.log \ No newline at end of file diff --git a/docker/helk-kibana/scripts/kibana-setup.sh b/docker/helk-kibana/scripts/kibana-setup.sh new file mode 100755 index 00000000..257e41d7 --- /dev/null +++ b/docker/helk-kibana/scripts/kibana-setup.sh @@ -0,0 +1,142 @@ +#!/bin/bash + +# HELK script: kibana-setup.sh +# HELK script description: Creates Kibana index patterns, dashboards and visualizations automatically. +# HELK build Stage: Alpha +# Author: Roberto Rodriguez (@Cyb3rWard0g) +# License: GPL-3.0 + +# References: +# https://github.com/elastic/kibana/issues/3709 (https://github.com/hobti01) +# https://explainshell.com/explain?cmd=set+-euxo%20pipefail +# https://github.com/elastic/beats-dashboards/blob/master/load.sh +# https://github.com/elastic/kibana/issues/14872 +# https://github.com/elastic/stack-docker/blob/master/docker-compose.yml +# https://stackoverflow.com/a/42377880 + +# *********** Setting Variables *************** +KIBANA_URL=http://$SERVER_HOST:$SERVER_PORT +TIME_FIELD="@timestamp" +DEFAULT_INDEX="logs-endpoint-winevent-sysmon-*" +DIR=/usr/share/kibana/dashboards + +# *********** Waiting for Kibana port to be up *************** +echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Checking to see if kibana port is up..." +until curl -s $KIBANA_URL -o /dev/null; do + sleep 1 +done + +# *********** Waiting for Kibana server to be running *************** +echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Checking to see if kibana server is running..." +while [[ -z $(grep "Server running at http://$KIBANA" /usr/share/kibana/config/kibana_logs.log) ]]; do + sleep 1 +done + +# ******** Set Trial License Variables *************** +if [[ -n "$ELASTICSEARCH_PASSWORD" ]] && [[ -n "$ELASTICSEARCH_USERNAME" ]]; then + # *********** Creating Kibana index-patterns *************** + declare -a index_patterns=("logs-endpoint-*" "logs-*" "logs-endpoint-winevent-sysmon-*" "logs-endpoint-winevent-security-*" "logs-endpoint-winevent-system-*" "logs-endpoint-winevent-application-*" "logs-endpoint-winevent-wmiactivity-*" "logs-endpoint-winevent-powershell-*" "mitre-attack-*" "elastalert_status" "elastalert_status_status" "elastalert_status_error" "elastalert_status_silence" "elastalert_status_past" "sysmon-join-*") + echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Creating Kibana Index Patterns..." + for index in ${!index_patterns[@]}; do + echo "[++++++] creating kibana index ${index_patterns[${index}]}" + until curl -u $KIBANA_USER:$KIBANA_PASSWORD -X POST "$KIBANA_URL/api/saved_objects/index-pattern/${index_patterns[${index}]}" \ + -H "Content-Type: application/json" -H "kbn-xsrf: true" \ + -d"{\"attributes\":{\"title\":\"${index_patterns[${index}]}\",\"timeFieldName\":\"$TIME_FIELD\"}}" + do + sleep 1 + done + done + + # *********** Making Sysmon the default index *************** + echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Making Sysmon the default index..." + until curl -u $KIBANA_USER:$KIBANA_PASSWORD -X POST -H "Content-Type: application/json" -H "kbn-xsrf: true" \ + "$KIBANA_URL/api/kibana/settings/defaultIndex" \ + -d"{\"value\":\"$DEFAULT_INDEX\"}" + do + sleep 1 + done + + # *********** Loading dashboards *************** + echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Loading Dashboards..." + for file in ${DIR}/*.json + do + echo "[++++++] Loading dashboard file ${file}" + until curl -u $KIBANA_USER:$KIBANA_PASSWORD -X POST "$KIBANA_URL/api/kibana/dashboards/import" -H 'kbn-xsrf: true' \ + -H 'Content-type:application/json' -d @${file} + do + sleep 1 + done + done + + # *********** Creating HELK User ********************* + echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Setting HELK's user password to $KIBANA_UI_PASSWORD" + curl -u $ELASTICSEARCH_USERNAME:$ELASTICSEARCH_PASSWORD -X POST "$ELASTICSEARCH_URL/_xpack/security/user/helk" -H 'Content-Type: application/json' -d" + { + \"password\" : \"$KIBANA_UI_PASSWORD\", + \"roles\" : [ \"superuser\" ], + \"full_name\" : \"The HELK\", + \"email\" : \"helk@example.com\" + } + " + + # *********** Create Roles ******************* + curl -u $ELASTICSEARCH_USERNAME:$ELASTICSEARCH_PASSWORD -X POST "$ELASTICSEARCH_URL/_xpack/security/role/hunters" -H 'Content-Type: application/json' -d' + { + "run_as": [], + "cluster": [], + "indices": [ + { + "names": [ "logs-*" ], + "privileges": [ "read" ] + } + ] + } + ' + curl -u $ELASTICSEARCH_USERNAME:$ELASTICSEARCH_PASSWORD -X POST "$ELASTICSEARCH_URL/_xpack/security/role/sysmon_hunters" -H 'Content-Type: application/json' -d' + { + "run_as": [], + "cluster": [], + "indices": [ + { + "names": [ "logs-endpoint-winevent-sysmon-*" ], + "privileges": [ "read" ] + } + ] + } + ' +else + # *********** Creating Kibana index-patterns *************** + declare -a index_patterns=("logs-endpoint-*" "logs-*" "logs-endpoint-winevent-sysmon-*" "logs-endpoint-winevent-security-*" "logs-endpoint-winevent-system-*" "logs-endpoint-winevent-application-*" "logs-endpoint-winevent-wmiactivity-*" "logs-endpoint-winevent-powershell-*" "mitre-attack-*" "elastalert_status" "elastalert_status_status" "elastalert_status_error" "elastalert_status_silence" "elastalert_status_past" "sysmon-join-*") + + echo "[+++] Creating Kibana Index Patterns..." + for index in ${!index_patterns[@]}; do + echo "[++++++] creating kibana index ${index_patterns[${index}]}" + until curl -X POST "$KIBANA_URL/api/saved_objects/index-pattern/${index_patterns[${index}]}" \ + -H "Content-Type: application/json" -H "kbn-xsrf: true" \ + -d"{\"attributes\":{\"title\":\"${index_patterns[${index}]}\",\"timeFieldName\":\"$TIME_FIELD\"}}" + do + sleep 1 + done + done + + # *********** Making Sysmon the default index *************** + echo "[++] Making Sysmon the default index..." + until curl -X POST -H "Content-Type: application/json" -H "kbn-xsrf: true" \ + "$KIBANA_URL/api/kibana/settings/defaultIndex" \ + -d"{\"value\":\"$DEFAULT_INDEX\"}" + do + sleep 1 + done + + # *********** Loading dashboards *************** + echo "[+++] Loading Dashboards..." + for file in ${DIR}/*.json + do + echo "[++++++] Loading dashboard file ${file}" + until curl -X POST "$KIBANA_URL/api/kibana/dashboards/import" -H 'kbn-xsrf: true' \ + -H 'Content-type:application/json' -d @${file} + do + sleep 1 + done + done +fi \ No newline at end of file diff --git a/docker/helk-kibana/scripts/trial/kibana-entrypoint.sh b/docker/helk-kibana/scripts/trial/kibana-entrypoint.sh deleted file mode 100755 index 4a4bea6f..00000000 --- a/docker/helk-kibana/scripts/trial/kibana-entrypoint.sh +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/sh - -# HELK script: kibana-entrypoint.sh -# HELK script description: Starts Kibana service -# HELK build Stage: Alpha -# Author: Roberto Rodriguez (@Cyb3rWard0g) -# License: GPL-3.0 - -# *********** Install Plugins ********************* - -# *********** Environment Variables *************** -if [[ -z "$ELASTICSEARCH_URL" ]]; then - export ELASTICSEARCH_URL=http://helk-elasticsearch:9200 -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elasticsearch URL to $ELASTICSEARCH_URL" - -if [[ -z "$SERVER_HOST" ]]; then - export SERVER_HOST=helk-kibana -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Kibana server host to $SERVER_HOST" - -if [[ -z "$SERVER_PORT" ]]; then - export SERVER_PORT=5601 -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Kibana server port to $SERVER_PORT" - -# *********** Password for Elasticsearch Backend ******** -if [[ -z "$ELASTIC_PASSWORD" ]]; then - ELASTIC_PASSWORD=elasticpassword -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elasticsearch backend password to $ELASTIC_PASSWORD" - -if [[ -z "$ELASTIC_HOST" ]]; then - ELASTIC_HOST=helk-elasticsearch -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elasticsearch host name to $ELASTIC_HOST" - -if [[ -z "$ELASTIC_PORT" ]]; then - ELASTIC_PORT=9200 -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elasticsearch port to $ELASTIC_PORT" - -# *********** Password used by Kibana to access Elasticsearch ******** -if [[ -z "$ELASTICSEARCH_PASSWORD" ]]; then - export ELASTICSEARCH_PASSWORD=kibanapassword -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Kibana's password to access Elasticsearch to $ELASTICSEARCH_PASSWORD" - -if [[ -z "$ELASTICSEARCH_USERNAME" ]]; then - export ELASTICSEARCH_USERNAME=kibana -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Kibana's username to access Elasticsearch to $ELASTICSEARCH_USERNAME" - -if [[ -z "$KIBANA_UI_PASSWORD" ]]; then - KIBANA_UI_PASSWORD=hunting -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Kibana's UI password to $KIBANA_UI_PASSWORD" - -if [[ -z "$ELASTICSEARCH_ACCESS" ]]; then - ELASTICSEARCH_ACCESS=http://elastic:$ELASTIC_PASSWORD@$ELASTIC_HOST:$ELASTIC_PORT -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting custom ELasticsearch URL with credentials to $ELASTICSEARCH_ACCESS" - -# *********** Check if Elasticsearch is up *************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Waiting for elasticsearch URI to be accessible.." -until curl -s $ELASTICSEARCH_URL -o /dev/null; do - sleep 1 -done - -# *********** Change Kibana and Logstash password *************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Submitting a request to change the password of a Kibana and Logstash users .." -until curl -s -H 'Content-Type:application/json' -XPUT $ELASTICSEARCH_ACCESS/_xpack/security/user/kibana/_password -d "{\"password\": \"$ELASTICSEARCH_PASSWORD\"}" -do - sleep 2 -done - -until curl -s -H 'Content-Type:application/json' -XPUT $ELASTICSEARCH_ACCESS/_xpack/security/user/logstash_system/_password -d "{\"password\": \"logstashpassword\"}" -do - sleep 2 -done - -echo "[HELK-DOCKER-INSTALLATION-INFO] Starting Kibana service.." -exec /usr/local/bin/kibana-docker & - -# *********** Creating Kibana Dashboards, visualizations and index-patterns *************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Running helk_kibana_setup.sh script..." -/usr/share/kibana/scripts/kibana-setup.sh - -tail -f /usr/share/kibana/config/kibana_logs.log \ No newline at end of file diff --git a/docker/helk-logstash/Dockerfile b/docker/helk-logstash/Dockerfile index b4cef0cf..84742408 100644 --- a/docker/helk-logstash/Dockerfile +++ b/docker/helk-logstash/Dockerfile @@ -5,8 +5,10 @@ # References: # https://cyberwardog.blogspot.com/2017/02/setting-up-pentesting-i-mean-threat_98.html -# https://github.com/spujadas/elk-docker/blob/master/Dockerfile -FROM docker.elastic.co/logstash/logstash:6.5.3 +FROM docker.elastic.co/logstash/logstash:6.5.4 LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" -LABEL description="Dockerfile base for the HELK Logstash." \ No newline at end of file +LABEL description="Dockerfile base for the HELK Logstash." + +RUN mv /usr/share/logstash/config/logstash.yml /usr/share/logstash/config/logstash.yml.backup +COPY --chown=logstash:logstash config/logstash.yml /usr/share/logstash/config/logstash.yml \ No newline at end of file diff --git a/docker/helk-logstash/config/logstash.yml b/docker/helk-logstash/config/logstash.yml index 7b8aac1b..c05f53f1 100644 --- a/docker/helk-logstash/config/logstash.yml +++ b/docker/helk-logstash/config/logstash.yml @@ -43,7 +43,7 @@ # How many events to retrieve from inputs before sending to filters+workers # # pipeline.batch.size: 125 -pipeline.batch.size: 500 +pipeline.batch.size: 1000 # # How long to wait before dispatching an undersized batch to filters+workers # Value is in milliseconds. @@ -75,11 +75,11 @@ pipeline.batch.size: 500 # Periodically check if the configuration has changed and reload the pipeline # This can also be triggered manually through the SIGHUP signal # -# config.reload.automatic: false +config.reload.automatic: true # # How often to check if the pipeline configuration has changed (in seconds) # -# config.reload.interval: 3s +config.reload.interval: 60s # # Show fully compiled configuration as debug LS_SETTINGS_DIR message # NOTE: --LS_SETTINGS_DIR.level must be 'debug' diff --git a/docker/helk-logstash/pipeline/0002-kafka-input.conf b/docker/helk-logstash/pipeline/0002-kafka-input.conf index 023bf837..7bf65f1a 100644 --- a/docker/helk-logstash/pipeline/0002-kafka-input.conf +++ b/docker/helk-logstash/pipeline/0002-kafka-input.conf @@ -9,13 +9,17 @@ input { topics => ["winlogbeat", "SYSMON_JOIN","filebeat"] decorate_events => true codec => "json" - auto_offset_reset => "earliest" - ############################# HELK Optimizing Latency ############################# + auto_offset_reset => "latest" + ############################# HELK Optimizing Throughput & Latency ############################# fetch_min_bytes => "1" - request_timeout_ms => "305000" + request_timeout_ms => "40000" + ############################# HELK Optimizing Durability ############################# + enable_auto_commit => "false" ############################# HELK Optimizing Availability ############################# - session_timeout_ms => "10000" - max_poll_records => "500" + connections_max_idle_ms => "540000" + session_timeout_ms => "30000" max_poll_interval_ms => "300000" + ############################# + max_poll_records => "250" } } \ No newline at end of file diff --git a/docker/helk-logstash/pipeline/1010-winevent-remove-winlogbeats-prepend-of-eventdata.conf b/docker/helk-logstash/pipeline/1010-winevent-winlogbeats-filter.conf similarity index 69% rename from docker/helk-logstash/pipeline/1010-winevent-remove-winlogbeats-prepend-of-eventdata.conf rename to docker/helk-logstash/pipeline/1010-winevent-winlogbeats-filter.conf index 1b0b8693..7956bbb5 100644 --- a/docker/helk-logstash/pipeline/1010-winevent-remove-winlogbeats-prepend-of-eventdata.conf +++ b/docker/helk-logstash/pipeline/1010-winevent-winlogbeats-filter.conf @@ -1,11 +1,12 @@ -# HELK winevent-remove-winlogbeats-prepend-of-eventdata filter conf +# HELK winevent-winlogbeats filter conf # HELK build Stage: Alpha -# Author: Nate Guagenti (@neu5ron) +# Author: Nate Guagenti (@neu5ron), Roberto Rodriguez (@Cyb3rWard0g) # License: GPL-3.0 filter { # Use the following to get rid of the prepended "event_data" nest that (elastic) winlogbeats adds to windows logs if [type] == "wineventlog" and [beat] { + mutate { add_field => { "z_logstash_pipeline" => "1010" } } ruby { code => " eventdata = event.get('event_data') @@ -29,5 +30,13 @@ filter { #" #tag_on_exception => "_rubyexception_1010" } + mutate { + add_field => { + "beat_hostname" => "%{[beat][hostname]}" + "beat_version" => "%{[beat][version]}" + "beat_name" => "%{[beat][name]}" + } + remove_field => [ "[beat]" ] + } } } \ No newline at end of file diff --git a/docker/helk-logstash/pipeline/1522-winevent-cleanup-lowercasing-windows-is-case-insensitive.conf b/docker/helk-logstash/pipeline/1522-winevent-cleanup-lowercasing-windows-filter.conf similarity index 97% rename from docker/helk-logstash/pipeline/1522-winevent-cleanup-lowercasing-windows-is-case-insensitive.conf rename to docker/helk-logstash/pipeline/1522-winevent-cleanup-lowercasing-windows-filter.conf index 5627dfcd..7f265c71 100644 --- a/docker/helk-logstash/pipeline/1522-winevent-cleanup-lowercasing-windows-is-case-insensitive.conf +++ b/docker/helk-logstash/pipeline/1522-winevent-cleanup-lowercasing-windows-filter.conf @@ -1,8 +1,10 @@ -# HELK winevent-cleanup-lowercasing-windows-is-case-sensitive filter conf +# HELK winevent-cleanup-lowercasing-windows filter conf # HELK build Stage: Alpha # Author: Nate Guagenti (@neu5ron) # License: GPL-3.0 +# Lowercasing windows is case insensitive + filter { if [event_id] { mutate { add_field => { "z_logstash_pipeline" => "1522" } } diff --git a/docker/helk-logstash/pipeline/1523-winevent-process-name-split.conf b/docker/helk-logstash/pipeline/1523-winevent-process-name-split.conf index f1359d3c..f91be32d 100644 --- a/docker/helk-logstash/pipeline/1523-winevent-process-name-split.conf +++ b/docker/helk-logstash/pipeline/1523-winevent-process-name-split.conf @@ -7,7 +7,7 @@ filter { if [event_id] { if [Image] { mutate { add_field => { "z_logstash_pipeline" => "1523_1" } } - if [Image] =~ /^(\w*$)|^(\w*\..*$)/ { + if [Image] !~ /.*\\.*/ { mutate { copy => {"Image" => "process_name"} } } else { @@ -19,7 +19,7 @@ filter { } if [Application] { mutate { add_field => { "z_logstash_pipeline" => "1523_2" } } - if [Application] =~ /^(\w*$)|^(\w*\..*$)/ { + if [Application] !~ /.*\\.*/ { mutate { copy => {"Application" => "process_name"} } } else { @@ -31,7 +31,7 @@ filter { } if [NewProcessName] { mutate { add_field => { "z_logstash_pipeline" => "1523_3" } } - if [NewProcessName] =~ /^(\w*$)|^(\w*\..*$)/ { + if [NewProcessName] !~ /.*\\.*/ { mutate { copy => {"NewProcessName" => "process_name"} } } else { @@ -43,7 +43,7 @@ filter { } if [ProcessName] { mutate { add_field => { "z_logstash_pipeline" => "1523_4" } } - if [ProcessName] =~ /^(\w*$)|^(\w*\..*$)/ { + if [ProcessName] !~ /.*\\.*/ { mutate { copy => {"ProcessName" => "process_name"} } } else { @@ -55,7 +55,7 @@ filter { } if [ParentProcessName] { mutate { add_field => { "z_logstash_pipeline" => "1523_5" } } - if [ParentProcessName] =~ /^(\w*$)|^(\w*\..*$)/ { + if [ParentProcessName] !~ /.*\\.*/ { mutate { copy => {"ParentProcessName" => "process_parent_name"} } } else { @@ -67,7 +67,7 @@ filter { } if [ParentImage] { mutate { add_field => { "z_logstash_pipeline" => "1523_6" } } - if [ParentImage] =~ /^(\w*$)|^(\w*\..*$)/ { + if [ParentImage] !~ /.*\\.*/ { mutate { copy => {"ParentImage" => "process_parent_name"} } } else { @@ -79,7 +79,7 @@ filter { } if [TargetImage] { mutate { add_field => { "z_logstash_pipeline" => "1523_7" } } - if [SourceImage] =~ /^(\w*$)|^(\w*\..*$)/ { + if [SourceImage] !~ /.*\\.*/ { mutate { copy => {"SourceImage" => "process_name"} } } else { @@ -88,7 +88,7 @@ filter { tag_on_failure => [ "_SourceImage__grokparsefailure", "_grokparsefailure", "_parsefailure" ] } } - if [TargetImage] =~ /^(\w*$)|^(\w*\..*$)/ { + if [TargetImage] !~ /.*\\.*/ { mutate { copy => {"TargetImage" => "target_process_name"} } } else{ diff --git a/docker/helk-logstash/pipeline/1531-winevent-sysmon-filter.conf b/docker/helk-logstash/pipeline/1531-winevent-sysmon-filter.conf index fcb4456f..8cf8303e 100644 --- a/docker/helk-logstash/pipeline/1531-winevent-sysmon-filter.conf +++ b/docker/helk-logstash/pipeline/1531-winevent-sysmon-filter.conf @@ -70,7 +70,7 @@ filter { "ParentCommandLine" => "process_parent_command_line" "IntegrityLevel" => "process_integrity_level" "LogonGuid" => "user_logon_guid" - "LogonIdd" => "user_logon_id" + "LogonId" => "user_logon_id" "ParentProcessGuid" => "process_parent_guid" "ParentProcessId" => "process_parent_id" "TerminalSessionId" => "user_session_id" diff --git a/docker/helk-logstash/pipeline/1532-winevent-security-filter.conf b/docker/helk-logstash/pipeline/1532-winevent-security-filter.conf index 86fef8f7..e57f2f76 100644 --- a/docker/helk-logstash/pipeline/1532-winevent-security-filter.conf +++ b/docker/helk-logstash/pipeline/1532-winevent-security-filter.conf @@ -225,7 +225,26 @@ filter { } } } - if [event_id] == 4661 or [event_id] == 4662 or [event_id] == 4663 { + if [event_id] == 4659 { + mutate { + rename => { + "SubjectDomainName" => "user_domain" + "SubjectLogonId" => "user_logon_id" + "SubjectUserName" => "user_name" + "SubjectUserSid" => "user_sid" + "ProcessId" => "process_id" + "ProcessName" => "process_path" + "ObjectName" => "object_name" + "ObjectServer" => "object_server" + "ObjectType" => "object_type" + "HandleId" => "object_access_handle_id" + "TransactionId" => "object_transaction_guid" + "AccessList" => "object_access_list_requested" + } + } + } + if [event_id] == 4660 or [event_id] == 4661 or [event_id] == 4662 or [event_id] == 4663 { + # https://docs.microsoft.com/en-us/windows/security/threat-protection/auditing/event-4660 # https://github.com/MicrosoftDocs/windows-itpro-docs/blob/master/windows/security/threat-protection/auditing/event-4661.md # https://github.com/MicrosoftDocs/windows-itpro-docs/blob/master/windows/security/threat-protection/auditing/event-4662.md # https://github.com/MicrosoftDocs/windows-itpro-docs/blob/master/windows/security/threat-protection/auditing/event-4663.md @@ -341,6 +360,7 @@ filter { rename => { "NewProcessId" => "process_id" "NewProcessName" => "process_path" + "ParentProcessName" => "process_parent_path" "CommandLine" => "process_command_line" "SubjectDomainName" => "user_domain" "SubjectLogonId" => "user_logon_id" @@ -557,7 +577,7 @@ filter { } } } - if [event_id] == 4768 or [event_id] == 4769 { + if [event_id] == 4768 or [event_id] == 4769 or [event_id] == 4770 or [event_id] == 4771 { # https://github.com/MicrosoftDocs/windows-itpro-docs/blob/master/windows/security/threat-protection/auditing/event-4768.md # https://github.com/MicrosoftDocs/windows-itpro-docs/blob/master/windows/security/threat-protection/auditing/event-4769.md # https://github.com/MicrosoftDocs/windows-itpro-docs/blob/master/windows/security/threat-protection/auditing/event-4771.md @@ -566,7 +586,7 @@ filter { "TargetDomainName" => "user_domain" "TargetUserName" => "user_name" "IpPort" => "src_port" - "PreAuthType" => "service_ticket_preauthtype" + "PreAuthType" => "service_ticket_preauth_type" "LogonGuid" => "user_logon_guid" "ServiceName" => "service_ticket_name" "ServiceSid" => "service_ticket_id" @@ -645,6 +665,7 @@ filter { "SubjectUserSid" => "user_sid" "ProcessId" => "process_id" "ProcessName" => "process_path" + "ParentProcessName" => "parent_process_path" "HandleId" => "object_access_handle_id" "NewSd" => "object_new_sddl" "ObjectName" => "object_name" @@ -683,7 +704,7 @@ filter { } } } - if [event_id] == 5136 { + if [event_id] == 5136 or [event_id] == 5137 { # https://github.com/MicrosoftDocs/windows-itpro-docs/blob/master/windows/security/threat-protection/auditing/event-5136.md mutate { rename => { @@ -748,6 +769,17 @@ filter { } } } + if [event_id] == 5379 { + mutate { + rename => { + "SubjectDomainName" => "user_domain" + "SubjectLogonId" => "user_logon_id" + "SubjectUserName" => "user_name" + "SubjectUserSid" => "user_sid" + "ReadOperation" => "object_operation_type" + } + } + } if [event_id] == 5447 { # https://github.com/MicrosoftDocs/windows-itpro-docs/blob/master/windows/security/threat-protection/auditing/event-5447.md mutate { @@ -785,6 +817,15 @@ filter { } } } + if [event_id] == 6144 { + # https://docs.microsoft.com/en-us/windows/security/threat-protection/auditing/event-6144 + mutate { + rename => { + "ErrorCode" => "error_code" + "GPOList" => "gpo_list" + } + } + } if [event_id] == 6416 { # https://github.com/MicrosoftDocs/windows-itpro-docs/blob/master/windows/security/threat-protection/auditing/event-6416.md mutate { diff --git a/docker/helk-logstash/pipeline/1544-winevent-cleanup-other.conf b/docker/helk-logstash/pipeline/1544-winevent-cleanup-other.conf index bac451df..5d97c3a0 100644 --- a/docker/helk-logstash/pipeline/1544-winevent-cleanup-other.conf +++ b/docker/helk-logstash/pipeline/1544-winevent-cleanup-other.conf @@ -6,33 +6,36 @@ filter { if [event_id] { if [user_logon_guid] { - mutate { add_field => { "z_logstash_pipeline" => "1544" } } + mutate { add_field => { "z_logstash_pipeline" => "1544_1" } } mutate { gsub => [ "user_logon_guid", "[{}]", "" ] } } if [provider_guid] { - mutate { add_field => { "z_logstash_pipeline" => "1544" } } + mutate { add_field => { "z_logstash_pipeline" => "1544_2" } } mutate { gsub => [ "provider_guid", "[{}]", "" ] } } if [process_guid] { - mutate { add_field => { "z_logstash_pipeline" => "1544" } } + mutate { add_field => { "z_logstash_pipeline" => "1544_3" } } mutate { gsub => [ "process_guid", "[{}]", "" ] } } if [process_parent_guid] { - mutate { add_field => { "z_logstash_pipeline" => "1544" } } + mutate { add_field => { "z_logstash_pipeline" => "1544_4" } } mutate { gsub => [ "process_parent_guid", "[{}]", "" ] } } if [target_process_guid] { - mutate { add_field => { "z_logstash_pipeline" => "1544" } } + mutate { add_field => { "z_logstash_pipeline" => "1544_5" } } mutate { gsub => [ "target_process_guid", "[{}]", "" ] } } if [user] { - mutate { add_field => { "z_logstash_pipeline" => "1544" } } - mutate { - remove_field => [ - "message", - "user" - ] - } + mutate { add_field => { "z_logstash_pipeline" => "1544_6" } } + mutate { remove_field => [ "user" ] } + } + if [message] { + mutate { add_field => { "z_logstash_pipeline" => "1544_7" } } + mutate { remove_field => [ "message" ] } + } + if [host] { + mutate { add_field => { "z_logstash_pipeline" => "1544_8" } } + mutate { remove_field => [ "host" ] } } } } \ No newline at end of file diff --git a/docker/helk-logstash/pipeline/1545-winevent-security-conversions.conf b/docker/helk-logstash/pipeline/1545-winevent-security-conversions.conf index b168acac..c8b5d66f 100644 --- a/docker/helk-logstash/pipeline/1545-winevent-security-conversions.conf +++ b/docker/helk-logstash/pipeline/1545-winevent-security-conversions.conf @@ -5,8 +5,8 @@ filter { if [log_name] == "Security" { - if [event_id] == 4624 and [impersonation_level] { - mutate { add_field => { "z_logstash_pipeline" => "1545" } } + if [event_id] == 4624 { + mutate { add_field => { "z_logstash_pipeline" => "1545_1" } } translate { field => "[impersonation_level]" destination => "[impersonation_level_value]" @@ -27,6 +27,303 @@ filter { } fallback => "Unknown" } + translate { + field => "[logon_elevated_token]" + destination => "[logon_elevated_token_value]" + dictionary => { + "%%1842" => "Yes" + "%%1843" => "No" + } + fallback => "Unknown" + } + translate { + field => "[logon_virtual_account]" + destination => "[logon_virtual_account_value]" + dictionary => { + "%%1842" => "Yes" + "%%1843" => "No" + } + fallback => "Unknown" + } + } + if [event_status] or [event_sub_status] { + if [event_id] == 4625 { + mutate { add_field => { "z_logstash_pipeline" => "1545_2" } } + translate { + field => "[event_status]" + destination => "[event_status_value]" + dictionary => { + "0xc000005e" => "There are currently no logon servers available to service the logon request" + "0xc0000064" => "User logon with misspelled or bad user account" + "0xc000006a" => "User logon with misspelled or bad password" + "0xc000006d" => "This is either due to a bad username or authentication information" + "0xc000006e" => "Unknown user name or bad password" + "0xc000006f" => "User logon outside authorized hours" + "0xc0000070" => "User logon from unauthorized workstation" + "0xc0000071" => "User logon with expired password" + "0xc0000072" => "User logon to account disabled by administrator" + "0xc00000dc" => "Indicates the Sam Server was in the wrong state to perform the desired operation" + "0xc0000133" => "Clocks between DC and other computer too far out of sync" + "0xc000015b" => "The user has not been granted the requested logon type (aka logon right) at this machine" + "0xc000018c" => "The logon request failed because the trust relationship between the primary domain and the trusted domain failed" + "0xc0000192" => "An attempt was made to logon, but the Netlogon service was not started" + "0xc0000193" => "User logon with expired account" + "0xc0000224" => "User is required to change password at next logon" + "0xc0000225" => "Evidently a bug in Windows and not a risk" + "0xc0000234" => "User logon with account locked" + "0xc00002ee" => "Failure Reason: An Error occurred during Logon" + "0xc0000413" => "Logon Failure: The machine you are logging onto is protected by an authentication firewall. The specified account is not allowed to authenticate to the machine" + "0x0" => "Status OK" + } + fallback => "Unknown" + } + translate { + field => "[event_sub_status]" + destination => "[event_sub_status_value]" + dictionary => { + "0xc000005e" => "There are currently no logon servers available to service the logon request" + "0xc0000064" => "User logon with misspelled or bad user account" + "0xc000006a" => "User logon with misspelled or bad password" + "0xc000006d" => "This is either due to a bad username or authentication information" + "0xc000006e" => "Unknown user name or bad password" + "0xc000006f" => "User logon outside authorized hours" + "0xc0000070" => "User logon from unauthorized workstation" + "0xc0000071" => "User logon with expired password" + "0xc0000072" => "User logon to account disabled by administrator" + "0xc00000dc" => "Indicates the Sam Server was in the wrong state to perform the desired operation" + "0xc0000133" => "Clocks between DC and other computer too far out of sync" + "0xc000015b" => "The user has not been granted the requested logon type (aka logon right) at this machine" + "0xc000018c" => "The logon request failed because the trust relationship between the primary domain and the trusted domain failed" + "0xc0000192" => "An attempt was made to logon, but the Netlogon service was not started" + "0xc0000193" => "User logon with expired account" + "0xc0000224" => "User is required to change password at next logon" + "0xc0000225" => "Evidently a bug in Windows and not a risk" + "0xc0000234" => "User logon with account locked" + "0xc00002ee" => "Failure Reason: An Error occurred during Logon" + "0xc0000413" => "Logon Failure: The machine you are logging onto is protected by an authentication firewall. The specified account is not allowed to authenticate to the machine" + "0x0" => "Status OK" + } + fallback => "Unknown" + } + mutate { copy => { "event_status_value" => "event_status" } } + } + if [event_id] == 4662 and [object_server] == "DS" { + mutate { add_field => { "z_logstash_pipeline" => "1545_3" } } + mutate { gsub => [ "object_type", "%{}", "" ] } + translate { + field => "[object_type]" + destination => "[object_type_value]" + dictionary => { + "f30e3bc2-9ff0-11d1-b603-0000f80367c1" => "Group-Policy-Container" + "19195a5b-6da0-11d0-afd3-00c04fd930c9" => "Domain-DNS" + "bf967aa5-0de6-11d0-a285-00aa003049e2" => "Organizational-Unit" + "bf967a8b-0de6-11d0-a285-00aa003049e2" => "Container" + "bf967a86-0de6-11d0-a285-00aa003049e2" => "Computer" + "2628a46a-a6ad-4ae0-b854-2b12d9fe6f9e" => "Account" + "bf967a9c-0de6-11d0-a285-00aa003049e2" => "Group" + } + fallback => "Unknown" + } + } + if [event_id] == 4688 { + mutate { add_field => { "z_logstash_pipeline" => "1545_4" } } + translate { + field => "[object_type]" + destination => "[object_type_value]" + dictionary => { + "f30e3bc2-9ff0-11d1-b603-0000f80367c1" => "Group-Policy-Container" + "19195a5b-6da0-11d0-afd3-00c04fd930c9" => "Domain-DNS" + "bf967aa5-0de6-11d0-a285-00aa003049e2" => "Organizational-Unit" + "bf967a8b-0de6-11d0-a285-00aa003049e2" => "Container" + "bf967a86-0de6-11d0-a285-00aa003049e2" => "Computer" + "2628a46a-a6ad-4ae0-b854-2b12d9fe6f9e" => "Account" + "bf967a9c-0de6-11d0-a285-00aa003049e2" => "Group" + } + fallback => "Unknown" + } + } + if [event_id] == 4776 { + mutate { add_field => { "z_logstash_pipeline" => "1545_5" } } + translate { + field => "[event_status]" + destination => "[event_status_value]" + dictionary => { + "0xc0000064" => "The username you typed does not exist. Bad username" + "0xc000006a" => "Account logon with misspelled or bad password" + "0xc000006d" => "Generic logon failure Some of the potential causes for this: An invalid username and/or password was used LAN Manager Authentication Level mismatch between the source and target computers" + "0xc000006f" => "Account logon outside authorized hours" + "0xc0000070" => "Account logon from unauthorized workstation" + "0xc0000071" => "Account logon with expired password" + "0xc0000072" => "Account logon to account disabled by administrator" + "0xc0000193" => "Account logon with expired account" + "0xc0000224" => "Account logon with Change Password at Next Logon flagged" + "0xc0000234" => "Account logon with account locked" + "0xc0000371" => "The local account store does not contain secret material for the specified account" + "0x0" => "No errors" + } + fallback => "Unknown" + } + } + } + if [event_id] == 5061 { + mutate { add_field => { "z_logstash_pipeline" => "1545_6" } } + translate { + field => "[key_type]" + destination => "[key_type_value]" + dictionary => { + "%%2500" => "User key" + } + fallback => "Unknown" + } + translate { + field => "[key_operation]" + destination => "[key_operation_value]" + dictionary => { + "%%2480" => "Open key" + } + fallback => "Unknown" + } + } + if [event_id] == 5152 or [event_id] == 5154 or [event_id] == 5156 or [event_id] == 5158 or [event_id] == 5157 { + mutate { add_field => { "z_logstash_pipeline" => "1545_7" } } + translate { + field => "[network_layer_name]" + destination => "[network_layer_name_value]" + dictionary => { + "%%14608" => "Resource Assignment" + "%%14609" => "Listen" + "%%14610" => "Receive/Accept" + "%%14611" => "Connect" + } + fallback => "Unknown" + } + translate { + field => "[network_direction]" + destination => "[network_direction_value]" + dictionary => { + "%%14592" => "Inbound" + "%%14593" => "Outbound" + } + fallback => "Unknown" + } + } + if [object_access_list_requested] { + if [event_id] == 4659 { + mutate { add_field => { "z_logstash_pipeline" => "1545_8" } } + translate { + field => "[object_access_list_requested]" + destination => "[object_access_list_requested_value]" + dictionary => { + "%%1537" => "DELETE" + } + fallback => "Unknown" + } + } + } + if [event_id] == 4768 or [event_id] == 4769 or [event_id] == 4770 or [event_id] == 4771 { + mutate { add_field => { "z_logstash_pipeline" => "1545_9" } } + translate { + field => "[ticket_encryption_type]" + destination => "[ticket_encryption_type_value]" + dictionary => { + "0x1" => "DES-CBC-CRC" + "0x3" => "DES-CBC-MD5" + "0x11" => "AES128-CTS-HMAC-SHA1-96" + "0x12" => "AES256-CTS-HMAC-SHA1-96" + "0x17" => "RC4-HMAC" + "0x18" => "RC4-HMAC-EXP" + "0xFFFFFFFF" => "Failure Event" + "0xffffffff" => "Failure Event" + } + fallback => "Unknown" + } + translate { + field => "[ticket_options]" + destination => "[ticket_options_type_value]" + dictionary => { + "0x40810010" => "Forwardable, Renewable, Canonicalize, Renewable-ok" + "0x40810000" => "Forwardable, Renewable, Canonicalize" + "0x60810010" => "Forwardable, Forwarded, Renewable, Canonicalize, Renewable-ok" + "0x40800000" => "Forwardable, Renewable" + "0x10002" => "Proxiable, Unused" + } + fallback => "Unknown" + } + if [service_ticket_preauth_type] { + translate { + field => "[service_ticket_preauth_type]" + destination => "[service_ticket_preauth_type_value]" + dictionary => { + "0" => "Without Pre-Authentication" + "2" => "PA-ENC-TIMESTAMP" + "11" => "PA-ETYPE-INFO" + "15" => "PA-PK-AS-REP_OLD" + "17" => "PA-PK-AS-REP" + "19" => "PA-ETYPE-INFO2" + "20" => "PA-SVR-REFERRAL-INFO" + "138" => "PA-ENCRYPTED-CHALLENGE" + "-" => "Failure Event" + } + fallback => "Unknown" + } + } + if [ticket_failure_code] { + translate { + field => "[ticket_failure_code]" + destination => "[ticket_failure_code_value]" + dictionary => { + "0x10" => "KDC_ERR_PADATA_TYPE_NOSUPP" + "0x17" => "KDC_ERR_KEY_EXPIRED" + "0x18" => "KDC_ERR_PREAUTH_FAILED" + } + fallback => "Unknown" + } + } + } + if [event_id] == 5058 or [event_id] == 5059 or [event_id] == 5061 { + mutate { add_field => { "z_logstash_pipeline" => "1545_10" } } + translate { + field => "[key_type]" + destination => "[key_type_value]" + dictionary => { + "%%2500" => "User key" + } + fallback => "Unknown" + } + translate { + field => "[key_operation]" + destination => "[key_operation_value]" + dictionary => { + "%%2480" => "Open Key" + "%%2458" => "Read persisted key from file" + "%%2464" => "Export of persistent cryptographic key" + } + fallback => "Unknown" + } + } + if [dsobject_domain_type] { + if [event_id] == 5136 or [event_id] == 5137 { + mutate { add_field => { "z_logstash_pipeline" => "1545_11" } } + translate { + field => "[dsobject_domain_type]" + destination => "[dsobject_domain_type_value]" + dictionary => { + "%%14676" => "Active Directory Domain Services" + } + fallback => "Unknown" + } + } + } + if [event_id] == 5379 { + mutate { add_field => { "z_logstash_pipeline" => "1545_12" } } + translate { + field => "[object_operation_type]" + destination => "[object_operation_type_value]" + dictionary => { + "%%8100" => "Enumerate Credentials" + } + fallback => "Unknown" + } } } } \ No newline at end of file diff --git a/docker/helk-logstash/pipeline/2511-winevent-powershell-filter.conf b/docker/helk-logstash/pipeline/2511-winevent-powershell-filter.conf index 5ecc5ad2..ac40302c 100644 --- a/docker/helk-logstash/pipeline/2511-winevent-powershell-filter.conf +++ b/docker/helk-logstash/pipeline/2511-winevent-powershell-filter.conf @@ -204,4 +204,4 @@ filter { } } } -} +} \ No newline at end of file diff --git a/docker/helk-logstash/pipeline/9950-winevent-sysmon-output.conf b/docker/helk-logstash/pipeline/9950-winevent-sysmon-output.conf index d155dd41..37eb02e3 100644 --- a/docker/helk-logstash/pipeline/9950-winevent-sysmon-output.conf +++ b/docker/helk-logstash/pipeline/9950-winevent-sysmon-output.conf @@ -12,5 +12,10 @@ output { user => 'elastic' #password => 'elasticpassword' } + kafka { + bootstrap_servers => "helk-kafka-broker:9092" + codec => "json" + topic_id => "winsysmon" + } } } \ No newline at end of file diff --git a/docker/helk-logstash/pipeline/9951-winevent-security-output.conf b/docker/helk-logstash/pipeline/9951-winevent-security-output.conf index 5c6e452f..3fca7dcd 100644 --- a/docker/helk-logstash/pipeline/9951-winevent-security-output.conf +++ b/docker/helk-logstash/pipeline/9951-winevent-security-output.conf @@ -12,5 +12,10 @@ output { user => 'elastic' #password => 'elasticpassword' } + kafka { + bootstrap_servers => "helk-kafka-broker:9092" + codec => "json" + topic_id => "winsecurity" + } } } \ No newline at end of file diff --git a/docker/helk-logstash/pipeline/9958-osquery-output.conf b/docker/helk-logstash/pipeline/9958-osquery-output.conf index 620bf0c9..0454f1c6 100644 --- a/docker/helk-logstash/pipeline/9958-osquery-output.conf +++ b/docker/helk-logstash/pipeline/9958-osquery-output.conf @@ -10,6 +10,7 @@ output { index => "logs-endpoint-osquery-%{+YYYY.MM.dd}" document_id => "%{[@metadata][log_hash]}" user => 'elastic' + #password => 'elasticpassword' } } } diff --git a/docker/helk-logstash/scripts/basic/logstash-entrypoint.sh b/docker/helk-logstash/scripts/basic/logstash-entrypoint.sh deleted file mode 100755 index 36628275..00000000 --- a/docker/helk-logstash/scripts/basic/logstash-entrypoint.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash - -# HELK script: logstash-entrypoint.sh -# HELK script description: Pushes output templates to ES and starts Logstash -# HELK build Stage: Alpha -# Author: Roberto Rodriguez (@Cyb3rWard0g) -# License: GPL-3.0 - -# *********** Environment Variables *************** -if [[ -z "$ELASTICSEARCH_URL" ]]; then - export ELASTICSEARCH_URL="http://helk-elasticsearch:9200" -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elasticsearch URL to $ELASTICSEARCH_URL" - -# ********* Setting LS_JAVA_OPTS *************** -if [[ -z "$LS_JAVA_OPTS" ]]; then - LS_MEMORY=$(awk '/MemAvailable/{printf "%.f", $2/1024/4}' /proc/meminfo) - export LS_JAVA_OPTS="-Xms${LS_MEMORY}m -Xmx${LS_MEMORY}m" -fi -echo "[HELK-DOCKER-INSTALLATION-INFO] Setting LS_JAVA_OPTS to $LS_JAVA_OPTS" - -# *********** Looking for ES *************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Waiting for elasticsearch URI to be accessible.." -until curl -s $ELASTICSEARCH_URL -o /dev/null; do - sleep 1 -done - -echo "[HELK-DOCKER-INSTALLATION-INFO] Uploading templates to elasticsearch.." -DIR=/usr/share/logstash/output_templates -for file in ${DIR}/*.json -do - template_name=$(echo $file | sed -r ' s/^.*\/[0-9]+\-//'); - echo "[HELK-DOCKER-INSTALLATION-INFO] Uploading $template_name template to elasticsearch.."; - curl -s -H 'Content-Type: application/json' -XPUT "$ELASTICSEARCH_URL/_template/$template_name" -d@${file}; -done - -# ********** Install Plugin ***************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Installing Logstash plugins.." -logstash-plugin install logstash-filter-prune - -# ********** Starting Logstash ***************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Running docker-entrypoint script.." -/usr/local/bin/docker-entrypoint \ No newline at end of file diff --git a/docker/helk-logstash/scripts/logstash-entrypoint.sh b/docker/helk-logstash/scripts/logstash-entrypoint.sh new file mode 100755 index 00000000..5da3c65e --- /dev/null +++ b/docker/helk-logstash/scripts/logstash-entrypoint.sh @@ -0,0 +1,99 @@ +#!/bin/bash + +# HELK script: logstash-entrypoint.sh +# HELK script description: Pushes output templates to ES and starts Logstash +# HELK build Stage: Alpha +# Author: Roberto Rodriguez (@Cyb3rWard0g) +# License: GPL-3.0 + +# *********** Environment Variables *************** +DIR=/usr/share/logstash/output_templates + +if [[ -z "$ELASTICSEARCH_URL" ]]; then + export ELASTICSEARCH_URL="http://helk-elasticsearch:9200" +fi +echo "[HELK-LOGSTASH-DOCKER-INSTALLATION-INFO] Setting Elasticsearch URL to $ELASTICSEARCH_URL" + +# ******** Set Trial License Variables *************** +if [[ -n "$ELASTIC_PASSWORD" ]]; then + if [[ -z "$ELASTIC_USERNAME" ]]; then + ELASTIC_USERNAME=elastic + fi + echo "[HELK-KIBANA-DOCKER-INSTALLATION-INFO] Setting Elasticsearch's username to access Elasticsearch to $ELASTIC_USERNAME" + + if [[ -z "$ELASTIC_HOST" ]]; then + ELASTIC_HOST=helk-elasticsearch + fi + echo "[HELK-LOGSTASH-DOCKER-INSTALLATION-INFO] Setting Elasticsearch host name to $ELASTIC_HOST" + + if [[ -z "$ELASTIC_PORT" ]]; then + ELASTIC_PORT=9200 + fi + echo "[HELK-LOGSTASH-DOCKER-INSTALLATION-INFO] Setting Elasticsearch port to $ELASTIC_PORT" + + # ****** Updating Pipeline configs *********** + for config in /usr/share/logstash/pipeline/*-output.conf + do + echo "[HELK-LOGSTASH-INSTALLATION-INFO] Updating pipeline config $config..." + sed -i "s/#password \=>.*$/password \=> \'${ELASTIC_PASSWORD}\'/g" ${config} + done + + # *********** Check if Elasticsearch is up *************** + echo "[HELK-LOGSTASH-DOCKER-INSTALLATION-INFO] Waiting for elasticsearch URI to be accessible.." + until curl -s -u $ELASTIC_USERNAME:$ELASTIC_PASSWORD $ELASTICSEARCH_URL -o /dev/null; do + sleep 1 + done + +else + # *********** Check if Elasticsearch is up *************** + echo "[HELK-LOGSTASH-DOCKER-INSTALLATION-INFO] Waiting for elasticsearch URI to be accessible.." + until curl -s $ELASTICSEARCH_URL -o /dev/null; do + sleep 1 + done + +fi + +# ********** Uploading templates to Elasticsearch ******* +echo "[HELK-LOGSTASH-DOCKER-INSTALLATION-INFO] Uploading templates to elasticsearch.." +for file in ${DIR}/*.json; do + template_name=$(echo $file | sed -r ' s/^.*\/[0-9]+\-//') + while true; do + echo "[HELK-LOGSTASH-DOCKER-INSTALLATION-INFO] Uploading $template_name template to elasticsearch.." + if [[ -n "$ELASTIC_PASSWORD" ]]; then + STATUS=$(curl -s -o /dev/null -w '%{http_code}' -u $ELASTIC_USERNAME:$ELASTIC_PASSWORD $ELASTICSEARCH_URL) + if [ $STATUS -eq 200 ]; then + curl -u $ELASTIC_USERNAME:$ELASTIC_PASSWORD -X POST $ELASTICSEARCH_URL/_template/$template_name -H 'Content-Type: application/json' -d@${file} + break + else + sleep 1 + fi + else + STATUS=$(curl -s -o /dev/null -w '%{http_code}' $ELASTICSEARCH_URL) + if [ $STATUS -eq 200 ]; then + curl -X POST $ELASTICSEARCH_URL/_template/$template_name -H 'Content-Type: application/json' -d@${file} + break + else + sleep 1 + fi + fi + done +done + +# ********* Setting LS_JAVA_OPTS *************** +if [[ -z "$LS_JAVA_OPTS" ]]; then + LS_MEMORY=$(awk '/MemAvailable/{printf "%.f", $2/1024/4}' /proc/meminfo) + export LS_JAVA_OPTS="-Xms${LS_MEMORY}m -Xmx${LS_MEMORY}m" +fi +echo "[HELK-LOGSTASH-DOCKER-INSTALLATION-INFO] Setting LS_JAVA_OPTS to $LS_JAVA_OPTS" + +# ********** Install Plugin ***************** +echo "[HELK-LOGSTASH-DOCKER-INSTALLATION-INFO] Installing Logstash plugins.." +if logstash-plugin list 'prune'; then + echo "[HELK-LOGSTASH-DOCKER-INSTALLATION-INFO] Plugin Prune is already installed" +else + logstash-plugin install logstash-filter-prune +fi + +# ********** Starting Logstash ***************** +echo "[HELK-LOGSTASH-DOCKER-INSTALLATION-INFO] Running docker-entrypoint script.." +/usr/local/bin/docker-entrypoint \ No newline at end of file diff --git a/docker/helk-logstash/scripts/trial/logstash-entrypoint.sh b/docker/helk-logstash/scripts/trial/logstash-entrypoint.sh deleted file mode 100755 index ff8eeac6..00000000 --- a/docker/helk-logstash/scripts/trial/logstash-entrypoint.sh +++ /dev/null @@ -1,68 +0,0 @@ -#!/bin/bash - -# HELK script: logstash-entrypoint.sh -# HELK script description: Pushes output templates to ES and starts Logstash -# HELK build Stage: Alpha -# Author: Roberto Rodriguez (@Cyb3rWard0g) -# License: GPL-3.0 - -# *********** Password for Elasticsearch Backend ******** -if [[ -z "$ELASTIC_PASSWORD" ]]; then - ELASTIC_PASSWORD=elasticpassword -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elasticsearch backend password to $ELASTIC_PASSWORD" - -if [[ -z "$ELASTIC_HOST" ]]; then - ELASTIC_HOST=helk-elasticsearch -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elasticsearch host name to $ELASTIC_HOST" - -if [[ -z "$ELASTIC_PORT" ]]; then - ELASTIC_PORT=9200 -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting Elasticsearch port to $ELASTIC_PORT" - -if [[ -z "$ELASTICSEARCH_ACCESS" ]]; then - ELASTICSEARCH_ACCESS=http://elastic:$ELASTIC_PASSWORD@$ELASTIC_HOST:$ELASTIC_PORT -fi -echo "[HELK-ES-DOCKER-INSTALLATION-INFO] Setting custom ELasticsearch URL with credentials to $ELASTICSEARCH_ACCESS" - -# ********* Setting LS_JAVA_OPTS *************** -if [[ -z "$LS_JAVA_OPTS" ]]; then - LS_MEMORY=$(awk '/MemAvailable/{printf "%.f", $2/1024/4}' /proc/meminfo) - export LS_JAVA_OPTS="-Xms${LS_MEMORY}m -Xmx${LS_MEMORY}m" -fi -echo "[HELK-DOCKER-INSTALLATION-INFO] Setting LS_JAVA_OPTS to $LS_JAVA_OPTS" - -# *********** Looking for ES *************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Waiting for elasticsearch URI to be accessible.." -until curl -s $ELASTICSEARCH_ACCESS -o /dev/null; do - sleep 1 -done - -echo "[HELK-DOCKER-INSTALLATION-INFO] Uploading templates to elasticsearch.." -DIR=/usr/share/logstash/output_templates -for file in ${DIR}/*.json -do - template_name=$(echo $file | sed -r ' s/^.*\/[0-9]+\-//'); - echo "[HELK-DOCKER-INSTALLATION-INFO] Uploading $template_name template to elasticsearch.."; - curl -s -H 'Content-Type: application/json' -XPUT $ELASTICSEARCH_ACCESS/_template/$template_name -d@${file}; - sleep 1 -done - -# ****** Updating Pipeline configs *********** -for config in /usr/share/logstash/pipeline/*-output.conf -do - echo "[HELK-LOGSTASH-INSTALLATION-INFO] Updating pipeline config $config..." - sed -i "s/#password \=>.*$/password \=> \'${ELASTIC_PASSWORD}\'/g" ${config} -done - -# ********** Install Plugin ***************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Installing Logstash plugins.." -logstash-plugin install logstash-filter-prune - -# ********** Starting Logstash ***************** -echo "[HELK-DOCKER-INSTALLATION-INFO] Running docker-entrypoint script.." -/usr/local/bin/docker-entrypoint - - diff --git a/docker/helk-zeppelin/Dockerfile b/docker/helk-zeppelin/Dockerfile deleted file mode 100644 index 7e153ef3..00000000 --- a/docker/helk-zeppelin/Dockerfile +++ /dev/null @@ -1,70 +0,0 @@ -# HELK script: HELK Zeppelin Dockerfile -# HELK build Stage: Alpha -# Author: Roberto Rodriguez (@Cyb3rWard0g) -# License: GPL-3.0 -# Reference: -# https://github.com/apache/zeppelin/blob/master/scripts/docker/zeppelin/bin/Dockerfile -# https://hub.docker.com/r/apache/zeppelin/~/dockerfile/ - -FROM cyb3rward0g/helk-spark-base:2.3.1 -LABEL maintainer="Roberto Rodriguez @Cyb3rWard0g" -LABEL description="Dockerfile base for the HELK Zeppelin." - -ENV DEBIAN_FRONTEND noninteractive - -USER root -# *********** Installing Prerequisites *************** -ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 -RUN apt-get -y update && \ - apt-get install -y git openjdk-8-jdk \ - libfontconfig git build-essential chrpath \ - libssl-dev libxft-dev libfreetype6 libfreetype6-dev \ - libfontconfig1 libfontconfig1-dev python3-pip && \ - rm -rf /var/lib/apt/lists/* - -# *********** Upgrading PIP *************** -RUN pip3 install --upgrade pip - -RUN apt-get -y update && \ - apt-get install -y gfortran && \ - # numerical/algebra packages - apt-get install -y libblas-dev libatlas-dev liblapack-dev && \ - # font, image for matplotlib - apt-get install -y libpng-dev libfreetype6-dev libxft-dev && \ - # for tkinter - apt-get install -y python-tk libxml2-dev libxslt-dev zlib1g-dev && \ - pip3 install numpy && \ - pip3 install matplotlib - -# *********** Creating the right directories *************** -RUN bash -c 'mkdir -pv /opt/helk/{es-hadoop,zeppelin}' - -# ************** Install Zeppelin ********************* -ENV Z_VERSION=0.8.0 -ENV SPARK_CYPHER_VERSION=0.1.6 -ENV Z_HOME="/opt/helk/zeppelin" -ENV ZEPPELIN_PORT=9090 - -RUN wget -qO- http://archive.apache.org/dist/zeppelin/zeppelin-${Z_VERSION}/zeppelin-${Z_VERSION}-bin-all.tgz | sudo tar xvz -C ${Z_HOME} --strip-components=1 - -# *********** Install CAPS *************** -RUN wget https://github.com/opencypher/cypher-for-apache-spark/releases/download/${SPARK_CYPHER_VERSION}/spark-cypher-${SPARK_CYPHER_VERSION}-cluster.jar -P ${Z_HOME} - -# *********** Download ES-Hadoop *************** -ENV ESHADOOP_VERSION=6.3.1 -RUN wget https://artifacts.elastic.co/downloads/elasticsearch-hadoop/elasticsearch-hadoop-${ESHADOOP_VERSION}.zip -P /opt/helk/es-hadoop/ \ - && unzip -j /opt/helk/es-hadoop/*.zip -d /opt/helk/es-hadoop/ \ - && rm /opt/helk/es-hadoop/*.zip - -# *********** Add Spark User rights to Zeppelin Folder *************** -RUN chown -R ${SPARK_USER}:${SPARK_USER} ${Z_HOME} /opt/helk/es-hadoop - -USER $SPARK_UID - -# *********** Add Custom SPark Conf ****** -ADD spark-defaults.conf ${SPARK_HOME}/conf/ - -EXPOSE 9090 - -WORKDIR ${Z_HOME} -CMD ["bin/zeppelin.sh"] \ No newline at end of file diff --git a/docker/helk-zeppelin/conf/interpreter.json b/docker/helk-zeppelin/conf/interpreter.json deleted file mode 100644 index 9186b7ad..00000000 --- a/docker/helk-zeppelin/conf/interpreter.json +++ /dev/null @@ -1,1658 +0,0 @@ -{ - "interpreterSettings": { - "ignite": { - "id": "ignite", - "name": "ignite", - "group": "ignite", - "properties": { - "ignite.config.url": { - "name": "ignite.config.url", - "value": "", - "type": "url" - }, - "ignite.peerClassLoadingEnabled": { - "name": "ignite.peerClassLoadingEnabled", - "value": true, - "type": "checkbox" - }, - "ignite.clientMode": { - "name": "ignite.clientMode", - "value": true, - "type": "checkbox" - }, - "ignite.jdbc.url": { - "name": "ignite.jdbc.url", - "value": "jdbc:ignite:cfg://default-ignite-jdbc.xml", - "type": "string" - }, - "ignite.addresses": { - "name": "ignite.addresses", - "value": "127.0.0.1:47500..47509", - "type": "textarea" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "ignite", - "class": "org.apache.zeppelin.ignite.IgniteInterpreter", - "defaultInterpreter": false, - "editor": { - "editOnDblClick": false, - "completionSupport": false - } - }, - { - "name": "ignitesql", - "class": "org.apache.zeppelin.ignite.IgniteSqlInterpreter", - "defaultInterpreter": false, - "editor": { - "editOnDblClick": false, - "completionSupport": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "python": { - "id": "python", - "name": "python", - "group": "python", - "properties": { - "zeppelin.python": { - "name": "zeppelin.python", - "value": "python", - "type": "string" - }, - "zeppelin.ipython.launch.timeout": { - "name": "zeppelin.ipython.launch.timeout", - "value": "30000", - "type": "number" - }, - "zeppelin.python.useIPython": { - "name": "zeppelin.python.useIPython", - "value": true, - "type": "checkbox" - }, - "zeppelin.ipython.grpc.message_size": { - "name": "zeppelin.ipython.grpc.message_size", - "value": "33554432", - "type": "number" - }, - "zeppelin.python.maxResult": { - "name": "zeppelin.python.maxResult", - "value": "1000", - "type": "number" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "python", - "class": "org.apache.zeppelin.python.PythonInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "python", - "editOnDblClick": false, - "completionSupport": true - } - }, - { - "name": "ipython", - "class": "org.apache.zeppelin.python.IPythonInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "python", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "sql", - "class": "org.apache.zeppelin.python.PythonInterpreterPandasSql", - "defaultInterpreter": false, - "editor": { - "language": "sql", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": false - } - }, - { - "name": "conda", - "class": "org.apache.zeppelin.python.PythonCondaInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "sh", - "editOnDblClick": false, - "completionSupport": false - } - }, - { - "name": "docker", - "class": "org.apache.zeppelin.python.PythonDockerInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "sh", - "editOnDblClick": false, - "completionSupport": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "sap": { - "id": "sap", - "name": "sap", - "group": "sap", - "properties": { - "universe.password": { - "name": "universe.password", - "value": "", - "type": "password" - }, - "universe.api.url": { - "name": "universe.api.url", - "value": "http://localhost:6405/biprws", - "type": "url" - }, - "universe.authType": { - "name": "universe.authType", - "value": "secEnterprise", - "type": "string" - }, - "universe.user": { - "name": "universe.user", - "value": "", - "type": "string" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "universe", - "class": "org.apache.zeppelin.sap.UniverseInterpreter", - "defaultInterpreter": true, - "editor": { - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "jdbc": { - "id": "jdbc", - "name": "jdbc", - "group": "jdbc", - "properties": { - "default.url": { - "name": "default.url", - "value": "jdbc:postgresql://localhost:5432/", - "type": "string" - }, - "default.driver": { - "name": "default.driver", - "value": "org.postgresql.Driver", - "type": "string" - }, - "zeppelin.jdbc.principal": { - "name": "zeppelin.jdbc.principal", - "value": "", - "type": "string" - }, - "default.completer.ttlInSeconds": { - "name": "default.completer.ttlInSeconds", - "value": "120", - "type": "number" - }, - "default.password": { - "name": "default.password", - "value": "", - "type": "password" - }, - "default.completer.schemaFilters": { - "name": "default.completer.schemaFilters", - "value": "", - "type": "textarea" - }, - "default.splitQueries": { - "name": "default.splitQueries", - "value": false, - "type": "checkbox" - }, - "default.user": { - "name": "default.user", - "value": "gpadmin", - "type": "string" - }, - "zeppelin.jdbc.concurrent.max_connection": { - "name": "zeppelin.jdbc.concurrent.max_connection", - "value": "10", - "type": "number" - }, - "common.max_count": { - "name": "common.max_count", - "value": "1000", - "type": "number" - }, - "default.precode": { - "name": "default.precode", - "value": "", - "type": "textarea" - }, - "zeppelin.jdbc.auth.type": { - "name": "zeppelin.jdbc.auth.type", - "value": "", - "type": "string" - }, - "default.statementPrecode": { - "name": "default.statementPrecode", - "value": "", - "type": "string" - }, - "zeppelin.jdbc.concurrent.use": { - "name": "zeppelin.jdbc.concurrent.use", - "value": true, - "type": "checkbox" - }, - "zeppelin.jdbc.keytab.location": { - "name": "zeppelin.jdbc.keytab.location", - "value": "", - "type": "string" - }, - "zeppelin.jdbc.interpolation": { - "name": "zeppelin.jdbc.interpolation", - "value": false, - "type": "checkbox" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "sql", - "class": "org.apache.zeppelin.jdbc.JDBCInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "sql", - "editOnDblClick": false, - "completionSupport": true - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "lens": { - "id": "lens", - "name": "lens", - "group": "lens", - "properties": { - "lens.server.base.url": { - "name": "lens.server.base.url", - "value": "http://\u003chostname\u003e:\u003cport\u003e/lensapi", - "type": "url" - }, - "zeppelin.lens.maxThreads": { - "name": "zeppelin.lens.maxThreads", - "value": "10", - "type": "number" - }, - "zeppelin.lens.maxResults": { - "name": "zeppelin.lens.maxResults", - "value": "1000", - "type": "number" - }, - "lens.client.dbname": { - "name": "lens.client.dbname", - "value": "default", - "type": "string" - }, - "lens.query.enable.persistent.resultset": { - "name": "lens.query.enable.persistent.resultset", - "value": false, - "type": "checkbox" - }, - "zeppelin.lens.run.concurrent": { - "name": "zeppelin.lens.run.concurrent", - "value": true, - "type": "checkbox" - }, - "lens.session.cluster.user": { - "name": "lens.session.cluster.user", - "value": "default", - "type": "string" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "lens", - "class": "org.apache.zeppelin.lens.LensInterpreter", - "defaultInterpreter": false, - "editor": { - "editOnDblClick": false, - "completionSupport": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "pig": { - "id": "pig", - "name": "pig", - "group": "pig", - "properties": { - "zeppelin.pig.execType": { - "name": "zeppelin.pig.execType", - "value": "mapreduce", - "type": "string" - }, - "SPARK_JAR": { - "name": "SPARK_JAR", - "value": "", - "type": "textarea" - }, - "zeppelin.pig.includeJobStats": { - "name": "zeppelin.pig.includeJobStats", - "value": false, - "type": "checkbox" - }, - "zeppelin.pig.maxResult": { - "name": "zeppelin.pig.maxResult", - "value": "1000", - "type": "number" - }, - "SPARK_MASTER": { - "name": "SPARK_MASTER", - "value": "local", - "type": "string" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "script", - "class": "org.apache.zeppelin.pig.PigInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "pig", - "editOnDblClick": false - } - }, - { - "name": "query", - "class": "org.apache.zeppelin.pig.PigQueryInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "pig", - "editOnDblClick": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "flink": { - "id": "flink", - "name": "flink", - "group": "flink", - "properties": { - "port": { - "name": "port", - "value": "6123", - "type": "number" - }, - "host": { - "name": "host", - "value": "local", - "type": "string" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "flink", - "class": "org.apache.zeppelin.flink.FlinkInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "scala", - "editOnDblClick": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "angular": { - "id": "angular", - "name": "angular", - "group": "angular", - "properties": {}, - "status": "READY", - "interpreterGroup": [ - { - "name": "angular", - "class": "org.apache.zeppelin.angular.AngularInterpreter", - "defaultInterpreter": false, - "editor": { - "editOnDblClick": true, - "completionSupport": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "livy": { - "id": "livy", - "name": "livy", - "group": "livy", - "properties": { - "livy.spark.executor.instances": { - "name": "livy.spark.executor.instances", - "value": "", - "type": "number" - }, - "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": { - "name": "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout", - "value": "", - "type": "string" - }, - "zeppelin.livy.concurrentSQL": { - "name": "zeppelin.livy.concurrentSQL", - "value": false, - "type": "checkbox" - }, - "zeppelin.livy.url": { - "name": "zeppelin.livy.url", - "value": "http://localhost:8998", - "type": "url" - }, - "zeppelin.livy.pull_status.interval.millis": { - "name": "zeppelin.livy.pull_status.interval.millis", - "value": "1000", - "type": "number" - }, - "livy.spark.executor.memory": { - "name": "livy.spark.executor.memory", - "value": "", - "type": "string" - }, - "zeppelin.livy.restart_dead_session": { - "name": "zeppelin.livy.restart_dead_session", - "value": false, - "type": "checkbox" - }, - "livy.spark.dynamicAllocation.enabled": { - "name": "livy.spark.dynamicAllocation.enabled", - "value": false, - "type": "checkbox" - }, - "zeppelin.livy.maxLogLines": { - "name": "zeppelin.livy.maxLogLines", - "value": "1000", - "type": "number" - }, - "livy.spark.dynamicAllocation.minExecutors": { - "name": "livy.spark.dynamicAllocation.minExecutors", - "value": "", - "type": "number" - }, - "livy.spark.executor.cores": { - "name": "livy.spark.executor.cores", - "value": "", - "type": "number" - }, - "zeppelin.livy.session.create_timeout": { - "name": "zeppelin.livy.session.create_timeout", - "value": "120", - "type": "number" - }, - "zeppelin.livy.spark.sql.maxResult": { - "name": "zeppelin.livy.spark.sql.maxResult", - "value": "1000", - "type": "number" - }, - "livy.spark.driver.cores": { - "name": "livy.spark.driver.cores", - "value": "", - "type": "number" - }, - "livy.spark.jars.packages": { - "name": "livy.spark.jars.packages", - "value": "", - "type": "textarea" - }, - "zeppelin.livy.spark.sql.field.truncate": { - "name": "zeppelin.livy.spark.sql.field.truncate", - "value": true, - "type": "checkbox" - }, - "livy.spark.driver.memory": { - "name": "livy.spark.driver.memory", - "value": "", - "type": "string" - }, - "zeppelin.livy.displayAppInfo": { - "name": "zeppelin.livy.displayAppInfo", - "value": true, - "type": "checkbox" - }, - "zeppelin.livy.principal": { - "name": "zeppelin.livy.principal", - "value": "", - "type": "string" - }, - "zeppelin.livy.keytab": { - "name": "zeppelin.livy.keytab", - "value": "", - "type": "textarea" - }, - "livy.spark.dynamicAllocation.maxExecutors": { - "name": "livy.spark.dynamicAllocation.maxExecutors", - "value": "", - "type": "number" - }, - "livy.spark.dynamicAllocation.initialExecutors": { - "name": "livy.spark.dynamicAllocation.initialExecutors", - "value": "", - "type": "number" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "spark", - "class": "org.apache.zeppelin.livy.LivySparkInterpreter", - "defaultInterpreter": true, - "editor": { - "language": "scala", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "sql", - "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "sql", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "pyspark", - "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "python", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "pyspark3", - "class": "org.apache.zeppelin.livy.LivyPySpark3Interpreter", - "defaultInterpreter": false, - "editor": { - "language": "python", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "sparkr", - "class": "org.apache.zeppelin.livy.LivySparkRInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "r", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "shared", - "class": "org.apache.zeppelin.livy.LivySharedInterpreter", - "defaultInterpreter": false - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "perNote": "shared", - "perUser": "scoped", - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "groovy": { - "id": "groovy", - "name": "groovy", - "group": "groovy", - "properties": { - "GROOVY_CLASSES": { - "name": "GROOVY_CLASSES", - "value": "", - "type": "textarea" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "groovy", - "class": "org.apache.zeppelin.groovy.GroovyInterpreter", - "defaultInterpreter": false, - "editor": { - "editOnDblClick": false, - "completionSupport": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "neo4j": { - "id": "neo4j", - "name": "neo4j", - "group": "neo4j", - "properties": { - "neo4j.auth.user": { - "name": "neo4j.auth.user", - "value": "", - "type": "string" - }, - "neo4j.auth.password": { - "name": "neo4j.auth.password", - "value": "", - "type": "string" - }, - "neo4j.auth.type": { - "name": "neo4j.auth.type", - "value": "BASIC", - "type": "string" - }, - "neo4j.max.concurrency": { - "name": "neo4j.max.concurrency", - "value": "50", - "type": "string" - }, - "neo4j.url": { - "name": "neo4j.url", - "value": "bolt://localhost:7687", - "type": "string" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "neo4j", - "class": "org.apache.zeppelin.graph.neo4j.Neo4jCypherInterpreter", - "defaultInterpreter": false, - "editor": { - "editOnDblClick": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "file": { - "id": "file", - "name": "file", - "group": "file", - "properties": { - "hdfs.url": { - "name": "hdfs.url", - "value": "http://localhost:50070/webhdfs/v1/", - "type": "url" - }, - "hdfs.user": { - "name": "hdfs.user", - "value": "hdfs", - "type": "string" - }, - "hdfs.maxlength": { - "name": "hdfs.maxlength", - "value": "1000", - "type": "number" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "hdfs", - "class": "org.apache.zeppelin.file.HDFSFileInterpreter", - "defaultInterpreter": false, - "editor": { - "editOnDblClick": false, - "completionSupport": true - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "elasticsearch": { - "id": "elasticsearch", - "name": "elasticsearch", - "group": "elasticsearch", - "properties": { - "elasticsearch.cluster.name": { - "name": "elasticsearch.cluster.name", - "value": "elasticsearch", - "type": "string" - }, - "elasticsearch.basicauth.username": { - "name": "elasticsearch.basicauth.username", - "value": "", - "type": "string" - }, - "elasticsearch.client.type": { - "name": "elasticsearch.client.type", - "value": "transport", - "type": "string" - }, - "elasticsearch.result.size": { - "name": "elasticsearch.result.size", - "value": "10", - "type": "number" - }, - "elasticsearch.port": { - "name": "elasticsearch.port", - "value": "9300", - "type": "number" - }, - "elasticsearch.host": { - "name": "elasticsearch.host", - "value": "localhost", - "type": "string" - }, - "elasticsearch.basicauth.password": { - "name": "elasticsearch.basicauth.password", - "value": "", - "type": "password" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "elasticsearch", - "class": "org.apache.zeppelin.elasticsearch.ElasticsearchInterpreter", - "defaultInterpreter": false, - "editor": { - "editOnDblClick": false, - "completionSupport": true - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "sh": { - "id": "sh", - "name": "sh", - "group": "sh", - "properties": { - "zeppelin.shell.interpolation": { - "name": "zeppelin.shell.interpolation", - "value": false, - "type": "checkbox" - }, - "zeppelin.shell.principal": { - "name": "zeppelin.shell.principal", - "value": "", - "type": "string" - }, - "shell.working.directory.user.home": { - "name": "shell.working.directory.user.home", - "value": false, - "type": "checkbox" - }, - "zeppelin.shell.auth.type": { - "name": "zeppelin.shell.auth.type", - "value": "", - "type": "string" - }, - "zeppelin.shell.keytab.location": { - "name": "zeppelin.shell.keytab.location", - "value": "", - "type": "string" - }, - "shell.command.timeout.millisecs": { - "name": "shell.command.timeout.millisecs", - "value": "60000", - "type": "number" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "sh", - "class": "org.apache.zeppelin.shell.ShellInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "sh", - "editOnDblClick": false, - "completionSupport": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "spark": { - "id": "spark", - "name": "spark", - "group": "spark", - "properties": { - "spark.executor.memory": { - "name": "spark.executor.memory", - "value": "1g", - "type": "string" - }, - "zeppelin.spark.sql.interpolation": { - "name": "zeppelin.spark.sql.interpolation", - "value": false, - "type": "checkbox" - }, - "zeppelin.spark.concurrentSQL": { - "name": "zeppelin.spark.concurrentSQL", - "value": false, - "type": "checkbox" - }, - "zeppelin.R.knitr": { - "name": "zeppelin.R.knitr", - "value": true, - "type": "checkbox" - }, - "zeppelin.R.cmd": { - "name": "zeppelin.R.cmd", - "value": "R", - "type": "string" - }, - "spark.app.name": { - "name": "spark.app.name", - "value": "Zeppelin", - "type": "string" - }, - "zeppelin.R.image.width": { - "name": "zeppelin.R.image.width", - "value": "100%", - "type": "number" - }, - "zeppelin.spark.importImplicit": { - "name": "zeppelin.spark.importImplicit", - "value": true, - "type": "checkbox" - }, - "zeppelin.dep.additionalRemoteRepository": { - "name": "zeppelin.dep.additionalRemoteRepository", - "value": "spark-packages,http://dl.bintray.com/spark-packages/maven,false;", - "type": "textarea" - }, - "zeppelin.spark.maxResult": { - "name": "zeppelin.spark.maxResult", - "value": "1000", - "type": "number" - }, - "master": { - "name": "master", - "value": "local[*]", - "type": "string" - }, - "zeppelin.pyspark.python": { - "name": "zeppelin.pyspark.python", - "value": "/usr/bin/python3", - "type": "string" - }, - "args": { - "name": "args", - "value": "", - "type": "textarea" - }, - "zeppelin.spark.enableSupportedVersionCheck": { - "name": "zeppelin.spark.enableSupportedVersionCheck", - "value": true, - "type": "checkbox" - }, - "zeppelin.spark.useNew": { - "name": "zeppelin.spark.useNew", - "value": true, - "type": "checkbox" - }, - "zeppelin.dep.localrepo": { - "name": "zeppelin.dep.localrepo", - "value": "local-repo", - "type": "string" - }, - "zeppelin.pyspark.useIPython": { - "name": "zeppelin.pyspark.useIPython", - "value": true, - "type": "checkbox" - }, - "zeppelin.spark.sql.stacktrace": { - "name": "zeppelin.spark.sql.stacktrace", - "value": false, - "type": "checkbox" - }, - "zeppelin.spark.useHiveContext": { - "name": "zeppelin.spark.useHiveContext", - "value": true, - "type": "checkbox" - }, - "zeppelin.spark.uiWebUrl": { - "name": "zeppelin.spark.uiWebUrl", - "value": "", - "type": "string" - }, - "zeppelin.R.render.options": { - "name": "zeppelin.R.render.options", - "value": "out.format \u003d \u0027html\u0027, comment \u003d NA, echo \u003d FALSE, results \u003d \u0027asis\u0027, message \u003d F, warning \u003d F, fig.retina \u003d 2", - "type": "textarea" - }, - "zeppelin.spark.printREPLOutput": { - "name": "zeppelin.spark.printREPLOutput", - "value": true, - "type": "checkbox" - }, - "spark.cores.max": { - "name": "spark.cores.max", - "value": "1", - "type": "number" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "spark", - "class": "org.apache.zeppelin.spark.SparkInterpreter", - "defaultInterpreter": true, - "editor": { - "language": "scala", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "sql", - "class": "org.apache.zeppelin.spark.SparkSqlInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "sql", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "dep", - "class": "org.apache.zeppelin.spark.DepInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "scala", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "pyspark", - "class": "org.apache.zeppelin.spark.PySparkInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "python", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "name": "ipyspark", - "class": "org.apache.zeppelin.spark.IPySparkInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "python", - "editOnDblClick": false, - "completionSupport": true - } - }, - { - "name": "r", - "class": "org.apache.zeppelin.spark.SparkRInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "r", - "editOnDblClick": false, - "completionSupport": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "cassandra": { - "id": "cassandra", - "name": "cassandra", - "group": "cassandra", - "properties": { - "cassandra.ssl.truststore.password": { - "name": "cassandra.ssl.truststore.password", - "value": "none", - "type": "password" - }, - "cassandra.pooling.new.connection.threshold.remote": { - "name": "cassandra.pooling.new.connection.threshold.remote", - "value": "100", - "type": "number" - }, - "cassandra.query.default.fetchSize": { - "name": "cassandra.query.default.fetchSize", - "value": "5000", - "type": "number" - }, - "cassandra.socket.tcp.no_delay": { - "name": "cassandra.socket.tcp.no_delay", - "value": true, - "type": "checkbox" - }, - "cassandra.ssl.enabled": { - "name": "cassandra.ssl.enabled", - "value": false, - "type": "checkbox" - }, - "cassandra.hosts": { - "name": "cassandra.hosts", - "value": "localhost", - "type": "textarea" - }, - "cassandra.credentials.username": { - "name": "cassandra.credentials.username", - "value": "none", - "type": "string" - }, - "cassandra.pooling.new.connection.threshold.local": { - "name": "cassandra.pooling.new.connection.threshold.local", - "value": "100", - "type": "number" - }, - "cassandra.socket.read.timeout.millisecs": { - "name": "cassandra.socket.read.timeout.millisecs", - "value": "12000", - "type": "number" - }, - "cassandra.load.balancing.policy": { - "name": "cassandra.load.balancing.policy", - "value": "DEFAULT", - "type": "string" - }, - "cassandra.pooling.max.request.per.connection.local": { - "name": "cassandra.pooling.max.request.per.connection.local", - "value": "1024", - "type": "number" - }, - "cassandra.cluster": { - "name": "cassandra.cluster", - "value": "Test Cluster", - "type": "string" - }, - "cassandra.pooling.heartbeat.interval.seconds": { - "name": "cassandra.pooling.heartbeat.interval.seconds", - "value": "30", - "type": "number" - }, - "cassandra.query.default.serial.consistency": { - "name": "cassandra.query.default.serial.consistency", - "value": "SERIAL", - "type": "string" - }, - "cassandra.retry.policy": { - "name": "cassandra.retry.policy", - "value": "DEFAULT", - "type": "string" - }, - "cassandra.native.port": { - "name": "cassandra.native.port", - "value": "9042", - "type": "number" - }, - "cassandra.interpreter.parallelism": { - "name": "cassandra.interpreter.parallelism", - "value": "10", - "type": "number" - }, - "cassandra.pooling.pool.timeout.millisecs": { - "name": "cassandra.pooling.pool.timeout.millisecs", - "value": "5000", - "type": "number" - }, - "cassandra.pooling.max.request.per.connection.remote": { - "name": "cassandra.pooling.max.request.per.connection.remote", - "value": "256", - "type": "number" - }, - "cassandra.compression.protocol": { - "name": "cassandra.compression.protocol", - "value": "NONE", - "type": "string" - }, - "cassandra.ssl.truststore.path": { - "name": "cassandra.ssl.truststore.path", - "value": "none", - "type": "string" - }, - "cassandra.socket.connection.timeout.millisecs": { - "name": "cassandra.socket.connection.timeout.millisecs", - "value": "5000", - "type": "number" - }, - "cassandra.query.default.consistency": { - "name": "cassandra.query.default.consistency", - "value": "ONE", - "type": "string" - }, - "cassandra.keyspace": { - "name": "cassandra.keyspace", - "value": "system", - "type": "string" - }, - "cassandra.reconnection.policy": { - "name": "cassandra.reconnection.policy", - "value": "DEFAULT", - "type": "string" - }, - "cassandra.pooling.max.connection.per.host.local": { - "name": "cassandra.pooling.max.connection.per.host.local", - "value": "8", - "type": "number" - }, - "cassandra.credentials.password": { - "name": "cassandra.credentials.password", - "value": "none", - "type": "password" - }, - "cassandra.protocol.version": { - "name": "cassandra.protocol.version", - "value": "4", - "type": "string" - }, - "cassandra.max.schema.agreement.wait.second": { - "name": "cassandra.max.schema.agreement.wait.second", - "value": "10", - "type": "number" - }, - "cassandra.pooling.core.connection.per.host.remote": { - "name": "cassandra.pooling.core.connection.per.host.remote", - "value": "1", - "type": "number" - }, - "cassandra.pooling.core.connection.per.host.local": { - "name": "cassandra.pooling.core.connection.per.host.local", - "value": "2", - "type": "number" - }, - "cassandra.pooling.max.connection.per.host.remote": { - "name": "cassandra.pooling.max.connection.per.host.remote", - "value": "2", - "type": "number" - }, - "cassandra.pooling.idle.timeout.seconds": { - "name": "cassandra.pooling.idle.timeout.seconds", - "value": "120", - "type": "number" - }, - "cassandra.speculative.execution.policy": { - "name": "cassandra.speculative.execution.policy", - "value": "DEFAULT", - "type": "string" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "cassandra", - "class": "org.apache.zeppelin.cassandra.CassandraInterpreter", - "defaultInterpreter": false, - "editor": { - "editOnDblClick": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "md": { - "id": "md", - "name": "md", - "group": "md", - "properties": { - "markdown.parser.type": { - "name": "markdown.parser.type", - "value": "pegdown", - "type": "string" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "md", - "class": "org.apache.zeppelin.markdown.Markdown", - "defaultInterpreter": false, - "editor": { - "language": "markdown", - "editOnDblClick": true, - "completionSupport": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "alluxio": { - "id": "alluxio", - "name": "alluxio", - "group": "alluxio", - "properties": { - "alluxio.master.port": { - "name": "alluxio.master.port", - "value": "19998", - "type": "number" - }, - "alluxio.master.hostname": { - "name": "alluxio.master.hostname", - "value": "localhost", - "type": "string" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "alluxio", - "class": "org.apache.zeppelin.alluxio.AlluxioInterpreter", - "defaultInterpreter": false, - "editor": { - "editOnDblClick": false, - "completionSupport": true - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "bigquery": { - "id": "bigquery", - "name": "bigquery", - "group": "bigquery", - "properties": { - "zeppelin.bigquery.project_id": { - "name": "zeppelin.bigquery.project_id", - "value": " ", - "type": "string" - }, - "zeppelin.bigquery.sql_dialect": { - "name": "zeppelin.bigquery.sql_dialect", - "value": "", - "type": "string" - }, - "zeppelin.bigquery.max_no_of_rows": { - "name": "zeppelin.bigquery.max_no_of_rows", - "value": "100000", - "type": "number" - }, - "zeppelin.bigquery.wait_time": { - "name": "zeppelin.bigquery.wait_time", - "value": "5000", - "type": "number" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "sql", - "class": "org.apache.zeppelin.bigquery.BigQueryInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "sql", - "editOnDblClick": false, - "completionSupport": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "hbase": { - "id": "hbase", - "name": "hbase", - "group": "hbase", - "properties": { - "hbase.home": { - "name": "hbase.home", - "value": "/usr/lib/hbase/", - "type": "string" - }, - "zeppelin.hbase.test.mode": { - "name": "zeppelin.hbase.test.mode", - "value": false, - "type": "checkbox" - }, - "hbase.ruby.sources": { - "name": "hbase.ruby.sources", - "value": "lib/ruby", - "type": "string" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "hbase", - "class": "org.apache.zeppelin.hbase.HbaseInterpreter", - "defaultInterpreter": false, - "editor": { - "editOnDblClick": false, - "completionSupport": false - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - }, - "kylin": { - "id": "kylin", - "name": "kylin", - "group": "kylin", - "properties": { - "kylin.query.project": { - "name": "kylin.query.project", - "value": "learn_kylin", - "type": "textarea" - }, - "kylin.api.user": { - "name": "kylin.api.user", - "value": "ADMIN", - "type": "string" - }, - "kylin.query.limit": { - "name": "kylin.query.limit", - "value": "5000", - "type": "number" - }, - "kylin.api.url": { - "name": "kylin.api.url", - "value": "http://localhost:7070/kylin/api/query", - "type": "url" - }, - "kylin.api.password": { - "name": "kylin.api.password", - "value": "KYLIN", - "type": "password" - }, - "kylin.query.ispartial": { - "name": "kylin.query.ispartial", - "value": true, - "type": "checkbox" - }, - "kylin.query.offset": { - "name": "kylin.query.offset", - "value": "0", - "type": "number" - } - }, - "status": "READY", - "interpreterGroup": [ - { - "name": "kylin", - "class": "org.apache.zeppelin.kylin.KylinInterpreter", - "defaultInterpreter": false, - "editor": { - "language": "sql", - "editOnDblClick": false, - "completionSupport": true - } - } - ], - "dependencies": [], - "option": { - "remote": true, - "port": -1, - "isExistingProcess": false, - "setPermission": false, - "owners": [], - "isUserImpersonate": false - } - } - }, - "interpreterBindings": { - "2DKXYM99T": [ - "spark", - "md", - "angular", - "sh", - "livy", - "alluxio", - "file", - "flink", - "python", - "ignite", - "lens", - "cassandra", - "kylin", - "elasticsearch", - "jdbc", - "hbase", - "bigquery", - "pig", - "groovy", - "neo4j", - "sap" - ], - "2C2AUG798": [ - "spark", - "md", - "angular", - "sh", - "livy", - "alluxio", - "file", - "flink", - "python", - "ignite", - "lens", - "cassandra", - "kylin", - "elasticsearch", - "jdbc", - "hbase", - "bigquery", - "pig", - "groovy", - "neo4j", - "sap" - ] - }, - "interpreterRepositories": [ - { - "id": "central", - "type": "default", - "url": "http://repo1.maven.org/maven2/", - "releasePolicy": { - "enabled": true, - "updatePolicy": "daily", - "checksumPolicy": "warn" - }, - "snapshotPolicy": { - "enabled": true, - "updatePolicy": "daily", - "checksumPolicy": "warn" - }, - "mirroredRepositories": [], - "repositoryManager": false - }, - { - "id": "local", - "type": "default", - "url": "file:///opt/helk/spark/.m2/repository", - "releasePolicy": { - "enabled": true, - "updatePolicy": "daily", - "checksumPolicy": "warn" - }, - "snapshotPolicy": { - "enabled": true, - "updatePolicy": "daily", - "checksumPolicy": "warn" - }, - "mirroredRepositories": [], - "repositoryManager": false - } - ] -} \ No newline at end of file diff --git a/docker/helk-zeppelin/spark-defaults.conf b/docker/helk-zeppelin/spark-defaults.conf deleted file mode 100644 index 24dd6cb0..00000000 --- a/docker/helk-zeppelin/spark-defaults.conf +++ /dev/null @@ -1,38 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Default system properties included when running spark-submit. -# This is useful for setting default environmental settings. - -# Example: -# spark.master spark://master:7077 -# spark.eventLog.enabled true -# spark.eventLog.dir hdfs://namenode:8021/directory -# spark.serializer org.apache.spark.serializer.KryoSerializer -# spark.driver.memory 5g -# spark.executor.extraJavaOptions -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three" - -# HELK References: -# https://spark.apache.org/docs/latest/configuration.html -# https://graphframes.github.io/quick-start.html -# https://spark-packages.org/package/graphframes/graphframes -# https://spark.apache.org/docs/latest/sql-programming-guide.html#pyspark-usage-guide-for-pandas-with-apache-arrow - -spark.master spark://helk-spark-master:7077 -spark.jars /opt/helk/zeppelin/spark-cypher-0.1.6-cluster.jar,/opt/helk/es-hadoop/elasticsearch-hadoop-6.3.1.jar -spark.jars.packages graphframes:graphframes:0.5.0-spark2.1-s_2.11,org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.0,databricks:spark-sklearn:0.2.3 -spark.sql.execution.arrow.enabled true \ No newline at end of file diff --git a/docker/helk-zeppelin/zeppelin-env.sh.template b/docker/helk-zeppelin/zeppelin-env.sh.template deleted file mode 100644 index 7c4c3583..00000000 --- a/docker/helk-zeppelin/zeppelin-env.sh.template +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# export JAVA_HOME= -# export MASTER= # Spark master url. eg. spark://master_addr:7077. Leave empty if you want to use local mode. -# export ZEPPELIN_JAVA_OPTS # Additional jvm options. for example, export ZEPPELIN_JAVA_OPTS="-Dspark.executor.memory=8g -Dspark.cores.max=16" -# export ZEPPELIN_MEM # Zeppelin jvm mem options Default -Xms1024m -Xmx1024m -XX:MaxPermSize=512m -# export ZEPPELIN_INTP_MEM # zeppelin interpreter process jvm mem options. Default -Xms1024m -Xmx1024m -XX:MaxPermSize=512m -# export ZEPPELIN_INTP_JAVA_OPTS # zeppelin interpreter process jvm options. -# export ZEPPELIN_SSL_PORT # ssl port (used when ssl environment variable is set to true) - -# export ZEPPELIN_LOG_DIR # Where log files are stored. PWD by default. -# export ZEPPELIN_PID_DIR # The pid files are stored. ${ZEPPELIN_HOME}/run by default. -# export ZEPPELIN_WAR_TEMPDIR # The location of jetty temporary directory. -# export ZEPPELIN_NOTEBOOK_DIR # Where notebook saved -# export ZEPPELIN_NOTEBOOK_HOMESCREEN # Id of notebook to be displayed in homescreen. ex) 2A94M5J1Z -# export ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE # hide homescreen notebook from list when this value set to "true". default "false" -# export ZEPPELIN_NOTEBOOK_S3_BUCKET # Bucket where notebook saved -# export ZEPPELIN_NOTEBOOK_S3_ENDPOINT # Endpoint of the bucket -# export ZEPPELIN_NOTEBOOK_S3_USER # User in bucket where notebook saved. For example bucket/user/notebook/2A94M5J1Z/note.json -# export ZEPPELIN_NOTEBOOK_S3_KMS_KEY_ID # AWS KMS key ID -# export ZEPPELIN_NOTEBOOK_S3_KMS_KEY_REGION # AWS KMS key region -# export ZEPPELIN_IDENT_STRING # A string representing this instance of zeppelin. $USER by default. -# export ZEPPELIN_NICENESS # The scheduling priority for daemons. Defaults to 0. -# export ZEPPELIN_INTERPRETER_LOCALREPO # Local repository for interpreter's additional dependency loading -# export ZEPPELIN_INTERPRETER_DEP_MVNREPO # Remote principal repository for interpreter's additional dependency loading -# export ZEPPELIN_HELIUM_NPM_REGISTRY # Remote Npm registry for Helium dependency loader -# export ZEPPELIN_NOTEBOOK_STORAGE # Refers to pluggable notebook storage class, can have two classes simultaneously with a sync between them (e.g. local and remote). -# export ZEPPELIN_NOTEBOOK_ONE_WAY_SYNC # If there are multiple notebook storages, should we treat the first one as the only source of truth? -# export ZEPPELIN_NOTEBOOK_PUBLIC # Make notebook public by default when created, private otherwise - -#### Spark interpreter configuration #### - -## Use provided spark installation ## -## defining SPARK_HOME makes Zeppelin run spark interpreter process using spark-submit -## -# export SPARK_HOME # (required) When it is defined, load it instead of Zeppelin embedded Spark libraries -# export SPARK_SUBMIT_OPTIONS # (optional) extra options to pass to spark submit. eg) "--driver-memory 512M --executor-memory 1G". -# export SPARK_APP_NAME # (optional) The name of spark application. - -## Use embedded spark binaries ## -## without SPARK_HOME defined, Zeppelin still able to run spark interpreter process using embedded spark binaries. -## however, it is not encouraged when you can define SPARK_HOME -## -# Options read in YARN client mode -# export HADOOP_CONF_DIR # yarn-site.xml is located in configuration directory in HADOOP_CONF_DIR. -# Pyspark (supported with Spark 1.2.1 and above) -# To configure pyspark, you need to set spark distribution's path to 'spark.home' property in Interpreter setting screen in Zeppelin GUI -# export PYSPARK_PYTHON # path to the python command. must be the same path on the driver(Zeppelin) and all workers. -# export PYTHONPATH - -## Spark interpreter options ## -## -# export ZEPPELIN_SPARK_USEHIVECONTEXT # Use HiveContext instead of SQLContext if set true. true by default. -# export ZEPPELIN_SPARK_CONCURRENTSQL # Execute multiple SQL concurrently if set true. false by default. -# export ZEPPELIN_SPARK_IMPORTIMPLICIT # Import implicits, UDF collection, and sql if set true. true by default. -# export ZEPPELIN_SPARK_MAXRESULT # Max number of Spark SQL result to display. 1000 by default. -# export ZEPPELIN_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE # Size in characters of the maximum text message to be received by websocket. Defaults to 1024000 - - -#### HBase interpreter configuration #### - -## To connect to HBase running on a cluster, either HBASE_HOME or HBASE_CONF_DIR must be set - -# export HBASE_HOME= # (require) Under which HBase scripts and configuration should be -# export HBASE_CONF_DIR= # (optional) Alternatively, configuration directory can be set to point to the directory that has hbase-site.xml - -#### ZeppelinHub connection configuration #### -# export ZEPPELINHUB_API_ADDRESS # Refers to the address of the ZeppelinHub service in use -# export ZEPPELINHUB_API_TOKEN # Refers to the Zeppelin instance token of the user -# export ZEPPELINHUB_USER_KEY # Optional, when using Zeppelin with authentication. - -#### Zeppelin impersonation configuration -# export ZEPPELIN_IMPERSONATE_CMD # Optional, when user want to run interpreter as end web user. eg) 'sudo -H -u ${ZEPPELIN_IMPERSONATE_USER} bash -c ' -# export ZEPPELIN_IMPERSONATE_SPARK_PROXY_USER #Optional, by default is true; can be set to false if you don't want to use --proxy-user option with Spark interpreter when impersonation enabled \ No newline at end of file diff --git a/docker/helk_install.sh b/docker/helk_install.sh index 2b22bf9c..cbdca81a 100755 --- a/docker/helk_install.sh +++ b/docker/helk_install.sh @@ -27,8 +27,8 @@ check_min_requirements(){ echo "[HELK-INSTALLATION-INFO] HELK being hosted on a $SYSTEM_KERNEL box" if [ "$SYSTEM_KERNEL" == "Linux" ]; then AVAILABLE_MEMORY=$(awk '/MemAvailable/{printf "%.f", $2/1024/1024}' /proc/meminfo) - AVAILABLE_DISK=$(df -m | awk '$NF=="/"{printf "%.f\t\t", $4 / 1024}') - ARCHITECTURE=$(uname -m) + AVAILABLE_DISK=$(df -m | awk '$NF=="/"{printf "%.f\t\t", $4 / 1024}') + ARCHITECTURE=$(uname -m) if [ "${ARCHITECTURE}" != "x86_64" ]; then echo "[HELK-INSTALLATION-ERROR] HELK REQUIRES AN X86_64 BASED OPERATING SYSTEM TO INSTALL" echo "[HELK-INSTALLATION-ERROR] Your Systems Architecture: ${ARCHITECTURE}" @@ -304,7 +304,7 @@ build_helk(){ export ADVERTISED_LISTENER=$HOST_IP echo "[HELK-INSTALLATION-INFO] Building & running HELK from $COMPOSE_CONFIG file.." - sudo -E docker-compose -f $COMPOSE_CONFIG up --build -d >> $LOGFILE 2>&1 + docker-compose -f $COMPOSE_CONFIG up --build -d >> $LOGFILE 2>&1 ERROR=$? if [ $ERROR -ne 0 ]; then echoerror "Could not run HELK via docker-compose file $COMPOSE_CONFIG (Error Code: $ERROR)." @@ -429,7 +429,6 @@ prepare_helk(){ get_jupyter_credentials(){ if [[ ${HELK_BUILD} == "helk-kibana-notebook-analysis" ]]; then - echo "[HELK-INSTALLATION-INFO] The following credentials can be used for Jupyterhub:" until docker exec -ti helk-jupyter cat /opt/helk/user_credentials.txt ; do sleep 10 done @@ -443,8 +442,8 @@ show_banner(){ echo "** HELK - THE HUNTING ELK **" echo "** **" echo "** Author: Roberto Rodriguez (@Cyb3rWard0g) **" - echo "** HELK build version: v0.1.6-alpha12132018 **" - echo "** HELK ELK version: 6.5.3 **" + echo "** HELK build version: v0.1.6-alpha01312019 **" + echo "** HELK ELK version: 6.5.4 **" echo "** License: GPL-3.0 **" echo "**********************************************" echo " " @@ -462,8 +461,8 @@ show_final_information(){ echo "HELK KIBANA URL: https://${HOST_IP}" echo "HELK KIBANA USER: helk" echo "HELK KIBANA PASSWORD: ${KIBANA_UI_PASSWORD_INPUT}" - echo "HELK JUPYTERHUB URL: http://${HOST_IP}/jupyter" echo "HELK SPARK MASTER UI: http://${HOST_IP}:8080" + echo "HELK JUPYTERHUB URL: http://${HOST_IP}/jupyter" get_jupyter_credentials elif [[ ${HELK_BUILD} == "helk-kibana-analysis" ]]; then echo "HELK KIBANA URL: https://${HOST_IP}" @@ -567,4 +566,4 @@ else echo "[HELK-INSTALLATION-ERROR] Make sure you set the right parameters" usage fi -fi +fi \ No newline at end of file diff --git a/docker/helk_update.sh b/docker/helk_update.sh index 99fd565c..fd38a010 100755 --- a/docker/helk_update.sh +++ b/docker/helk_update.sh @@ -3,7 +3,6 @@ # HELK script: helk_update.sh # HELK script description: Update and Rebuild HELK # HELK build Stage: Alpha -# HELK ELK version: 6.5.3 # Author: Roberto Rodriguez (@Cyb3rWard0g) # Script Author: Dev Dua (@devdua) # License: GPL-3.0