From 2c7242c52b56e65fc55a23b418e607d15624af07 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Mon, 19 Mar 2018 10:30:06 +1000 Subject: [PATCH 01/31] Separate out java installation into script This commit separates out the installation of Java into a separate script, to be reused by both Elasticsearch and Logstash installations --- src/scripts/elasticsearch-ubuntu-install.sh | 49 +--------- src/scripts/java-ubuntu-install.sh | 99 +++++++++++++++++++++ src/settings/ubuntuSettings.json | 1 + 3 files changed, 101 insertions(+), 48 deletions(-) create mode 100644 src/scripts/java-ubuntu-install.sh diff --git a/src/scripts/elasticsearch-ubuntu-install.sh b/src/scripts/elasticsearch-ubuntu-install.sh index 231630e5..29e0157f 100755 --- a/src/scripts/elasticsearch-ubuntu-install.sh +++ b/src/scripts/elasticsearch-ubuntu-install.sh @@ -341,57 +341,10 @@ check_data_disk() fi } -# Update the oracle-java8-installer to patch download of Java 8u171 to 8u181. -# 8u171 download is now archived -# TODO: Remove this once oracle-java8-installer package is updated -install_java_package() -{ - apt-get -yq $@ install oracle-java8-installer || true \ - && pushd /var/lib/dpkg/info \ - && log "[install_java_package] update oracle-java8-installer to 8u181" \ - && sed -i 's|JAVA_VERSION=8u171|JAVA_VERSION=8u181|' oracle-java8-installer.* \ - && sed -i 's|PARTNER_URL=http://download.oracle.com/otn-pub/java/jdk/8u171-b11/512cd62ec5174c3487ac17c61aaa89e8/|PARTNER_URL=http://download.oracle.com/otn-pub/java/jdk/8u181-b13/96a7b8442fe848ef90c96a2fad6ed6d1/|' oracle-java8-installer.* \ - && sed -i 's|SHA256SUM_TGZ="b6dd2837efaaec4109b36cfbb94a774db100029f98b0d78be68c27bec0275982"|SHA256SUM_TGZ="1845567095bfbfebd42ed0d09397939796d05456290fb20a83c476ba09f991d3"|' oracle-java8-installer.* \ - && sed -i 's|J_DIR=jdk1.8.0_171|J_DIR=jdk1.8.0_181|' oracle-java8-installer.* \ - && popd \ - && log "[install_java_package] updated oracle-java8-installer" \ - && apt-get -yq $@ install oracle-java8-installer -} - # Install Oracle Java install_java() { - log "[install_java] adding apt repository for Java 8" - (add-apt-repository -y ppa:webupd8team/java || (sleep 15; add-apt-repository -y ppa:webupd8team/java)) - log "[install_java] updating apt-get" - (apt-get -y update || (sleep 15; apt-get -y update)) > /dev/null - log "[install_java] updated apt-get" - echo debconf shared/accepted-oracle-license-v1-1 select true | debconf-set-selections - echo debconf shared/accepted-oracle-license-v1-1 seen true | debconf-set-selections - log "[install_java] installing Java" - (install_java_package || (sleep 15; install_java_package)) - command -v java >/dev/null 2>&1 || { sleep 15; rm /var/cache/oracle-jdk8-installer/jdk-*; apt-get install -f; } - - #if the previous did not install correctly we go nuclear, otherwise this loop will early exit - for i in $(seq 30); do - if $(command -v java >/dev/null 2>&1); then - log "[install_java] installed Java!" - return - else - sleep 5 - rm /var/cache/oracle-jdk8-installer/jdk-*; - rm -f /var/lib/dpkg/info/oracle-java8-installer* - rm /etc/apt/sources.list.d/*java* - apt-get -yq purge oracle-java8-installer* - apt-get -yq autoremove - apt-get -yq clean - (add-apt-repository -y ppa:webupd8team/java || (sleep 15; add-apt-repository -y ppa:webupd8team/java)) - apt-get -yq update - install_java_package --reinstall - log "[install_java] seeing if Java is installed after nuclear retry ${i}/30" - fi - done - command -v java >/dev/null 2>&1 || { log "[install_java] Java did not get installed properly even after a retry and a forced installation" >&2; exit 50; } + bash java-ubuntu-install.sh } # Install Elasticsearch diff --git a/src/scripts/java-ubuntu-install.sh b/src/scripts/java-ubuntu-install.sh new file mode 100644 index 00000000..9da0e202 --- /dev/null +++ b/src/scripts/java-ubuntu-install.sh @@ -0,0 +1,99 @@ +#!/bin/bash + +# License: https://github.com/elastic/azure-marketplace/blob/master/LICENSE.txt +# +# Russ Cam (Elastic) +# + +export DEBIAN_FRONTEND=noninteractive + +######################### +# HELP +######################### + +help() +{ + echo "This script installs Java on Ubuntu using the oracle-java8-installer apt package" + echo "" + echo "Options:" + echo " -h this help message" +} + +log() +{ + echo \[$(date +%d%m%Y-%H:%M:%S)\] \["install_java"\] "$1" + echo \[$(date +%d%m%Y-%H:%M:%S)\] \["install_java"\] "$1" >> /var/log/arm-install.log +} + +######################### +# Parameter handling +######################### + +while getopts h optname; do + log "Option $optname set with value ${OPTARG}" + case ${optname} in + h) #show help + help + exit 2 + ;; + \?) #unrecognized option - show help + echo -e \\n"Option -${BOLD}$OPTARG${NORM} not allowed." + help + exit 2 + ;; + esac +done + +# Update the oracle-java8-installer to patch download of Java 8u171 to 8u181. +# 8u171 download is now archived +# TODO: Remove this once oracle-java8-installer package is updated +install_java_package() +{ + apt-get -yq $@ install oracle-java8-installer || true \ + && pushd /var/lib/dpkg/info \ + && log "[install_java_package] update oracle-java8-installer to 8u181" \ + && sed -i 's|JAVA_VERSION=8u171|JAVA_VERSION=8u181|' oracle-java8-installer.* \ + && sed -i 's|PARTNER_URL=http://download.oracle.com/otn-pub/java/jdk/8u171-b11/512cd62ec5174c3487ac17c61aaa89e8/|PARTNER_URL=http://download.oracle.com/otn-pub/java/jdk/8u181-b13/96a7b8442fe848ef90c96a2fad6ed6d1/|' oracle-java8-installer.* \ + && sed -i 's|SHA256SUM_TGZ="b6dd2837efaaec4109b36cfbb94a774db100029f98b0d78be68c27bec0275982"|SHA256SUM_TGZ="1845567095bfbfebd42ed0d09397939796d05456290fb20a83c476ba09f991d3"|' oracle-java8-installer.* \ + && sed -i 's|J_DIR=jdk1.8.0_171|J_DIR=jdk1.8.0_181|' oracle-java8-installer.* \ + && popd \ + && log "[install_java_package] updated oracle-java8-installer" \ + && apt-get -yq $@ install oracle-java8-installer +} + +install_java() +{ + log "adding apt repository for java" + (add-apt-repository -y ppa:webupd8team/java || (sleep 15; add-apt-repository -y ppa:webupd8team/java)) + log "updating apt-get" + (apt-get -y update || (sleep 15; apt-get -y update)) > /dev/null + log "updated apt-get" + echo debconf shared/accepted-oracle-license-v1-1 select true | debconf-set-selections + echo debconf shared/accepted-oracle-license-v1-1 seen true | debconf-set-selections + log "installing java" + (install_java_package || (sleep 15; install_java_package)) + command -v java >/dev/null 2>&1 || { sleep 15; rm /var/cache/oracle-jdk8-installer/jdk-*; apt-get install -f; } + + #if the previous did not install correctly we go nuclear, otherwise this loop will early exit + for i in $(seq 30); do + if $(command -v java >/dev/null 2>&1); then + log "installed java!" + return + else + sleep 5 + rm /var/cache/oracle-jdk8-installer/jdk-*; + rm -f /var/lib/dpkg/info/oracle-java8-installer* + rm /etc/apt/sources.list.d/*java* + apt-get -yq purge oracle-java8-installer* + apt-get -yq autoremove + apt-get -yq clean + (add-apt-repository -y ppa:webupd8team/java || (sleep 15; add-apt-repository -y ppa:webupd8team/java)) + apt-get -yq update + install_java_package --reinstall + log "seeing if java is installed after nuclear retry ${i}/30" + fi + done + command -v java >/dev/null 2>&1 || { log "java did not get installed properly even after a retry and a forced installation" >&2; exit 50; } +} + +install_java diff --git a/src/settings/ubuntuSettings.json b/src/settings/ubuntuSettings.json index 9c27a47e..69393881 100644 --- a/src/settings/ubuntuSettings.json +++ b/src/settings/ubuntuSettings.json @@ -93,6 +93,7 @@ "[concat(parameters('templateBaseUrl'), 'scripts/elasticsearch-ubuntu-install.sh')]", "[concat(parameters('templateBaseUrl'), 'scripts/kibana-install.sh')]", "[concat(parameters('templateBaseUrl'), 'scripts/vm-disk-utils-0.1.sh')]" + "[concat(parameters('templateBaseUrl'), 'scripts/java-ubuntu-install.sh')]" ], "ubuntuSkus": { "5": "16.04-LTS", From 229d8619201e5d011a9a1bed7a6e718dcd82bd64 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Mon, 19 Mar 2018 11:31:33 +1000 Subject: [PATCH 02/31] Rename Kibana script --- ...na-install.sh => kibana-ubuntu-install.sh} | 0 src/scripts/logstash-ubuntu-install.sh | 194 ++++++++++++++++++ src/settings/ubuntuSettings.json | 12 ++ 3 files changed, 206 insertions(+) rename src/scripts/{kibana-install.sh => kibana-ubuntu-install.sh} (100%) create mode 100644 src/scripts/logstash-ubuntu-install.sh diff --git a/src/scripts/kibana-install.sh b/src/scripts/kibana-ubuntu-install.sh similarity index 100% rename from src/scripts/kibana-install.sh rename to src/scripts/kibana-ubuntu-install.sh diff --git a/src/scripts/logstash-ubuntu-install.sh b/src/scripts/logstash-ubuntu-install.sh new file mode 100644 index 00000000..0f325ecb --- /dev/null +++ b/src/scripts/logstash-ubuntu-install.sh @@ -0,0 +1,194 @@ +#!/bin/bash + +# License: https://github.com/elastic/azure-marketplace/blob/master/LICENSE.txt +# +# Russ Cam (Elastic) +# + +######################### +# HELP +######################### + +help() +{ + echo "This script installs logstash on a dedicated VM in the elasticsearch ARM template cluster" + echo "Parameters:" + + echo "-h view this help content" +} + +# Custom logging with time so we can easily relate running times, also log to separate file so order is guaranteed. +# The Script extension output the stdout/err buffer in intervals with duplicates. +log() +{ + echo \[$(date +%d%m%Y-%H:%M:%S)\] "$1" + echo \[$(date +%d%m%Y-%H:%M:%S)\] "$1" >> /var/log/arm-install.log +} + +log "Begin execution of Logstash script extension on ${HOSTNAME}" +START_TIME=$SECONDS + +export DEBIAN_FRONTEND=noninteractive + +if service --status-all | grep -Fq 'logstash'; then + log "Logstash already installed." + exit 0 +fi + +######################### +# Parameter handling +######################### + +#Script Parameters +CLUSTER_NAME="elasticsearch" +LOGSTASH_VERSION="6.2.1" +ES_VERSION="6.2.1" +#Default internal load balancer ip +ELASTICSEARCH_URL="http://10.0.0.4:9200" +INSTALL_XPACK=0 +USER_LOGSTASH_PWD="changeme" + +#Loop through options passed +while getopts :n:v:e:u:S:C:K:P:m:lh optname; do + log "Option $optname set" + case $optname in + n) #set cluster name + CLUSTER_NAME="${OPTARG}" + ;; + v) #logstash version number + LOGSTASH_VERSION="${OPTARG}" + ;; + e) #elasticsearch version number + ES_VERSION="${OPTARG}" + ;; + u) #elasticsearch url + ELASTICSEARCH_URL="${OPTARG}" + ;; + S) #security logstash pwd + USER_LOGSTASH_PWD="${OPTARG}" + ;; + l) #install X-Pack + INSTALL_XPACK=1 + ;; + h) #show help + help + exit 2 + ;; + \?) #unrecognized option - show help + echo -e \\n"Option -${BOLD}$OPTARG${NORM} not allowed." + help + exit 2 + ;; + esac +done +######################### +# Parameter state changes +######################### + +log "installing logstash $LOGSTASH_VERSION for Elasticsearch $ES_VERSION cluster: $CLUSTER_NAME" +log "installing X-Pack plugins is set to: $INSTALL_XPACK" +log "Logstash will talk to Elasticsearch over $ELASTICSEARCH_URL" + +######################### +# Installation steps as functions +######################### + +download_install_deb() +{ + log "[download_install_deb] starting download of package" + local DOWNLOAD_URL="https://artifacts.elastic.co/downloads/kibana/kibana-$KIBANA_VERSION-amd64.deb" + curl -o "kibana-$KIBANA_VERSION.deb" "$DOWNLOAD_URL" + log "[download_install_deb] installing downloaded package" + dpkg -i "kibana-$KIBANA_VERSION.deb" +} + +## Security +##---------------------------------- + +configuration_and_plugins() +{ + # backup the current config + mv /etc/kibana/kibana.yml /etc/kibana/kibana.yml.bak + + log "[configuration_and_plugins] configuring kibana.yml" + local KIBANA_CONF=/etc/kibana/kibana.yml + # set the elasticsearch URL + echo "elasticsearch.url: \"$ELASTICSEARCH_URL\"" >> $KIBANA_CONF + echo "server.host:" $(hostname -I) >> $KIBANA_CONF + # specify kibana log location + echo "logging.dest: /var/log/kibana.log" >> $KIBANA_CONF + touch /var/log/kibana.log + chown kibana: /var/log/kibana.log + + # set logging to silent by default + echo "logging.silent: true" >> $KIBANA_CONF + + # install x-pack + if [ ${INSTALL_XPACK} -ne 0 ]; then + echo "elasticsearch.username: kibana" >> $KIBANA_CONF + echo "elasticsearch.password: $USER_KIBANA_PWD" >> $KIBANA_CONF + + install_pwgen + local ENCRYPTION_KEY=$(pwgen 64 1) + echo "xpack.security.encryptionKey: \"$ENCRYPTION_KEY\"" >> $KIBANA_CONF + ENCRYPTION_KEY=$(pwgen 64 1) + echo "xpack.reporting.encryptionKey: \"$ENCRYPTION_KEY\"" >> $KIBANA_CONF + log "[configuration_and_plugins] x-pack security encryption key generated" + + log "[configuration_and_plugins] installing x-pack plugin" + /usr/share/kibana/bin/kibana-plugin install x-pack + log "[configuration_and_plugins] installed x-pack plugin" + fi + + # configure HTTPS if cert and private key supplied + if [[ -n "${SSL_CERT}" && -n "${SSL_KEY}" ]]; then + mkdir -p /etc/kibana/ssl + log "[configuration_and_plugins] save kibana cert blob to file" + echo ${SSL_CERT} | base64 -d | tee /etc/kibana/ssl/kibana.crt + log "[configuration_and_plugins] save kibana key blob to file" + echo ${SSL_KEY} | base64 -d | tee /etc/kibana/ssl/kibana.key + + log "[configuration_and_plugins] configuring encrypted communication" + + if dpkg --compare-versions "$KIBANA_VERSION" ">=" "5.3.0"; then + echo "server.ssl.enabled: true" >> $KIBANA_CONF + echo "server.ssl.key: /etc/kibana/ssl/kibana.key" >> $KIBANA_CONF + echo "server.ssl.certificate: /etc/kibana/ssl/kibana.crt" >> $KIBANA_CONF + + if [[ -n "${SSL_PASSPHRASE}" ]]; then + echo "server.ssl.keyPassphrase: \"$SSL_PASSPHRASE\"" >> $KIBANA_CONF + fi + else + echo "server.ssl.key: /etc/kibana/ssl/kibana.key" >> $KIBANA_CONF + echo "server.ssl.cert: /etc/kibana/ssl/kibana.crt" >> $KIBANA_CONF + fi + + log "[configuration_and_plugins] configured encrypted communication" + fi +} + +install_start_service() +{ + log "[install_start_service] configuring service for kibana to run at start" + update-rc.d kibana defaults 95 10 + log "[install_start_service] starting kibana!" + service kibana start +} + +######################### +# Installation sequence +######################### + +log "[apt-get] updating apt-get" +(apt-get -y update || (sleep 15; apt-get -y update)) > /dev/null +log "[apt-get] updated apt-get" + +log "[install_sequence] Starting installation" +download_install_deb +configuration_and_plugins +install_start_service +log "[install_sequence] Finished installation" + +ELAPSED_TIME=$(($SECONDS - $START_TIME)) +PRETTY=$(printf '%dh:%dm:%ds\n' $(($ELAPSED_TIME/3600)) $(($ELAPSED_TIME%3600/60)) $(($ELAPSED_TIME%60))) +log "End execution of Kibana script extension in ${PRETTY}" diff --git a/src/settings/ubuntuSettings.json b/src/settings/ubuntuSettings.json index 69393881..8546f084 100644 --- a/src/settings/ubuntuSettings.json +++ b/src/settings/ubuntuSettings.json @@ -156,6 +156,18 @@ "protectedSettings": { "commandToExecute": "[concat('bash kibana-install.sh -', variables('installPluginsShortOpt'), 'n \"', parameters('esSettings').clusterName, '\" -v \"', parameters('esSettings').version, '\" -u \"', concat(if(equals(parameters('networkSettings').https, 'Yes'), 'https', 'http'), '://', parameters('topologySettings').vNetLoadBalancerIp, ':9200') ,'\" -S \"', parameters('esSettings').securityKibanaPwd, '\" -C \"', parameters('topologySettings').kibanaCertBlob, '\" -K \"', parameters('topologySettings').kibanaKeyBlob, '\" -P \"', parameters('topologySettings').kibanaKeyPassphrase, '\" -Y \"', replace(parameters('topologySettings').kibanaYaml, '\"', '\\\"'), '\" -H \"', parameters('esSettings').httpCertBlob,'\" -G \"', parameters('esSettings').httpCertPassword, '\" -V \"', parameters('esSettings').httpCaCertBlob,'\" -J \"', parameters('esSettings').httpCaCertPassword, '\" -U \"', variables('kibanaDomainName'), '\"')]" } + }, + "logstash": { + "publisher": "Microsoft.Azure.Extensions", + "type": "CustomScript", + "typeHandlerVersion": "2.0", + "autoUpgradeMinorVersion": true, + "settings": { + "fileUris": "[variables('ubuntuScripts')]" + }, + "protectedSettings": { + "commandToExecute": "[concat('bash logstash-ubuntu-install.sh -', variables('installPluginsShortOpt'), 'n \"', parameters('esSettings').clusterName, '\" -v \"', parameters('esSettings').logstashVersion, '\" -e \"', parameters('esSettings').version, '\" -u \"', concat('http://', parameters('topologySettings').vNetLoadBalancerIp, ':9200') ,'\" -S \"', parameters('esSettings').securityLogstashPwd, '\"')]" + } } } } From fd8d9e75e25f14376bfd7456d4b43cc2b8e665e6 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Mon, 19 Mar 2018 17:59:23 +1000 Subject: [PATCH 03/31] Start of add logstash to template Work in Progress --- src/mainTemplate.json | 86 +++++++++++++ src/scripts/logstash-ubuntu-install.sh | 165 ++++++++++++++----------- 2 files changed, 182 insertions(+), 69 deletions(-) diff --git a/src/mainTemplate.json b/src/mainTemplate.json index b0b5cc04..532b1098 100644 --- a/src/mainTemplate.json +++ b/src/mainTemplate.json @@ -350,6 +350,92 @@ "description": "Additional configuration for Kibana yaml configuration file. Each line must be separated by a newline character e.g. server.ssl.enabled: true\nkibana.defaultAppId: \"home\"" } }, + "logstash": { + "type": "string", + "defaultValue": "No", + "allowedValues": [ + "Yes", + "No" + ], + "metadata": { + "description": "Provision machines with Logstash" + } + }, + "vmSizeLogstash": { + "type": "string", + "defaultValue": "Standard_A2", + "allowedValues": [ + "Standard_A2", + "Standard_A3", + "Standard_A4", + "Standard_A5", + "Standard_A6", + "Standard_A7", + "Standard_D1", + "Standard_D2", + "Standard_D3", + "Standard_D4", + "Standard_D11", + "Standard_D12", + "Standard_D13", + "Standard_D14", + "Standard_D1_v2", + "Standard_D2_v2", + "Standard_D3_v2", + "Standard_D4_v2", + "Standard_D5_v2", + "Standard_D11_v2", + "Standard_D12_v2", + "Standard_D13_v2", + "Standard_D14_v2", + "Standard_D15_v2", + "Standard_DS1", + "Standard_DS2", + "Standard_DS3", + "Standard_DS4", + "Standard_DS11", + "Standard_DS12", + "Standard_DS13", + "Standard_DS14", + "Standard_DS1_v2", + "Standard_DS2_v2", + "Standard_DS3_v2", + "Standard_DS4_v2", + "Standard_DS5_v2", + "Standard_DS11_v2", + "Standard_DS12_v2", + "Standard_DS13_v2", + "Standard_DS14_v2", + "Standard_DS15_v2", + "Standard_F1", + "Standard_F2", + "Standard_F4", + "Standard_F8", + "Standard_F16", + "Standard_F1s", + "Standard_F2s", + "Standard_F4s", + "Standard_F8s", + "Standard_F16s" + ], + "metadata": { + "description": "Size of the Logstash nodes" + } + }, + "logstashKeystorePassword": { + "type": "securestring", + "defaultValue": "", + "metadata": { + "description": "Password for the Logstash keystore." + } + }, + "logstashAdditionalYaml": { + "type": "string", + "defaultValue": "", + "metadata": { + "description": "Additional configuration for Logstash yaml configuration file. Each line must be separated by a newline character e.g. pipeline.batch.size: 125\npipeline.batch.delay: 50" + } + }, "jumpbox": { "type": "string", "defaultValue": "No", diff --git a/src/scripts/logstash-ubuntu-install.sh b/src/scripts/logstash-ubuntu-install.sh index 0f325ecb..eabe88f9 100644 --- a/src/scripts/logstash-ubuntu-install.sh +++ b/src/scripts/logstash-ubuntu-install.sh @@ -5,6 +5,8 @@ # Russ Cam (Elastic) # +export DEBIAN_FRONTEND=noninteractive + ######################### # HELP ######################### @@ -13,6 +15,7 @@ help() { echo "This script installs logstash on a dedicated VM in the elasticsearch ARM template cluster" echo "Parameters:" + # TODO: Add parameters here echo "-h view this help content" } @@ -28,7 +31,16 @@ log() log "Begin execution of Logstash script extension on ${HOSTNAME}" START_TIME=$SECONDS -export DEBIAN_FRONTEND=noninteractive +######################### +# Preconditions +######################### + +if [ "${UID}" -ne 0 ]; +then + log "Script executed without root permissions" + echo "You must be root to run this program." >&2 + exit 3 +fi if service --status-all | grep -Fq 'logstash'; then log "Logstash already installed." @@ -40,27 +52,21 @@ fi ######################### #Script Parameters -CLUSTER_NAME="elasticsearch" LOGSTASH_VERSION="6.2.1" -ES_VERSION="6.2.1" -#Default internal load balancer ip ELASTICSEARCH_URL="http://10.0.0.4:9200" INSTALL_XPACK=0 +INSTALL_ADDITIONAL_PLUGINS="" USER_LOGSTASH_PWD="changeme" +LOGSTASH_KEYSTORE_PWD="changeme" +LOGSTASH_CONF_FILE="" #Loop through options passed -while getopts :n:v:e:u:S:C:K:P:m:lh optname; do +while getopts :v:u:S:c:K:L:h optname; do log "Option $optname set" case $optname in - n) #set cluster name - CLUSTER_NAME="${OPTARG}" - ;; v) #logstash version number LOGSTASH_VERSION="${OPTARG}" ;; - e) #elasticsearch version number - ES_VERSION="${OPTARG}" - ;; u) #elasticsearch url ELASTICSEARCH_URL="${OPTARG}" ;; @@ -70,6 +76,14 @@ while getopts :n:v:e:u:S:C:K:P:m:lh optname; do l) #install X-Pack INSTALL_XPACK=1 ;; + L) #install additional plugins + INSTALL_ADDITIONAL_PLUGINS="${OPTARG}" + ;; + c) #logstash configuration + LOGSTASH_CONF_FILE="${OPTARG}" + ;; + K) #logstash keystore password + LOGSTASH_KEYSTORE_PWD="${OPTARG}" h) #show help help exit 2 @@ -85,9 +99,8 @@ done # Parameter state changes ######################### -log "installing logstash $LOGSTASH_VERSION for Elasticsearch $ES_VERSION cluster: $CLUSTER_NAME" +log "installing logstash $LOGSTASH_VERSION" log "installing X-Pack plugins is set to: $INSTALL_XPACK" -log "Logstash will talk to Elasticsearch over $ELASTICSEARCH_URL" ######################### # Installation steps as functions @@ -95,84 +108,98 @@ log "Logstash will talk to Elasticsearch over $ELASTICSEARCH_URL" download_install_deb() { - log "[download_install_deb] starting download of package" - local DOWNLOAD_URL="https://artifacts.elastic.co/downloads/kibana/kibana-$KIBANA_VERSION-amd64.deb" - curl -o "kibana-$KIBANA_VERSION.deb" "$DOWNLOAD_URL" - log "[download_install_deb] installing downloaded package" - dpkg -i "kibana-$KIBANA_VERSION.deb" + log "[download_install_deb] starting download of package" + local DOWNLOAD_URL="https://artifacts.elastic.co/downloads/logstash/logstash-$LOGSTASH_VERSION.deb" + curl -o "logstash-$LOGSTASH_VERSION.deb" "$DOWNLOAD_URL" + log "[download_install_deb] installing downloaded package" + dpkg -i "logstash-$LOGSTASH_VERSION.deb" } ## Security ##---------------------------------- +add_keystore_or_env_var() +{ + if dpkg --compare-versions "$LOGSTASH_VERSION" ">=" "6.2.0"; then + log "[configuration_and_plugins] adding $1 to logstash keystore" + echo "$2" | bin/logstash-keystore add $1 + log "[configuration_and_plugins] added $1 logstash keystore" + else + log "[add_keystore_or_env_var] adding environment variable for $1" + set +o history + export $1="$2" + set -o history + log "[add_keystore_or_env_var] added environment variable for $1" + fi +} + +install_additional_plugins() +{ + SKIP_PLUGINS="x-pack" + log "[install_additional_plugins] installing additional plugins" + for PLUGIN in $(echo $INSTALL_ADDITIONAL_PLUGINS | tr ";" "\n") + do + if [[ $SKIP_PLUGINS =~ $PLUGIN ]]; then + log "[install_additional_plugins] skipping plugin $PLUGIN" + else + log "[install_additional_plugins] installing plugin $PLUGIN" + /usr/share/logstash/bin/logstash-plugin install $PLUGIN + log "[install_additional_plugins] installed plugin $PLUGIN" + fi + done + log "[install_additional_plugins] installed additional plugins" +} + configuration_and_plugins() { # backup the current config - mv /etc/kibana/kibana.yml /etc/kibana/kibana.yml.bak + local LOGSTASH_CONF=/etc/logstash/logstash.yml + mv "$LOGSTASH_CONF" "$LOGSTASH_CONF.bak" - log "[configuration_and_plugins] configuring kibana.yml" - local KIBANA_CONF=/etc/kibana/kibana.yml - # set the elasticsearch URL - echo "elasticsearch.url: \"$ELASTICSEARCH_URL\"" >> $KIBANA_CONF - echo "server.host:" $(hostname -I) >> $KIBANA_CONF - # specify kibana log location - echo "logging.dest: /var/log/kibana.log" >> $KIBANA_CONF - touch /var/log/kibana.log - chown kibana: /var/log/kibana.log + log "[configuration_and_plugins] configuring logstash.yml" - # set logging to silent by default - echo "logging.silent: true" >> $KIBANA_CONF + mkdir -p /var/log/logstash + chown -R logstash: /var/log/logstash + + # logstash conf file + if [[ -n "$LOGSTASH_CONF_FILE" ]]; then + echo "$LOGSTASH_CONF_FILE" > /etc/logstash/conf.d/logstash.conf + fi + + # logstash keystore + if dpkg --compare-versions "$LOGSTASH_VERSION" ">=" "6.2.0"; then + set +o history + export LOGSTASH_KEYSTORE_PASS="$LOGSTASH_KEYSTORE_PWD" + set -o history + log "[configuration_and_plugins] creating logstash keystore" + bin/logstash-keystore create + log "[configuration_and_plugins] created logstash keystore" + fi + + add_keystore_or_env_var 'LOGSTASH_SYSTEM_PASS' "$USER_LOGSTASH_PWD" + add_keystore_or_env_var 'ELASTICSEARCH_URL' "$ELASTICSEARCH_URL" # install x-pack if [ ${INSTALL_XPACK} -ne 0 ]; then - echo "elasticsearch.username: kibana" >> $KIBANA_CONF - echo "elasticsearch.password: $USER_KIBANA_PWD" >> $KIBANA_CONF - - install_pwgen - local ENCRYPTION_KEY=$(pwgen 64 1) - echo "xpack.security.encryptionKey: \"$ENCRYPTION_KEY\"" >> $KIBANA_CONF - ENCRYPTION_KEY=$(pwgen 64 1) - echo "xpack.reporting.encryptionKey: \"$ENCRYPTION_KEY\"" >> $KIBANA_CONF - log "[configuration_and_plugins] x-pack security encryption key generated" + echo "xpack.monitoring.elasticsearch.username: logstash_system" >> $LOGSTASH_CONF + echo 'xpack.monitoring.elasticsearch.password: ${LOGSTASH_SYSTEM_PASS}' >> $LOGSTASH_CONF log "[configuration_and_plugins] installing x-pack plugin" - /usr/share/kibana/bin/kibana-plugin install x-pack + /usr/share/logstash/bin/logstash-plugin install x-pack log "[configuration_and_plugins] installed x-pack plugin" fi - # configure HTTPS if cert and private key supplied - if [[ -n "${SSL_CERT}" && -n "${SSL_KEY}" ]]; then - mkdir -p /etc/kibana/ssl - log "[configuration_and_plugins] save kibana cert blob to file" - echo ${SSL_CERT} | base64 -d | tee /etc/kibana/ssl/kibana.crt - log "[configuration_and_plugins] save kibana key blob to file" - echo ${SSL_KEY} | base64 -d | tee /etc/kibana/ssl/kibana.key - - log "[configuration_and_plugins] configuring encrypted communication" - - if dpkg --compare-versions "$KIBANA_VERSION" ">=" "5.3.0"; then - echo "server.ssl.enabled: true" >> $KIBANA_CONF - echo "server.ssl.key: /etc/kibana/ssl/kibana.key" >> $KIBANA_CONF - echo "server.ssl.certificate: /etc/kibana/ssl/kibana.crt" >> $KIBANA_CONF - - if [[ -n "${SSL_PASSPHRASE}" ]]; then - echo "server.ssl.keyPassphrase: \"$SSL_PASSPHRASE\"" >> $KIBANA_CONF - fi - else - echo "server.ssl.key: /etc/kibana/ssl/kibana.key" >> $KIBANA_CONF - echo "server.ssl.cert: /etc/kibana/ssl/kibana.crt" >> $KIBANA_CONF - fi - - log "[configuration_and_plugins] configured encrypted communication" + # install additional plugins + if [[ -n "$INSTALL_ADDITIONAL_PLUGINS" ]]; then + install_additional_plugins fi } install_start_service() { - log "[install_start_service] configuring service for kibana to run at start" - update-rc.d kibana defaults 95 10 - log "[install_start_service] starting kibana!" - service kibana start + log "[install_start_service] starting logstash" + systemctl start logstash.service + log "[install_start_service] started logstash!" } ######################### @@ -191,4 +218,4 @@ log "[install_sequence] Finished installation" ELAPSED_TIME=$(($SECONDS - $START_TIME)) PRETTY=$(printf '%dh:%dm:%ds\n' $(($ELAPSED_TIME/3600)) $(($ELAPSED_TIME%3600/60)) $(($ELAPSED_TIME%60))) -log "End execution of Kibana script extension in ${PRETTY}" +log "End execution of Logstash script extension in ${PRETTY}" From 025034bda72cc9a55994e8fb37586e57102149a5 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 7 Sep 2018 16:24:31 +1000 Subject: [PATCH 04/31] Add common Logstash configuration WIP - untested --- src/mainTemplate.json | 28 ++ ...tu-install.sh => elasticsearch-install.sh} | 0 ...java-ubuntu-install.sh => java-install.sh} | 0 ...na-ubuntu-install.sh => kibana-install.sh} | 2 +- src/scripts/logstash-install.sh | 314 ++++++++++++++++++ src/scripts/logstash-ubuntu-install.sh | 221 ------------ src/settings/ubuntuSettings.json | 15 +- 7 files changed, 351 insertions(+), 229 deletions(-) rename src/scripts/{elasticsearch-ubuntu-install.sh => elasticsearch-install.sh} (100%) mode change 100755 => 100644 rename src/scripts/{java-ubuntu-install.sh => java-install.sh} (100%) rename src/scripts/{kibana-ubuntu-install.sh => kibana-install.sh} (99%) create mode 100644 src/scripts/logstash-install.sh delete mode 100644 src/scripts/logstash-ubuntu-install.sh diff --git a/src/mainTemplate.json b/src/mainTemplate.json index 532b1098..de54e088 100644 --- a/src/mainTemplate.json +++ b/src/mainTemplate.json @@ -422,6 +422,20 @@ "description": "Size of the Logstash nodes" } }, + "logstashHeapSize": { + "type": "int", + "defaultValue": 0, + "metadata": { + "description": "The size, in megabytes, of memory to allocate to Logstash for the JVM heap. If unspecified, will default to 1GB" + } + }, + "logstashConf": { + "type": "securestring", + "defaultValue": "", + "metadata": { + "description": "base 64 Logstash configuration." + } + }, "logstashKeystorePassword": { "type": "securestring", "defaultValue": "", @@ -429,6 +443,13 @@ "description": "Password for the Logstash keystore." } }, + "logstashAdditionalPlugins": { + "type": "string", + "defaultValue": "", + "metadata": { + "description": "Additional Logstash plugins to install. Each plugin must be separated by a semicolon. e.g. azure_event_hubs;http_poller" + } + }, "logstashAdditionalYaml": { "type": "string", "defaultValue": "", @@ -1608,6 +1629,13 @@ "kibanaCertBlob": "[parameters('kibanaCertBlob')]", "kibanaHttps": "[variables('kibanaHttps')]", "kibanaYaml": "[parameters('kibanaAdditionalYaml')]", + "vmSizeLogstash": "[parameters('vmSizeLogstash')]", + "logstash": "[parameters('logstash')]", + "logstashHeapSize": "[parameters('logstashHeapSize')]", + "logstashConf": "[parameters('logstashConf')]", + "logstashPlugins": "[parameters('logstashAdditionalPlugins')]", + "logstashYaml": "[parameters('logstashAdditionalYaml')]", + "logstashKeystorePwd": "[parameters('logstashKeystorePassword')]", "jumpbox": "[parameters('jumpbox')]", "dataNodeStorageSettings": { "accountType": "[variables('resolvedStorageAccountType')]", diff --git a/src/scripts/elasticsearch-ubuntu-install.sh b/src/scripts/elasticsearch-install.sh old mode 100755 new mode 100644 similarity index 100% rename from src/scripts/elasticsearch-ubuntu-install.sh rename to src/scripts/elasticsearch-install.sh diff --git a/src/scripts/java-ubuntu-install.sh b/src/scripts/java-install.sh similarity index 100% rename from src/scripts/java-ubuntu-install.sh rename to src/scripts/java-install.sh diff --git a/src/scripts/kibana-ubuntu-install.sh b/src/scripts/kibana-install.sh similarity index 99% rename from src/scripts/kibana-ubuntu-install.sh rename to src/scripts/kibana-install.sh index 0a179bb4..c1d7acbb 100644 --- a/src/scripts/kibana-ubuntu-install.sh +++ b/src/scripts/kibana-install.sh @@ -156,7 +156,7 @@ log "Kibana will talk to Elasticsearch over $ELASTICSEARCH_URL" download_kibana() { log "[download_kibana] Download Kibana $KIBANA_VERSION" - local DOWNLOAD_URL="https://artifacts.elastic.co/downloads/kibana/kibana-$KIBANA_VERSION-amd64.deb" + local DOWNLOAD_URL="https://artifacts.elastic.co/downloads/kibana/kibana-$KIBANA_VERSION-amd64.deb?ultron=msft&gambit=azure" log "[download_kibana] Download location $DOWNLOAD_URL" wget --retry-connrefused --waitretry=1 -q "$DOWNLOAD_URL" -O "kibana-$KIBANA_VERSION.deb" local EXIT_CODE=$? diff --git a/src/scripts/logstash-install.sh b/src/scripts/logstash-install.sh new file mode 100644 index 00000000..a949d846 --- /dev/null +++ b/src/scripts/logstash-install.sh @@ -0,0 +1,314 @@ +#!/bin/bash + +# License: https://github.com/elastic/azure-marketplace/blob/master/LICENSE.txt +# +# Russ Cam (Elastic) +# + +export DEBIAN_FRONTEND=noninteractive + +######################### +# HELP +######################### + +help() +{ + echo "This script installs logstash on a dedicated VM in the elasticsearch ARM template cluster" + echo "Parameters:" + # TODO: Add parameters here + + echo "-h view this help content" +} + +# Custom logging with time so we can easily relate running times, also log to separate file so order is guaranteed. +# The Script extension output the stdout/err buffer in intervals with duplicates. +log() +{ + echo \[$(date +%d%m%Y-%H:%M:%S)\] "$1" + echo \[$(date +%d%m%Y-%H:%M:%S)\] "$1" >> /var/log/arm-install.log +} + +log "Begin execution of Logstash script extension on ${HOSTNAME}" +START_TIME=$SECONDS + +######################### +# Preconditions +######################### + +if [ "${UID}" -ne 0 ]; +then + log "Script executed without root permissions" + echo "You must be root to run this program." >&2 + exit 3 +fi + +if service --status-all | grep -Fq 'logstash'; then + log "Logstash already installed." + + # TODO: Ability to stop, change configuration and restart + + exit 0 +fi + +######################### +# Parameter handling +######################### + +#Script Parameters +LOGSTASH_VERSION="6.2.4" +ELASTICSEARCH_URL="http://10.0.0.4:9200" +INSTALL_XPACK=0 +INSTALL_ADDITIONAL_PLUGINS="" +USER_LOGSTASH_PWD="changeme" +LOGSTASH_KEYSTORE_PWD="changeme" +LOGSTASH_CONF_FILE="" + +#Loop through options passed +while getopts :v:u:S:c:K:L:h optname; do + log "Option $optname set" + case $optname in + v) #logstash version number + LOGSTASH_VERSION="${OPTARG}" + ;; + u) #elasticsearch url + ELASTICSEARCH_URL="${OPTARG}" + ;; + S) #security logstash pwd + USER_LOGSTASH_PWD="${OPTARG}" + ;; + l) #install X-Pack + INSTALL_XPACK=1 + ;; + L) #install additional plugins + INSTALL_ADDITIONAL_PLUGINS="${OPTARG}" + ;; + c) #logstash configuration + LOGSTASH_CONF_FILE="${OPTARG}" + ;; + K) #logstash keystore password + LOGSTASH_KEYSTORE_PWD="${OPTARG}" + h) #show help + help + exit 2 + ;; + \?) #unrecognized option - show help + echo -e \\n"Option -${BOLD}$OPTARG${NORM} not allowed." + help + exit 2 + ;; + esac +done + +######################### +# Parameter state changes +######################### + +log "installing logstash $LOGSTASH_VERSION" +log "installing X-Pack plugins is set to: $INSTALL_XPACK" + +######################### +# Installation steps as functions +######################### + +# Install Oracle Java +install_java() +{ + bash java-ubuntu-install.sh +} + +# Install Logstash +install_logstash() +{ + local DOWNLOAD_URL="https://artifacts.elastic.co/downloads/logstash/logstash-$LOGSTASH_VERSION.deb?ultron=msft&gambit=azure" + + log "[install_logstash] installing Logstash $LOGSTASH_VERSION" + log "[install_logstash] download location - $DOWNLOAD_URL" + wget --retry-connrefused --waitretry=1 -q "$DOWNLOAD_URL" -O logstash.deb + local EXIT_CODE=$? + if [ $EXIT_CODE -ne 0 ]; then + log "[install_logstash] error downloading Logstash $LOGSTASH_VERSION" + exit $EXIT_CODE + fi + log "[install_logstash] downloaded Logstash $LOGSTASH_VERSION" + dpkg -i logstash.deb + log "[install_logstash] installed Logstash $LOGSTASH_VERSION" +} + +## Security +##---------------------------------- + +add_keystore_or_env_var() +{ + local KEY=$1 + local VALUE="$2" + + if dpkg --compare-versions "$LOGSTASH_VERSION" ">=" "6.2.0"; then + # TODO: Should be set in /etc/sysconfig/logstash + # See https://www.elastic.co/guide/en/logstash/current/keystore.html#keystore-password + set +o history + export LOGSTASH_KEYSTORE_PASS="$LOGSTASH_KEYSTORE_PWD" + set -o history + + # create it the keystore if it doesn't exist + if [[ ! -f /etc/logstash/logstash.keystore ]]; then + log "[configure_logstash] creating logstash keystore" + /usr/share/logstash/bin/logstash-keystore create + log "[configure_logstash] created logstash keystore" + fi + + log "[configuration_and_plugins] adding $KEY to logstash keystore" + echo "$VALUE" | /usr/share/logstash/bin/logstash-keystore add $KEY + log "[configuration_and_plugins] added $KEY logstash keystore" + else + log "[add_keystore_or_env_var] adding environment variable for $KEY" + set +o history + export $KEY="$VALUE" + set -o history + log "[add_keystore_or_env_var] added environment variable for $KEY" + fi +} + +configure_logstash() +{ + # backup the current config + local LOGSTASH_CONF=/etc/logstash/logstash.yml + mv $LOGSTASH_CONF $LOGSTASH_CONF.bak + + log "[configure_logstash] configuring logstash.yml" + + echo "node.name: \"${HOSTNAME}\"" >> $LOGSTASH_CONF + + # logstash conf file + if [[ -n "$LOGSTASH_CONF_FILE" ]]; then + local CONF_FILE=/etc/logstash/conf.d/logstash.conf + mv $CONF_FILE $CONF_FILE.bak + echo ${LOGSTASH_CONF_FILE} | base64 -d | tee $CONF_FILE + fi + + # allow values to be referenced in *.conf files + add_keystore_or_env_var 'LOGSTASH_SYSTEM_PASS' "$USER_LOGSTASH_PWD" + add_keystore_or_env_var 'ELASTICSEARCH_URL' "$ELASTICSEARCH_URL" + + # put data on the OS disk in a writable location + echo "path.data: /var/lib/logstash" >> $LOGSTASH_CONF + + # TODO: make configurable? + # echo "queue.type: persisted" >> $LOGSTASH_CONF + + # put log files on the OS disk in a writable location + local LOG_PATH=/var/log/logstash + mkdir -p $LOG_PATH + chown -R logstash: $LOG_PATH + echo "path.logs: $LOG_PATH" >> $LOGSTASH_CONF + echo "log.level: error" >> $LOGSTASH_CONF + + # install x-pack + if [ ${INSTALL_XPACK} -ne 0 ]; then + if dpkg --compare-versions "$LOGSTASH_VERSION" "<" "6.3.0"; then + log "[configure_logstash] installing x-pack plugin" + /usr/share/logstash/bin/logstash-plugin install x-pack + log "[configure_logstash] installed x-pack plugin" + fi + + echo "xpack.monitoring.elasticsearch.username: logstash_system" >> $LOGSTASH_CONF + # reference from env var or keystore + echo 'xpack.monitoring.elasticsearch.password: "${LOGSTASH_SYSTEM_PASS}"' >> $LOGSTASH_CONF + echo "xpack.monitoring.enabled: true" >> $LOGSTASH_CONF + fi + + # Additional yaml configuration + if [[ -n "$YAML_CONFIGURATION" ]]; then + log "[configure_logstash] include additional yaml configuration" + + local SKIP_LINES="node.name path.data path.logs " + SKIP_LINES+="xpack.monitoring.elasticsearch.username xpack.monitoring.elasticsearch.password " + SKIP_LINES+="xpack.monitoring.enabled " + local SKIP_REGEX="^\s*("$(echo $SKIP_LINES | tr " " "|" | sed 's/\./\\\./g')")" + IFS=$'\n' + for LINE in $(echo -e "$YAML_CONFIGURATION"); do + if [[ -n "$LINE" ]]; then + if [[ $LINE =~ $SKIP_REGEX ]]; then + log "[configure_logstash] Skipping line '$LINE'" + else + log "[configure_logstash] Adding line '$LINE' to $LOGSTASH_CONF" + echo "$LINE" >> $LOGSTASH_CONF + fi + fi + done + unset IFS + log "[configure_logstash] included additional yaml configuration" + log "[configure_logstash] run yaml lint on configuration" + install_yamllint + LINT=$(yamllint -d "{extends: relaxed, rules: {key-duplicates: {level: error}}}" $LOGSTASH_CONF; exit ${PIPESTATUS[0]}) + EXIT_CODE=$? + log "[configure_logstash] ran yaml lint (exit code $EXIT_CODE) $LINT" + if [ $EXIT_CODE -ne 0 ]; then + log "[configure_logstash] errors in yaml configuration. exiting" + exit 11 + fi + fi +} + +install_additional_plugins() +{ + SKIP_PLUGINS="x-pack" + log "[install_additional_plugins] installing additional plugins" + for PLUGIN in $(echo $INSTALL_ADDITIONAL_PLUGINS | tr ";" "\n") + do + if [[ $SKIP_PLUGINS =~ $PLUGIN ]]; then + log "[install_additional_plugins] skipping plugin $PLUGIN" + else + log "[install_additional_plugins] installing plugin $PLUGIN" + /usr/share/logstash/bin/logstash-plugin install $PLUGIN + log "[install_additional_plugins] installed plugin $PLUGIN" + fi + done + log "[install_additional_plugins] installed additional plugins" +} + +start_service() +{ + log "[start_service] starting logstash" + systemctl start logstash.service + log "[start_service] started logstash!" +} + +install_apt_package() +{ + local PACKAGE=$1 + if [ $(dpkg-query -W -f='${Status}' $PACKAGE 2>/dev/null | grep -c "ok installed") -eq 0 ]; then + log "[install_$PACKAGE] installing $PACKAGE" + (apt-get -yq install $PACKAGE || (sleep 15; apt-get -yq install $PACKAGE)) + log "[install_$PACKAGE] installed $PACKAGE" + fi +} + +install_yamllint() +{ + install_apt_package yamllint +} + +######################### +# Installation sequence +######################### + +log "[apt-get] updating apt-get" +(apt-get -y update || (sleep 15; apt-get -y update)) > /dev/null +log "[apt-get] updated apt-get" + +install_java + +install_logstash + +configuration_and_plugins + +# install additional plugins +if [[ -n "$INSTALL_ADDITIONAL_PLUGINS" ]]; then + install_additional_plugins +fi + +start_service + +ELAPSED_TIME=$(($SECONDS - $START_TIME)) +PRETTY=$(printf '%dh:%dm:%ds\n' $(($ELAPSED_TIME/3600)) $(($ELAPSED_TIME%3600/60)) $(($ELAPSED_TIME%60))) +log "End execution of Logstash script extension in ${PRETTY}" diff --git a/src/scripts/logstash-ubuntu-install.sh b/src/scripts/logstash-ubuntu-install.sh deleted file mode 100644 index eabe88f9..00000000 --- a/src/scripts/logstash-ubuntu-install.sh +++ /dev/null @@ -1,221 +0,0 @@ -#!/bin/bash - -# License: https://github.com/elastic/azure-marketplace/blob/master/LICENSE.txt -# -# Russ Cam (Elastic) -# - -export DEBIAN_FRONTEND=noninteractive - -######################### -# HELP -######################### - -help() -{ - echo "This script installs logstash on a dedicated VM in the elasticsearch ARM template cluster" - echo "Parameters:" - # TODO: Add parameters here - - echo "-h view this help content" -} - -# Custom logging with time so we can easily relate running times, also log to separate file so order is guaranteed. -# The Script extension output the stdout/err buffer in intervals with duplicates. -log() -{ - echo \[$(date +%d%m%Y-%H:%M:%S)\] "$1" - echo \[$(date +%d%m%Y-%H:%M:%S)\] "$1" >> /var/log/arm-install.log -} - -log "Begin execution of Logstash script extension on ${HOSTNAME}" -START_TIME=$SECONDS - -######################### -# Preconditions -######################### - -if [ "${UID}" -ne 0 ]; -then - log "Script executed without root permissions" - echo "You must be root to run this program." >&2 - exit 3 -fi - -if service --status-all | grep -Fq 'logstash'; then - log "Logstash already installed." - exit 0 -fi - -######################### -# Parameter handling -######################### - -#Script Parameters -LOGSTASH_VERSION="6.2.1" -ELASTICSEARCH_URL="http://10.0.0.4:9200" -INSTALL_XPACK=0 -INSTALL_ADDITIONAL_PLUGINS="" -USER_LOGSTASH_PWD="changeme" -LOGSTASH_KEYSTORE_PWD="changeme" -LOGSTASH_CONF_FILE="" - -#Loop through options passed -while getopts :v:u:S:c:K:L:h optname; do - log "Option $optname set" - case $optname in - v) #logstash version number - LOGSTASH_VERSION="${OPTARG}" - ;; - u) #elasticsearch url - ELASTICSEARCH_URL="${OPTARG}" - ;; - S) #security logstash pwd - USER_LOGSTASH_PWD="${OPTARG}" - ;; - l) #install X-Pack - INSTALL_XPACK=1 - ;; - L) #install additional plugins - INSTALL_ADDITIONAL_PLUGINS="${OPTARG}" - ;; - c) #logstash configuration - LOGSTASH_CONF_FILE="${OPTARG}" - ;; - K) #logstash keystore password - LOGSTASH_KEYSTORE_PWD="${OPTARG}" - h) #show help - help - exit 2 - ;; - \?) #unrecognized option - show help - echo -e \\n"Option -${BOLD}$OPTARG${NORM} not allowed." - help - exit 2 - ;; - esac -done -######################### -# Parameter state changes -######################### - -log "installing logstash $LOGSTASH_VERSION" -log "installing X-Pack plugins is set to: $INSTALL_XPACK" - -######################### -# Installation steps as functions -######################### - -download_install_deb() -{ - log "[download_install_deb] starting download of package" - local DOWNLOAD_URL="https://artifacts.elastic.co/downloads/logstash/logstash-$LOGSTASH_VERSION.deb" - curl -o "logstash-$LOGSTASH_VERSION.deb" "$DOWNLOAD_URL" - log "[download_install_deb] installing downloaded package" - dpkg -i "logstash-$LOGSTASH_VERSION.deb" -} - -## Security -##---------------------------------- - -add_keystore_or_env_var() -{ - if dpkg --compare-versions "$LOGSTASH_VERSION" ">=" "6.2.0"; then - log "[configuration_and_plugins] adding $1 to logstash keystore" - echo "$2" | bin/logstash-keystore add $1 - log "[configuration_and_plugins] added $1 logstash keystore" - else - log "[add_keystore_or_env_var] adding environment variable for $1" - set +o history - export $1="$2" - set -o history - log "[add_keystore_or_env_var] added environment variable for $1" - fi -} - -install_additional_plugins() -{ - SKIP_PLUGINS="x-pack" - log "[install_additional_plugins] installing additional plugins" - for PLUGIN in $(echo $INSTALL_ADDITIONAL_PLUGINS | tr ";" "\n") - do - if [[ $SKIP_PLUGINS =~ $PLUGIN ]]; then - log "[install_additional_plugins] skipping plugin $PLUGIN" - else - log "[install_additional_plugins] installing plugin $PLUGIN" - /usr/share/logstash/bin/logstash-plugin install $PLUGIN - log "[install_additional_plugins] installed plugin $PLUGIN" - fi - done - log "[install_additional_plugins] installed additional plugins" -} - -configuration_and_plugins() -{ - # backup the current config - local LOGSTASH_CONF=/etc/logstash/logstash.yml - mv "$LOGSTASH_CONF" "$LOGSTASH_CONF.bak" - - log "[configuration_and_plugins] configuring logstash.yml" - - mkdir -p /var/log/logstash - chown -R logstash: /var/log/logstash - - # logstash conf file - if [[ -n "$LOGSTASH_CONF_FILE" ]]; then - echo "$LOGSTASH_CONF_FILE" > /etc/logstash/conf.d/logstash.conf - fi - - # logstash keystore - if dpkg --compare-versions "$LOGSTASH_VERSION" ">=" "6.2.0"; then - set +o history - export LOGSTASH_KEYSTORE_PASS="$LOGSTASH_KEYSTORE_PWD" - set -o history - log "[configuration_and_plugins] creating logstash keystore" - bin/logstash-keystore create - log "[configuration_and_plugins] created logstash keystore" - fi - - add_keystore_or_env_var 'LOGSTASH_SYSTEM_PASS' "$USER_LOGSTASH_PWD" - add_keystore_or_env_var 'ELASTICSEARCH_URL' "$ELASTICSEARCH_URL" - - # install x-pack - if [ ${INSTALL_XPACK} -ne 0 ]; then - echo "xpack.monitoring.elasticsearch.username: logstash_system" >> $LOGSTASH_CONF - echo 'xpack.monitoring.elasticsearch.password: ${LOGSTASH_SYSTEM_PASS}' >> $LOGSTASH_CONF - - log "[configuration_and_plugins] installing x-pack plugin" - /usr/share/logstash/bin/logstash-plugin install x-pack - log "[configuration_and_plugins] installed x-pack plugin" - fi - - # install additional plugins - if [[ -n "$INSTALL_ADDITIONAL_PLUGINS" ]]; then - install_additional_plugins - fi -} - -install_start_service() -{ - log "[install_start_service] starting logstash" - systemctl start logstash.service - log "[install_start_service] started logstash!" -} - -######################### -# Installation sequence -######################### - -log "[apt-get] updating apt-get" -(apt-get -y update || (sleep 15; apt-get -y update)) > /dev/null -log "[apt-get] updated apt-get" - -log "[install_sequence] Starting installation" -download_install_deb -configuration_and_plugins -install_start_service -log "[install_sequence] Finished installation" - -ELAPSED_TIME=$(($SECONDS - $START_TIME)) -PRETTY=$(printf '%dh:%dm:%ds\n' $(($ELAPSED_TIME/3600)) $(($ELAPSED_TIME%3600/60)) $(($ELAPSED_TIME%60))) -log "End execution of Logstash script extension in ${PRETTY}" diff --git a/src/settings/ubuntuSettings.json b/src/settings/ubuntuSettings.json index 8546f084..ded711b6 100644 --- a/src/settings/ubuntuSettings.json +++ b/src/settings/ubuntuSettings.json @@ -90,10 +90,11 @@ "commonShortOpts": "[concat(variables('dedicatedMasterNodesShortOpt'), variables('installPluginsShortOpt'), variables('installAzureCloudPluginShortOpt'), 'n ')]", "commonInstallParams": "[concat('\"', parameters('esSettings').clusterName, '\" -v \"', parameters('esSettings').version, '\" -m ', parameters('esSettings').heapSize,' -A \"', parameters('esSettings').securityAdminPwd, '\" -R \"', parameters('esSettings').securityReadPwd, '\" -K \"', parameters('esSettings').securityKibanaPwd, '\" -S \"', parameters('esSettings').securityLogstashPwd, '\" -B \"', parameters('esSettings').securityBootstrapPwd, '\" -Z ', parameters('topologySettings').vmDataNodeCount,' -p \"', variables('namespacePrefix'), '\" -a \"', variables('azureCloudStorageName'), '\" -k \"', variables('azureCloudStorageKey'), '\" -E \"', variables('azureCloudStorageSuffix'), '\" -L \"', parameters('esSettings').installAdditionalPlugins, '\" -C \"', replace(parameters('esSettings').yamlConfiguration, '\"', '\\\"'), '\" -D \"', parameters('topologySettings').vNetLoadBalancerIp, '\" -H \"', parameters('esSettings').httpCertBlob,'\" -G \"', parameters('esSettings').httpCertPassword, '\" -V \"', parameters('esSettings').httpCaCertBlob, '\" -J \"', parameters('esSettings').httpCaCertPassword, '\" -T \"', parameters('esSettings').transportCaCertBlob, '\" -W \"', parameters('esSettings').transportCaCertPassword, '\" -N \"', parameters('esSettings').transportCertPassword, '\" -O \"', parameters('esSettings').samlMetadataUri, '\" -P \"', variables('kibanaDomainName'), '\"')]", "ubuntuScripts": [ - "[concat(parameters('templateBaseUrl'), 'scripts/elasticsearch-ubuntu-install.sh')]", + "[concat(parameters('templateBaseUrl'), 'scripts/elasticsearch-install.sh')]", "[concat(parameters('templateBaseUrl'), 'scripts/kibana-install.sh')]", - "[concat(parameters('templateBaseUrl'), 'scripts/vm-disk-utils-0.1.sh')]" - "[concat(parameters('templateBaseUrl'), 'scripts/java-ubuntu-install.sh')]" + "[concat(parameters('templateBaseUrl'), 'scripts/logstash-install.sh')]", + "[concat(parameters('templateBaseUrl'), 'scripts/vm-disk-utils-0.1.sh')]", + "[concat(parameters('templateBaseUrl'), 'scripts/java-install.sh')]" ], "ubuntuSkus": { "5": "16.04-LTS", @@ -118,7 +119,7 @@ "fileUris": "[variables('ubuntuScripts')]" }, "protectedSettings": { - "commandToExecute": "[concat('bash elasticsearch-ubuntu-install.sh -x', variables('commonShortOpts'), variables('commonInstallParams'))]" + "commandToExecute": "[concat('bash elasticsearch-install.sh -x', variables('commonShortOpts'), variables('commonInstallParams'))]" } }, "client": { @@ -130,7 +131,7 @@ "fileUris": "[variables('ubuntuScripts')]" }, "protectedSettings": { - "commandToExecute": "[concat('bash elasticsearch-ubuntu-install.sh -y', variables('commonShortOpts'), variables('commonInstallParams'))]" + "commandToExecute": "[concat('bash elasticsearch-install.sh -y', variables('commonShortOpts'), variables('commonInstallParams'))]" } }, "data": { @@ -142,7 +143,7 @@ "fileUris": "[variables('ubuntuScripts')]" }, "protectedSettings": { - "commandToExecute": "[concat('bash elasticsearch-ubuntu-install.sh -', variables('dataNodeShortOpt'), variables('commonShortOpts'), variables('commonInstallParams'))]" + "commandToExecute": "[concat('bash elasticsearch-install.sh -', variables('dataNodeShortOpt'), variables('commonShortOpts'), variables('commonInstallParams'))]" } }, "kibana": { @@ -166,7 +167,7 @@ "fileUris": "[variables('ubuntuScripts')]" }, "protectedSettings": { - "commandToExecute": "[concat('bash logstash-ubuntu-install.sh -', variables('installPluginsShortOpt'), 'n \"', parameters('esSettings').clusterName, '\" -v \"', parameters('esSettings').logstashVersion, '\" -e \"', parameters('esSettings').version, '\" -u \"', concat('http://', parameters('topologySettings').vNetLoadBalancerIp, ':9200') ,'\" -S \"', parameters('esSettings').securityLogstashPwd, '\"')]" + "commandToExecute": "[concat('bash logstash-install.sh -', variables('installPluginsShortOpt'), 'v \"', parameters('esSettings').logstashVersion, '\" -u \"', concat('http://', parameters('topologySettings').vNetLoadBalancerIp, ':9200') ,'\" -S \"', parameters('esSettings').securityLogstashPwd, '\" -L \"', parameters('topologySettings').logstashPlugins, '\" -c \"', parameters('topologySettings').logstashConf, '\" -K \"', parameters('topologySettings').logstashKeystorePwd, '\"')]" } } } From 7b7bfbb4a23fc92564247c88f6b3056ffcb18402 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 14 Sep 2018 16:44:20 +1000 Subject: [PATCH 05/31] Add logstash linked template WIP --- src/machines/logstash-resources.json | 159 +++++++++++++++++++++++++++ src/partials/node-resources.json | 40 +++++++ src/scripts/logstash-install.sh | 146 ++++++++++++++++++------ src/settings/ubuntuSettings.json | 5 +- 4 files changed, 317 insertions(+), 33 deletions(-) create mode 100644 src/machines/logstash-resources.json diff --git a/src/machines/logstash-resources.json b/src/machines/logstash-resources.json new file mode 100644 index 00000000..49ece7fb --- /dev/null +++ b/src/machines/logstash-resources.json @@ -0,0 +1,159 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "templateBaseUrl": { + "type": "string", + "metadata": { + "description": "Base uri of resources" + } + }, + "location": { + "type": "string", + "metadata": { + "description": "Location where resources will be provisioned" + } + }, + "namespace": { + "type": "string", + "metadata": { + "description": "The unique namespace for the Kibana VM" + } + }, + "networkSettings": { + "type": "object", + "metadata": { + "description": "Network settings" + } + }, + "credentials": { + "type": "secureObject", + "metadata": { + "description": "Credentials information block" + } + }, + "osSettings": { + "type": "object", + "metadata": { + "description": "Platform and OS settings" + } + }, + "vmSize": { + "type": "string", + "defaultValue": "Standard_D1", + "metadata": { + "description": "Size of the Logstash VM" + } + }, + "elasticTags": { + "type": "object", + "defaultValue": { + "provider": "648D2193-0CE0-4EFB-8A82-AF9792184FD9" + }, + "metadata": { + "description": "Unique identifiers to allow the Azure Infrastructure to understand the origin of resources deployed to Azure. You do not need to supply a value for this." + } + } + }, + "variables": { + "namespace": "[parameters('namespace')]", + "subnetId": "[concat(resourceId(parameters('networkSettings').resourceGroup, 'Microsoft.Network/virtualNetworks', parameters('networkSettings').name), '/subnets/', parameters('networkSettings').subnet.name)]", + "nicName": "[concat(variables('namespace'), '-nic')]", + "password_osProfile": { + "computername": "[parameters('namespace')]", + "adminUsername": "[parameters('credentials').adminUsername]", + "adminPassword": "[parameters('credentials').password]" + }, + "sshPublicKey_osProfile": { + "computername": "[parameters('namespace')]", + "adminUsername": "[parameters('credentials').adminUsername]", + "linuxConfiguration": { + "disablePasswordAuthentication": "true", + "ssh": { + "publicKeys": [ + { + "path": "[concat('/home/', parameters('credentials').adminUsername, '/.ssh/authorized_keys')]", + "keyData": "[parameters('credentials').sshPublicKey]" + } + ] + } + } + }, + "osProfile": "[variables(concat(parameters('credentials').authenticationType, '_osProfile'))]" + }, + "resources": [ + { + "apiVersion": "2017-10-01", + "type": "Microsoft.Network/networkInterfaces", + "name": "[variables('nicName')]", + "location": "[parameters('location')]", + "tags": { + "provider": "[toUpper(parameters('elasticTags').provider)]" + }, + "dependsOn": [ + ], + "properties": { + "ipConfigurations": [ + { + "name": "ipconfig1", + "properties": { + "privateIPAllocationMethod": "Dynamic", + "subnet": { + "id": "[variables('subnetId')]" + } + } + } + ] + } + }, + { + "apiVersion": "2017-12-01", + "type": "Microsoft.Compute/virtualMachines", + "name": "[parameters('namespace')]", + "location": "[parameters('location')]", + "tags": { + "provider": "[toUpper(parameters('elasticTags').provider)]" + }, + "dependsOn": [ + "[concat('Microsoft.Network/networkInterfaces/', variables('nicName'))]" + ], + "properties": { + "hardwareProfile": { + "vmSize": "[parameters('vmSize')]" + }, + "osProfile": "[variables('osProfile')]", + "storageProfile": { + "imageReference": "[parameters('osSettings').imageReference]", + "osDisk": { + "name": "osdisk", + "managedDisk": { + "storageAccountType": "Standard_LRS" + }, + "caching": "ReadWrite", + "createOption": "FromImage" + } + }, + "networkProfile": { + "networkInterfaces": [ + { + "id": "[resourceId('Microsoft.Network/networkInterfaces', variables('nicName'))]" + } + ] + } + }, + "resources": [ + { + "type": "Microsoft.Compute/virtualMachines/extensions", + "name": "[concat(variables('namespace'), '/script')]", + "apiVersion": "2017-12-01", + "location": "[parameters('location')]", + "dependsOn": [ + "[concat('Microsoft.Compute/virtualMachines/', parameters('namespace'))]" + ], + "properties": "[parameters('osSettings').extensionSettings.logstash]" + } + ] + } + ], + "outputs": {} +} diff --git a/src/partials/node-resources.json b/src/partials/node-resources.json index 56de3ef1..6df38716 100644 --- a/src/partials/node-resources.json +++ b/src/partials/node-resources.json @@ -400,6 +400,46 @@ } } } + }, + { + "condition": "[equals(parameters('topologySettings').logstash, 'Yes')]", + "name": "logstash", + "type": "Microsoft.Resources/deployments", + "apiVersion": "2017-05-10", + "dependsOn": [], + "properties": { + "mode": "Incremental", + "templateLink": { + "uri": "[concat(parameters('templateBaseUrl'), 'machines/logstash-resources.json')]", + "contentVersion": "1.0.0.0" + }, + "parameters": { + "templateBaseUrl": { + "value": "[parameters('templateBaseUrl')]" + }, + "credentials": { + "value": "[parameters('commonVmSettings').credentials]" + }, + "location": { + "value": "[parameters('commonVmSettings').location]" + }, + "namespace": { + "value": "[concat(parameters('commonVmSettings').namespacePrefix, 'logstash')]" + }, + "networkSettings": { + "value": "[parameters('networkSettings')]" + }, + "osSettings": { + "value": "[parameters('osSettings')]" + }, + "vmSize": { + "value": "[parameters('topologySettings').vmSizeLogstash]" + }, + "elasticTags": { + "value": "[parameters('elasticTags')]" + } + } + } } ], "outputs": { diff --git a/src/scripts/logstash-install.sh b/src/scripts/logstash-install.sh index a949d846..32916160 100644 --- a/src/scripts/logstash-install.sh +++ b/src/scripts/logstash-install.sh @@ -42,34 +42,35 @@ then exit 3 fi -if service --status-all | grep -Fq 'logstash'; then - log "Logstash already installed." - - # TODO: Ability to stop, change configuration and restart - - exit 0 -fi - ######################### # Parameter handling ######################### #Script Parameters LOGSTASH_VERSION="6.2.4" +LOGSTASH_HEAP=0 ELASTICSEARCH_URL="http://10.0.0.4:9200" INSTALL_XPACK=0 INSTALL_ADDITIONAL_PLUGINS="" USER_LOGSTASH_PWD="changeme" LOGSTASH_KEYSTORE_PWD="changeme" LOGSTASH_CONF_FILE="" +YAML_CONFIGURATION="" +HTTP_CERT="" +HTTP_CERT_PASSWORD="" +HTTP_CACERT="" +HTTP_CACERT_PASSWORD="" #Loop through options passed -while getopts :v:u:S:c:K:L:h optname; do +while getopts :v:m:u:S:H:G:V:J:L:c:K:Y:lh optname; do log "Option $optname set" case $optname in v) #logstash version number LOGSTASH_VERSION="${OPTARG}" ;; + m) #heap_size + LOGSTASH_HEAP=${OPTARG} + ;; u) #elasticsearch url ELASTICSEARCH_URL="${OPTARG}" ;; @@ -79,14 +80,30 @@ while getopts :v:u:S:c:K:L:h optname; do l) #install X-Pack INSTALL_XPACK=1 ;; + H) #Elasticsearch certificate + HTTP_CERT="${OPTARG}" + ;; + G) #Elasticsearch certificate password + HTTP_CERT_PASSWORD="${OPTARG}" + ;; + V) #Elasticsearch CA certificate + HTTP_CACERT="${OPTARG}" + ;; + J) #Elasticsearch CA certificate password + HTTP_CACERT_PASSWORD="${OPTARG}" + ;; L) #install additional plugins INSTALL_ADDITIONAL_PLUGINS="${OPTARG}" ;; - c) #logstash configuration + c) #logstash configuration file LOGSTASH_CONF_FILE="${OPTARG}" ;; K) #logstash keystore password LOGSTASH_KEYSTORE_PWD="${OPTARG}" + ;; + Y) #logstash additional yml configuration + YAML_CONFIGURATION="${OPTARG}" + ;; h) #show help help exit 2 @@ -99,13 +116,6 @@ while getopts :v:u:S:c:K:L:h optname; do esac done -######################### -# Parameter state changes -######################### - -log "installing logstash $LOGSTASH_VERSION" -log "installing X-Pack plugins is set to: $INSTALL_XPACK" - ######################### # Installation steps as functions ######################### @@ -149,16 +159,16 @@ add_keystore_or_env_var() export LOGSTASH_KEYSTORE_PASS="$LOGSTASH_KEYSTORE_PWD" set -o history - # create it the keystore if it doesn't exist + # create keystore if it doesn't exist if [[ ! -f /etc/logstash/logstash.keystore ]]; then - log "[configure_logstash] creating logstash keystore" + log "[add_keystore_or_env_var] creating logstash keystore" /usr/share/logstash/bin/logstash-keystore create - log "[configure_logstash] created logstash keystore" + log "[add_keystore_or_env_var] created logstash keystore" fi - log "[configuration_and_plugins] adding $KEY to logstash keystore" + log "[add_keystore_or_env_var] adding $KEY to logstash keystore" echo "$VALUE" | /usr/share/logstash/bin/logstash-keystore add $KEY - log "[configuration_and_plugins] added $KEY logstash keystore" + log "[add_keystore_or_env_var] added $KEY logstash keystore" else log "[add_keystore_or_env_var] adding environment variable for $KEY" set +o history @@ -168,13 +178,15 @@ add_keystore_or_env_var() fi } -configure_logstash() +configure_logstash_yaml() { - # backup the current config local LOGSTASH_CONF=/etc/logstash/logstash.yml + local SSL_PATH=/etc/logstash/ssl + + # backup the current config mv $LOGSTASH_CONF $LOGSTASH_CONF.bak - log "[configure_logstash] configuring logstash.yml" + log "[configure_logstash_yaml] configuring logstash.yml" echo "node.name: \"${HOSTNAME}\"" >> $LOGSTASH_CONF @@ -182,11 +194,12 @@ configure_logstash() if [[ -n "$LOGSTASH_CONF_FILE" ]]; then local CONF_FILE=/etc/logstash/conf.d/logstash.conf mv $CONF_FILE $CONF_FILE.bak + log "[configure_logstash_yaml] writing logstash conf to $CONF_FILE" echo ${LOGSTASH_CONF_FILE} | base64 -d | tee $CONF_FILE fi # allow values to be referenced in *.conf files - add_keystore_or_env_var 'LOGSTASH_SYSTEM_PASS' "$USER_LOGSTASH_PWD" + add_keystore_or_env_var 'LOGSTASH_SYSTEM_PASSWORD' "$USER_LOGSTASH_PWD" add_keystore_or_env_var 'ELASTICSEARCH_URL' "$ELASTICSEARCH_URL" # put data on the OS disk in a writable location @@ -205,17 +218,63 @@ configure_logstash() # install x-pack if [ ${INSTALL_XPACK} -ne 0 ]; then if dpkg --compare-versions "$LOGSTASH_VERSION" "<" "6.3.0"; then - log "[configure_logstash] installing x-pack plugin" + log "[configure_logstash_yaml] installing x-pack plugin" /usr/share/logstash/bin/logstash-plugin install x-pack - log "[configure_logstash] installed x-pack plugin" + log "[configure_logstash_yaml] installed x-pack plugin" fi + # configure monitoring + echo 'xpack.monitoring.elasticsearch.url: "${ELASTICSEARCH_URL}"' >> $LOGSTASH_CONF echo "xpack.monitoring.elasticsearch.username: logstash_system" >> $LOGSTASH_CONF - # reference from env var or keystore - echo 'xpack.monitoring.elasticsearch.password: "${LOGSTASH_SYSTEM_PASS}"' >> $LOGSTASH_CONF + echo 'xpack.monitoring.elasticsearch.password: "${LOGSTASH_SYSTEM_PASSWORD}"' >> $LOGSTASH_CONF echo "xpack.monitoring.enabled: true" >> $LOGSTASH_CONF fi + # Make the HTTP CA cert for communication with Elasticsearch available to + # Logstash conf files through ${ELASTICSEARCH_CACERT} + if [[ -n "${HTTP_CERT}" || -n "${HTTP_CACERT}" && ${INSTALL_XPACK} -ne 0 ]]; then + [ -d $SSL_PATH ] || mkdir -p $SSL_PATH + + if [[ -n "${HTTP_CERT}" ]]; then + # convert PKCS#12 certificate to PEM format + log "[configure_logstash_yaml] Save PKCS#12 archive for Elasticsearch HTTP to file" + echo ${HTTP_CERT} | base64 -d | tee $SSL_PATH/elasticsearch-http.p12 + log "[configure_logstash_yaml] Extract CA cert from PKCS#12 archive for Elasticsearch HTTP" + echo "$HTTP_CERT_PASSWORD" | openssl pkcs12 -in $SSL_PATH/elasticsearch-http.p12 -out $SSL_PATH/elasticsearch-http-ca.crt -cacerts -nokeys -chain -passin stdin + + log "[configure_logstash_yaml] Configuring ELASTICSEARCH_CACERT for Elasticsearch TLS" + if [[ $(stat -c %s $SSL_PATH/elasticsearch-http-ca.crt 2>/dev/null) -eq 0 ]]; then + log "[configure_logstash_yaml] No CA cert extracted from HTTP cert. Cannot make ELASTICSEARCH_CACERT available to conf files" + else + log "[configure_logstash_yaml] CA cert extracted from HTTP PKCS#12 archive. Make ELASTICSEARCH_CACERT available to conf files" + add_keystore_or_env_var "ELASTICSEARCH_CACERT" "$SSL_PATH/elasticsearch-http-ca.crt" + echo 'xpack.monitoring.elasticsearch.ssl.ca: "${ELASTICSEARCH_CACERT}"' >> $LOGSTASH_CONF + fi + + else + + # convert PKCS#12 CA certificate to PEM format + local HTTP_CACERT_FILENAME=elasticsearch-http-ca.p12 + log "[configure_logstash_yaml] Save PKCS#12 archive for Elasticsearch HTTP CA to file" + echo ${HTTP_CACERT} | base64 -d | tee $SSL_PATH/$HTTP_CACERT_FILENAME + log "[configure_logstash_yaml] Convert PKCS#12 archive for Elasticsearch HTTP CA to PEM format" + echo "$HTTP_CACERT_PASSWORD" | openssl pkcs12 -in $SSL_PATH/$HTTP_CACERT_FILENAME -out $SSL_PATH/elasticsearch-http-ca.crt -clcerts -nokeys -chain -passin stdin + + log "[configure_logstash_yaml] Configuring ELASTICSEARCH_CACERT for Elasticsearch TLS" + if [[ $(stat -c %s $SSL_PATH/elasticsearch-http-ca.crt 2>/dev/null) -eq 0 ]]; then + log "[configure_logstash_yaml] No CA cert extracted from HTTP CA PKCS#12 archive. Cannot make ELASTICSEARCH_CACERT available to conf files" + else + log "[configure_logstash_yaml] CA cert extracted from HTTP CA PKCS#12 archive. Make ELASTICSEARCH_CACERT available to conf files" + add_keystore_or_env_var "ELASTICSEARCH_CACERT" "$SSL_PATH/elasticsearch-http-ca.crt" + echo 'xpack.monitoring.elasticsearch.ssl.ca: "${ELASTICSEARCH_CACERT}"' >> $LOGSTASH_CONF + fi + fi + chown -R logstash: $SSL_PATH + log "[configure_logstash_yaml] Configured ELASTICSEARCH_CACERT for Elasticsearch TLS" + fi + + # TODO: Configure Pipeline Management? + # Additional yaml configuration if [[ -n "$YAML_CONFIGURATION" ]]; then log "[configure_logstash] include additional yaml configuration" @@ -249,6 +308,15 @@ configure_logstash() fi } +configure_logstash() +{ + if [[ "$LOGSTASH_HEAP" -ne "0" ]]; then + log "[configure_logstash] configure logstash heap size - $LOGSTASH_HEAP" + sed -i -e "s/^\-Xmx.*/-Xmx${LOGSTASH_HEAP}m/" /etc/logstash/jvm.options + sed -i -e "s/^\-Xms.*/-Xms${LOGSTASH_HEAP}m/" /etc/logstash/jvm.options + fi +} + install_additional_plugins() { SKIP_PLUGINS="x-pack" @@ -292,6 +360,20 @@ install_yamllint() # Installation sequence ######################### +if systemctl -q is-enabled logstash.service; then + log "logstash already installed." + + configure_logstash_yaml + + # restart logstash if config has changed + cmp --silent /etc/logstash/logstash.yml /etc/logstash/logstash.yml.bak \ + || systemctl reload-or-restart logstash.service + + exit 0 +fi + +log "installing logstash $LOGSTASH_VERSION" +log "installing X-Pack plugins is set to: $INSTALL_XPACK" log "[apt-get] updating apt-get" (apt-get -y update || (sleep 15; apt-get -y update)) > /dev/null log "[apt-get] updated apt-get" @@ -300,7 +382,9 @@ install_java install_logstash -configuration_and_plugins +configure_logstash_yaml + +configure_logstash # install additional plugins if [[ -n "$INSTALL_ADDITIONAL_PLUGINS" ]]; then diff --git a/src/settings/ubuntuSettings.json b/src/settings/ubuntuSettings.json index ded711b6..60eb36fb 100644 --- a/src/settings/ubuntuSettings.json +++ b/src/settings/ubuntuSettings.json @@ -64,6 +64,7 @@ "nodesTemplateUrl": "[concat(parameters('templateBaseUrl'), 'partials/node-resources.json')]", "namespacePrefix": "[parameters('topologySettings').vmHostNamePrefix]", "kibanaDomainName": "[if(not(empty(parameters('esSettings').samlMetadataUri)),if(not(empty(parameters('esSettings').samlServiceProviderUri)),parameters('esSettings').samlServiceProviderUri,parameters('kibanaIp')),'')]", + "loadBalancerIp": "[concat(if(equals(parameters('networkSettings').https, 'Yes'), 'https', 'http'), '://', parameters('topologySettings').vNetLoadBalancerIp, ':9200')]", "dataNodeShortOpts": { "No": "z", "Yes": "" @@ -155,7 +156,7 @@ "fileUris": "[variables('ubuntuScripts')]" }, "protectedSettings": { - "commandToExecute": "[concat('bash kibana-install.sh -', variables('installPluginsShortOpt'), 'n \"', parameters('esSettings').clusterName, '\" -v \"', parameters('esSettings').version, '\" -u \"', concat(if(equals(parameters('networkSettings').https, 'Yes'), 'https', 'http'), '://', parameters('topologySettings').vNetLoadBalancerIp, ':9200') ,'\" -S \"', parameters('esSettings').securityKibanaPwd, '\" -C \"', parameters('topologySettings').kibanaCertBlob, '\" -K \"', parameters('topologySettings').kibanaKeyBlob, '\" -P \"', parameters('topologySettings').kibanaKeyPassphrase, '\" -Y \"', replace(parameters('topologySettings').kibanaYaml, '\"', '\\\"'), '\" -H \"', parameters('esSettings').httpCertBlob,'\" -G \"', parameters('esSettings').httpCertPassword, '\" -V \"', parameters('esSettings').httpCaCertBlob,'\" -J \"', parameters('esSettings').httpCaCertPassword, '\" -U \"', variables('kibanaDomainName'), '\"')]" + "commandToExecute": "[concat('bash kibana-install.sh -', variables('installPluginsShortOpt'), 'n \"', parameters('esSettings').clusterName, '\" -v \"', parameters('esSettings').version, '\" -u \"', variables('loadBalancerIp'),'\" -S \"', parameters('esSettings').securityKibanaPwd, '\" -C \"', parameters('topologySettings').kibanaCertBlob, '\" -K \"', parameters('topologySettings').kibanaKeyBlob, '\" -P \"', parameters('topologySettings').kibanaKeyPassphrase, '\" -Y \"', replace(parameters('topologySettings').kibanaYaml, '\"', '\\\"'), '\" -H \"', parameters('esSettings').httpCertBlob,'\" -G \"', parameters('esSettings').httpCertPassword, '\" -V \"', parameters('esSettings').httpCaCertBlob,'\" -J \"', parameters('esSettings').httpCaCertPassword, '\" -U \"', variables('kibanaDomainName'), '\"')]" } }, "logstash": { @@ -167,7 +168,7 @@ "fileUris": "[variables('ubuntuScripts')]" }, "protectedSettings": { - "commandToExecute": "[concat('bash logstash-install.sh -', variables('installPluginsShortOpt'), 'v \"', parameters('esSettings').logstashVersion, '\" -u \"', concat('http://', parameters('topologySettings').vNetLoadBalancerIp, ':9200') ,'\" -S \"', parameters('esSettings').securityLogstashPwd, '\" -L \"', parameters('topologySettings').logstashPlugins, '\" -c \"', parameters('topologySettings').logstashConf, '\" -K \"', parameters('topologySettings').logstashKeystorePwd, '\"')]" + "commandToExecute": "[concat('bash logstash-install.sh -', variables('installPluginsShortOpt'), 'v \"', parameters('esSettings').logstashVersion, '\" -m ', parameters('topologySettings').logstashHeapSize, ' -u \"', variables('loadBalancerIp'),'\" -S \"', parameters('esSettings').securityLogstashPwd, '\" -L \"', parameters('topologySettings').logstashPlugins, '\" -c \"', parameters('topologySettings').logstashConf, '\" -K \"', parameters('topologySettings').logstashKeystorePwd, '\" -Y \"', replace(parameters('topologySettings').logstashYaml, '\"', '\\\"'), '\" -H \"', parameters('esSettings').httpCertBlob,'\" -G \"', parameters('esSettings').httpCertPassword, '\" -V \"', parameters('esSettings').httpCaCertBlob,'\" -J \"', parameters('esSettings').httpCaCertPassword)]" } } } From d81ad982c20d27afe9787772b952af38440e5422 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 14 Sep 2018 16:45:20 +1000 Subject: [PATCH 06/31] Update links --- README.md | 10 +++++----- parameters/password.parameters.json | 2 +- parameters/ssh.parameters.json | 2 +- src/mainTemplate.json | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 5fa64268..0bb7d2a2 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ Transport Layer Security. ![Example UI Flow](images/ui.gif) -You can view the UI in developer mode by [clicking here](https://portal.azure.com/#blade/Microsoft_Azure_Compute/CreateMultiVmWizardBlade/internal_bladeCallId/anything/internal_bladeCallerParams/{"initialData":{},"providerConfig":{"createUiDefinition":"https%3A%2F%2Fraw.githubusercontent.com%2Felastic%2Fazure-marketplace%2Fmaster%2Fsrc%2FcreateUiDefinition.json"}}). If you feel something is cached improperly use [this client unoptimized link instead](https://portal.azure.com/?clientOptimizations=false#blade/Microsoft_Azure_Compute/CreateMultiVmWizardBlade/internal_bladeCallId/anything/internal_bladeCallerParams/{"initialData":{},"providerConfig":{"createUiDefinition":"https%3A%2F%2Fraw.githubusercontent.com%2Felastic%2Fazure-marketplace%2Fmaster%2Fsrc%2FcreateUiDefinition.json"}}) +You can view the UI in developer mode by [clicking here](https://portal.azure.com/#blade/Microsoft_Azure_Compute/CreateMultiVmWizardBlade/internal_bladeCallId/anything/internal_bladeCallerParams/{"initialData":{},"providerConfig":{"createUiDefinition":"https%3A%2F%2Fraw.githubusercontent.com%2Felastic%2Fazure-marketplace%2Ffeature%2Flogstash%2Fsrc%2FcreateUiDefinition.json"}}). If you feel something is cached improperly use [this client unoptimized link instead](https://portal.azure.com/?clientOptimizations=false#blade/Microsoft_Azure_Compute/CreateMultiVmWizardBlade/internal_bladeCallId/anything/internal_bladeCallerParams/{"initialData":{},"providerConfig":{"createUiDefinition":"https%3A%2F%2Fraw.githubusercontent.com%2Felastic%2Fazure-marketplace%2Ffeature%2Flogstash%2Fsrc%2FcreateUiDefinition.json"}}) ## Reporting bugs @@ -465,7 +465,7 @@ in conjunction with other parameters. ### Web based deploy - + Deploy to Azure @@ -499,7 +499,7 @@ supported by the last release. It's recommended to update to [Azure CLI 2.0](htt ```sh az group deployment create \ --resource-group \ - --template-uri https://raw.githubusercontent.com/elastic/azure-marketplace/master/src/mainTemplate.json \ + --template-uri https://raw.githubusercontent.com/elastic/azure-marketplace/feature/logstash/src/mainTemplate.json \ --parameters @parameters/password.parameters.json ``` @@ -523,7 +523,7 @@ where `` refers to the resource group you just created. ```powershell $clusterParameters = @{ - "artifactsBaseUrl"="https://raw.githubusercontent.com/elastic/azure-marketplace/master/src" + "artifactsBaseUrl"="https://raw.githubusercontent.com/elastic/azure-marketplace/feature/logstash/src" "esVersion" = "6.3.0" "esClusterName" = "elasticsearch" "loadBalancerType" = "internal" @@ -546,7 +546,7 @@ where `` refers to the resource group you just created. 5. Use our template directly from GitHub ```powershell - New-AzureRmResourceGroupDeployment -Name "" -ResourceGroupName "" -TemplateUri "https://raw.githubusercontent.com/elastic/azure-marketplace/master/src/mainTemplate.json" -TemplateParameterObject $clusterParameters + New-AzureRmResourceGroupDeployment -Name "" -ResourceGroupName "" -TemplateUri "https://raw.githubusercontent.com/elastic/azure-marketplace/feature/logstash/src/mainTemplate.json" -TemplateParameterObject $clusterParameters ``` ## Targeting a specific template version diff --git a/parameters/password.parameters.json b/parameters/password.parameters.json index 271729f4..1afe0def 100644 --- a/parameters/password.parameters.json +++ b/parameters/password.parameters.json @@ -1,5 +1,5 @@ { - "artifactsBaseUrl":{"value":"https://raw.githubusercontent.com/elastic/azure-marketplace/master/src"}, + "artifactsBaseUrl":{"value":"https://raw.githubusercontent.com/elastic/azure-marketplace/feature/logstash/src"}, "esVersion":{"value":"6.2.4"}, "esClusterName":{"value":"my-azure-cluster"}, "loadBalancerType":{"value":"internal"}, diff --git a/parameters/ssh.parameters.json b/parameters/ssh.parameters.json index 093dd3db..6ed7016b 100644 --- a/parameters/ssh.parameters.json +++ b/parameters/ssh.parameters.json @@ -1,5 +1,5 @@ { - "artifactsBaseUrl":{"value":"https://raw.githubusercontent.com/elastic/azure-marketplace/master/src"}, + "artifactsBaseUrl":{"value":"https://raw.githubusercontent.com/elastic/azure-marketplace/feature/logstash/src"}, "esVersion":{"value":"6.2.4"}, "esClusterName":{"value":"my-azure-cluster"}, "loadBalancerType":{"value":"internal"}, diff --git a/src/mainTemplate.json b/src/mainTemplate.json index de54e088..1c1bde2d 100644 --- a/src/mainTemplate.json +++ b/src/mainTemplate.json @@ -4,7 +4,7 @@ "parameters": { "artifactsBaseUrl": { "type": "string", - "defaultValue": "https://raw.githubusercontent.com/elastic/azure-marketplace/master/src", + "defaultValue": "https://raw.githubusercontent.com/elastic/azure-marketplace/feature/logstash/src", "metadata": { "artifactsBaseUrl": "Base URL of the Elastic template gallery package" } From e1075929254cd5206937034732447cb3049cac50 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 14 Sep 2018 17:03:28 +1000 Subject: [PATCH 07/31] Change default Logstash size --- src/mainTemplate.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/mainTemplate.json b/src/mainTemplate.json index 1c1bde2d..f72bebeb 100644 --- a/src/mainTemplate.json +++ b/src/mainTemplate.json @@ -363,7 +363,7 @@ }, "vmSizeLogstash": { "type": "string", - "defaultValue": "Standard_A2", + "defaultValue": "Standard_D1", "allowedValues": [ "Standard_A2", "Standard_A3", From 91628e8f0203d306b95115610b7c324a04b6d85b Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 14 Sep 2018 17:10:56 +1000 Subject: [PATCH 08/31] Use ES version for Logstash version --- src/settings/ubuntuSettings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/settings/ubuntuSettings.json b/src/settings/ubuntuSettings.json index 60eb36fb..2f32c207 100644 --- a/src/settings/ubuntuSettings.json +++ b/src/settings/ubuntuSettings.json @@ -168,7 +168,7 @@ "fileUris": "[variables('ubuntuScripts')]" }, "protectedSettings": { - "commandToExecute": "[concat('bash logstash-install.sh -', variables('installPluginsShortOpt'), 'v \"', parameters('esSettings').logstashVersion, '\" -m ', parameters('topologySettings').logstashHeapSize, ' -u \"', variables('loadBalancerIp'),'\" -S \"', parameters('esSettings').securityLogstashPwd, '\" -L \"', parameters('topologySettings').logstashPlugins, '\" -c \"', parameters('topologySettings').logstashConf, '\" -K \"', parameters('topologySettings').logstashKeystorePwd, '\" -Y \"', replace(parameters('topologySettings').logstashYaml, '\"', '\\\"'), '\" -H \"', parameters('esSettings').httpCertBlob,'\" -G \"', parameters('esSettings').httpCertPassword, '\" -V \"', parameters('esSettings').httpCaCertBlob,'\" -J \"', parameters('esSettings').httpCaCertPassword)]" + "commandToExecute": "[concat('bash logstash-install.sh -', variables('installPluginsShortOpt'), 'v \"', parameters('esSettings').version, '\" -m ', parameters('topologySettings').logstashHeapSize, ' -u \"', variables('loadBalancerIp'),'\" -S \"', parameters('esSettings').securityLogstashPwd, '\" -L \"', parameters('topologySettings').logstashPlugins, '\" -c \"', parameters('topologySettings').logstashConf, '\" -K \"', parameters('topologySettings').logstashKeystorePwd, '\" -Y \"', replace(parameters('topologySettings').logstashYaml, '\"', '\\\"'), '\" -H \"', parameters('esSettings').httpCertBlob,'\" -G \"', parameters('esSettings').httpCertPassword, '\" -V \"', parameters('esSettings').httpCaCertBlob,'\" -J \"', parameters('esSettings').httpCaCertPassword)]" } } } From ca043e4cb9a233b3bb9a9ea9a41694a507a8b267 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 14 Sep 2018 17:36:15 +1000 Subject: [PATCH 09/31] Unique OS disk names --- src/machines/jumpbox-resources.json | 4 ++-- src/machines/kibana-resources.json | 2 +- src/machines/logstash-resources.json | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/machines/jumpbox-resources.json b/src/machines/jumpbox-resources.json index ed934144..98f346f0 100644 --- a/src/machines/jumpbox-resources.json +++ b/src/machines/jumpbox-resources.json @@ -159,7 +159,7 @@ { "apiVersion": "2017-12-01", "type": "Microsoft.Compute/virtualMachines", - "name": "[concat(variables('namespace'), '-0')]", + "name": "[variables('namespace')]", "location": "[parameters('location')]", "tags": { "provider": "[toUpper(parameters('elasticTags').provider)]" @@ -175,7 +175,7 @@ "storageProfile": { "imageReference": "[parameters('osSettings').imageReference]", "osDisk": { - "name": "osdisk", + "name": "[concat(variables('namespace'), '-osdisk')]", "managedDisk": { "storageAccountType": "Standard_LRS" }, diff --git a/src/machines/kibana-resources.json b/src/machines/kibana-resources.json index 6f282522..08995f56 100644 --- a/src/machines/kibana-resources.json +++ b/src/machines/kibana-resources.json @@ -181,7 +181,7 @@ "storageProfile": { "imageReference": "[parameters('osSettings').imageReference]", "osDisk": { - "name": "osdisk", + "name": "[concat(variables('namespace'), '-osdisk')]", "managedDisk": { "storageAccountType": "Standard_LRS" }, diff --git a/src/machines/logstash-resources.json b/src/machines/logstash-resources.json index 49ece7fb..708cb157 100644 --- a/src/machines/logstash-resources.json +++ b/src/machines/logstash-resources.json @@ -125,7 +125,7 @@ "storageProfile": { "imageReference": "[parameters('osSettings').imageReference]", "osDisk": { - "name": "osdisk", + "name": "[concat(variables('namespace'), '-osdisk')]", "managedDisk": { "storageAccountType": "Standard_LRS" }, From 429bd66631ab64350c38f48a07216abcaf01d2e4 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 14 Sep 2018 18:11:07 +1000 Subject: [PATCH 10/31] Closing quote --- src/settings/ubuntuSettings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/settings/ubuntuSettings.json b/src/settings/ubuntuSettings.json index 2f32c207..e42d3ee0 100644 --- a/src/settings/ubuntuSettings.json +++ b/src/settings/ubuntuSettings.json @@ -168,7 +168,7 @@ "fileUris": "[variables('ubuntuScripts')]" }, "protectedSettings": { - "commandToExecute": "[concat('bash logstash-install.sh -', variables('installPluginsShortOpt'), 'v \"', parameters('esSettings').version, '\" -m ', parameters('topologySettings').logstashHeapSize, ' -u \"', variables('loadBalancerIp'),'\" -S \"', parameters('esSettings').securityLogstashPwd, '\" -L \"', parameters('topologySettings').logstashPlugins, '\" -c \"', parameters('topologySettings').logstashConf, '\" -K \"', parameters('topologySettings').logstashKeystorePwd, '\" -Y \"', replace(parameters('topologySettings').logstashYaml, '\"', '\\\"'), '\" -H \"', parameters('esSettings').httpCertBlob,'\" -G \"', parameters('esSettings').httpCertPassword, '\" -V \"', parameters('esSettings').httpCaCertBlob,'\" -J \"', parameters('esSettings').httpCaCertPassword)]" + "commandToExecute": "[concat('bash logstash-install.sh -', variables('installPluginsShortOpt'), 'v \"', parameters('esSettings').version, '\" -m ', parameters('topologySettings').logstashHeapSize, ' -u \"', variables('loadBalancerIp'), '\" -S \"', parameters('esSettings').securityLogstashPwd, '\" -L \"', parameters('topologySettings').logstashPlugins, '\" -c \"', parameters('topologySettings').logstashConf, '\" -K \"', parameters('topologySettings').logstashKeystorePwd, '\" -Y \"', replace(parameters('topologySettings').logstashYaml, '\"', '\\\"'), '\" -H \"', parameters('esSettings').httpCertBlob,'\" -G \"', parameters('esSettings').httpCertPassword, '\" -V \"', parameters('esSettings').httpCaCertBlob,'\" -J \"', parameters('esSettings').httpCaCertPassword, '\"')]" } } } From 425c78a5314e3415a8c0e0916140ac4327b9737f Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Mon, 24 Sep 2018 11:08:59 +1000 Subject: [PATCH 11/31] Use correct script name --- src/scripts/elasticsearch-install.sh | 13 +++++++++---- src/scripts/logstash-install.sh | 2 +- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/src/scripts/elasticsearch-install.sh b/src/scripts/elasticsearch-install.sh index 29e0157f..2bc87397 100644 --- a/src/scripts/elasticsearch-install.sh +++ b/src/scripts/elasticsearch-install.sh @@ -344,7 +344,7 @@ check_data_disk() # Install Oracle Java install_java() { - bash java-ubuntu-install.sh + bash java-install.sh } # Install Elasticsearch @@ -1130,10 +1130,15 @@ install_yamllint() install_ntp() { - log "[install_ntp] installing ntp daemon" - (apt-get -yq install ntp || (sleep 15; apt-get -yq install ntp)) + install_apt_package ntp + install_apt_package ntpdate + + if [ $(systemctl -q is-active ntp) ]; then + service ntp stop + fi + ntpdate pool.ntp.org - log "[install_ntp] installed ntp daemon and ntpdate" + service ntp start } install_monit() diff --git a/src/scripts/logstash-install.sh b/src/scripts/logstash-install.sh index 32916160..ae244db1 100644 --- a/src/scripts/logstash-install.sh +++ b/src/scripts/logstash-install.sh @@ -123,7 +123,7 @@ done # Install Oracle Java install_java() { - bash java-ubuntu-install.sh + bash java-install.sh } # Install Logstash From fbcb4a6d26fcd8038e7a3425c4b4ea66f5db5638 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Mon, 24 Sep 2018 12:48:43 +1000 Subject: [PATCH 12/31] Set path.settings when calling logstash scripts --- src/scripts/logstash-install.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/scripts/logstash-install.sh b/src/scripts/logstash-install.sh index ae244db1..30a45a7b 100644 --- a/src/scripts/logstash-install.sh +++ b/src/scripts/logstash-install.sh @@ -152,7 +152,7 @@ add_keystore_or_env_var() local KEY=$1 local VALUE="$2" - if dpkg --compare-versions "$LOGSTASH_VERSION" ">=" "6.2.0"; then + if dpkg --compare-versions "$LOGSTASH_VERSION" "ge" "6.2.0"; then # TODO: Should be set in /etc/sysconfig/logstash # See https://www.elastic.co/guide/en/logstash/current/keystore.html#keystore-password set +o history @@ -162,12 +162,12 @@ add_keystore_or_env_var() # create keystore if it doesn't exist if [[ ! -f /etc/logstash/logstash.keystore ]]; then log "[add_keystore_or_env_var] creating logstash keystore" - /usr/share/logstash/bin/logstash-keystore create + /usr/share/logstash/bin/logstash-keystore create --path.settings /etc/logstash log "[add_keystore_or_env_var] created logstash keystore" fi log "[add_keystore_or_env_var] adding $KEY to logstash keystore" - echo "$VALUE" | /usr/share/logstash/bin/logstash-keystore add $KEY + echo "$VALUE" | /usr/share/logstash/bin/logstash-keystore add $KEY --path.settings /etc/logstash log "[add_keystore_or_env_var] added $KEY logstash keystore" else log "[add_keystore_or_env_var] adding environment variable for $KEY" @@ -219,7 +219,7 @@ configure_logstash_yaml() if [ ${INSTALL_XPACK} -ne 0 ]; then if dpkg --compare-versions "$LOGSTASH_VERSION" "<" "6.3.0"; then log "[configure_logstash_yaml] installing x-pack plugin" - /usr/share/logstash/bin/logstash-plugin install x-pack + /usr/share/logstash/bin/logstash-plugin install x-pack --path.settings /etc/logstash log "[configure_logstash_yaml] installed x-pack plugin" fi @@ -327,7 +327,7 @@ install_additional_plugins() log "[install_additional_plugins] skipping plugin $PLUGIN" else log "[install_additional_plugins] installing plugin $PLUGIN" - /usr/share/logstash/bin/logstash-plugin install $PLUGIN + /usr/share/logstash/bin/logstash-plugin install $PLUGIN --path.settings /etc/logstash log "[install_additional_plugins] installed plugin $PLUGIN" fi done From b4def4e213169f0a1a60e4814dcf98f05c95e3c7 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Mon, 24 Sep 2018 14:28:16 +1000 Subject: [PATCH 13/31] Check if logstash.yml exists before trying to move --- src/scripts/logstash-install.sh | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/src/scripts/logstash-install.sh b/src/scripts/logstash-install.sh index 30a45a7b..4ae10e29 100644 --- a/src/scripts/logstash-install.sh +++ b/src/scripts/logstash-install.sh @@ -184,7 +184,10 @@ configure_logstash_yaml() local SSL_PATH=/etc/logstash/ssl # backup the current config - mv $LOGSTASH_CONF $LOGSTASH_CONF.bak + if [[ -f $LOGSTASH_CONF ]]; then + log "[configure_logstash_yaml] moving $LOGSTASH_CONF to $LOGSTASH_CONF.bak" + mv $LOGSTASH_CONF $LOGSTASH_CONF.bak + fi log "[configure_logstash_yaml] configuring logstash.yml" @@ -203,6 +206,7 @@ configure_logstash_yaml() add_keystore_or_env_var 'ELASTICSEARCH_URL' "$ELASTICSEARCH_URL" # put data on the OS disk in a writable location + # TODO: Consider allowing attached managed disk in future echo "path.data: /var/lib/logstash" >> $LOGSTASH_CONF # TODO: make configurable? @@ -217,9 +221,9 @@ configure_logstash_yaml() # install x-pack if [ ${INSTALL_XPACK} -ne 0 ]; then - if dpkg --compare-versions "$LOGSTASH_VERSION" "<" "6.3.0"; then + if dpkg --compare-versions "$LOGSTASH_VERSION" "lt" "6.3.0"; then log "[configure_logstash_yaml] installing x-pack plugin" - /usr/share/logstash/bin/logstash-plugin install x-pack --path.settings /etc/logstash + /usr/share/logstash/bin/logstash-plugin install x-pack log "[configure_logstash_yaml] installed x-pack plugin" fi @@ -327,7 +331,7 @@ install_additional_plugins() log "[install_additional_plugins] skipping plugin $PLUGIN" else log "[install_additional_plugins] installing plugin $PLUGIN" - /usr/share/logstash/bin/logstash-plugin install $PLUGIN --path.settings /etc/logstash + /usr/share/logstash/bin/logstash-plugin install $PLUGIN log "[install_additional_plugins] installed plugin $PLUGIN" fi done @@ -360,8 +364,8 @@ install_yamllint() # Installation sequence ######################### -if systemctl -q is-enabled logstash.service; then - log "logstash already installed." +if systemctl -q is-active logstash.service; then + log "logstash already installed and running. reconfigure and restart if logstash.yml has changed" configure_logstash_yaml From 3438622bc2b83ea865539eb0718f524b31c9c0b0 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Mon, 24 Sep 2018 15:49:35 +1000 Subject: [PATCH 14/31] Persist environment variables in /etc/sysconfig/logstash --- src/scripts/logstash-install.sh | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/src/scripts/logstash-install.sh b/src/scripts/logstash-install.sh index 4ae10e29..270e8bde 100644 --- a/src/scripts/logstash-install.sh +++ b/src/scripts/logstash-install.sh @@ -151,12 +151,17 @@ add_keystore_or_env_var() { local KEY=$1 local VALUE="$2" + local SYS_CONFIG=/etc/sysconfig/logstash + + if [[ ! -f $SYS_CONFIG ]]; then + touch $SYS_CONFIG + chmod 600 $SYS_CONFIG + fi if dpkg --compare-versions "$LOGSTASH_VERSION" "ge" "6.2.0"; then - # TODO: Should be set in /etc/sysconfig/logstash - # See https://www.elastic.co/guide/en/logstash/current/keystore.html#keystore-password set +o history export LOGSTASH_KEYSTORE_PASS="$LOGSTASH_KEYSTORE_PWD" + echo "LOGSTASH_KEYSTORE_PASS=\"$LOGSTASH_KEYSTORE_PWD\"" >> $SYS_CONFIG set -o history # create keystore if it doesn't exist @@ -173,6 +178,7 @@ add_keystore_or_env_var() log "[add_keystore_or_env_var] adding environment variable for $KEY" set +o history export $KEY="$VALUE" + echo "$KEY=\"$VALUE\"" >> $SYS_CONFIG set -o history log "[add_keystore_or_env_var] added environment variable for $KEY" fi @@ -196,7 +202,12 @@ configure_logstash_yaml() # logstash conf file if [[ -n "$LOGSTASH_CONF_FILE" ]]; then local CONF_FILE=/etc/logstash/conf.d/logstash.conf - mv $CONF_FILE $CONF_FILE.bak + + if [[ -f $CONF_FILE ]]; then + log "[configure_logstash_yaml] moving $CONF_FILE to $CONF_FILE.bak" + mv $CONF_FILE $CONF_FILE.bak + fi + log "[configure_logstash_yaml] writing logstash conf to $CONF_FILE" echo ${LOGSTASH_CONF_FILE} | base64 -d | tee $CONF_FILE fi From 059c73f2e8504adc011ffff0c7dd5b3303177e0a Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Mon, 24 Sep 2018 16:21:14 +1000 Subject: [PATCH 15/31] create /etc/sysconfig dir --- src/scripts/logstash-install.sh | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/scripts/logstash-install.sh b/src/scripts/logstash-install.sh index 270e8bde..74be3723 100644 --- a/src/scripts/logstash-install.sh +++ b/src/scripts/logstash-install.sh @@ -151,17 +151,18 @@ add_keystore_or_env_var() { local KEY=$1 local VALUE="$2" - local SYS_CONFIG=/etc/sysconfig/logstash + local SYS_CONFIG=/etc/sysconfig if [[ ! -f $SYS_CONFIG ]]; then - touch $SYS_CONFIG - chmod 600 $SYS_CONFIG + [ -d $SYS_CONFIG ] || mkdir -p $SYS_CONFIG + touch $SYS_CONFIG/logstash + chmod 600 $SYS_CONFIG/logstash fi if dpkg --compare-versions "$LOGSTASH_VERSION" "ge" "6.2.0"; then set +o history export LOGSTASH_KEYSTORE_PASS="$LOGSTASH_KEYSTORE_PWD" - echo "LOGSTASH_KEYSTORE_PASS=\"$LOGSTASH_KEYSTORE_PWD\"" >> $SYS_CONFIG + echo "LOGSTASH_KEYSTORE_PASS=\"$LOGSTASH_KEYSTORE_PWD\"" >> $SYS_CONFIG/logstash set -o history # create keystore if it doesn't exist @@ -178,7 +179,7 @@ add_keystore_or_env_var() log "[add_keystore_or_env_var] adding environment variable for $KEY" set +o history export $KEY="$VALUE" - echo "$KEY=\"$VALUE\"" >> $SYS_CONFIG + echo "$KEY=\"$VALUE\"" >> $SYS_CONFIG/logstash set -o history log "[add_keystore_or_env_var] added environment variable for $KEY" fi From a9c2c7a26dc84af6dd9b698cfa66a50099143f31 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Tue, 25 Sep 2018 13:14:06 +1000 Subject: [PATCH 16/31] Add parameter descriptions to Logstash script --- src/scripts/elasticsearch-install.sh | 6 ++--- src/scripts/logstash-install.sh | 34 ++++++++++++++++++++-------- 2 files changed, 28 insertions(+), 12 deletions(-) diff --git a/src/scripts/elasticsearch-install.sh b/src/scripts/elasticsearch-install.sh index 2bc87397..5c09e087 100644 --- a/src/scripts/elasticsearch-install.sh +++ b/src/scripts/elasticsearch-install.sh @@ -1133,12 +1133,12 @@ install_ntp() install_apt_package ntp install_apt_package ntpdate - if [ $(systemctl -q is-active ntp) ]; then - service ntp stop + if systemctl -q is-active ntp.service; then + systemctl stop ntp.service fi ntpdate pool.ntp.org - service ntp start + systemctl start ntp.service } install_monit() diff --git a/src/scripts/logstash-install.sh b/src/scripts/logstash-install.sh index 74be3723..19d8d3e5 100644 --- a/src/scripts/logstash-install.sh +++ b/src/scripts/logstash-install.sh @@ -13,9 +13,24 @@ export DEBIAN_FRONTEND=noninteractive help() { - echo "This script installs logstash on a dedicated VM in the elasticsearch ARM template cluster" + echo "This script installs Logstash on a dedicated Ubuntu VM" echo "Parameters:" - # TODO: Add parameters here + echo "-v Logstash version e.g. 6.2.2" + echo "-m heap size in megabytes to allocate to JVM" + echo "-u Elasticsearch URL to configure monitoring and make available to configuration through ELASTICSEARCH_URL variable" + + echo "-S logstash_system user password" + echo "-l whether to install X-Pack plugins (or enable trial license in 6.3.0+)" + + echo "-H base64 encoded PKCS#12 archive (.p12/.pfx) containing the key and certificate used to secure the Elasticsearch HTTP layer" + echo "-G password for PKCS#12 archive (.p12/.pfx) containing the key and certificate used to secure the Elasticsearch HTTP layer" + echo "-V base64 encoded PKCS#12 archive (.p12/.pfx) containing the CA key and certificate used to secure the Elasticsearch HTTP layer" + echo "-J password for PKCS#12 archive (.p12/.pfx) containing the CA key and certificate used to secure the Elasticsearch HTTP layer" + + echo "-L install additional plugins" + echo "-c base 64 encoded Logstash conf file" + echo "-K Logstash keystore password for Logstash 6.2.0+" + echo "-Y additional yaml configuration" echo "-h view this help content" } @@ -160,13 +175,13 @@ add_keystore_or_env_var() fi if dpkg --compare-versions "$LOGSTASH_VERSION" "ge" "6.2.0"; then - set +o history - export LOGSTASH_KEYSTORE_PASS="$LOGSTASH_KEYSTORE_PWD" - echo "LOGSTASH_KEYSTORE_PASS=\"$LOGSTASH_KEYSTORE_PWD\"" >> $SYS_CONFIG/logstash - set -o history - # create keystore if it doesn't exist if [[ ! -f /etc/logstash/logstash.keystore ]]; then + set +o history + export LOGSTASH_KEYSTORE_PASS="$LOGSTASH_KEYSTORE_PWD" + echo "LOGSTASH_KEYSTORE_PASS=\"$LOGSTASH_KEYSTORE_PWD\"" >> $SYS_CONFIG/logstash + set -o history + log "[add_keystore_or_env_var] creating logstash keystore" /usr/share/logstash/bin/logstash-keystore create --path.settings /etc/logstash log "[add_keystore_or_env_var] created logstash keystore" @@ -221,7 +236,7 @@ configure_logstash_yaml() # TODO: Consider allowing attached managed disk in future echo "path.data: /var/lib/logstash" >> $LOGSTASH_CONF - # TODO: make configurable? + # TODO: make persistent queues configurable? # echo "queue.type: persisted" >> $LOGSTASH_CONF # put log files on the OS disk in a writable location @@ -289,7 +304,8 @@ configure_logstash_yaml() log "[configure_logstash_yaml] Configured ELASTICSEARCH_CACERT for Elasticsearch TLS" fi - # TODO: Configure Pipeline Management? + # TODO: Configure Centralized Pipeline Management? + # https://www.elastic.co/guide/en/logstash/current/configuring-centralized-pipelines.html # Additional yaml configuration if [[ -n "$YAML_CONFIGURATION" ]]; then From 23510a0649345947081b0a857a1fe061513af763 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Wed, 26 Sep 2018 16:24:59 +1000 Subject: [PATCH 17/31] logstash integration test --- build/arm-tests/1d-0m-0c-int-klp.json | 21 +++++++ build/arm-tests/3d-0m-0c-ags-ks.json | 2 +- build/arm-tests/3d-0m-0c-ags-tls.json | 8 +-- build/arm-tests/3d-0m-0c-agw-ks.json | 2 +- build/arm-tests/3d-0m-0c-ext-tls-kp.json | 8 +-- build/arm-tests/3d-0m-0c-ext-tls-pass-kp.json | 8 +-- build/conf/logstash-tls.conf | 20 ++++++ build/conf/logstash.conf | 18 ++++++ build/tasks/arm-validator.js | 61 +++++++++++++++---- parameters/password.parameters.json | 7 +++ parameters/ssh.parameters.json | 7 +++ src/mainTemplate.json | 2 +- 12 files changed, 137 insertions(+), 27 deletions(-) create mode 100644 build/arm-tests/1d-0m-0c-int-klp.json create mode 100644 build/conf/logstash-tls.conf create mode 100644 build/conf/logstash.conf diff --git a/build/arm-tests/1d-0m-0c-int-klp.json b/build/arm-tests/1d-0m-0c-int-klp.json new file mode 100644 index 00000000..225022ab --- /dev/null +++ b/build/arm-tests/1d-0m-0c-int-klp.json @@ -0,0 +1,21 @@ +{ + "description": "1 data node cluster with logstash", + "isValid" : true, + "deploy" : true, + "why" : "", + "location" : "westeurope", + "parameters" : { + "loadBalancerType":{"value":"external"}, + "kibana":{"value":"No"}, + "logstash":{"value":"Yes"}, + "logstashAdditionalPlugins":{"value":"logstash-input-heartbeat"}, + "logstashConf":{"value":"conf/logstash.conf"}, + "vmSizeDataNodes":{"value":"Standard_DS1_v2"}, + "vmDataNodeCount":{"value":1}, + "vmDataDiskCount":{"value":1}, + "vmDataDiskSize":{"value":"Small"}, + "storageAccountType":{"value":"Default"}, + "dataNodesAreMasterEligible":{"value":"Yes"}, + "authenticationType":{"value":"password"} + } +} diff --git a/build/arm-tests/3d-0m-0c-ags-ks.json b/build/arm-tests/3d-0m-0c-ags-ks.json index 0dffeff7..544e790e 100644 --- a/build/arm-tests/3d-0m-0c-ags-ks.json +++ b/build/arm-tests/3d-0m-0c-ags-ks.json @@ -30,7 +30,7 @@ "appGatewayTier": {"value":"Standard"}, "appGatewaySku": {"value":"Small"}, "appGatewayCount": {"value":1}, - "appGatewayCertBlob": {"value":"cert-with-password.pfx"}, + "appGatewayCertBlob": {"value":"certs/cert-with-password.pfx"}, "appGatewayCertPassword": {"value":"Password123"}, "appGatewayWafStatus": {"value":"Disabled"}, "appGatewayWafMode": {"value":"Detection"} diff --git a/build/arm-tests/3d-0m-0c-ags-tls.json b/build/arm-tests/3d-0m-0c-ags-tls.json index 0c456c02..0e217499 100644 --- a/build/arm-tests/3d-0m-0c-ags-tls.json +++ b/build/arm-tests/3d-0m-0c-ags-tls.json @@ -5,8 +5,8 @@ "location" : "westeurope", "parameters" : { "loadBalancerType":{"value":"gateway"}, - "esHttpCertBlob":{"value":"cert-no-password.pfx"}, - "esTransportCaCertBlob":{"value":"ca-cert-no-password.pfx"}, + "esHttpCertBlob":{"value":"certs/cert-no-password.pfx"}, + "esTransportCaCertBlob":{"value":"certs/ca-cert-no-password.pfx"}, "kibana":{"value":"No"}, "jumpbox":{"value":"Yes"}, "vmSizeDataNodes":{"value":"Standard_D1"}, @@ -31,9 +31,9 @@ "appGatewayTier": {"value":"Standard"}, "appGatewaySku": {"value":"Small"}, "appGatewayCount": {"value":1}, - "appGatewayCertBlob": {"value":"cert-with-password.pfx"}, + "appGatewayCertBlob": {"value":"certs/cert-with-password.pfx"}, "appGatewayCertPassword": {"value":"Password123"}, - "appGatewayEsHttpCertBlob": {"value":"cert-no-password.crt"}, + "appGatewayEsHttpCertBlob": {"value":"certs/cert-no-password.crt"}, "appGatewayWafStatus": {"value":"Disabled"}, "appGatewayWafMode": {"value":"Detection"} } diff --git a/build/arm-tests/3d-0m-0c-agw-ks.json b/build/arm-tests/3d-0m-0c-agw-ks.json index 0ae77a13..baeb60c2 100644 --- a/build/arm-tests/3d-0m-0c-agw-ks.json +++ b/build/arm-tests/3d-0m-0c-agw-ks.json @@ -30,7 +30,7 @@ "appGatewayTier": {"value":"WAF"}, "appGatewaySku": {"value":"Medium"}, "appGatewayCount": {"value":2}, - "appGatewayCertBlob": {"value":"cert-with-password.pfx"}, + "appGatewayCertBlob": {"value":"certs/cert-with-password.pfx"}, "appGatewayCertPassword": {"value":"Password123"}, "appGatewayWafStatus": {"value":"Enabled"}, "appGatewayWafMode": {"value":"Detection"} diff --git a/build/arm-tests/3d-0m-0c-ext-tls-kp.json b/build/arm-tests/3d-0m-0c-ext-tls-kp.json index 131a701b..d23e3206 100644 --- a/build/arm-tests/3d-0m-0c-ext-tls-kp.json +++ b/build/arm-tests/3d-0m-0c-ext-tls-kp.json @@ -5,11 +5,11 @@ "location" : "westeurope", "parameters" : { "loadBalancerType":{"value":"external"}, - "esHttpCertBlob":{"value":"cert-no-password.pfx"}, - "esTransportCaCertBlob":{"value":"ca-cert-no-password.pfx"}, + "esHttpCertBlob":{"value":"certs/cert-no-password.pfx"}, + "esTransportCaCertBlob":{"value":"certs/ca-cert-no-password.pfx"}, "kibana":{"value":"Yes"}, - "kibanaCertBlob": {"value":"cert-no-password.crt"}, - "kibanaKeyBlob": {"value":"cert-no-password.key"}, + "kibanaCertBlob": {"value":"certs/cert-no-password.crt"}, + "kibanaKeyBlob": {"value":"certs/cert-no-password.key"}, "jumpbox":{"value":"No"}, "vmSizeKibana":{"value":"Standard_D1"}, "vmSizeDataNodes":{"value":"Standard_D1"}, diff --git a/build/arm-tests/3d-0m-0c-ext-tls-pass-kp.json b/build/arm-tests/3d-0m-0c-ext-tls-pass-kp.json index d0e83a41..1f49a2dd 100644 --- a/build/arm-tests/3d-0m-0c-ext-tls-pass-kp.json +++ b/build/arm-tests/3d-0m-0c-ext-tls-pass-kp.json @@ -5,14 +5,14 @@ "location" : "westeurope", "parameters" : { "loadBalancerType":{"value":"external"}, - "esHttpCertBlob":{"value":"cert-with-password.pfx"}, + "esHttpCertBlob":{"value":"certs/cert-with-password.pfx"}, "esHttpCertPassword":{"value":"Password123"}, - "esTransportCaCertBlob":{"value":"ca-cert-with-password.pfx"}, + "esTransportCaCertBlob":{"value":"certs/ca-cert-with-password.pfx"}, "esTransportCaCertPassword":{"value":"Password123"}, "esTransportCertPassword":{"value":"Password1234"}, "kibana":{"value":"Yes"}, - "kibanaCertBlob":{"value":"cert-with-password.crt"}, - "kibanaKeyBlob":{"value":"cert-with-password.key"}, + "kibanaCertBlob":{"value":"certs/cert-with-password.crt"}, + "kibanaKeyBlob":{"value":"certs/cert-with-password.key"}, "kibanaKeyPassphrase": { "value": "Password123" }, "jumpbox":{"value":"No"}, "vmSizeKibana":{"value":"Standard_D1"}, diff --git a/build/conf/logstash-tls.conf b/build/conf/logstash-tls.conf new file mode 100644 index 00000000..daeb3414 --- /dev/null +++ b/build/conf/logstash-tls.conf @@ -0,0 +1,20 @@ +input { + heartbeat { + id => "heartbeat" + interval => 1 + } +} + +filter { +} + +output { + elasticsearch { + hosts => ["${ELASTICSEARCH_URL}"] + user => "elastic" + password => "Password123" + index => "heartbeat" + ssl => true + cacert => "${ELASTICSEARCH_CACERT}" + } +} diff --git a/build/conf/logstash.conf b/build/conf/logstash.conf new file mode 100644 index 00000000..1b145810 --- /dev/null +++ b/build/conf/logstash.conf @@ -0,0 +1,18 @@ +input { + heartbeat { + id => "heartbeat" + interval => 1 + } +} + +filter { +} + +output { + elasticsearch { + hosts => ["${ELASTICSEARCH_URL}"] + user => "elastic" + password => "Password123" + index => "heartbeat" + } +} diff --git a/build/tasks/arm-validator.js b/build/tasks/arm-validator.js index d41b899e..dd8f63b6 100644 --- a/build/tasks/arm-validator.js +++ b/build/tasks/arm-validator.js @@ -39,17 +39,17 @@ var bootstrapTest = (t, defaultVersion) => { var test = require("../arm-tests/" + t); - // replace cert parameters with values with base64 encoded certs - [ - "esHttpCertBlob", + // replace parameters with values with base64 encoded values + [ "esHttpCertBlob", "esHttpCaCertBlob", "esTransportCaCertBlob", "kibanaCertBlob", "kibanaKeyBlob", "appGatewayCertBlob", - "appGatewayEsHttpCertBlob"].forEach(k => { + "appGatewayEsHttpCertBlob", + "logstashConf"].forEach(k => { if (test.parameters[k] && test.parameters[k].value) { - var cert = fs.readFileSync("certs/" + test.parameters[k].value); + var cert = fs.readFileSync(test.parameters[k].value); test.parameters[k].value = new Buffer(cert).toString("base64"); } }); @@ -322,8 +322,14 @@ var sanityCheckDeployment = (test, stdout, cb) => { if (stdout) { var outputs = JSON.parse(stdout).properties.outputs; - if (outputs.loadbalancer.value !== "N/A") - checks.push(()=> sanityCheckExternalLoadBalancer(test, "external loadbalancer", outputs.loadbalancer.value, allChecked)); + if (outputs.loadbalancer.value !== "N/A") { + checks.push(()=> sanityCheckExternalLoadBalancer(test, "external loadbalancer", outputs.loadbalancer.value, allChecked)); + + // logstash can be checked with external loadbalancer + // TODO: support checking through Application Gateway and Kibana + if (t.params.logstash.value === "Yes") + checks.push(()=> sanityCheckLogstash(test, outputs.loadbalancer.value, allChecked)); + } if (outputs.kibana.value !== "N/A") checks.push(()=> sanityCheckKibana(test, outputs.kibana.value, allChecked)); } @@ -361,10 +367,7 @@ var sanityCheckApplicationGateway = (test, cb) => { }); } -var sanityCheckExternalLoadBalancer = (test, loadbalancerType, url, cb) => { - var t = armTests[test]; - var rg = t.resourceGroup; - log(`checking ${loadbalancerType} ${url} in resource group: ${rg}`); +var createLoadBalancerRequestOptions = (t, loadbalancerType) => { var opts = { json: true, auth: { username: "elastic", password: config.deployments.securityPassword }, @@ -390,6 +393,14 @@ var sanityCheckExternalLoadBalancer = (test, loadbalancerType, url, cb) => { } } + return opts; +} + +var sanityCheckExternalLoadBalancer = (test, loadbalancerType, url, cb) => { + var t = armTests[test]; + var rg = t.resourceGroup; + log(`checking ${loadbalancerType} ${url} in resource group: ${rg}`); + var opts = createLoadBalancerRequestOptions(t, loadbalancerType); request(url, opts, (error, response, body) => { if (!error && response.statusCode == 200) { log(test, `loadBalancerResponse: ${JSON.stringify(body, null, 2)}`); @@ -421,7 +432,7 @@ var sanityCheckExternalLoadBalancer = (test, loadbalancerType, url, cb) => { //bailout(error || new error(m)); cb(); } - }) + }); } var sanityCheckKibana = (test, url, cb) => { @@ -457,6 +468,32 @@ var sanityCheckKibana = (test, url, cb) => { }); } +var sanityCheckLogstash = (test, url, cb) => { + var t = armTests[test]; + var rg = t.resourceGroup; + log(`checking logstash is sending events in resource group: ${rg}`); + var opts = createLoadBalancerRequestOptions(t, "external"); + + request(`${url}/heartbeat/_count`, opts, (error, response, body) => { + if (!error && response.statusCode == 200) { + var count = (body) ? body.count : -1; + if (count >= 0) { + log(`logstash sent ${count} events in resource group: ${rg}`); + cb(); + } + else { + log(`logstash not sent any events in resource group: ${rg}`); + cb(); + } + } + else { + log(test, `error counting logstash events: error: ${error}`); + cb(); + } + }); + +} + var deployTemplate = (test, cb) => { var t = armTests[test]; if (!t.isValid || !t.deploy) return; diff --git a/parameters/password.parameters.json b/parameters/password.parameters.json index 1afe0def..d0c9b194 100644 --- a/parameters/password.parameters.json +++ b/parameters/password.parameters.json @@ -25,6 +25,13 @@ "kibanaKeyBlob":{"value":""}, "kibanaKeyPassphrase":{"value":""}, "kibanaAdditionalYaml": { "value":""}, + "logstash": { "value":"No"}, + "vmSizeLogstash": { "value":"Standard_D1"}, + "logstashHeapSize": { "value": 0 }, + "logstashConf": { "value": "" }, + "logstashKeystorePassword": { "value": "" }, + "logstashAdditionalPlugins": { "value": "" }, + "logstashAdditionalYaml": { "value": "" }, "jumpbox":{"value":"No"}, "vmHostNamePrefix":{"value":""}, "vmSizeDataNodes":{"value":"Standard_D1"}, diff --git a/parameters/ssh.parameters.json b/parameters/ssh.parameters.json index 6ed7016b..0412de86 100644 --- a/parameters/ssh.parameters.json +++ b/parameters/ssh.parameters.json @@ -25,6 +25,13 @@ "kibanaKeyBlob":{"value":""}, "kibanaKeyPassphrase":{"value":""}, "kibanaAdditionalYaml": { "value":""}, + "logstash": { "value":"No"}, + "vmSizeLogstash": { "value":"Standard_D1"}, + "logstashHeapSize": { "value": 0 }, + "logstashConf": { "value": "" }, + "logstashKeystorePassword": { "value": "" }, + "logstashAdditionalPlugins": { "value": "" }, + "logstashAdditionalYaml": { "value": "" }, "jumpbox":{"value":"No"}, "vmHostNamePrefix":{"value":""}, "vmSizeDataNodes":{"value":"Standard_D1"}, diff --git a/src/mainTemplate.json b/src/mainTemplate.json index f72bebeb..cc7d475f 100644 --- a/src/mainTemplate.json +++ b/src/mainTemplate.json @@ -433,7 +433,7 @@ "type": "securestring", "defaultValue": "", "metadata": { - "description": "base 64 Logstash configuration." + "description": "base 64 encoded form of a Logstash conf file to deploy." } }, "logstashKeystorePassword": { From 8ea13f045d1ebdb12ed0bf954faf6cd66474f518 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Wed, 26 Sep 2018 16:47:55 +1000 Subject: [PATCH 18/31] Generate logstash keystore password if not provided --- build/tasks/arm-validator.js | 2 +- src/createUiDefinition.json | 7 +++++++ src/mainTemplate.json | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/build/tasks/arm-validator.js b/build/tasks/arm-validator.js index dd8f63b6..a4e8d598 100644 --- a/build/tasks/arm-validator.js +++ b/build/tasks/arm-validator.js @@ -487,7 +487,7 @@ var sanityCheckLogstash = (test, url, cb) => { } } else { - log(test, `error counting logstash events: error: ${error}`); + log(test, `statusCode: ${response.statusCode}, error: ${error}\ncheckLogstashEventCountResponse: ${JSON.stringify(body ? body : {}, null, 2)}`); cb(); } }); diff --git a/src/createUiDefinition.json b/src/createUiDefinition.json index 8471f83c..bc73604e 100644 --- a/src/createUiDefinition.json +++ b/src/createUiDefinition.json @@ -1212,6 +1212,13 @@ "kibanaCertBlob": "", "kibanaKeyPassphrase": "", "kibanaAdditionalYaml": "", + "logstash": "No", + "vmSizeLogstash": "Standard_D1", + "logstashHeapSize": 0, + "logstashConf": "", + "logstashKeystorePassword": "", + "logstashAdditionalPlugins": "", + "logstashAdditionalYaml": "", "jumpbox": "[steps('externalAccessStep').jumpbox]", "vmSizeDataNodes": "[steps('nodesStep').dataNodes.vmSizeDataNodes]", "vmDataDiskCount": "[int(steps('nodesStep').dataNodesDisks.vmDataDiskCount)]", diff --git a/src/mainTemplate.json b/src/mainTemplate.json index cc7d475f..dbfd7122 100644 --- a/src/mainTemplate.json +++ b/src/mainTemplate.json @@ -1635,7 +1635,7 @@ "logstashConf": "[parameters('logstashConf')]", "logstashPlugins": "[parameters('logstashAdditionalPlugins')]", "logstashYaml": "[parameters('logstashAdditionalYaml')]", - "logstashKeystorePwd": "[parameters('logstashKeystorePassword')]", + "logstashKeystorePwd": "[if(not(empty(parameters('logstashKeystorePassword'))),parameters('logstashKeystorePassword'),uniqueString(resourceGroup().id, deployment().name, parameters('securityLogstashPassword')))]", "jumpbox": "[parameters('jumpbox')]", "dataNodeStorageSettings": { "accountType": "[variables('resolvedStorageAccountType')]", From 62adb098b2df7ecd6af55b3b390e65db6d1f0ec2 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Wed, 26 Sep 2018 19:44:37 +1000 Subject: [PATCH 19/31] Poll for logstash event index --- build/arm-tests/1d-0m-0c-int-klp.json | 4 ++- build/tasks/arm-validator.js | 36 ++++++++++++++++----------- 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/build/arm-tests/1d-0m-0c-int-klp.json b/build/arm-tests/1d-0m-0c-int-klp.json index 225022ab..c36841b8 100644 --- a/build/arm-tests/1d-0m-0c-int-klp.json +++ b/build/arm-tests/1d-0m-0c-int-klp.json @@ -6,8 +6,10 @@ "location" : "westeurope", "parameters" : { "loadBalancerType":{"value":"external"}, - "kibana":{"value":"No"}, + "kibana":{"value":"Yes"}, + "vmSizeKibana":{"value":"Standard_DS1_v2"}, "logstash":{"value":"Yes"}, + "vmSizeLogstash":{"value":"Standard_DS1_v2"}, "logstashAdditionalPlugins":{"value":"logstash-input-heartbeat"}, "logstashConf":{"value":"conf/logstash.conf"}, "vmSizeDataNodes":{"value":"Standard_DS1_v2"}, diff --git a/build/tasks/arm-validator.js b/build/tasks/arm-validator.js index a4e8d598..f9b372e9 100644 --- a/build/tasks/arm-validator.js +++ b/build/tasks/arm-validator.js @@ -473,25 +473,33 @@ var sanityCheckLogstash = (test, url, cb) => { var rg = t.resourceGroup; log(`checking logstash is sending events in resource group: ${rg}`); var opts = createLoadBalancerRequestOptions(t, "external"); - - request(`${url}/heartbeat/_count`, opts, (error, response, body) => { - if (!error && response.statusCode == 200) { - var count = (body) ? body.count : -1; - if (count >= 0) { - log(`logstash sent ${count} events in resource group: ${rg}`); - cb(); + var attempts = 0; + var countRequest = () => { + request(`${url}/heartbeat/_count`, opts, (error, response, body) => { + if (!error && response.statusCode == 200) { + var count = (body) ? body.count : -1; + if (count >= 0) { + log(`logstash sent ${count} events in resource group: ${rg}`); + cb(); + } + else { + log(`logstash not sent any events in resource group: ${rg}`); + cb(); + } + } + else if (response.statusCode == 404 && attempts < 10) { + log(`logstash event index not found. retry attempt: ${++attempts} for resource group: ${rg}`); + setTimeout(countRequest, 5000); } else { - log(`logstash not sent any events in resource group: ${rg}`); + log(`problem checking for logstash events in resource group: ${rg}. response status code: ${response.statusCode}`); + log(test, `statusCode: ${response.statusCode}, error: ${error}\ncheckLogstashEventCountResponse: ${JSON.stringify(body ? body : {}, null, 2)}`); cb(); } - } - else { - log(test, `statusCode: ${response.statusCode}, error: ${error}\ncheckLogstashEventCountResponse: ${JSON.stringify(body ? body : {}, null, 2)}`); - cb(); - } - }); + }); + }; + countRequest(); } var deployTemplate = (test, cb) => { From ee8ea3688a6988e1df291fe336c82b7d917b9cdf Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Thu, 27 Sep 2018 16:00:41 +1000 Subject: [PATCH 20/31] Configure monitoring only when possible --- src/scripts/logstash-install.sh | 33 +++++++++++++++++++++++++++------ 1 file changed, 27 insertions(+), 6 deletions(-) diff --git a/src/scripts/logstash-install.sh b/src/scripts/logstash-install.sh index 19d8d3e5..7b5616ca 100644 --- a/src/scripts/logstash-install.sh +++ b/src/scripts/logstash-install.sh @@ -168,7 +168,7 @@ add_keystore_or_env_var() local VALUE="$2" local SYS_CONFIG=/etc/sysconfig - if [[ ! -f $SYS_CONFIG ]]; then + if [[ ! -f $SYS_CONFIG/logstash ]]; then [ -d $SYS_CONFIG ] || mkdir -p $SYS_CONFIG touch $SYS_CONFIG/logstash chmod 600 $SYS_CONFIG/logstash @@ -254,16 +254,22 @@ configure_logstash_yaml() log "[configure_logstash_yaml] installed x-pack plugin" fi - # configure monitoring - echo 'xpack.monitoring.elasticsearch.url: "${ELASTICSEARCH_URL}"' >> $LOGSTASH_CONF + # assumes Security is enabled, so configure monitoring credentials echo "xpack.monitoring.elasticsearch.username: logstash_system" >> $LOGSTASH_CONF echo 'xpack.monitoring.elasticsearch.password: "${LOGSTASH_SYSTEM_PASSWORD}"' >> $LOGSTASH_CONF - echo "xpack.monitoring.enabled: true" >> $LOGSTASH_CONF fi + # configure monitoring + echo 'xpack.monitoring.elasticsearch.url: "${ELASTICSEARCH_URL}"' >> $LOGSTASH_CONF + + local MONITORING='true' + # Make the HTTP CA cert for communication with Elasticsearch available to # Logstash conf files through ${ELASTICSEARCH_CACERT} if [[ -n "${HTTP_CERT}" || -n "${HTTP_CACERT}" && ${INSTALL_XPACK} -ne 0 ]]; then + + MONITORING='false' + [ -d $SSL_PATH ] || mkdir -p $SSL_PATH if [[ -n "${HTTP_CERT}" ]]; then @@ -279,11 +285,18 @@ configure_logstash_yaml() else log "[configure_logstash_yaml] CA cert extracted from HTTP PKCS#12 archive. Make ELASTICSEARCH_CACERT available to conf files" add_keystore_or_env_var "ELASTICSEARCH_CACERT" "$SSL_PATH/elasticsearch-http-ca.crt" - echo 'xpack.monitoring.elasticsearch.ssl.ca: "${ELASTICSEARCH_CACERT}"' >> $LOGSTASH_CONF + + # logstash performs hostname verification for monitoring + # which will not work for a HTTP cert provided by the user, where logstash communicates through internal loadbalancer. + # 6.4.0 exposes verification_mode, so set this to none and document. + if dpkg --compare-versions "$LOGSTASH_VERSION" "ge" "6.4.0"; then + echo 'xpack.monitoring.elasticsearch.ssl.ca: "${ELASTICSEARCH_CACERT}"' >> $LOGSTASH_CONF + echo 'xpack.monitoring.elasticsearch.ssl.verification_mode: none' >> $LOGSTASH_CONF + MONITORING='true' + fi fi else - # convert PKCS#12 CA certificate to PEM format local HTTP_CACERT_FILENAME=elasticsearch-http-ca.p12 log "[configure_logstash_yaml] Save PKCS#12 archive for Elasticsearch HTTP CA to file" @@ -297,13 +310,21 @@ configure_logstash_yaml() else log "[configure_logstash_yaml] CA cert extracted from HTTP CA PKCS#12 archive. Make ELASTICSEARCH_CACERT available to conf files" add_keystore_or_env_var "ELASTICSEARCH_CACERT" "$SSL_PATH/elasticsearch-http-ca.crt" + + # HTTP certs created from a HTTP CA provided by the user will include the + # IP address of the internal loadbalancer, so hostname verification will pass. echo 'xpack.monitoring.elasticsearch.ssl.ca: "${ELASTICSEARCH_CACERT}"' >> $LOGSTASH_CONF + MONITORING='true' fi fi + chown -R logstash: $SSL_PATH log "[configure_logstash_yaml] Configured ELASTICSEARCH_CACERT for Elasticsearch TLS" + log "[configure_logstash_yaml] X-Pack monitoring for Logstash set to $MONITORING" fi + echo "xpack.monitoring.enabled: $MONITORING" >> $LOGSTASH_CONF + # TODO: Configure Centralized Pipeline Management? # https://www.elastic.co/guide/en/logstash/current/configuring-centralized-pipelines.html From e7b320a7a33dabab9e5ce30c1b73b1402ab1efe5 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Thu, 27 Sep 2018 16:27:07 +1000 Subject: [PATCH 21/31] patch logstash VM values --- build/tasks/patch-values.js | 1 + src/mainTemplate.json | 56 ++++++++++++++++++++++++++++++++++++- 2 files changed, 56 insertions(+), 1 deletion(-) diff --git a/build/tasks/patch-values.js b/build/tasks/patch-values.js index d6e33aff..160ebe9f 100644 --- a/build/tasks/patch-values.js +++ b/build/tasks/patch-values.js @@ -58,6 +58,7 @@ gulp.task("patch", function(cb) { main.parameters.vmSizeMasterNodes.allowedValues = vmSizes; main.parameters.vmSizeClientNodes.allowedValues = vmSizes; main.parameters.vmSizeKibana.allowedValues = kibanaVmSizes; + main.parameters.vmSizeLogstash.allowedValues = vmSizes; jsonfile.writeFile(mainTemplate, main, function (err) { jsonfile.readFile(uiTemplate, function(err, ui) { diff --git a/src/mainTemplate.json b/src/mainTemplate.json index dbfd7122..5fd42499 100644 --- a/src/mainTemplate.json +++ b/src/mainTemplate.json @@ -365,12 +365,18 @@ "type": "string", "defaultValue": "Standard_D1", "allowedValues": [ + "Standard_A0", + "Standard_A1", "Standard_A2", "Standard_A3", "Standard_A4", "Standard_A5", "Standard_A6", "Standard_A7", + "Standard_A8", + "Standard_A9", + "Standard_A10", + "Standard_A11", "Standard_D1", "Standard_D2", "Standard_D3", @@ -407,6 +413,26 @@ "Standard_DS13_v2", "Standard_DS14_v2", "Standard_DS15_v2", + "Standard_D2s_v3", + "Standard_D4s_v3", + "Standard_D8s_v3", + "Standard_D16s_v3", + "Standard_D32s_v3", + "Standard_D64s_v3", + "Standard_E2_v3", + "Standard_E4_v3", + "Standard_E8_v3", + "Standard_E16_v3", + "Standard_E32_v3", + "Standard_E64_v3", + "Standard_E64i_v3", + "Standard_E2s_v3", + "Standard_E4s_v3", + "Standard_E8s_v3", + "Standard_E16s_v3", + "Standard_E32s_v3", + "Standard_E64s_v3", + "Standard_E64is_v3", "Standard_F1", "Standard_F2", "Standard_F4", @@ -416,7 +442,35 @@ "Standard_F2s", "Standard_F4s", "Standard_F8s", - "Standard_F16s" + "Standard_F16s", + "Standard_G1", + "Standard_G2", + "Standard_G3", + "Standard_G4", + "Standard_G5", + "Standard_GS1", + "Standard_GS2", + "Standard_GS3", + "Standard_GS4", + "Standard_GS5", + "Standard_L4s", + "Standard_L8s", + "Standard_L16s", + "Standard_L32s", + "Standard_M8ms", + "Standard_M16ms", + "Standard_M32ts", + "Standard_M32ls", + "Standard_M32ms", + "Standard_M64s", + "Standard_M64ls", + "Standard_M64ms", + "Standard_M128s", + "Standard_M128ms", + "Standard_M64", + "Standard_M64m", + "Standard_M128", + "Standard_M128m" ], "metadata": { "description": "Size of the Logstash nodes" From 323c9f73c389dca4185e41774d31d156ef853617 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Thu, 27 Sep 2018 16:27:34 +1000 Subject: [PATCH 22/31] Replace security password in test logstash conf files --- ...-0c-int-klp.json => 1d-0m-0c-ext-klp.json} | 0 build/arm-tests/1d-0m-0c-ext-tls-klp.json | 30 +++++++ build/conf/logstash-tls.conf | 2 +- build/conf/logstash.conf | 2 +- build/tasks/arm-validator.js | 89 +++++++++++++++---- 5 files changed, 103 insertions(+), 20 deletions(-) rename build/arm-tests/{1d-0m-0c-int-klp.json => 1d-0m-0c-ext-klp.json} (100%) create mode 100644 build/arm-tests/1d-0m-0c-ext-tls-klp.json diff --git a/build/arm-tests/1d-0m-0c-int-klp.json b/build/arm-tests/1d-0m-0c-ext-klp.json similarity index 100% rename from build/arm-tests/1d-0m-0c-int-klp.json rename to build/arm-tests/1d-0m-0c-ext-klp.json diff --git a/build/arm-tests/1d-0m-0c-ext-tls-klp.json b/build/arm-tests/1d-0m-0c-ext-tls-klp.json new file mode 100644 index 00000000..eee937fa --- /dev/null +++ b/build/arm-tests/1d-0m-0c-ext-tls-klp.json @@ -0,0 +1,30 @@ +{ + "description": "1 data node cluster with logstash", + "notes": "the use of xpack.monitoring.collection.enabled requires version to be 6.3.0+", + "isValid" : true, + "deploy" : true, + "why" : "", + "location" : "westeurope", + "parameters" : { + "loadBalancerType":{"value":"external"}, + "esAdditionalYaml":{"value":"xpack.monitoring.collection.enabled: true"}, + "esHttpCaCertBlob":{"value":"certs/ca-cert-with-password.pfx"}, + "esHttpCaCertPassword":{"value":"Password123"}, + "esTransportCaCertBlob":{"value":"certs/ca-cert-with-password.pfx"}, + "esTransportCaCertPassword":{"value":"Password123"}, + "esTransportCertPassword":{"value":"Password1234"}, + "kibana":{"value":"Yes"}, + "vmSizeKibana":{"value":"Standard_DS1_v2"}, + "logstash":{"value":"Yes"}, + "vmSizeLogstash":{"value":"Standard_DS1_v2"}, + "logstashAdditionalPlugins":{"value":"logstash-input-heartbeat"}, + "logstashConf":{"value":"conf/logstash-tls.conf"}, + "vmSizeDataNodes":{"value":"Standard_DS1_v2"}, + "vmDataNodeCount":{"value":1}, + "vmDataDiskCount":{"value":1}, + "vmDataDiskSize":{"value":"Small"}, + "storageAccountType":{"value":"Default"}, + "dataNodesAreMasterEligible":{"value":"Yes"}, + "authenticationType":{"value":"password"} + } +} diff --git a/build/conf/logstash-tls.conf b/build/conf/logstash-tls.conf index daeb3414..ad571eb1 100644 --- a/build/conf/logstash-tls.conf +++ b/build/conf/logstash-tls.conf @@ -12,7 +12,7 @@ output { elasticsearch { hosts => ["${ELASTICSEARCH_URL}"] user => "elastic" - password => "Password123" + password => "securityAdminPassword" index => "heartbeat" ssl => true cacert => "${ELASTICSEARCH_CACERT}" diff --git a/build/conf/logstash.conf b/build/conf/logstash.conf index 1b145810..230ad129 100644 --- a/build/conf/logstash.conf +++ b/build/conf/logstash.conf @@ -12,7 +12,7 @@ output { elasticsearch { hosts => ["${ELASTICSEARCH_URL}"] user => "elastic" - password => "Password123" + password => "securityAdminPassword" index => "heartbeat" } } diff --git a/build/tasks/arm-validator.js b/build/tasks/arm-validator.js index f9b372e9..7e081655 100644 --- a/build/tasks/arm-validator.js +++ b/build/tasks/arm-validator.js @@ -39,7 +39,7 @@ var bootstrapTest = (t, defaultVersion) => { var test = require("../arm-tests/" + t); - // replace parameters with values with base64 encoded values + // replace parameters with base64 encoded file values [ "esHttpCertBlob", "esHttpCaCertBlob", "esTransportCaCertBlob", @@ -49,8 +49,11 @@ var bootstrapTest = (t, defaultVersion) => "appGatewayEsHttpCertBlob", "logstashConf"].forEach(k => { if (test.parameters[k] && test.parameters[k].value) { - var cert = fs.readFileSync(test.parameters[k].value); - test.parameters[k].value = new Buffer(cert).toString("base64"); + var buffer = fs.readFileSync(test.parameters[k].value); + if (k === "logstashConf") { + buffer = new Buffer(buffer.toString().replace("securityAdminPassword", config.deployments.securityPassword)); + } + test.parameters[k].value = new Buffer(buffer).toString("base64"); } }); @@ -371,19 +374,30 @@ var createLoadBalancerRequestOptions = (t, loadbalancerType) => { var opts = { json: true, auth: { username: "elastic", password: config.deployments.securityPassword }, + // don't perform hostname validation as all tests use self-signed certs agentOptions: { checkServerIdentity: _.noop } }; - var certParams = { - blob: (loadbalancerType === "application gateway") ? "appGatewayCertBlob": "esHttpCertBlob", - passphrase: (loadbalancerType === "application gateway") ? "appGatewayCertPassword": "esHttpCertPassword", - }; - - if (t.params[certParams.blob] && t.params[certParams.blob].value) { - if (t.params[certParams.passphrase] && t.params[certParams.passphrase].value) { + if (loadbalancerType === "application gateway") { + if (t.params.appGatewayCertBlob && t.params.appGatewayCertBlob.value) { + if (t.params.appGatewayCertPassword && t.params.appGatewayCertPassword.value) { + opts = merge.recursive(true, opts, { + pfx: fs.readFileSync("certs/cert-with-password.pfx"), + passphrase: t.params.appGatewayCertPassword.value, + }); + } + else { + opts = merge.recursive(true, opts, { + pfx: fs.readFileSync("certs/cert-no-password.pfx") + }); + } + } + } + else if (t.params.esHttpCertBlob && t.params.esHttpCertBlob.value) { + if (t.params.esHttpCertPassword && t.params.esHttpCertPassword.value) { opts = merge.recursive(true, opts, { pfx: fs.readFileSync("certs/cert-with-password.pfx"), - passphrase: t.params[certParams.passphrase].value, + passphrase: t.params.esHttpCertPassword.value, }); } else { @@ -392,6 +406,13 @@ var createLoadBalancerRequestOptions = (t, loadbalancerType) => { }); } } + else if (t.params.esHttpCaCertBlob && t.params.esHttpCaCertBlob.value) { + opts = merge.recursive(true, opts, { + // ca cert agentOption does not work: https://github.com/request/request#using-optionsagentoptions + // so disable cert validation altogether when certs are generated from a CA. + rejectUnauthorized: false + }); + } return opts; } @@ -406,11 +427,14 @@ var sanityCheckExternalLoadBalancer = (test, loadbalancerType, url, cb) => { log(test, `loadBalancerResponse: ${JSON.stringify(body, null, 2)}`); request(`${url}/_cluster/health`, opts, (error, response, body) => { var status = (body) ? body.status : "unknown"; - if (!error && response.statusCode == 200 && status === "green") { + if (!error && response.statusCode === 200 && + // if logstash is deployed and successfully sending events, the logstash created + // index will be created with the default number of shards and replicas + (status === "green" || (status === "yellow" && t.params.logstash.value === "Yes"))) { log(`cluster is up and running in resource group: ${rg}`); log(test, `clusterHealthResponse: ${JSON.stringify(body, null, 2)}`); var expectedTotalNodes = 3 + t.params.vmDataNodeCount.value + t.params.vmClientNodeCount.value; - if (t.params.dataNodesAreMasterEligible.value == "Yes") expectedTotalNodes -= 3; + if (t.params.dataNodesAreMasterEligible.value === "Yes") expectedTotalNodes -= 3; log(`expecting ${expectedTotalNodes} total nodes in resource group: ${rg} and found: ${body.number_of_nodes}`); //if (body.number_of_nodes != expectedTotalNodes) return bailOut(new Error(m)); @@ -464,7 +488,36 @@ var sanityCheckKibana = (test, url, cb) => { log(test, `kibanaResponse: ${JSON.stringify((body && body.status) ? body.status : {}, null, 2)}`); //no validation just yet, kibana is most likely red straight after deployment while it retries the cluster //There is no guarantee kibana is not provisioned before the cluster is up - cb(); + + if (state == "green") { + // check monitoring endpoint + opts.method = "POST"; + opts.headers = opts.headers || {}; + opts.headers["kbn-xsrf"] = "reporting"; + + request(`${url}/api/monitoring/v1/clusters`, opts, function (error, response, body) { + log(test, `monitoringResponse: ${JSON.stringify(body ? body : {}, null, 2)}`); + + if (t.params.kibana.value === "Yes") { + var kibana = body.kibana; + if (kibana) { + log ("kibana monitoring enabled"); + } + } + + if (t.params.logstash.value === "Yes") { + var logstash = body.logstash; + if (logstash) { + log("logstash monitoring enabled"); + } + } + + cb(); + }); + } + else { + cb(); + } }); } @@ -476,7 +529,7 @@ var sanityCheckLogstash = (test, url, cb) => { var attempts = 0; var countRequest = () => { request(`${url}/heartbeat/_count`, opts, (error, response, body) => { - if (!error && response.statusCode == 200) { + if (!error && response && response.statusCode == 200) { var count = (body) ? body.count : -1; if (count >= 0) { log(`logstash sent ${count} events in resource group: ${rg}`); @@ -487,13 +540,13 @@ var sanityCheckLogstash = (test, url, cb) => { cb(); } } - else if (response.statusCode == 404 && attempts < 10) { + else if (response && response.statusCode == 404 && attempts < 10) { log(`logstash event index not found. retry attempt: ${++attempts} for resource group: ${rg}`); setTimeout(countRequest, 5000); } else { - log(`problem checking for logstash events in resource group: ${rg}. response status code: ${response.statusCode}`); - log(test, `statusCode: ${response.statusCode}, error: ${error}\ncheckLogstashEventCountResponse: ${JSON.stringify(body ? body : {}, null, 2)}`); + log(`problem checking for logstash events in resource group: ${rg}. ${response ? "response status code: " + response.statusCode: ""}`); + log(test, `statusCode: ${response ? response.statusCode : "unknown"}, error: ${error}\ncheckLogstashEventCountResponse: ${JSON.stringify(body ? body : {}, null, 2)}`); cb(); } }); From bf2eeb8d29bcdb75aadbd73ec7c796d912f78c3e Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Thu, 27 Sep 2018 16:27:41 +1000 Subject: [PATCH 23/31] Update README --- README.md | 42 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 37 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 0bb7d2a2..a3464d3b 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ For more details around developing the template, take a look at the [Development The [Azure Marketplace Elasticsearch offering](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/elastic.elasticsearch) offers a simplified UI and installation experience over the full power of the ARM template. -It will always bootstrap a cluster complete with a trial license of Elastic's commercial [X-Pack features](https://www.elastic.co/products/x-pack). +It will always bootstrap a cluster complete with a trial license of the [Elastic Stack's commercial features](https://www.elastic.co/products/stack). Deploying through the Marketplace is great and easy way to get your feet wet for the first time with Elasticsearch (on Azure) but in the long run, you'll want to deploy the templates directly from GitHub using the Azure CLI or PowerShell SDKs. Check out the examples. @@ -140,7 +140,7 @@ in conjunction with other parameters. xpackPluginsstring Either Yes or No to install a trial license of the commercial X-Pack - features such as Monitoring, Security, Alerting, Graph, Machine Learning (5.5.0+) and SQL. If also installing Kibana, it will have Reporting and Profiler installed. + features such as Monitoring, Security, Alerting, Graph, Machine Learning (5.5.0+) and SQL. If also installing Kibana, it will have Reporting and Profiler installed.

A value of No for Elasticsearch and Kibana prior to 6.3.0, will include only the Open Source features. @@ -275,7 +275,7 @@ in conjunction with other parameters. support Premium Storage and Standard Storage for those that do not. Standard will use Standard Storage. Default - Client (Coordinating only) node related settings + Coordinating node related settings vmClientNodeCountint The number of client nodes to provision. Must be a positive integer. By default, the data nodes are added to the backend pool of the loadbalancer but @@ -341,8 +341,7 @@ in conjunction with other parameters. kibanastring Either Yes or No to provision a machine with Kibana installed and a public IP address to access it. If you have opted to also install the X-Pack plugins using xpackPlugins, - has Kibana installed on it. If you have opted to also install the X-Pack plugins using xpackPlugins, - a trial license of the commercial X-Pack Kibana plugins will be installed. + a trial license of the commercial Kibana features will be applied and activated. Yes vmSizeKibanastring @@ -362,6 +361,39 @@ in conjunction with other parameters. kibanaAdditionalYamlstring Additional configuration for Kibana yaml configuration file. Each line must be separated by a \n newline character e.g. "server.name: \"My server\"\nkibana.defaultAppId: home".

This is an expert level feature - It is recommended that you run your additional yaml through a linter before starting a deployment."" + Logstash related settings + + logstashstring + Either Yes or No to provision a machine with Logstash installed. If you have opted to also install the X-Pack plugins using xpackPlugins, + a trial license for the commercial Logstash features will be applied and activated. + No + + vmSizeLogstashstring + Azure VM size of the Logstash instance. See this list for supported sizes. + Check that the size you select is available in the region you choose. + Standard_D1 + + logstashHeapSizeinteger + The size, in megabytes, of memory to allocate for the JVM heap for Logstash. If unspecified, Logstash will be configured with the default heap size for the distribution and version. + Take a look at the Logstash documentation on profiling heap size for more information.

This is an expert level feature - setting a heap size too low, or larger than available memory on the Logstash VM SKU will fail the deployment. + 0 + + logstashConfsecurestring + A Base-64 encoded form of a Logstash config file to deploy. + "" + + logstashKeystorePasswordsecurestring + The password to protect the Logstash keystore. If no value is supplied, a value will be generated using the ARM template uniqueString() function. Used only in 6.2.0+ + "" + + logstashAdditionalPluginsstring + Additional Logstash plugins to install. Each plugin must be separated by a semicolon. e.g. logstash-input-heartbeat;logstash-input-twitter + "" + + logstashAdditionalYamlstring + Additional configuration for Logstash yaml configuration file. Each line must be separated by a newline character \n e.g. "pipeline.batch.size: 125\npipeline.batch.delay: 50".

This is an expert level feature - It is recommended that you run your additional yaml through a linter before starting a deployment. + "" + Jumpbox related settings jumpboxstring From c2f390c6f4772dd9e203b7276b386dcf9ee7a2d3 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Thu, 27 Sep 2018 16:34:28 +1000 Subject: [PATCH 24/31] Update README --- README.md | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index a3464d3b..10c96915 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,6 @@ -# Elasticsearch Azure Marketplace offering +# Elastic Stack Azure Marketplace offering + +Easily deploy the Elastic Stack of Elasticsearch, Kibana and Logstash to Azure. This repository consists of: @@ -24,12 +26,12 @@ For more details around developing the template, take a look at the [Development ## Azure Marketplace -The [Azure Marketplace Elasticsearch offering](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/elastic.elasticsearch) offers a simplified UI and installation experience over the full power of the ARM template. +The [Azure Marketplace Elastic Stack offering](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/elastic.elasticsearch) offers a simplified UI and installation experience over the full power of the ARM template. -It will always bootstrap a cluster complete with a trial license of the [Elastic Stack's commercial features](https://www.elastic.co/products/stack). +It will always bootstrap an Elasticsearch cluster complete with a trial license of the [Elastic Stack's commercial features](https://www.elastic.co/products/stack). -Deploying through the Marketplace is great and easy way to get your feet wet for the first time with Elasticsearch (on Azure) but in the long run, you'll want to deploy the templates directly from GitHub using the Azure CLI or PowerShell SDKs. -Check out the examples. +Deploying through the Marketplace is great and easy way to get your feet wet for the first time with Elasticsearch on Azure, but in the long run, you'll want to deploy the templates directly from GitHub using the Azure CLI or PowerShell SDKs. +Check out the CLI examples. --- @@ -81,12 +83,12 @@ posts for further information ### X-Pack features -Starting with Elasticsearch and Kibana 6.3.0, The template deploys with X-Pack features bundled as part of the deployment, and +Starting with Elasticsearch, Kibana and Logstash 6.3.0, The template deploys with X-Pack features bundled as part of the deployment, and includes the free features under the [Basic license](https://www.elastic.co/subscriptions) level. The [`xpackPlugins`](#x-pack) parameter determines whether a self-generated trial license is applied, offering a trial period of 30 days of the Platinum license features. A value of `Yes` applies a trial license, a value of `No` applies the Basic license. -For Elasticsearch and Kibana prior to 6.3.0, The [`xpackPlugins`](#x-pack) parameter determines whether X-Pack plugins are installed +For Elasticsearch, Kibana and Logstash prior to 6.3.0, The [`xpackPlugins`](#x-pack) parameter determines whether X-Pack plugins are installed and a self-generated trial license is applied. In difference to 6.3.0 however, a value of `No` for `xpackPlugins` means that X-Pack plugins are not installed, and therefore does not provide the free features under the Basic license level, offering the Open Source features only. For these versions, you can install X-Pack plugins and [**register for a free Basic license** to apply to the deployment](https://register.elastic.co/), in @@ -94,8 +96,9 @@ order to use the free features available under the Basic license level. ## Parameters -The ARM template accepts a _lot_ of parameters, although many of them are optional and only used -in conjunction with other parameters. +The ARM template accepts a _lot_ of parameters, but don't fear! Most of them are **optional** and only used +in conjunction with other parameters. Where a parameter value is not explicitly provided, it will take the default +value defined in the template. From 865f91022d32f066830c0a348d83236dbb119d2d Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 28 Sep 2018 10:48:01 +1000 Subject: [PATCH 25/31] Check Kibana monitoring endpoint --- build/tasks/arm-validator.js | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/build/tasks/arm-validator.js b/build/tasks/arm-validator.js index 7e081655..bfb86628 100644 --- a/build/tasks/arm-validator.js +++ b/build/tasks/arm-validator.js @@ -149,7 +149,7 @@ var bailOutNoCleanUp = (error) => { throw error; } -var bailOut = (error, rg) => { +var bailOut = (error, rg) => { if (!error) return; if (!rg) log(error) else log(`resourcegroup: ${rg} - ${error}`) @@ -490,25 +490,37 @@ var sanityCheckKibana = (test, url, cb) => { //There is no guarantee kibana is not provisioned before the cluster is up if (state == "green") { - // check monitoring endpoint + log(`checking kibana monitoring endpoint for rg: ${rg}`); + opts.method = "POST"; opts.headers = opts.headers || {}; opts.headers["kbn-xsrf"] = "reporting"; + var now = new Date(); + now.setHours(now.getHours() - 1); + var plusAnHour = new Date(); + plusAnHour.setHours(plusAnHour.getHours() + 1); + opts.body = JSON.stringify({ + timeRange: { + min: dateFormat(now, "isoUtcDateTime"), + max: dateFormat(plusAnHour, "isoUtcDateTime") + } + }); request(`${url}/api/monitoring/v1/clusters`, opts, function (error, response, body) { log(test, `monitoringResponse: ${JSON.stringify(body ? body : {}, null, 2)}`); - if (t.params.kibana.value === "Yes") { - var kibana = body.kibana; + if (body && body.length) { + var kibana = body[0].kibana; if (kibana) { log ("kibana monitoring enabled"); } - } - if (t.params.logstash.value === "Yes") { - var logstash = body.logstash; - if (logstash) { - log("logstash monitoring enabled"); + if (t.params.logstash.value === "Yes") { + log("logstash enabled in the template. Checking monitoring"); + var logstash = body[0].logstash; + if (logstash) { + log("logstash monitoring enabled"); + } } } From b52abaeed515479f0d319bbc2ddd2e7eeeef8a4b Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 28 Sep 2018 10:50:34 +1000 Subject: [PATCH 26/31] Configure Logstash monitoring for 6.3.0+ or X-Pack installed --- src/scripts/logstash-install.sh | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/src/scripts/logstash-install.sh b/src/scripts/logstash-install.sh index 7b5616ca..fa5e3957 100644 --- a/src/scripts/logstash-install.sh +++ b/src/scripts/logstash-install.sh @@ -36,7 +36,6 @@ help() } # Custom logging with time so we can easily relate running times, also log to separate file so order is guaranteed. -# The Script extension output the stdout/err buffer in intervals with duplicates. log() { echo \[$(date +%d%m%Y-%H:%M:%S)\] "$1" @@ -62,7 +61,7 @@ fi ######################### #Script Parameters -LOGSTASH_VERSION="6.2.4" +LOGSTASH_VERSION="6.4.0" LOGSTASH_HEAP=0 ELASTICSEARCH_URL="http://10.0.0.4:9200" INSTALL_XPACK=0 @@ -204,6 +203,8 @@ configure_logstash_yaml() { local LOGSTASH_CONF=/etc/logstash/logstash.yml local SSL_PATH=/etc/logstash/ssl + local LOG_PATH=/var/log/logstash + local XPACK_BUNDLED=$(dpkg --compare-versions "$LOGSTASH_VERSION" "ge" "6.3.0"; echo $?) # backup the current config if [[ -f $LOGSTASH_CONF ]]; then @@ -240,28 +241,29 @@ configure_logstash_yaml() # echo "queue.type: persisted" >> $LOGSTASH_CONF # put log files on the OS disk in a writable location - local LOG_PATH=/var/log/logstash mkdir -p $LOG_PATH chown -R logstash: $LOG_PATH echo "path.logs: $LOG_PATH" >> $LOGSTASH_CONF echo "log.level: error" >> $LOGSTASH_CONF # install x-pack - if [ ${INSTALL_XPACK} -ne 0 ]; then + if [[ $INSTALL_XPACK -ne 0 ]]; then if dpkg --compare-versions "$LOGSTASH_VERSION" "lt" "6.3.0"; then log "[configure_logstash_yaml] installing x-pack plugin" /usr/share/logstash/bin/logstash-plugin install x-pack log "[configure_logstash_yaml] installed x-pack plugin" fi + echo 'xpack.monitoring.elasticsearch.url: "${ELASTICSEARCH_URL}"' >> $LOGSTASH_CONF + # assumes Security is enabled, so configure monitoring credentials echo "xpack.monitoring.elasticsearch.username: logstash_system" >> $LOGSTASH_CONF echo 'xpack.monitoring.elasticsearch.password: "${LOGSTASH_SYSTEM_PASSWORD}"' >> $LOGSTASH_CONF + elif [[ $XPACK_BUNDLED -eq 0 ]]; then + # configure monitoring for basic + echo 'xpack.monitoring.elasticsearch.url: "${ELASTICSEARCH_URL}"' >> $LOGSTASH_CONF fi - # configure monitoring - echo 'xpack.monitoring.elasticsearch.url: "${ELASTICSEARCH_URL}"' >> $LOGSTASH_CONF - local MONITORING='true' # Make the HTTP CA cert for communication with Elasticsearch available to @@ -323,7 +325,9 @@ configure_logstash_yaml() log "[configure_logstash_yaml] X-Pack monitoring for Logstash set to $MONITORING" fi - echo "xpack.monitoring.enabled: $MONITORING" >> $LOGSTASH_CONF + if [[ $XPACK_BUNDLED -eq 0 || $INSTALL_XPACK -ne 0 ]]; then + echo "xpack.monitoring.enabled: $MONITORING" >> $LOGSTASH_CONF + fi # TODO: Configure Centralized Pipeline Management? # https://www.elastic.co/guide/en/logstash/current/configuring-centralized-pipelines.html @@ -334,7 +338,7 @@ configure_logstash_yaml() local SKIP_LINES="node.name path.data path.logs " SKIP_LINES+="xpack.monitoring.elasticsearch.username xpack.monitoring.elasticsearch.password " - SKIP_LINES+="xpack.monitoring.enabled " + SKIP_LINES+="xpack.monitoring.enabled xpack.monitoring.elasticsearch.ssl.ca xpack.monitoring.elasticsearch.ssl.verification_mode " local SKIP_REGEX="^\s*("$(echo $SKIP_LINES | tr " " "|" | sed 's/\./\\\./g')")" IFS=$'\n' for LINE in $(echo -e "$YAML_CONFIGURATION"); do From b86ee767459648cf3c017f554dd963b6ef735a70 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 28 Sep 2018 10:50:51 +1000 Subject: [PATCH 27/31] Consistent default versions and log message casing --- src/scripts/elasticsearch-install.sh | 2 +- src/scripts/kibana-install.sh | 54 ++++++++++++++-------------- 2 files changed, 28 insertions(+), 28 deletions(-) diff --git a/src/scripts/elasticsearch-install.sh b/src/scripts/elasticsearch-install.sh index 5c09e087..49551cbd 100644 --- a/src/scripts/elasticsearch-install.sh +++ b/src/scripts/elasticsearch-install.sh @@ -97,7 +97,7 @@ fi CLUSTER_NAME="elasticsearch" NAMESPACE_PREFIX="" -ES_VERSION="6.2.4" +ES_VERSION="6.4.0" ES_HEAP=0 INSTALL_XPACK=0 INSTALL_ADDITIONAL_PLUGINS="" diff --git a/src/scripts/kibana-install.sh b/src/scripts/kibana-install.sh index c1d7acbb..0026d9fa 100644 --- a/src/scripts/kibana-install.sh +++ b/src/scripts/kibana-install.sh @@ -68,7 +68,7 @@ fi #Script Parameters CLUSTER_NAME="elasticsearch" -KIBANA_VERSION="6.2.4" +KIBANA_VERSION="6.4.0" #Default internal load balancer ip ELASTICSEARCH_URL="http://10.0.0.4:9200" INSTALL_XPACK=0 @@ -208,32 +208,32 @@ configuration_and_plugins() install_pwgen local ENCRYPTION_KEY=$(pwgen 64 1) echo "xpack.security.encryptionKey: \"$ENCRYPTION_KEY\"" >> $KIBANA_CONF - log "[configuration_and_plugins] X-Pack Security encryption key generated" + log "[configuration_and_plugins] X-Pack security encryption key generated" ENCRYPTION_KEY=$(pwgen 64 1) echo "xpack.reporting.encryptionKey: \"$ENCRYPTION_KEY\"" >> $KIBANA_CONF - log "[configuration_and_plugins] X-Pack Reporting encryption key generated" + log "[configuration_and_plugins] X-Pack reporting encryption key generated" - log "[configuration_and_plugins] Installing X-Pack plugin" + log "[configuration_and_plugins] installing X-Pack plugin" /usr/share/kibana/bin/kibana-plugin install x-pack - log "[configuration_and_plugins] Installed X-Pack plugin" + log "[configuration_and_plugins] installed X-Pack plugin" fi # configure HTTPS if cert and private key supplied if [[ -n "${SSL_CERT}" && -n "${SSL_KEY}" ]]; then [ -d $SSL_PATH ] || mkdir -p $SSL_PATH - log "[configuration_and_plugins] Save kibana cert to file" + log "[configuration_and_plugins] save kibana cert to file" echo ${SSL_CERT} | base64 -d | tee $SSL_PATH/kibana.crt - log "[configuration_and_plugins] Save kibana key to file" + log "[configuration_and_plugins] save kibana key to file" echo ${SSL_KEY} | base64 -d | tee $SSL_PATH/kibana.key - log "[configuration_and_plugins] Configuring SSL/TLS to Kibana" + log "[configuration_and_plugins] configuring SSL/TLS to Kibana" echo "server.ssl.enabled: true" >> $KIBANA_CONF echo "server.ssl.key: $SSL_PATH/kibana.key" >> $KIBANA_CONF echo "server.ssl.certificate: $SSL_PATH/kibana.crt" >> $KIBANA_CONF if [[ -n "${SSL_PASSPHRASE}" ]]; then echo "server.ssl.keyPassphrase: \"$SSL_PASSPHRASE\"" >> $KIBANA_CONF fi - log "[configuration_and_plugins] Configured SSL/TLS to Kibana" + log "[configuration_and_plugins] configured SSL/TLS to Kibana" fi # configure HTTPS communication with Elasticsearch if cert supplied and x-pack installed. @@ -243,14 +243,14 @@ configuration_and_plugins() if [[ -n "${HTTP_CERT}" ]]; then # convert PKCS#12 certificate to PEM format - log "[configuration_and_plugins] Save PKCS#12 archive for Elasticsearch HTTP to file" + log "[configuration_and_plugins] save PKCS#12 archive for Elasticsearch HTTP to file" echo ${HTTP_CERT} | base64 -d | tee $SSL_PATH/elasticsearch-http.p12 - log "[configuration_and_plugins] Extract CA cert from PKCS#12 archive for Elasticsearch HTTP" + log "[configuration_and_plugins] extract CA cert from PKCS#12 archive for Elasticsearch HTTP" echo "$HTTP_CERT_PASSWORD" | openssl pkcs12 -in $SSL_PATH/elasticsearch-http.p12 -out $SSL_PATH/elasticsearch-http-ca.crt -cacerts -nokeys -chain -passin stdin - log "[configuration_and_plugins] Configuring TLS for Elasticsearch" + log "[configuration_and_plugins] configuring TLS for Elasticsearch" if [[ $(stat -c %s $SSL_PATH/elasticsearch-http-ca.crt 2>/dev/null) -eq 0 ]]; then - log "[configuration_and_plugins] No CA cert extracted from HTTP cert. Setting verification mode to none" + log "[configuration_and_plugins] no CA cert extracted from HTTP cert. Setting verification mode to none" echo "elasticsearch.ssl.verificationMode: none" >> $KIBANA_CONF else log "[configuration_and_plugins] CA cert extracted from HTTP PKCS#12 archive. Setting verification mode to certificate" @@ -262,29 +262,29 @@ configuration_and_plugins() # convert PKCS#12 CA certificate to PEM format local HTTP_CACERT_FILENAME=elasticsearch-http-ca.p12 - log "[configuration_and_plugins] Save PKCS#12 archive for Elasticsearch HTTP CA to file" + log "[configuration_and_plugins] save PKCS#12 archive for Elasticsearch HTTP CA to file" echo ${HTTP_CACERT} | base64 -d | tee $SSL_PATH/$HTTP_CACERT_FILENAME - log "[configuration_and_plugins] Convert PKCS#12 archive for Elasticsearch HTTP CA to PEM format" + log "[configuration_and_plugins] convert PKCS#12 archive for Elasticsearch HTTP CA to PEM format" echo "$HTTP_CACERT_PASSWORD" | openssl pkcs12 -in $SSL_PATH/$HTTP_CACERT_FILENAME -out $SSL_PATH/elasticsearch-http-ca.crt -clcerts -nokeys -chain -passin stdin - log "[configuration_and_plugins] Configuring TLS for Elasticsearch" + log "[configuration_and_plugins] configuring TLS for Elasticsearch" if [[ $(stat -c %s $SSL_PATH/elasticsearch-http-ca.crt 2>/dev/null) -eq 0 ]]; then - log "[configuration_and_plugins] No CA cert extracted from HTTP CA. Setting verification mode to none" + log "[configuration_and_plugins] no CA cert extracted from HTTP CA. Setting verification mode to none" echo "elasticsearch.ssl.verificationMode: none" >> $KIBANA_CONF else log "[configuration_and_plugins] CA cert extracted from HTTP CA PKCS#12 archive. Setting verification mode to full" echo "elasticsearch.ssl.verificationMode: full" >> $KIBANA_CONF - log "[configuration_and_plugins] Set CA cert in certificate authorities" + log "[configuration_and_plugins] set CA cert in certificate authorities" echo "elasticsearch.ssl.certificateAuthorities: [ $SSL_PATH/elasticsearch-http-ca.crt ]" >> $KIBANA_CONF fi fi chown -R kibana: $SSL_PATH - log "[configuration_and_plugins] Configured TLS for Elasticsearch" + log "[configuration_and_plugins] configured TLS for Elasticsearch" fi # Configure SAML Single-Sign-On if [[ -n "$SAML_SP_URI" && ${INSTALL_XPACK} -ne 0 ]]; then - log "[configuration_and_plugins] Configuring Kibana for SAML Single-Sign-On" + log "[configuration_and_plugins] configuring Kibana for SAML Single-Sign-On" # Allow both saml and basic realms echo "xpack.security.authProviders: [ saml,basic ]" >> $KIBANA_CONF echo "server.xsrf.whitelist: [ /api/security/v1/saml ]" >> $KIBANA_CONF @@ -306,7 +306,7 @@ configuration_and_plugins() echo "xpack.security.public.protocol: ${PROTOCOL%://}" >> $KIBANA_CONF echo "xpack.security.public.hostname: \"${HOSTNAME%/}\"" >> $KIBANA_CONF echo "xpack.security.public.port: ${PORT%/}" >> $KIBANA_CONF - log "[configuration_and_plugins] Configured Kibana for SAML Single-Sign-On" + log "[configuration_and_plugins] configured Kibana for SAML Single-Sign-On" fi # Additional yaml configuration @@ -325,9 +325,9 @@ configuration_and_plugins() for LINE in $(echo -e "$YAML_CONFIGURATION"); do if [[ -n "$LINE" ]]; then if [[ $LINE =~ $SKIP_REGEX ]]; then - log "[configuration_and_plugins] Skipping line '$LINE'" + log "[configuration_and_plugins] skipping line '$LINE'" else - log "[configuration_and_plugins] Adding line '$LINE' to $KIBANA_CONF" + log "[configuration_and_plugins] adding line '$LINE' to $KIBANA_CONF" echo "$LINE" >> $KIBANA_CONF fi fi @@ -355,9 +355,9 @@ install_yamllint() install_start_service() { - log "[install_start_service] Configuring service for kibana to run at start" + log "[install_start_service] configuring service for kibana to run at start" update-rc.d kibana defaults 95 10 - log "[install_start_service] Starting kibana!" + log "[install_start_service] starting Kibana!" service kibana start } @@ -369,11 +369,11 @@ log "[apt-get] updating apt-get" (apt-get -y update || (sleep 15; apt-get -y update)) > /dev/null log "[apt-get] updated apt-get" -log "[install_sequence] Starting installation" +log "[install_sequence] starting installation" download_kibana configuration_and_plugins install_start_service -log "[install_sequence] Finished installation" +log "[install_sequence] finished installation" ELAPSED_TIME=$(($SECONDS - $START_TIME)) PRETTY=$(printf '%dh:%dm:%ds\n' $(($ELAPSED_TIME/3600)) $(($ELAPSED_TIME%3600/60)) $(($ELAPSED_TIME%60))) From da93307b3519ea7c2e1c767bd829b4714eea0f78 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 28 Sep 2018 14:38:58 +1000 Subject: [PATCH 28/31] Set default conf file dir --- src/scripts/logstash-install.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/scripts/logstash-install.sh b/src/scripts/logstash-install.sh index fa5e3957..63d01704 100644 --- a/src/scripts/logstash-install.sh +++ b/src/scripts/logstash-install.sh @@ -237,6 +237,13 @@ configure_logstash_yaml() # TODO: Consider allowing attached managed disk in future echo "path.data: /var/lib/logstash" >> $LOGSTASH_CONF + # explicitly set the default conf file dir + if dpkg --compare-versions "$LOGSTASH_VERSION" "ge" "6.2.0"; then + echo "path.config: /etc/logstash/conf.d/*.conf" >> $LOGSTASH_CONF + else + echo "path.config: /etc/logstash/conf.d" >> $LOGSTASH_CONF + fi + # TODO: make persistent queues configurable? # echo "queue.type: persisted" >> $LOGSTASH_CONF From 6e573d8de71b2de6d3703adc96d2479fa3adeb3a Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 28 Sep 2018 15:57:37 +1000 Subject: [PATCH 29/31] Add Logstash to UI definition --- build/tasks/patch-values.js | 4 +- src/createUiDefinition.json | 175 +++++++++++++++++++++++++++++++++++- src/mainTemplate.json | 2 +- 3 files changed, 175 insertions(+), 6 deletions(-) diff --git a/build/tasks/patch-values.js b/build/tasks/patch-values.js index 160ebe9f..cd5c3e5e 100644 --- a/build/tasks/patch-values.js +++ b/build/tasks/patch-values.js @@ -86,6 +86,7 @@ gulp.task("patch", function(cb) { var dataSizeControl = _.find(dataNodesSection.elements, function (el) { return el.name == "vmSizeDataNodes"; }); var clientSizeControl = _.find(clientNodesSection.elements, function (el) { return el.name == "vmSizeClientNodes"; }); var kibanaSizeControl = _.find(externalAccessStep.elements, function (el) { return el.name == "vmSizeKibana"; }); + var logstashSizeControl = _.find(externalAccessStep.elements, function (el) { return el.name == "vmSizeLogstash"; }); var patchVmSizes = function(control, allowedSizes, patchRecommended, recommendedSize) { delete control.constraints.allowedValues; @@ -95,7 +96,7 @@ gulp.task("patch", function(cb) { if (recommendedSize) { var fromIndex = sizes.indexOf(recommendedSize); if (fromIndex == -1) { - throw new Error("recommendSize '" + recommendedSize + "' not found in recommendedSizes [" + recommendedSizes.join("','") + "]"); + throw new Error(`recommendSize '${recommendedSize}' not found in recommendedSizes [${recommendedSizes.join("','")}]`); } sizes.splice(fromIndex); sizes.unshift(recommendedSize); @@ -108,6 +109,7 @@ gulp.task("patch", function(cb) { patchVmSizes(dataSizeControl, vmSizes, true, "Standard_DS1_v2"); patchVmSizes(clientSizeControl, vmSizes); patchVmSizes(kibanaSizeControl, kibanaVmSizes); + patchVmSizes(logstashSizeControl, vmSizes); var dataNodeCountControl = _.find(dataNodesSection.elements, function (el) { return el.name == "vmDataNodeCount"; }); dataNodeCountControl.constraints.allowedValues = dataNodeValues; diff --git a/src/createUiDefinition.json b/src/createUiDefinition.json index bc73604e..6cb8fda3 100644 --- a/src/createUiDefinition.json +++ b/src/createUiDefinition.json @@ -1136,6 +1136,173 @@ "osPlatform": "Linux", "count": "1" }, + { + "name": "logstash", + "type": "Microsoft.Common.OptionsGroup", + "label": "Install Logstash?", + "defaultValue": "No", + "toolTip": "Yes, to provision a single Logstash instance.", + "constraints": { + "allowedValues": [ + { + "label": "Yes", + "value": "Yes" + }, + { + "label": "No", + "value": "No" + } + ] + } + }, + { + "name": "vmSizeLogstash", + "type": "Microsoft.Compute.SizeSelector", + "label": "Logstash VM size", + "toolTip": "Choose VM SKU, Standard D1, D2, D3", + "visible": "[equals(steps('externalAccessStep').logstash, 'Yes')]", + "recommendedSizes": [ + "Standard_D1", + "Standard_D2", + "Standard_D3" + ], + "constraints": { + "allowedSizes": [ + "Standard_A0", + "Standard_A1", + "Standard_A2", + "Standard_A3", + "Standard_A4", + "Standard_A5", + "Standard_A6", + "Standard_A7", + "Standard_A8", + "Standard_A9", + "Standard_A10", + "Standard_A11", + "Standard_D1", + "Standard_D2", + "Standard_D3", + "Standard_D4", + "Standard_D11", + "Standard_D12", + "Standard_D13", + "Standard_D14", + "Standard_D1_v2", + "Standard_D2_v2", + "Standard_D3_v2", + "Standard_D4_v2", + "Standard_D5_v2", + "Standard_D11_v2", + "Standard_D12_v2", + "Standard_D13_v2", + "Standard_D14_v2", + "Standard_D15_v2", + "Standard_DS1", + "Standard_DS2", + "Standard_DS3", + "Standard_DS4", + "Standard_DS11", + "Standard_DS12", + "Standard_DS13", + "Standard_DS14", + "Standard_DS1_v2", + "Standard_DS2_v2", + "Standard_DS3_v2", + "Standard_DS4_v2", + "Standard_DS5_v2", + "Standard_DS11_v2", + "Standard_DS12_v2", + "Standard_DS13_v2", + "Standard_DS14_v2", + "Standard_DS15_v2", + "Standard_D2s_v3", + "Standard_D4s_v3", + "Standard_D8s_v3", + "Standard_D16s_v3", + "Standard_D32s_v3", + "Standard_D64s_v3", + "Standard_E2_v3", + "Standard_E4_v3", + "Standard_E8_v3", + "Standard_E16_v3", + "Standard_E32_v3", + "Standard_E64_v3", + "Standard_E64i_v3", + "Standard_E2s_v3", + "Standard_E4s_v3", + "Standard_E8s_v3", + "Standard_E16s_v3", + "Standard_E32s_v3", + "Standard_E64s_v3", + "Standard_E64is_v3", + "Standard_F1", + "Standard_F2", + "Standard_F4", + "Standard_F8", + "Standard_F16", + "Standard_F1s", + "Standard_F2s", + "Standard_F4s", + "Standard_F8s", + "Standard_F16s", + "Standard_G1", + "Standard_G2", + "Standard_G3", + "Standard_G4", + "Standard_G5", + "Standard_GS1", + "Standard_GS2", + "Standard_GS3", + "Standard_GS4", + "Standard_GS5", + "Standard_L4s", + "Standard_L8s", + "Standard_L16s", + "Standard_L32s", + "Standard_M8ms", + "Standard_M16ms", + "Standard_M32ts", + "Standard_M32ls", + "Standard_M32ms", + "Standard_M64s", + "Standard_M64ls", + "Standard_M64ms", + "Standard_M128s", + "Standard_M128ms", + "Standard_M64", + "Standard_M64m", + "Standard_M128", + "Standard_M128m" + ] + }, + "osPlatform": "Linux", + "count": "1" + }, + { + "name": "logstashConf", + "type": "Microsoft.Common.FileUpload", + "label": "Logstash config file", + "toolTip": "A Logstash config file to use when Logstash starts. \"${ELASTICSEARCH_URL}\" can be used to reference the Elasticsearch URL", + "constraints": { + "accept": ".conf" + }, + "options": { + "multiple": false, + "uploadMode": "file", + "openMode": "binary", + "encoding": "UTF-8" + }, + "visible": "[equals(steps('externalAccessStep').logstash, 'Yes')]" + }, + { + "name": "logstashAdditionalPlugins", + "type": "Microsoft.Common.TextBox", + "label": "Additional Logstash plugins", + "defaultValue": "", + "toolTip": "Additional Logstash plugins to install. Each plugin name must be separated by a semicolon e.g. logstash-input-azure_event_hubs;logstash-input-http_poller", + "visible": "[equals(steps('externalAccessStep').logstash, 'Yes')]" + }, { "name": "jumpbox", "type": "Microsoft.Common.OptionsGroup", @@ -1212,12 +1379,12 @@ "kibanaCertBlob": "", "kibanaKeyPassphrase": "", "kibanaAdditionalYaml": "", - "logstash": "No", - "vmSizeLogstash": "Standard_D1", + "logstash": "[steps('externalAccessStep').logstash]", + "vmSizeLogstash": "[steps('externalAccessStep').vmSizeLogstash]", "logstashHeapSize": 0, - "logstashConf": "", + "logstashConf": "[steps('externalAccessStep').logstashConf]", "logstashKeystorePassword": "", - "logstashAdditionalPlugins": "", + "logstashAdditionalPlugins": "[steps('externalAccessStep').logstashAdditionalPlugins]", "logstashAdditionalYaml": "", "jumpbox": "[steps('externalAccessStep').jumpbox]", "vmSizeDataNodes": "[steps('nodesStep').dataNodes.vmSizeDataNodes]", diff --git a/src/mainTemplate.json b/src/mainTemplate.json index 5fd42499..05cb45a6 100644 --- a/src/mainTemplate.json +++ b/src/mainTemplate.json @@ -501,7 +501,7 @@ "type": "string", "defaultValue": "", "metadata": { - "description": "Additional Logstash plugins to install. Each plugin must be separated by a semicolon. e.g. azure_event_hubs;http_poller" + "description": "Additional Logstash plugins to install. Each plugin must be separated by a semicolon. e.g. logstash-input-azure_event_hubs;logstash-input-http_poller" } }, "logstashAdditionalYaml": { From b32c829724bbfd9f5835d4140ef5993f4f7cbcd6 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 28 Sep 2018 16:37:38 +1000 Subject: [PATCH 30/31] Rename External Access section to Kibana & Logstash --- build/tasks/patch-values.js | 8 +- src/createUiDefinition.json | 755 ++++++++++++++++++------------------ 2 files changed, 394 insertions(+), 369 deletions(-) diff --git a/build/tasks/patch-values.js b/build/tasks/patch-values.js index cd5c3e5e..16a13874 100644 --- a/build/tasks/patch-values.js +++ b/build/tasks/patch-values.js @@ -85,8 +85,12 @@ gulp.task("patch", function(cb) { var masterSizeControl = _.find(masterNodesSection.elements, function (el) { return el.name == "vmSizeMasterNodes"; }); var dataSizeControl = _.find(dataNodesSection.elements, function (el) { return el.name == "vmSizeDataNodes"; }); var clientSizeControl = _.find(clientNodesSection.elements, function (el) { return el.name == "vmSizeClientNodes"; }); - var kibanaSizeControl = _.find(externalAccessStep.elements, function (el) { return el.name == "vmSizeKibana"; }); - var logstashSizeControl = _.find(externalAccessStep.elements, function (el) { return el.name == "vmSizeLogstash"; }); + + var kibanaSection = _.find(externalAccessStep.elements, function (el) { return el.name == "kibanaSection"; }); + var kibanaSizeControl = _.find(kibanaSection.elements, function (el) { return el.name == "vmSizeKibana"; }); + + var logstashSection = _.find(externalAccessStep.elements, function (el) { return el.name == "logstashSection"; }); + var logstashSizeControl = _.find(logstashSection.elements, function (el) { return el.name == "vmSizeLogstash"; }); var patchVmSizes = function(control, allowedSizes, patchRecommended, recommendedSize) { delete control.constraints.allowedValues; diff --git a/src/createUiDefinition.json b/src/createUiDefinition.json index 6cb8fda3..d9fa317c 100644 --- a/src/createUiDefinition.json +++ b/src/createUiDefinition.json @@ -879,14 +879,392 @@ } ] }, + { + "name": "externalAccessStep", + "label": "Kibana & Logstash", + "subLabel": { + "preValidation": "Required", + "postValidation": "Done" + }, + "bladeTitle": "Kibana & Logstash", + "elements": [ + { + "name": "kibanaSection", + "type": "Microsoft.Common.Section", + "label": "Kibana", + "elements": [ + { + "name": "kibana", + "type": "Microsoft.Common.OptionsGroup", + "label": "Install Kibana?", + "defaultValue": "Yes", + "toolTip": "Yes, to provision a single Kibana instance.", + "constraints": { + "allowedValues": [ + { + "label": "Yes", + "value": "Yes" + }, + { + "label": "No", + "value": "No" + } + ] + } + }, + { + "name": "vmSizeKibana", + "type": "Microsoft.Compute.SizeSelector", + "label": "Kibana VM size", + "toolTip": "Choose VM SKU, Standard D1, D2, D3", + "visible": "[equals(steps('externalAccessStep').kibanaSection.kibana, 'Yes')]", + "recommendedSizes": [ + "Standard_D1", + "Standard_D2", + "Standard_D3" + ], + "constraints": { + "allowedSizes": [ + "Standard_A2", + "Standard_A3", + "Standard_A4", + "Standard_A5", + "Standard_A6", + "Standard_A7", + "Standard_A8", + "Standard_A9", + "Standard_A10", + "Standard_A11", + "Standard_D1", + "Standard_D2", + "Standard_D3", + "Standard_D4", + "Standard_D11", + "Standard_D12", + "Standard_D13", + "Standard_D14", + "Standard_D1_v2", + "Standard_D2_v2", + "Standard_D3_v2", + "Standard_D4_v2", + "Standard_D5_v2", + "Standard_D11_v2", + "Standard_D12_v2", + "Standard_D13_v2", + "Standard_D14_v2", + "Standard_D15_v2", + "Standard_DS1", + "Standard_DS2", + "Standard_DS3", + "Standard_DS4", + "Standard_DS11", + "Standard_DS12", + "Standard_DS13", + "Standard_DS14", + "Standard_DS1_v2", + "Standard_DS2_v2", + "Standard_DS3_v2", + "Standard_DS4_v2", + "Standard_DS5_v2", + "Standard_DS11_v2", + "Standard_DS12_v2", + "Standard_DS13_v2", + "Standard_DS14_v2", + "Standard_DS15_v2", + "Standard_D2s_v3", + "Standard_D4s_v3", + "Standard_D8s_v3", + "Standard_D16s_v3", + "Standard_D32s_v3", + "Standard_D64s_v3", + "Standard_E2_v3", + "Standard_E4_v3", + "Standard_E8_v3", + "Standard_E16_v3", + "Standard_E32_v3", + "Standard_E64_v3", + "Standard_E64i_v3", + "Standard_E2s_v3", + "Standard_E4s_v3", + "Standard_E8s_v3", + "Standard_E16s_v3", + "Standard_E32s_v3", + "Standard_E64s_v3", + "Standard_E64is_v3", + "Standard_F1", + "Standard_F2", + "Standard_F4", + "Standard_F8", + "Standard_F16", + "Standard_F1s", + "Standard_F2s", + "Standard_F4s", + "Standard_F8s", + "Standard_F16s", + "Standard_G1", + "Standard_G2", + "Standard_G3", + "Standard_G4", + "Standard_G5", + "Standard_GS1", + "Standard_GS2", + "Standard_GS3", + "Standard_GS4", + "Standard_GS5", + "Standard_L4s", + "Standard_L8s", + "Standard_L16s", + "Standard_L32s", + "Standard_M8ms", + "Standard_M16ms", + "Standard_M32ts", + "Standard_M32ls", + "Standard_M32ms", + "Standard_M64s", + "Standard_M64ls", + "Standard_M64ms", + "Standard_M128s", + "Standard_M128ms", + "Standard_M64", + "Standard_M64m", + "Standard_M128", + "Standard_M128m" + ] + }, + "osPlatform": "Linux", + "count": "1" + } + ] + }, + { + "name": "logstashSection", + "type": "Microsoft.Common.Section", + "label": "Logstash", + "elements": [ + { + "name": "logstash", + "type": "Microsoft.Common.OptionsGroup", + "label": "Install Logstash?", + "defaultValue": "No", + "toolTip": "Yes, to provision a single Logstash instance.", + "constraints": { + "allowedValues": [ + { + "label": "Yes", + "value": "Yes" + }, + { + "label": "No", + "value": "No" + } + ] + } + }, + { + "name": "vmSizeLogstash", + "type": "Microsoft.Compute.SizeSelector", + "label": "Logstash VM size", + "toolTip": "Choose VM SKU, Standard D1, D2, D3", + "visible": "[equals(steps('externalAccessStep').logstashSection.logstash, 'Yes')]", + "recommendedSizes": [ + "Standard_D1", + "Standard_D2", + "Standard_D3" + ], + "constraints": { + "allowedSizes": [ + "Standard_A0", + "Standard_A1", + "Standard_A2", + "Standard_A3", + "Standard_A4", + "Standard_A5", + "Standard_A6", + "Standard_A7", + "Standard_A8", + "Standard_A9", + "Standard_A10", + "Standard_A11", + "Standard_D1", + "Standard_D2", + "Standard_D3", + "Standard_D4", + "Standard_D11", + "Standard_D12", + "Standard_D13", + "Standard_D14", + "Standard_D1_v2", + "Standard_D2_v2", + "Standard_D3_v2", + "Standard_D4_v2", + "Standard_D5_v2", + "Standard_D11_v2", + "Standard_D12_v2", + "Standard_D13_v2", + "Standard_D14_v2", + "Standard_D15_v2", + "Standard_DS1", + "Standard_DS2", + "Standard_DS3", + "Standard_DS4", + "Standard_DS11", + "Standard_DS12", + "Standard_DS13", + "Standard_DS14", + "Standard_DS1_v2", + "Standard_DS2_v2", + "Standard_DS3_v2", + "Standard_DS4_v2", + "Standard_DS5_v2", + "Standard_DS11_v2", + "Standard_DS12_v2", + "Standard_DS13_v2", + "Standard_DS14_v2", + "Standard_DS15_v2", + "Standard_D2s_v3", + "Standard_D4s_v3", + "Standard_D8s_v3", + "Standard_D16s_v3", + "Standard_D32s_v3", + "Standard_D64s_v3", + "Standard_E2_v3", + "Standard_E4_v3", + "Standard_E8_v3", + "Standard_E16_v3", + "Standard_E32_v3", + "Standard_E64_v3", + "Standard_E64i_v3", + "Standard_E2s_v3", + "Standard_E4s_v3", + "Standard_E8s_v3", + "Standard_E16s_v3", + "Standard_E32s_v3", + "Standard_E64s_v3", + "Standard_E64is_v3", + "Standard_F1", + "Standard_F2", + "Standard_F4", + "Standard_F8", + "Standard_F16", + "Standard_F1s", + "Standard_F2s", + "Standard_F4s", + "Standard_F8s", + "Standard_F16s", + "Standard_G1", + "Standard_G2", + "Standard_G3", + "Standard_G4", + "Standard_G5", + "Standard_GS1", + "Standard_GS2", + "Standard_GS3", + "Standard_GS4", + "Standard_GS5", + "Standard_L4s", + "Standard_L8s", + "Standard_L16s", + "Standard_L32s", + "Standard_M8ms", + "Standard_M16ms", + "Standard_M32ts", + "Standard_M32ls", + "Standard_M32ms", + "Standard_M64s", + "Standard_M64ls", + "Standard_M64ms", + "Standard_M128s", + "Standard_M128ms", + "Standard_M64", + "Standard_M64m", + "Standard_M128", + "Standard_M128m" + ] + }, + "osPlatform": "Linux", + "count": "1" + }, + { + "name": "logstashConf", + "type": "Microsoft.Common.FileUpload", + "label": "Logstash config file", + "toolTip": "A Logstash config file to use when Logstash starts. \"${ELASTICSEARCH_URL}\" can be used to reference the Elasticsearch URL", + "constraints": { + "accept": ".conf" + }, + "options": { + "multiple": false, + "uploadMode": "file", + "openMode": "binary", + "encoding": "UTF-8" + }, + "visible": "[equals(steps('externalAccessStep').logstashSection.logstash, 'Yes')]" + }, + { + "name": "logstashAdditionalPlugins", + "type": "Microsoft.Common.TextBox", + "label": "Additional Logstash plugins", + "defaultValue": "", + "toolTip": "Additional Logstash plugins to install. Each plugin name must be separated by a semicolon e.g. logstash-input-azure_event_hubs;logstash-input-http_poller", + "visible": "[equals(steps('externalAccessStep').logstashSection.logstash, 'Yes')]" + } + ] + }, + { + "name": "externalAccessSection", + "type": "Microsoft.Common.Section", + "label": "External Access", + "elements": [ + { + "name": "jumpbox", + "type": "Microsoft.Common.OptionsGroup", + "label": "Use a jump box?", + "defaultValue": "No", + "toolTip": "A jump box allows you to connect to your cluster from a public access point like SSH. This is usually not necessary if Kibana is installed, since Kibana itself acts as a jump box.", + "constraints": { + "allowedValues": [ + { + "label": "Yes", + "value": "Yes" + }, + { + "label": "No", + "value": "No" + } + ] + } + }, + { + "name": "loadBalancerType", + "type": "Microsoft.Common.OptionsGroup", + "label": "Load balancer type", + "defaultValue": "Internal", + "toolTip": "Choose whether the load balancer should be public facing (external) or internal.", + "constraints": { + "allowedValues": [ + { + "label": "Internal", + "value": "internal" + }, + { + "label": "External", + "value": "external" + } + ] + } + } + ] + } + ] + }, { "name": "securityStep", - "label": "User Configuration", + "label": "Security", "subLabel": { "preValidation": "Required", "postValidation": "Done" }, - "bladeTitle": "User Configuration", + "bladeTitle": "Security", "elements": [ { "name": "es_admin", @@ -985,363 +1363,6 @@ ] } ] - }, - { - "name": "externalAccessStep", - "label": "External Access", - "subLabel": { - "preValidation": "Required", - "postValidation": "Done" - }, - "bladeTitle": "External Access Control", - "elements": [ - { - "name": "kibana", - "type": "Microsoft.Common.OptionsGroup", - "label": "Install Kibana?", - "defaultValue": "Yes", - "toolTip": "Yes, to provision a single Kibana instance.", - "constraints": { - "allowedValues": [ - { - "label": "Yes", - "value": "Yes" - }, - { - "label": "No", - "value": "No" - } - ] - } - }, - { - "name": "vmSizeKibana", - "type": "Microsoft.Compute.SizeSelector", - "label": "Kibana VM size", - "toolTip": "Choose VM SKU, Standard D1, D2, D3", - "visible": "[equals(steps('externalAccessStep').kibana, 'Yes')]", - "recommendedSizes": [ - "Standard_D1", - "Standard_D2", - "Standard_D3" - ], - "constraints": { - "allowedSizes": [ - "Standard_A2", - "Standard_A3", - "Standard_A4", - "Standard_A5", - "Standard_A6", - "Standard_A7", - "Standard_A8", - "Standard_A9", - "Standard_A10", - "Standard_A11", - "Standard_D1", - "Standard_D2", - "Standard_D3", - "Standard_D4", - "Standard_D11", - "Standard_D12", - "Standard_D13", - "Standard_D14", - "Standard_D1_v2", - "Standard_D2_v2", - "Standard_D3_v2", - "Standard_D4_v2", - "Standard_D5_v2", - "Standard_D11_v2", - "Standard_D12_v2", - "Standard_D13_v2", - "Standard_D14_v2", - "Standard_D15_v2", - "Standard_DS1", - "Standard_DS2", - "Standard_DS3", - "Standard_DS4", - "Standard_DS11", - "Standard_DS12", - "Standard_DS13", - "Standard_DS14", - "Standard_DS1_v2", - "Standard_DS2_v2", - "Standard_DS3_v2", - "Standard_DS4_v2", - "Standard_DS5_v2", - "Standard_DS11_v2", - "Standard_DS12_v2", - "Standard_DS13_v2", - "Standard_DS14_v2", - "Standard_DS15_v2", - "Standard_D2s_v3", - "Standard_D4s_v3", - "Standard_D8s_v3", - "Standard_D16s_v3", - "Standard_D32s_v3", - "Standard_D64s_v3", - "Standard_E2_v3", - "Standard_E4_v3", - "Standard_E8_v3", - "Standard_E16_v3", - "Standard_E32_v3", - "Standard_E64_v3", - "Standard_E64i_v3", - "Standard_E2s_v3", - "Standard_E4s_v3", - "Standard_E8s_v3", - "Standard_E16s_v3", - "Standard_E32s_v3", - "Standard_E64s_v3", - "Standard_E64is_v3", - "Standard_F1", - "Standard_F2", - "Standard_F4", - "Standard_F8", - "Standard_F16", - "Standard_F1s", - "Standard_F2s", - "Standard_F4s", - "Standard_F8s", - "Standard_F16s", - "Standard_G1", - "Standard_G2", - "Standard_G3", - "Standard_G4", - "Standard_G5", - "Standard_GS1", - "Standard_GS2", - "Standard_GS3", - "Standard_GS4", - "Standard_GS5", - "Standard_L4s", - "Standard_L8s", - "Standard_L16s", - "Standard_L32s", - "Standard_M8ms", - "Standard_M16ms", - "Standard_M32ts", - "Standard_M32ls", - "Standard_M32ms", - "Standard_M64s", - "Standard_M64ls", - "Standard_M64ms", - "Standard_M128s", - "Standard_M128ms", - "Standard_M64", - "Standard_M64m", - "Standard_M128", - "Standard_M128m" - ] - }, - "osPlatform": "Linux", - "count": "1" - }, - { - "name": "logstash", - "type": "Microsoft.Common.OptionsGroup", - "label": "Install Logstash?", - "defaultValue": "No", - "toolTip": "Yes, to provision a single Logstash instance.", - "constraints": { - "allowedValues": [ - { - "label": "Yes", - "value": "Yes" - }, - { - "label": "No", - "value": "No" - } - ] - } - }, - { - "name": "vmSizeLogstash", - "type": "Microsoft.Compute.SizeSelector", - "label": "Logstash VM size", - "toolTip": "Choose VM SKU, Standard D1, D2, D3", - "visible": "[equals(steps('externalAccessStep').logstash, 'Yes')]", - "recommendedSizes": [ - "Standard_D1", - "Standard_D2", - "Standard_D3" - ], - "constraints": { - "allowedSizes": [ - "Standard_A0", - "Standard_A1", - "Standard_A2", - "Standard_A3", - "Standard_A4", - "Standard_A5", - "Standard_A6", - "Standard_A7", - "Standard_A8", - "Standard_A9", - "Standard_A10", - "Standard_A11", - "Standard_D1", - "Standard_D2", - "Standard_D3", - "Standard_D4", - "Standard_D11", - "Standard_D12", - "Standard_D13", - "Standard_D14", - "Standard_D1_v2", - "Standard_D2_v2", - "Standard_D3_v2", - "Standard_D4_v2", - "Standard_D5_v2", - "Standard_D11_v2", - "Standard_D12_v2", - "Standard_D13_v2", - "Standard_D14_v2", - "Standard_D15_v2", - "Standard_DS1", - "Standard_DS2", - "Standard_DS3", - "Standard_DS4", - "Standard_DS11", - "Standard_DS12", - "Standard_DS13", - "Standard_DS14", - "Standard_DS1_v2", - "Standard_DS2_v2", - "Standard_DS3_v2", - "Standard_DS4_v2", - "Standard_DS5_v2", - "Standard_DS11_v2", - "Standard_DS12_v2", - "Standard_DS13_v2", - "Standard_DS14_v2", - "Standard_DS15_v2", - "Standard_D2s_v3", - "Standard_D4s_v3", - "Standard_D8s_v3", - "Standard_D16s_v3", - "Standard_D32s_v3", - "Standard_D64s_v3", - "Standard_E2_v3", - "Standard_E4_v3", - "Standard_E8_v3", - "Standard_E16_v3", - "Standard_E32_v3", - "Standard_E64_v3", - "Standard_E64i_v3", - "Standard_E2s_v3", - "Standard_E4s_v3", - "Standard_E8s_v3", - "Standard_E16s_v3", - "Standard_E32s_v3", - "Standard_E64s_v3", - "Standard_E64is_v3", - "Standard_F1", - "Standard_F2", - "Standard_F4", - "Standard_F8", - "Standard_F16", - "Standard_F1s", - "Standard_F2s", - "Standard_F4s", - "Standard_F8s", - "Standard_F16s", - "Standard_G1", - "Standard_G2", - "Standard_G3", - "Standard_G4", - "Standard_G5", - "Standard_GS1", - "Standard_GS2", - "Standard_GS3", - "Standard_GS4", - "Standard_GS5", - "Standard_L4s", - "Standard_L8s", - "Standard_L16s", - "Standard_L32s", - "Standard_M8ms", - "Standard_M16ms", - "Standard_M32ts", - "Standard_M32ls", - "Standard_M32ms", - "Standard_M64s", - "Standard_M64ls", - "Standard_M64ms", - "Standard_M128s", - "Standard_M128ms", - "Standard_M64", - "Standard_M64m", - "Standard_M128", - "Standard_M128m" - ] - }, - "osPlatform": "Linux", - "count": "1" - }, - { - "name": "logstashConf", - "type": "Microsoft.Common.FileUpload", - "label": "Logstash config file", - "toolTip": "A Logstash config file to use when Logstash starts. \"${ELASTICSEARCH_URL}\" can be used to reference the Elasticsearch URL", - "constraints": { - "accept": ".conf" - }, - "options": { - "multiple": false, - "uploadMode": "file", - "openMode": "binary", - "encoding": "UTF-8" - }, - "visible": "[equals(steps('externalAccessStep').logstash, 'Yes')]" - }, - { - "name": "logstashAdditionalPlugins", - "type": "Microsoft.Common.TextBox", - "label": "Additional Logstash plugins", - "defaultValue": "", - "toolTip": "Additional Logstash plugins to install. Each plugin name must be separated by a semicolon e.g. logstash-input-azure_event_hubs;logstash-input-http_poller", - "visible": "[equals(steps('externalAccessStep').logstash, 'Yes')]" - }, - { - "name": "jumpbox", - "type": "Microsoft.Common.OptionsGroup", - "label": "Use a jump box?", - "defaultValue": "No", - "toolTip": "A jump box allows you to connect to your cluster from a public access point like SSH. This is usually not necessary if Kibana is installed, since Kibana itself acts as a jump box.", - "constraints": { - "allowedValues": [ - { - "label": "Yes", - "value": "Yes" - }, - { - "label": "No", - "value": "No" - } - ] - } - }, - { - "name": "loadBalancerType", - "type": "Microsoft.Common.OptionsGroup", - "label": "Load balancer type", - "defaultValue": "Internal", - "toolTip": "Choose whether the load balancer should be public facing (external) or internal.", - "constraints": { - "allowedValues": [ - { - "label": "Internal", - "value": "internal" - }, - { - "label": "External", - "value": "external" - } - ] - } - } - ] } ], "outputs": { @@ -1363,7 +1384,7 @@ "samlMetadataUri": "", "samlServiceProviderUri": "", "esHeapSize": 0, - "loadBalancerType": "[steps('externalAccessStep').loadBalancerType]", + "loadBalancerType": "[steps('externalAccessStep').externalAccessSection.loadBalancerType]", "vNetNewOrExisting": "[steps('clusterSettingsStep').virtualNetworkConfiguration.virtualNetwork.newOrExisting]", "vNetName": "[steps('clusterSettingsStep').virtualNetworkConfiguration.virtualNetwork.name]", "vNetClusterSubnetName": "[steps('clusterSettingsStep').virtualNetworkConfiguration.virtualNetwork.subnets.subnet1.name]", @@ -1373,20 +1394,20 @@ "vNetNewClusterSubnetAddressPrefix": "[steps('clusterSettingsStep').virtualNetworkConfiguration.virtualNetwork.subnets.subnet1.addressPrefix]", "vNetAppGatewaySubnetName": "es-gateway-subnet", "vNetNewAppGatewaySubnetAddressPrefix": "10.0.0.128/28", - "kibana": "[steps('externalAccessStep').kibana]", - "vmSizeKibana": "[steps('externalAccessStep').vmSizeKibana]", + "kibana": "[steps('externalAccessStep').kibanaSection.kibana]", + "vmSizeKibana": "[steps('externalAccessStep').kibanaSection.vmSizeKibana]", "kibanaKeyBlob": "", "kibanaCertBlob": "", "kibanaKeyPassphrase": "", "kibanaAdditionalYaml": "", - "logstash": "[steps('externalAccessStep').logstash]", - "vmSizeLogstash": "[steps('externalAccessStep').vmSizeLogstash]", + "logstash": "[steps('externalAccessStep').logstashSection.logstash]", + "vmSizeLogstash": "[steps('externalAccessStep').logstashSection.vmSizeLogstash]", "logstashHeapSize": 0, - "logstashConf": "[steps('externalAccessStep').logstashConf]", + "logstashConf": "[steps('externalAccessStep').logstashSection.logstashConf]", "logstashKeystorePassword": "", - "logstashAdditionalPlugins": "[steps('externalAccessStep').logstashAdditionalPlugins]", + "logstashAdditionalPlugins": "[steps('externalAccessStep').logstashSection.logstashAdditionalPlugins]", "logstashAdditionalYaml": "", - "jumpbox": "[steps('externalAccessStep').jumpbox]", + "jumpbox": "[steps('externalAccessStep').externalAccessSection.jumpbox]", "vmSizeDataNodes": "[steps('nodesStep').dataNodes.vmSizeDataNodes]", "vmDataDiskCount": "[int(steps('nodesStep').dataNodesDisks.vmDataDiskCount)]", "vmDataDiskSize": "[steps('nodesStep').dataNodesDisks.vmDataDiskSize]", From ae1f3ddb8d8fcbf67857266991d65e878bc86b2e Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Fri, 28 Sep 2018 17:29:28 +1000 Subject: [PATCH 31/31] Documentation for Logstash --- docs/azure-arm-template.asciidoc | 91 ++++++++++++++++++++++++++++---- docs/faqs.asciidoc | 11 ++-- 2 files changed, 86 insertions(+), 16 deletions(-) diff --git a/docs/azure-arm-template.asciidoc b/docs/azure-arm-template.asciidoc index 4d1700e1..7975433c 100644 --- a/docs/azure-arm-template.asciidoc +++ b/docs/azure-arm-template.asciidoc @@ -835,7 +835,7 @@ The following parameters can be used to deploy Kibana, and control additional co Whether to deploy Kibana in addition to Elasticsearch. A value of `Yes` will also deploy Kibana, whilst `No` will not. Defaults to `Yes`. `vmSizeKibana`:: -The {vms}[Azure VM SKU] to use for data nodes. Different VM SKUs have different CPU, RAM, +The {vms}[Azure VM SKU] to use for Kibana. Different VM SKUs have different CPU, RAM, temporary storage space and network bandwidth. The Kibana VM always uses standard storage for the OS disk. The default value is `Standard_A2`. [NOTE] @@ -864,6 +864,86 @@ Kibana is deployed with Kibana communicates with Elasticsearch through the <> +=== Logstash + +A single instance of Logstash can be deployed in addition to Elasticsearch, providing a pipeline for ingesting data into Elasticsearch. +The version of Logstash deployed is always the same as the version of Elasticsearch, ensuring compatibility between products. + +The following parameters can be used to deploy Logstash, and control additional configuration + +`logstash`:: +Whether to deploy Logstash in addition to Elasticsearch. A value of `Yes` will also deploy Logstash, whilst `No` will not. Defaults to `No`. + +`vmSizeLogstash`:: +The {vms}[Azure VM SKU] to use for Logstash. Different VM SKUs have different CPU, RAM, +temporary storage space and network bandwidth. The Logstash VM always uses standard storage for the OS disk. The default value is `Standard_D1`. + +[NOTE] +-- +The template deploys only a single instance of Logstash. You should ensure that a VM SKU +of sufficient size is chosen to be able to handle the expected amount of traffic. A larger VM +SKU will generally be faster and have better bandwidth than a smaller VM SKU. +-- + +`logstashHeapSize`:: +The amount of memory, in megabytes, to allocate to Logstash for the JVM heap. +Default will allocate whatever the default is within jvm.options for the version +of Logstash deployed. + +`logstashConf`:: +A Base-64 encoded string form of Logstash configuration file with which to start Logstash. +A number of parameters are configured that can be referenced from the configuration file ++ +[horizontal] +`${ELASTICSEARCH_URL}`::: the Elasticsearch endpoint +`${LOGSTASH_SYSTEM_PASSWORD}`::: password of the built-in `logstash_system` user +`${ELASTICSEARCH_CACERT}`::: the path to the CA cert used to secure the Elasticsearch HTTP layer. +Only set when Transport Layer Security is configured for the <> + +[IMPORTANT] +.TLS with Logstash monitoring +-- +When Transport Layer Security is configured for the Elasticsearch HTTP layer, +Logstash is configured to perform verification against the certificate presented, +using the CA certificate used to secure the Elasticsearch HTTP layer. + +Logstash communicates with Elasticsearch through the IP address of the internal load balancer, which means +that a certificate provided with `esHttpCertBlob` is unlikely to pass hostname +verification. In Logstash 6.4.0+, `xpack.monitoring.elasticsearch.ssl.verification_mode` is set to `none`. +For prior versions of Logstash, monitoring is not enabled when a certificate has been provided with `esHttpCertBlob`. + +When a CA certificate is provided with `esHttpCaCertBlob`, the generated certificates used to +secure the Elasticsearch HTTP layer include the internal load balancer IP address, meaning +monitoring can be enabled for all versions where Transport Layer Security is configured +for the Elasticsearch HTTP layer. +-- + +`logstashKeystorePassword`:: +Security password for Logstash keystore, used to store values in Logstash 6.2.0 onwards. ++ +If no value is supplied, a password will be generated using the +ARM template `uniqueString()` function. + +`logstashAdditionalPlugins`:: +Additional Logstash plugins to install. Each plugin must be separated by a semicolon. For example ++ +[source,text] +---- +logstash-input-azure_event_hubs;logstash-input-http_poller +---- + +`logstashAdditionalYaml`:: +Additional configuration that will be applied to the logstash.yml configuration file before start up. Each line must be separated by a `\n` newline character, for example ++ +[source,text] +---- +"pipeline.batch.size: 125\npipeline.batch.delay: 50" +---- ++ +It is recommended that you run your additional yaml through a {yamllint}[linter] before starting a deployment, as incorrectly formatted yaml will fail the deployment. + +Logstash only accessible within the Virtual Network and communicates with Elasticsearch through the <> + [[security]] === Security @@ -916,13 +996,6 @@ than six characters in length. Security password for the `logstash_system` built-in user account. This is the account that Logstash can use to communicate with Elasticsearch. Must be greater than six characters in length. -+ -[NOTE] --- -The template does **not** currently deploy Logstash. The `logstash_system` built-in user account -must still be configured however, and can be used to communicate with Elasticsearch -if Logstash is also deployed later. --- `securityReadPassword`:: Security password for an `es_read` user account configured with a user (read-only) role with @@ -1330,7 +1403,7 @@ One way to generate a PKCS#12 archive containing a CA certificate and key is usi [[security-kibana]] ==== Kibana -You can secure communication between the browser and Kibana with TLS wiht the following parameters +You can secure communication between the browser and Kibana with TLS with the following parameters `kibanaCertBlob`:: A base 64 encoded string of the PEM certificate used to secure communication between the browser and Kibana. diff --git a/docs/faqs.asciidoc b/docs/faqs.asciidoc index ce3158ab..ca2d7406 100644 --- a/docs/faqs.asciidoc +++ b/docs/faqs.asciidoc @@ -73,10 +73,7 @@ ssh @ See the <> for more details. Does the Marketplace solution deploy Logstash?:: -The Marketplace solution **does not** currently deploy Logstash, only Elasticsearch -and Kibana. There is an {github}/issues/157[open issue] to track and discuss the inclusion of Logstash -within the template. -+ -You can deploy Logstash manually by creating a new VM, attaching it to the same -virtual network to which the cluster is attached, and deploy Logstash onto the VM by -using SSH through either the Kibana VM or Jumpbox VM. +The Marketplace solution can also deploy Logstash, although it does not do so by default. +To also deploy an instance of Logstash, select `Yes` for `Install Logstash?` in the +`Kibana & Logstash` section. You can also deploy additional plugins and provide a +Logstash config file to use.
ParameterTypeDescriptionDefault Value