Skip to content

Commit

Permalink
Merge branch 'deviantony:main' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
waitesgithub authored Dec 5, 2023
2 parents 29ee390 + aa2902e commit 985b3ca
Show file tree
Hide file tree
Showing 30 changed files with 332 additions and 242 deletions.
2 changes: 1 addition & 1 deletion .env
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
ELASTIC_VERSION=8.5.3
ELASTIC_VERSION=8.11.1

## Passwords for stack users
#
Expand Down
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,5 @@ contact_links:
url: https://forums.docker.com
about: Please ask questions related to the usage of Docker products in those forums.
- name: docker-elk Gitter chat room
url: https://gitter.im/deviantony/docker-elk
url: https://app.gitter.im/#/room/#deviantony_docker-elk:gitter.im
about: General questions regarding this project can also be asked in the chat.
94 changes: 47 additions & 47 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,11 @@ jobs:
# https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners
runs-on: ubuntu-22.04

env:
COMPOSE_PROJECT_NAME: docker-elk

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4

#####################################################
# #
Expand Down Expand Up @@ -61,22 +64,18 @@ jobs:
##########################################################

- name: Run the stack
run: docker compose up -d
run: |
docker compose up setup
docker compose up -d
# Elasticsearch's high disk watermark gets regularly exceeded on GitHub Actions runners.
# https://www.elastic.co/guide/en/elasticsearch/reference/8.10/fix-watermark-errors.html
- name: Disable Elasticsearch disk allocation decider
run: .github/workflows/scripts/disable-disk-alloc-decider.sh

- name: Execute core test suite
run: .github/workflows/scripts/run-tests-core.sh

- name: 'debug: Display state and logs (core)'
# https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions#jobsjob_idif
# https://docs.github.com/en/free-pro-team@latest/actions/reference/context-and-expression-syntax-for-github-actions#job-status-check-functions
if: always()
run: |
docker compose ps
docker compose logs setup
docker compose logs elasticsearch
docker compose logs logstash
docker compose logs kibana
##############################
# #
# Test supported extensions. #
Expand Down Expand Up @@ -108,11 +107,6 @@ jobs:
sed -i '/input { udp { port => 50000 codec => json } }/d' logstash/pipeline/logstash.conf
docker compose restart logstash
- name: 'debug: Display state and logs (Logspout)'
if: always()
run: |
docker compose -f docker-compose.yml -f extensions/logspout/logspout-compose.yml ps
docker compose -f docker-compose.yml -f extensions/logspout/logspout-compose.yml logs logspout
# next steps don't need Logstash
docker compose stop logstash
Expand All @@ -125,13 +119,6 @@ jobs:
docker compose -f docker-compose.yml -f extensions/fleet/fleet-compose.yml -f extensions/fleet/agent-apmserver-compose.yml up --remove-orphans -d fleet-server apm-server
.github/workflows/scripts/run-tests-fleet.sh
- name: 'debug: Display state and logs (Fleet)'
if: always()
run: |
docker compose -f docker-compose.yml -f extensions/fleet/fleet-compose.yml -f extensions/fleet/agent-apmserver-compose.yml ps
docker compose -f docker-compose.yml -f extensions/fleet/fleet-compose.yml -f extensions/fleet/agent-apmserver-compose.yml logs fleet-server
docker compose -f docker-compose.yml -f extensions/fleet/fleet-compose.yml -f extensions/fleet/agent-apmserver-compose.yml logs apm-server
#
# Metricbeat
#
Expand All @@ -141,12 +128,6 @@ jobs:
docker compose -f docker-compose.yml -f extensions/metricbeat/metricbeat-compose.yml up --remove-orphans -d metricbeat
.github/workflows/scripts/run-tests-metricbeat.sh
- name: 'debug: Display state and logs (Metricbeat)'
if: always()
run: |
docker compose -f docker-compose.yml -f extensions/metricbeat/metricbeat-compose.yml ps
docker compose -f docker-compose.yml -f extensions/metricbeat/metricbeat-compose.yml logs metricbeat
#
# Filebeat
#
Expand All @@ -156,12 +137,6 @@ jobs:
docker compose -f docker-compose.yml -f extensions/filebeat/filebeat-compose.yml up --remove-orphans -d filebeat
.github/workflows/scripts/run-tests-filebeat.sh
- name: 'debug: Display state and logs (Filebeat)'
if: always()
run: |
docker compose -f docker-compose.yml -f extensions/filebeat/filebeat-compose.yml ps
docker compose -f docker-compose.yml -f extensions/filebeat/filebeat-compose.yml logs filebeat
#
# Heartbeat
#
Expand All @@ -171,12 +146,6 @@ jobs:
docker compose -f docker-compose.yml -f extensions/heartbeat/heartbeat-compose.yml up --remove-orphans -d heartbeat
.github/workflows/scripts/run-tests-heartbeat.sh
- name: 'debug: Display state and logs (Heartbeat)'
if: always()
run: |
docker compose -f docker-compose.yml -f extensions/heartbeat/heartbeat-compose.yml ps
docker compose -f docker-compose.yml -f extensions/heartbeat/heartbeat-compose.yml logs heartbeat
#
# Enterprise Search
#
Expand Down Expand Up @@ -204,11 +173,42 @@ jobs:
sed -i '/xpack.security.authc.api_key.enabled: true/d' elasticsearch/config/elasticsearch.yml
docker compose restart elasticsearch
- name: 'debug: Display state and logs (Enterprise Search)'
if: always()
- name: Collect troubleshooting data
id: debug-data
if: failure()
run: |
docker compose -f docker-compose.yml -f extensions/enterprise-search/enterprise-search-compose.yml ps
docker compose -f docker-compose.yml -f extensions/enterprise-search/enterprise-search-compose.yml logs enterprise-search
declare debug_data_dir="$(mktemp -d)"
docker compose \
-f docker-compose.yml \
-f extensions/logspout/logspout-compose.yml \
-f extensions/fleet/fleet-compose.yml \
-f extensions/fleet/agent-apmserver-compose.yml \
-f extensions/metricbeat/metricbeat-compose.yml \
-f extensions/filebeat/filebeat-compose.yml \
-f extensions/heartbeat/heartbeat-compose.yml \
-f extensions/enterprise-search/enterprise-search-compose.yml \
ps >"$debug_data_dir"/docker_ps.log
docker compose \
-f docker-compose.yml \
-f extensions/logspout/logspout-compose.yml \
-f extensions/fleet/fleet-compose.yml \
-f extensions/fleet/agent-apmserver-compose.yml \
-f extensions/metricbeat/metricbeat-compose.yml \
-f extensions/filebeat/filebeat-compose.yml \
-f extensions/heartbeat/heartbeat-compose.yml \
-f extensions/enterprise-search/enterprise-search-compose.yml \
logs >"$debug_data_dir"/docker_logs.log
echo "path=${debug_data_dir}" >>"$GITHUB_OUTPUT"
- name: Upload collected troubleshooting data
if: always() && steps.debug-data.outputs.path
uses: actions/upload-artifact@v3
with:
name: debug-data
path: ${{ steps.debug-data.outputs.path }}/*.*

##############
# #
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4

- name: Check links
uses: gaurav-nelson/github-action-markdown-link-check@v1
Expand Down
30 changes: 30 additions & 0 deletions .github/workflows/scripts/disable-disk-alloc-decider.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
#!/usr/bin/env bash

set -eu
set -o pipefail


source "${BASH_SOURCE[0]%/*}"/lib/testing.sh


cid_es="$(container_id elasticsearch)"
ip_es="$(service_ip elasticsearch)"

grouplog 'Wait for readiness of Elasticsearch'
poll_ready "$cid_es" "http://${ip_es}:9200/" -u 'elastic:testpasswd'
endgroup

log 'Disabling disk allocation decider'

declare -a put_args=( '-X' 'PUT' '--fail-with-body' '-s' '-u' 'elastic:testpasswd'
'-H' 'Content-Type: application/json'
"http://${ip_es}:9200/_cluster/settings?pretty"
'-d' '{"persistent":{"cluster.routing.allocation.disk.threshold_enabled":false}}'
)
declare response
declare -i exit_code=0

response=$(curl "${put_args[@]}") || exit_code=$?
echo "$response"

exit $exit_code
11 changes: 11 additions & 0 deletions .github/workflows/scripts/lib/testing.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,17 @@ function err {
echo -e "\n[x] $1\n" >&2
}

# Start an expandable group in the GitHub Action log.
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#grouping-log-lines
function grouplog {
echo "::group::$1"
}

# End the current expandable group in the GitHub Action log.
function endgroup {
echo '::endgroup::'
}

# Return the ID of the container running the given service.
function container_id {
local svc=$1
Expand Down
66 changes: 56 additions & 10 deletions .github/workflows/scripts/run-tests-core.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ set -eu
set -o pipefail


source "$(dirname ${BASH_SOURCE[0]})/lib/testing.sh"
source "${BASH_SOURCE[0]%/*}"/lib/testing.sh


cid_es="$(container_id elasticsearch)"
Expand All @@ -15,14 +15,17 @@ ip_es="$(service_ip elasticsearch)"
ip_ls="$(service_ip logstash)"
ip_kb="$(service_ip kibana)"

log 'Waiting for readiness of Elasticsearch'
grouplog 'Wait for readiness of Elasticsearch'
poll_ready "$cid_es" "http://${ip_es}:9200/" -u 'elastic:testpasswd'
endgroup

log 'Waiting for readiness of Logstash'
grouplog 'Wait for readiness of Logstash'
poll_ready "$cid_ls" "http://${ip_ls}:9600/_node/pipelines/main?pretty"
endgroup

log 'Waiting for readiness of Kibana'
grouplog 'Wait for readiness of Kibana'
poll_ready "$cid_kb" "http://${ip_kb}:5601/api/status" -u 'kibana_system:testpasswd'
endgroup

log 'Sending message to Logstash TCP input'

Expand All @@ -43,15 +46,58 @@ if ((was_retried)); then
echo >&2
fi

sleep 5
curl -X POST "http://${ip_es}:9200/logs-generic-default/_refresh" -u elastic:testpasswd \
-s -w '\n'
# It might take a few seconds before the indices and alias are created, so we
# need to be resilient here.
was_retried=0
declare -a refresh_args=( '-X' 'POST' '-s' '-w' '%{http_code}' '-u' 'elastic:testpasswd'
"http://${ip_es}:9200/logs-generic-default/_refresh"
)

# retry for max 10s (10*1s)
for _ in $(seq 1 10); do
output="$(curl "${refresh_args[@]}")"
if [ "${output: -3}" -eq 200 ]; then
break
fi

was_retried=1
echo -n 'x' >&2
sleep 1
done
if ((was_retried)); then
# flush stderr, important in non-interactive environments (CI)
echo >&2
fi

log 'Searching message in Elasticsearch'
response="$(curl "http://${ip_es}:9200/logs-generic-default/_search?q=message:dockerelk&pretty" -s -u elastic:testpasswd)"
echo "$response"

# We don't know how much time it will take Logstash to create our document, so
# we need to be resilient here too.
was_retried=0
declare -a search_args=( '-s' '-u' 'elastic:testpasswd'
"http://${ip_es}:9200/logs-generic-default/_search?q=message:dockerelk&pretty"
)
declare -i count
count="$(jq -rn --argjson data "${response}" '$data.hits.total.value')"
declare response

# retry for max 10s (10*1s)
for _ in $(seq 1 10); do
response="$(curl "${search_args[@]}")"
count="$(jq -rn --argjson data "${response}" '$data.hits.total.value')"
if (( count )); then
break
fi

was_retried=1
echo -n 'x' >&2
sleep 1
done
if ((was_retried)); then
# flush stderr, important in non-interactive environments (CI)
echo >&2
fi

echo "$response"
if (( count != 1 )); then
echo "Expected 1 document, got ${count}"
exit 1
Expand Down
8 changes: 5 additions & 3 deletions .github/workflows/scripts/run-tests-enterprise-search.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ set -eu
set -o pipefail


source "$(dirname ${BASH_SOURCE[0]})/lib/testing.sh"
source "${BASH_SOURCE[0]%/*}"/lib/testing.sh


cid_es="$(container_id elasticsearch)"
Expand All @@ -13,11 +13,13 @@ cid_en="$(container_id enterprise-search)"
ip_es="$(service_ip elasticsearch)"
ip_en="$(service_ip enterprise-search)"

log 'Waiting for readiness of Elasticsearch'
grouplog 'Wait for readiness of Elasticsearch'
poll_ready "$cid_es" "http://${ip_es}:9200/" -u 'elastic:testpasswd'
endgroup

log 'Waiting for readiness of Enterprise Search'
grouplog 'Wait for readiness of Enterprise Search'
poll_ready "$cid_en" "http://${ip_en}:3002/api/ent/v1/internal/health" -u 'elastic:testpasswd'
endgroup

log 'Ensuring that App Search API keys were created in Elasticsearch'
response="$(curl "http://${ip_es}:9200/.ent-search-actastic-app_search_api_tokens_v3/_search?q=*:*&pretty" -s -u elastic:testpasswd)"
Expand Down
8 changes: 5 additions & 3 deletions .github/workflows/scripts/run-tests-filebeat.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ set -eu
set -o pipefail


source "$(dirname ${BASH_SOURCE[0]})/lib/testing.sh"
source "${BASH_SOURCE[0]%/*}"/lib/testing.sh


cid_es="$(container_id elasticsearch)"
Expand All @@ -13,11 +13,13 @@ cid_mb="$(container_id filebeat)"
ip_es="$(service_ip elasticsearch)"
ip_mb="$(service_ip filebeat)"

log 'Waiting for readiness of Elasticsearch'
grouplog 'Wait for readiness of Elasticsearch'
poll_ready "$cid_es" "http://${ip_es}:9200/" -u 'elastic:testpasswd'
endgroup

log 'Waiting for readiness of Filebeat'
grouplog 'Wait for readiness of Filebeat'
poll_ready "$cid_mb" "http://${ip_mb}:5066/?pretty"
endgroup

# We expect to find log entries for the 'elasticsearch' Compose service using
# the following query:
Expand Down
Loading

0 comments on commit 985b3ca

Please sign in to comment.