Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add system test for libbeat internal x-pack monitoring #10645

Merged
merged 11 commits into from
Feb 11, 2019
22 changes: 16 additions & 6 deletions libbeat/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ services:
- KAFKA_PORT=9092
- KIBANA_HOST=kibana
- KIBANA_PORT=5601
- ES_MONITORING_HOST=elasticsearch_monitoring
- ES_MONITORING_PORT=9200
env_file:
- ${PWD}/build/test.env
volumes:
Expand All @@ -32,12 +34,13 @@ services:
proxy_dep:
image: busybox
depends_on:
elasticsearch: { condition: service_healthy }
logstash: { condition: service_healthy }
kafka: { condition: service_healthy }
redis: { condition: service_healthy }
sredis: { condition: service_healthy }
kibana: { condition: service_healthy }
elasticsearch: { condition: service_healthy }
elasticsearch_monitoring: { condition: service_healthy }
logstash: { condition: service_healthy }
kafka: { condition: service_healthy }
redis: { condition: service_healthy }
sredis: { condition: service_healthy }
kibana: { condition: service_healthy }
healthcheck:
interval: 1s
retries: 1200
Expand All @@ -47,6 +50,13 @@ services:
file: ${ES_BEATS}/testing/environments/${TESTING_ENVIRONMENT}.yml
service: elasticsearch

elasticsearch_monitoring:
extends:
file: ${ES_BEATS}/testing/environments/${TESTING_ENVIRONMENT}.yml
service: elasticsearch
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9200"]

# This host name is static because of the certificate.
logstash:
extends:
Expand Down
8 changes: 8 additions & 0 deletions libbeat/tests/system/config/mockbeat.yml.j2
Original file line number Diff line number Diff line change
Expand Up @@ -102,4 +102,12 @@ logging.metrics.period: {{ metrics_period }}
keystore.path: {{keystore_path}}
{% endif %}

{% if xpack and xpack.monitoring -%}
#================================ X-Pack Monitoring =====================================
xpack.monitoring.elasticsearch.hosts: {{xpack.monitoring.elasticsearch.hosts}}
xpack.monitoring.elasticsearch.metrics.period: 2s # to speed up tests
ycombinator marked this conversation as resolved.
Show resolved Hide resolved
xpack.monitoring.elasticsearch.state.period: 3s # to speed up tests
{% endif -%}

# vim: set ft=jinja:

98 changes: 98 additions & 0 deletions libbeat/tests/system/test_monitoring.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
from base import BaseTest
import os
from elasticsearch import Elasticsearch
import re
from nose.plugins.attrib import attr
import unittest

INTEGRATION_TESTS = os.environ.get('INTEGRATION_TESTS', False)


class Test(BaseTest):

def setUp(self):
super(BaseTest, self).setUp()

self.es = Elasticsearch([self.get_elasticsearch_url()])
self.es_monitoring = Elasticsearch([self.get_elasticsearch_monitoring_url()])

@unittest.skipUnless(INTEGRATION_TESTS, "integration test")
@attr('integration')
def test_via_output_cluster(self):
"""
Test shipping monitoring data via the elasticsearch output cluster.
Make sure expected documents are indexed in monitoring cluster.
"""

self.render_config_template(
"mockbeat",
xpack={
"monitoring": {
"elasticsearch": {
"hosts": [self.get_elasticsearch_url()]
}
}
}
)

self.clean()

proc = self.start_beat(config="mockbeat.yml")
self.wait_until(lambda: self.log_contains("mockbeat start running."))
self.wait_until(lambda: self.log_contains(re.compile("\[monitoring\].*Publish event")))
self.wait_until(lambda: self.log_contains(re.compile(
"Connection to .*elasticsearch\("+self.get_elasticsearch_url()+"\).* established")))
self.wait_until(lambda: self.monitoring_doc_exists('beats_stats'))
self.wait_until(lambda: self.monitoring_doc_exists('beats_state'))

for monitoring_doc_type in ['beats_stats', 'beats_state']:
field_names = ['cluster_uuid', 'timestamp', 'interval_ms', 'type', 'source_node', monitoring_doc_type]
self.assert_monitoring_doc_contains_fields(monitoring_doc_type, field_names)

def monitoring_doc_exists(self, monitoring_type):
results = self.es_monitoring.search(
index='.monitoring-beats-*',
q='type:'+monitoring_type,
size=1
)
hits = results['hits']['hits']
return len(hits) == 1

def assert_monitoring_doc_contains_fields(self, monitoring_type, field_names):
results = self.es_monitoring.search(
index='.monitoring-beats-*',
q='type:'+monitoring_type,
size=1
)
hits = results['hits']['hits']
source = hits[0]['_source']

for field_name in field_names:
assert field_name in source

def clean(self):
# Setup remote exporter
self.es.cluster.put_settings(body={
"transient": {
"xpack.monitoring.exporters.my_remote": {
"type": "http",
"host": [self.get_elasticsearch_monitoring_url()]
}
}
})

# Enable collection
self.es.cluster.put_settings(body={
"transient": {
"xpack.monitoring.collection.enabled": True
}
})

# Delete any old beats monitoring data
self.es_monitoring.indices.delete(index=".monitoring-beats-*", ignore=[404])

def get_elasticsearch_monitoring_url(self):
return "http://{host}:{port}".format(
host=os.getenv("ES_MONITORING_HOST", "localhost"),
port=os.getenv("ES_MONITORING_PORT", "9210")
)