-
Notifications
You must be signed in to change notification settings - Fork 1
/
docker-compose.yml
189 lines (170 loc) · 6.31 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
---
version: '3.3'
services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:${ELASTIC_STACK_VERSION}
hostname: elasticsearch
container_name: elasticsearch
environment:
- 'http.host=0.0.0.0'
- 'transport.host=127.0.0.1'
- 'xpack.monitoring.collection.enabled=true'
ports:
- '127.0.0.1:9200:9200'
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9200/_cat/health"]
interval: 30s
timeout: 10s
retries: 5
networks:
- stack
kibana:
image: docker.elastic.co/kibana/kibana:${ELASTIC_STACK_VERSION}
hostname: kibana
container_name: kibana
ports:
- '127.0.0.1:5601:5601'
networks:
- stack
depends_on:
- elasticsearch
logstash:
image: docker.elastic.co/logstash/logstash:${ELASTIC_STACK_VERSION}
hostname: logstash
container_name: logstash
environment:
- 'xpack.monitoring.enabled=true'
ports:
- '127.0.0.1:4560:4560'
- '127.0.0.1:5044:5044'
volumes:
# Provide a pipeline configuration for Logstash with a bind-mounted file
- ./docker-compose/logstash.conf:/usr/share/logstash/pipeline/logstash.conf
networks:
- stack
depends_on:
- elasticsearch
# Filebeat sending data to Logstash
filebeat_for_logstash:
image: docker.elastic.co/beats/filebeat:${ELASTIC_STACK_VERSION}
hostname: filebeat_for_logstash
container_name: filebeat_for_logstash
volumes:
# Bind-mount the logs/ directory, so Filebeat can read its files
- './logs-docker/:/mnt/logs/:ro'
# Bind-mount a custom configuration file
- './docker-compose/filebeat-logstash.yml:/usr/share/filebeat/filebeat.yml:ro'
# Bind-mount the config directory so it can be dynamically loaded
- './docker-compose/filebeat-logstash/config/:/usr/share/filebeat/config/:ro'
# Bind-mount the registry file to avoid data duplication between restarts
- './docker-compose/filebeat-logstash/registry/:/usr/share/filebeat/data/'
command: filebeat -e
restart: on-failure
networks:
- stack
depends_on:
- elasticsearch
- logstash
# Filebeat sending data to Elasticsearch
filebeat_for_elasticsearch:
image: docker.elastic.co/beats/filebeat:${ELASTIC_STACK_VERSION}
hostname: filebeat_for_elasticsearch
container_name: filebeat_for_elasticsearch
volumes:
# Bind-mount the logs/ directory, so Filebeat can read its files
- './logs-docker/:/mnt/logs/:ro'
# Bind-mount a custom configuration file
- './docker-compose/filebeat-elasticsearch.yml:/usr/share/filebeat/filebeat.yml:ro'
# Bind-mount the config directory so it can be dynamically loaded
- './docker-compose/filebeat-elasticsearch/config/:/usr/share/filebeat/config/:ro'
# Bind-mount the registry file to avoid data duplication between restarts
- './docker-compose/filebeat-elasticsearch/registry/:/usr/share/filebeat/data/'
command: 'filebeat -e'
restart: on-failure
networks:
- stack
depends_on:
- elasticsearch
- kibana
# Filebeat collecting from a Docker container and sending to Elasticsearch
filebeat_docker_for_elasticsearch:
image: docker.elastic.co/beats/filebeat:${ELASTIC_STACK_VERSION}
hostname: filebeat_docker_for_elasticsearch
container_name: filebeat_docker_for_elasticsearch
user: root #To read the docker socket
volumes:
# Bind-mount the Docker log directory from the python container, so Filebeat can read its files
- '/var/lib/docker/containers:/var/lib/docker/containers:ro'
# Bind-mount a custom configuration file
- './docker-compose/filebeat-docker.yml:/usr/share/filebeat/filebeat.yml:ro'
# Bind-mount the registry file to avoid data duplication between restarts
- './docker-compose/filebeat-docker/registry/:/usr/share/filebeat/data/'
# Bind-mount the Docker daemon to enable add_docker_metadata from within the container
- '/var/run/docker.sock:/var/run/docker.sock:ro'
command: 'filebeat -e'
restart: on-failure
networks:
- stack
depends_on:
- elasticsearch
- kibana
# Metricbeat to optionally collect metrics from the entire setup; not required for logging itself
metricbeat:
hostname: metricbeat
container_name: metricbeat
user: root #To read the docker socket
image: docker.elastic.co/beats/metricbeat:${ELASTIC_STACK_VERSION}
volumes:
# Bind-mount a custom configuration file
- './docker-compose/metricbeat.yml:/usr/share/metricbeat/metricbeat.yml:ro'
# Bind-mount the config directory so it can be dynamically loaded
- './docker-compose/metricbeat/config/:/usr/share/metricbeat/config/:ro'
# Monitor the Docker host rather than the Metricbeat container; these are used by the system module
- '/proc:/hostfs/proc:ro'
- '/sys/fs/cgroup:/hostfs/sys/fs/cgroup:ro'
- '/:/hostfs:ro'
# Bind-mount the Docker daemon to enable add_docker_metadata from within the container
- '/var/run/docker.sock:/var/run/docker.sock:ro'
command: 'metricbeat -e'
restart: on-failure
networks:
- stack
depends_on:
- elasticsearch
- kibana
#Short lived container to configure the stack once Kibana and Elasticsearch are available
configure_stack:
image: docker.elastic.co/beats/filebeat:${ELASTIC_STACK_VERSION}
hostname: configure_stack
container_name: configure_stack
volumes:
- './docker-compose/setup.sh:/usr/local/bin/setup.sh:ro'
- './docker-compose/setup_ingest-pipeline_parse-python.json:/usr/local/bin/setup_ingest-pipeline_parse-python.json:ro'
command: '/usr/local/bin/setup.sh'
networks:
- stack
depends_on:
- elasticsearch
- kibana
#The python app to be monitored
python_app:
hostname: python_app
container_name: python_app
build:
dockerfile: $PWD/dockerfile_python
context: $PWD
labels:
- "app=fizzbuzz"
- "co.elastic.logs/multiline.pattern=^\\["
- "co.elastic.logs/multiline.negate=true"
- "co.elastic.logs/multiline.match=after"
volumes:
# Bind-mount the log folder to the host so a sidecar can collect the logs
- './logs-docker/:/logs/'
networks:
- stack
depends_on:
- elasticsearch
- logstash
networks:
stack: {}