Skip to content

Commit

Permalink
Organize logstash pipeline and fix kibana config detection
Browse files Browse the repository at this point in the history
- Logstash config is now broken into components
- Fixed deploy script to work around elastic/kibana#16348, which is also
present in Kiban 5.6.8.
  • Loading branch information
dcode committed Feb 24, 2018
1 parent cf2ffaf commit bf2d71d
Show file tree
Hide file tree
Showing 17 changed files with 176 additions and 173 deletions.
64 changes: 45 additions & 19 deletions playbooks/deploy-rock.yml
Original file line number Diff line number Diff line change
Expand Up @@ -461,33 +461,58 @@
######################################################
- name: Install Bro-Kafka configuration for Logstash
copy:
src: logstash-kafka-bro.conf
dest: /etc/logstash/conf.d/kafka-bro.conf
src: "{{item}}"
dest: "/etc/logstash/conf.d/{{item}}"
mode: 0640
owner: "{{ logstash_user }}"
group: "{{ logstash_group }}"
when: with_logstash and with_bro and with_kafka
notify: Restart Logstash
with_items:
- logstash-100-input-kafka-bro.conf
- logstash-500-filter-bro.conf
- logstash-999-output-es-bro.conf

- name: Install Suricata-Kafka configuration for Logstash
copy:
src: logstash-kafka-suricata.conf
dest: /etc/logstash/conf.d/kafka-suricata.conf
src: "{{item}}"
dest: "/etc/logstash/conf.d/{{item}}"
mode: 0640
owner: "{{ logstash_user }}"
group: "{{ logstash_group }}"
when: with_logstash and with_suricata and with_kafka
notify: Restart Logstash
with_items:
- logstash-100-input-kafka-suricata.conf
- logstash-500-filter-suricata.conf
- logstash-999-output-es-suricata.conf

- name: Install FSF-Kafka configuration for Logstash
copy:
src: logstash-kafka-fsf.conf
dest: "/etc/logstash/conf.d/kafka-fsf.conf"
src: "{{item}}"
dest: "/etc/logstash/conf.d/{{item}}"
mode: 0640
owner: "{{ logstash_user }}"
group: "{{ logstash_group }}"
when: with_logstash and with_fsf and with_kafka
notify: Restart Logstash
with_items:
- logstash-100-input-kafka-fsf.conf
- logstash-100-filter-fsf.conf
- logstash-999-output-es-fsf.conf

- name: Install Parse Failure configuration for Logstash
copy:
src: "{{item}}"
dest: "/etc/logstash/conf.d/{{item}}"
mode: 0640
owner: "{{ logstash_user }}"
group: "{{ logstash_group }}"
when: with_logstash
notify: Restart Logstash
with_items:
- logstash-998-filter-parsefailures.conf
- logstash-999-output-es-parsefailures.conf

- name: Check for Parse Failure mapping template
uri:
Expand Down Expand Up @@ -1053,6 +1078,19 @@
remote_src: yes
when: with_kibana

- name: Configure Kibana templates
uri:
method: PUT
url: http://localhost:9200/_template/kibana-config
body: >
{ "order" : 0, "template" : ".kibana",
"settings" :
{ "index.number_of_replicas" : "0",
"index.number_of_shards" : "1" },
"mappings" : { }, "aliases" : { } }
status_code: 200,201
when: with_kibana

- name: Query Kibana package info
yum:
list: kibana
Expand All @@ -1071,6 +1109,7 @@
return_content: true
register: kibana_cfg
changed_when: false
failed_when: false
until: kibana_cfg.status == 200
retries: 10
delay: 3
Expand All @@ -1081,19 +1120,6 @@
kibana_config: "{{ kibana_cfg.json }}"
when: with_kibana

- name: Configure Kibana templates
uri:
method: PUT
url: http://localhost:9200/_template/kibana-config
body: >
{ "order" : 0, "template" : ".kibana",
"settings" :
{ "index.number_of_replicas" : "0",
"index.number_of_shards" : "1" },
"mappings" : { }, "aliases" : { } }
status_code: 200,201
when: with_kibana

- name: Push Kibana dashboard config
command: >
/opt/rocknsm/rock-dashboards-{{ rock_dashboards_branch }}/load.sh
Expand Down
File renamed without changes.
12 changes: 12 additions & 0 deletions playbooks/files/logstash-100-input-kafka-fsf.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
input {
kafka {
topics => ["fsf-raw"]
add_field => { "[@metadata][stage]" => "fsfraw_kafka" }
# Set this to one per kafka partition to scale up
#consumer_threads => 4
group_id => "fsf_logstash"
bootstrap_servers => "127.0.0.1:9092"
codec => json
auto_offset_reset => "earliest"
}
}
12 changes: 12 additions & 0 deletions playbooks/files/logstash-100-input-kafka-suricata.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
input {
kafka {
topics => ["suricata-raw"]
add_field => { "[@metadata][stage]" => "suricataraw_kafka" }
# Set this to one per kafka partition to scale up
#consumer_threads => 4
group_id => "suricata_logstash"
bootstrap_servers => "127.0.0.1:9092"
codec => json
auto_offset_reset => "earliest"
}
}
File renamed without changes.
23 changes: 23 additions & 0 deletions playbooks/files/logstash-500-filter-fsf.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
filter {
if [@metadata][stage] == "fsfraw_kafka" {
if ![tags] {
# Remove kafka_topic field
mutate { remove_field => [ "kafka_topic" ] }

# Set the timestamp
date { match => [ "Scan Time", "ISO8601" ] }
}
else {
mutate { add_field => { "[@metadata][stage]" => "_parsefailure" } }
}
}

if [@metadata][stage] == "fsf" {
if ![tags] {
mutate { remove_field => ["path"] }
}
else {
mutate { add_field => { "[@metadata][stage]" => "_parsefailure" } }
}
}
}
28 changes: 28 additions & 0 deletions playbooks/files/logstash-500-filter-suricata.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
filter {

if [@metadata][stage] == "suricataraw_kafka" {

if ![tags] {

# Remove kafka_topic field
mutate {
remove_field => [ "kafka_topic" ]
}

# Set the timestamp
date { match => [ "timestamp", "ISO8601" ] }
} else {
mutate { add_field => { "[@metadata][stage]" => "_parsefailure" } }
}
}

if [@metadata][stage] == "suricata_eve" {
# Tags will determine if there is some sort of parse failure
if ![tags] {
mutate { remove_field => ["path"] }
}
else {
mutate { add_field => { "[@metadata][stage]" => "_parsefailure" } }
}
}
}
11 changes: 0 additions & 11 deletions playbooks/files/logstash-500-fsf-es.conf

This file was deleted.

11 changes: 0 additions & 11 deletions playbooks/files/logstash-500-suricata-es.conf

This file was deleted.

15 changes: 15 additions & 0 deletions playbooks/files/logstash-999-output-es-bro.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
output {
if [@metadata][stage] == "broraw_kafka" {
kafka {
codec => json
topic_id => "bro-%{[@meta][event_type]}"
bootstrap_servers => "127.0.0.1:9092"
}

elasticsearch {
hosts => ["127.0.0.1"]
index => "bro-%{[@meta][event_type]}-%{+YYYY.MM.dd}"
manage_template => false
}
}
}
16 changes: 16 additions & 0 deletions playbooks/files/logstash-999-output-es-fsf.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
output {
if [@metadata][stage] == "fsfraw_kafka" {
kafka {
codec => json
topic_id => "fsf-clean"
bootstrap_servers => "127.0.0.1:9092"
}

elasticsearch {
hosts => ["127.0.0.1"]
index => "fsf-%{+YYYY.MM.dd}"
manage_template => false
document_type => "fsf"
}
}
}
9 changes: 9 additions & 0 deletions playbooks/files/logstash-999-output-es-parsefailures.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
output {
if [@metadata][stage] == "_parsefailure" {
elasticsearch {
hosts => ["127.0.0.1"]
index => "parse-failures-%{+YYYY.MM.dd}"
document_type => "_parsefailure"
}
}
}
16 changes: 16 additions & 0 deletions playbooks/files/logstash-999-output-es-suricata.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
output {
if [@metadata][stage] == "suricataraw_kafka" {
kafka {
codec => json
topic_id => "suricata-clean"
bootstrap_servers => "127.0.0.1:9092"
}

elasticsearch {
hosts => ["127.0.0.1"]
index => "suricata-%{+YYYY.MM.dd}"
manage_template => false
document_type => "%{event_type}"
}
}
}
39 changes: 0 additions & 39 deletions playbooks/files/logstash-999-output.conf

This file was deleted.

47 changes: 0 additions & 47 deletions playbooks/files/logstash-kafka-fsf.conf

This file was deleted.

Loading

0 comments on commit bf2d71d

Please sign in to comment.