Skip to content

Commit

Permalink
Improve ECS categorization field mappings for nginx module (elastic#1…
Browse files Browse the repository at this point in the history
…7844)

- access
  + event.kind
  + event.category
  + event.type
  + event.outcome
  + lowercase http.request.method
  + improve grok to not populate empty fields
  + related.ip
  + related.users
- error
  + event.kind
  + event.category
  + event.outcome
- ingress_controller
  + event.kind
  + event.category
  + event.type
  + event.outcome
  + lowercase http.request.method
  + improve grok to not populate empty fields
  + related.ip
  + related.users

Closes elastic#16174

(cherry picked from commit 93c3d15)
  • Loading branch information
leehinman committed Apr 23, 2020
1 parent 7731ffa commit 699677c
Show file tree
Hide file tree
Showing 15 changed files with 992 additions and 525 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.next.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -338,6 +338,7 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...master[Check the HEAD d
- Enhance `elasticsearch/server` fileset to handle ECS-compatible logs emitted by Elasticsearch. {issue}17715[17715] {pull}17714[17714]
- Added Unix stream socket support as an input source and a syslog input source. {pull}17492[17492]
- Improve ECS categorization field mappings in misp module. {issue}16026[16026] {pull}17344[17344]
- Improve ECS categorization field mappings for nginx module. {issue}16174[16174] {pull}17844[17844]

*Heartbeat*

Expand Down
150 changes: 0 additions & 150 deletions filebeat/module/nginx/access/ingest/default.json

This file was deleted.

167 changes: 167 additions & 0 deletions filebeat/module/nginx/access/ingest/pipeline.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
description: Pipeline for parsing Nginx access logs. Requires the geoip and user_agent
plugins.
processors:
- grok:
field: message
patterns:
- (%{NGINX_HOST} )?"?(?:%{NGINX_ADDRESS_LIST:nginx.access.remote_ip_list}|%{NOTSPACE:source.address})
- (-|%{DATA:user.name}) \[%{HTTPDATE:nginx.access.time}\] "%{DATA:nginx.access.info}"
%{NUMBER:http.response.status_code:long} %{NUMBER:http.response.body.bytes:long}
"(-|%{DATA:http.request.referrer})" "(-|%{DATA:user_agent.original})"
pattern_definitions:
NGINX_HOST: (?:%{IP:destination.ip}|%{NGINX_NOTSEPARATOR:destination.domain})(:%{NUMBER:destination.port})?
NGINX_NOTSEPARATOR: "[^\t ,:]+"
NGINX_ADDRESS_LIST: (?:%{IP}|%{WORD})("?,?\s*(?:%{IP}|%{WORD}))*
ignore_missing: true
- grok:
field: nginx.access.info
patterns:
- '%{WORD:http.request.method} %{DATA:url.original} HTTP/%{NUMBER:http.version}'
- ""
ignore_missing: true
- remove:
field: nginx.access.info
- split:
field: nginx.access.remote_ip_list
separator: '"?,?\s+'
ignore_missing: true
- split:
field: nginx.access.origin
separator: '"?,?\s+'
ignore_missing: true
- set:
field: source.address
if: ctx.source?.address == null
value: ""
- script:
if: ctx.nginx?.access?.remote_ip_list != null && ctx.nginx.access.remote_ip_list.length > 0
lang: painless
source: >-
boolean isPrivate(def dot, def ip) {
try {
StringTokenizer tok = new StringTokenizer(ip, dot);
int firstByte = Integer.parseInt(tok.nextToken());
int secondByte = Integer.parseInt(tok.nextToken());
if (firstByte == 10) {
return true;
}
if (firstByte == 192 && secondByte == 168) {
return true;
}
if (firstByte == 172 && secondByte >= 16 && secondByte <= 31) {
return true;
}
if (firstByte == 127) {
return true;
}
return false;
}
catch (Exception e) {
return false;
}
}
try {
ctx.source.address = null;
if (ctx.nginx.access.remote_ip_list == null) {
return;
}
def found = false;
for (def item : ctx.nginx.access.remote_ip_list) {
if (!isPrivate(params.dot, item)) {
ctx.source.address = item;
found = true;
break;
}
}
if (!found) {
ctx.source.address = ctx.nginx.access.remote_ip_list[0];
}
}
catch (Exception e) {
ctx.source.address = null;
}
params:
dot: .
- remove:
field: source.address
if: ctx.source.address == null
- grok:
field: source.address
patterns:
- ^%{IP:source.ip}$
ignore_failure: true
- remove:
field: message
- rename:
field: '@timestamp'
target_field: event.created
- date:
field: nginx.access.time
target_field: '@timestamp'
formats:
- dd/MMM/yyyy:H:m:s Z
on_failure:
- append:
field: error.message
value: '{{ _ingest.on_failure_message }}'
- remove:
field: nginx.access.time
- user_agent:
field: user_agent.original
ignore_missing: true
- geoip:
field: source.ip
target_field: source.geo
ignore_missing: true
- geoip:
database_file: GeoLite2-ASN.mmdb
field: source.ip
target_field: source.as
properties:
- asn
- organization_name
ignore_missing: true
- rename:
field: source.as.asn
target_field: source.as.number
ignore_missing: true
- rename:
field: source.as.organization_name
target_field: source.as.organization.name
ignore_missing: true
- set:
field: event.kind
value: event
- append:
field: event.category
value: web
- append:
field: event.type
value: access
- set:
field: event.outcome
value: success
if: "ctx?.http?.response?.status_code != null && ctx.http.response.status_code < 400"
- set:
field: event.outcome
value: failure
if: "ctx?.http?.response?.status_code != null && ctx.http.response.status_code >= 400"
- lowercase:
field: http.request.method
ignore_missing: true
- append:
field: related.ip
value: "{{source.ip}}"
if: "ctx?.source?.ip != null"
- append:
field: related.ip
value: "{{destination.ip}}"
if: "ctx?.destination?.ip != null"
- append:
field: related.user
value: "{{user.name}}"
if: "ctx?.user?.name != null"
on_failure:
- set:
field: error.message
value: '{{ _ingest.on_failure_message }}'
2 changes: 1 addition & 1 deletion filebeat/module/nginx/access/manifest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ var:
os.windows:
- c:/programdata/nginx/logs/*access.log*

ingest_pipeline: ingest/default.json
ingest_pipeline: ingest/pipeline.yml
input: config/nginx-access.yml

machine_learning:
Expand Down
Loading

0 comments on commit 699677c

Please sign in to comment.