diff --git a/agent/modules/stenoquery/stenoquery.go b/agent/modules/stenoquery/stenoquery.go
index 57af7351..55d7b0e8 100644
--- a/agent/modules/stenoquery/stenoquery.go
+++ b/agent/modules/stenoquery/stenoquery.go
@@ -154,6 +154,10 @@ func (steno *StenoQuery) CreateQuery(job *model.Job) string {
query := fmt.Sprintf("before %s and after %s", endTime, beginTime)
+ if len(job.Filter.Protocol) > 0 {
+ query = fmt.Sprintf("%s and %s", query, job.Filter.Protocol)
+ }
+
if len(job.Filter.SrcIp) > 0 {
query = fmt.Sprintf("%s and host %s", query, job.Filter.SrcIp)
}
@@ -162,12 +166,15 @@ func (steno *StenoQuery) CreateQuery(job *model.Job) string {
query = fmt.Sprintf("%s and host %s", query, job.Filter.DstIp)
}
- if job.Filter.SrcPort > 0 {
- query = fmt.Sprintf("%s and port %d", query, job.Filter.SrcPort)
- }
+ // Some legacy jobs won't have the protocol provided
+ if job.Filter.Protocol != model.PROTOCOL_ICMP {
+ if job.Filter.SrcPort > 0 {
+ query = fmt.Sprintf("%s and port %d", query, job.Filter.SrcPort)
+ }
- if job.Filter.DstPort > 0 {
- query = fmt.Sprintf("%s and port %d", query, job.Filter.DstPort)
+ if job.Filter.DstPort > 0 {
+ query = fmt.Sprintf("%s and port %d", query, job.Filter.DstPort)
+ }
}
return query
diff --git a/agent/modules/stenoquery/stenoquery_test.go b/agent/modules/stenoquery/stenoquery_test.go
index da86799b..41623336 100644
--- a/agent/modules/stenoquery/stenoquery_test.go
+++ b/agent/modules/stenoquery/stenoquery_test.go
@@ -46,6 +46,11 @@ func TestCreateQuery(tester *testing.T) {
query := sq.CreateQuery(job)
assert.Equal(tester, expectedQuery, query)
+ job.Filter.Protocol = model.PROTOCOL_TCP
+ query = sq.CreateQuery(job)
+ expectedQuery = expectedQuery + " and tcp"
+ assert.Equal(tester, expectedQuery, query)
+
job.Filter.SrcIp = "1.2.3.4"
query = sq.CreateQuery(job)
expectedQuery = expectedQuery + " and host " + job.Filter.SrcIp
@@ -66,3 +71,37 @@ func TestCreateQuery(tester *testing.T) {
expectedQuery = expectedQuery + " and port " + strconv.Itoa(job.Filter.DstPort)
assert.Equal(tester, expectedQuery, query)
}
+
+func TestCreateQueryIcmp(tester *testing.T) {
+ sq := NewStenoQuery(nil)
+
+ job := model.NewJob()
+ job.Filter.BeginTime, _ = time.Parse(time.RFC3339, "2006-01-02T15:05:05Z")
+ job.Filter.EndTime, _ = time.Parse(time.RFC3339, "2006-01-02T15:06:05Z")
+ expectedQuery := "before 2006-01-02T15:06:05Z and after 2006-01-02T15:05:05Z"
+ query := sq.CreateQuery(job)
+ assert.Equal(tester, expectedQuery, query)
+
+ job.Filter.Protocol = model.PROTOCOL_ICMP
+ query = sq.CreateQuery(job)
+ expectedQuery = expectedQuery + " and icmp"
+ assert.Equal(tester, expectedQuery, query)
+
+ job.Filter.SrcIp = "1.2.3.4"
+ query = sq.CreateQuery(job)
+ expectedQuery = expectedQuery + " and host " + job.Filter.SrcIp
+ assert.Equal(tester, expectedQuery, query)
+
+ job.Filter.DstIp = "1.2.1.2"
+ query = sq.CreateQuery(job)
+ expectedQuery = expectedQuery + " and host " + job.Filter.DstIp
+ assert.Equal(tester, expectedQuery, query)
+
+ job.Filter.SrcPort = 123
+ query = sq.CreateQuery(job)
+ assert.Equal(tester, expectedQuery, query) // port ignored for icmp
+
+ job.Filter.DstPort = 123
+ query = sq.CreateQuery(job)
+ assert.Equal(tester, expectedQuery, query) // port ignored for icmp
+}
diff --git a/html/css/app.css b/html/css/app.css
index 39b4325f..7531b112 100644
--- a/html/css/app.css
+++ b/html/css/app.css
@@ -173,7 +173,7 @@ a#title, a#title:visited, a#title:active, a#title:hover {
.filter.label {
display: inline-block;
- width: 44%;
+ width: 40%;
text-align: right;
white-space: nowrap;
vertical-align: top;
diff --git a/html/index.html b/html/index.html
index 8f9acc94..e9fb21dd 100644
--- a/html/index.html
+++ b/html/index.html
@@ -1337,6 +1337,7 @@
+
@@ -1373,29 +1374,29 @@ {{ i18n.viewJob }}
-
+
fa-hashtag
{{ job.id }}
-
+
fa-ethernet
{{ job.nodeId }}
-
+
fa-file-export
- {{ job.filter.srcIp }}:{{ job.filter.srcPort }} - {{ $root.pickHostname(job.filter.srcIp) }}
+ {{ job.filter.srcIp }}:{{ job.filter.srcPort }} - {{ $root.pickHostname(job.filter.srcIp) }}
-
+
fa-file-import
- {{ job.filter.dstIp }}:{{ job.filter.dstPort }} - {{ $root.pickHostname(job.filter.dstIp) }}
+ {{ job.filter.dstIp }}:{{ job.filter.dstPort }} - {{ $root.pickHostname(job.filter.dstIp) }}
@@ -1404,55 +1405,59 @@ {{ i18n.viewJob }}
{{ i18n.job }}:
- {{ job.id }}
+ {{ job.id }}
{{ i18n.owner }}:
- {{ job.owner }}
+ {{ job.owner }}
{{ i18n.sensorId }}:
- {{ job.nodeId }}
+ {{ job.nodeId }}
{{ i18n.importId }}:
- {{ job.filter.importId }}
+ {{ job.filter.importId }}
+
+
+ {{ i18n.protocol }}:
+ {{ job.filter.protocol }}
{{ i18n.srcIp }}:
- {{ job.filter.srcIp }} - {{ $root.pickHostname(job.filter.srcIp) }}
+ {{ job.filter.srcIp }} - {{ $root.pickHostname(job.filter.srcIp) }}
{{ i18n.srcPort }}:
- {{ job.filter.srcPort }}
+ {{ job.filter.srcPort }}
{{ i18n.dstIp }}:
- {{ job.filter.dstIp }} - {{ $root.pickHostname(job.filter.dstIp) }}
+ {{ job.filter.dstIp }} - {{ $root.pickHostname(job.filter.dstIp) }}
{{ i18n.dstPort }}:
- {{ job.filter.dstPort }}
+ {{ job.filter.dstPort }}
{{ i18n.dateQueued }}:
- {{ job.createTime | formatDateTime}}
+ {{ job.createTime | formatDateTime}}
{{ i18n.dateCompleted }}:
- {{ job.completeTime | formatDateTime}}
+ {{ job.completeTime | formatDateTime}}
{{ i18n.dateFailed }}:
- {{ job.failTime | formatDateTime}}
+ {{ job.failTime | formatDateTime}}
{{ i18n.beginTime }}:
- {{ job.filter.beginTime | formatTimestamp }}
+ {{ job.filter.beginTime | formatTimestamp }}
{{ i18n.endTime }}:
- {{ job.filter.endTime | formatTimestamp }}
+ {{ job.filter.endTime | formatTimestamp }}
diff --git a/html/js/app.js b/html/js/app.js
index 5f1e3fea..e58e6aa9 100644
--- a/html/js/app.js
+++ b/html/js/app.js
@@ -961,7 +961,9 @@ $(document).ready(function() {
if (event.code == "Escape") {
this.unmaximize(true);
}
- event.cancel();
+ if (typeof event.cancelable !== "boolean" || event.cancelable) {
+ event.preventDefault();
+ }
},
batchLookup(ips, comp) {
if (!this.enableReverseLookup) {
diff --git a/html/js/i18n.js b/html/js/i18n.js
index c44de03d..843d1c35 100644
--- a/html/js/i18n.js
+++ b/html/js/i18n.js
@@ -253,6 +253,7 @@ const i18n = {
deleteUserConfirm: 'You are about to permanently delete this user:',
denied: 'Denied',
description: 'Description',
+ destination: 'Destination',
details: 'Details',
disconnected: 'Disconnected from manager',
diskUsageElastic: 'Elastic Storage Used',
@@ -553,6 +554,8 @@ const i18n = {
pending: 'Pending',
product: 'Security Onion',
profile: 'Profile',
+ protocol: 'Protocol',
+ protocolHelp: 'Optional protocol, such as "icmp" or "tcp".',
queriesHelp: 'Choose from several pre-defined queries',
queryHelp: 'Specify a query in Onion Query Language (OQL)',
quickActions: 'Actions',
@@ -684,6 +687,7 @@ const i18n = {
sortedBy: 'Sort:',
sortInclude: "Sort By",
sortIncludeHelp: "Add as a sort-by field",
+ source: 'Source',
sponsorsIntro: 'Brought to you by:',
srcIp: 'Source IP',
srcIpHelp: 'Optional source IP address to include in this job filter',
diff --git a/html/js/routes/job.js b/html/js/routes/job.js
index 486c983e..cc69e9a5 100644
--- a/html/js/routes/job.js
+++ b/html/js/routes/job.js
@@ -349,7 +349,7 @@ routes.push({ path: '/job/:jobId', name: 'job', component: {
view += (code < 32 || code > 126) && code != 13 && code != 10 ? "." : input[idx];
}
return view;
- }
+ }
}
}});
diff --git a/html/js/routes/jobs.js b/html/js/routes/jobs.js
index 2212efdd..f9e37ee2 100644
--- a/html/js/routes/jobs.js
+++ b/html/js/routes/jobs.js
@@ -31,6 +31,7 @@ routes.push({ path: '/jobs', name: 'jobs', component: {
valid: false,
sensorId: null,
importId: null,
+ protocol: null,
srcIp: null,
srcPort: null,
dstIp: null,
@@ -90,6 +91,7 @@ routes.push({ path: '/jobs', name: 'jobs', component: {
}
this.form.sensorId = localStorage['settings.jobs.addJobForm.sensorId'];
this.form.importId = localStorage['settings.jobs.addJobForm.importId'];
+ this.form.protocol = localStorage['settings.jobs.addJobForm.protocol'];
this.form.srcIp = localStorage['settings.jobs.addJobForm.srcIp'];
this.form.srcPort = localStorage['settings.jobs.addJobForm.srcPort'];
this.form.dstIp = localStorage['settings.jobs.addJobForm.dstIp'];
@@ -111,13 +113,14 @@ routes.push({ path: '/jobs', name: 'jobs', component: {
}
},
submitAddJob(event) {
- this.addJob(this.form.sensorId, this.form.importId, this.form.srcIp, this.form.srcPort, this.form.dstIp, this.form.dstPort, this.form.beginTime, this.form.endTime);
+ this.addJob(this.form.sensorId, this.form.importId, this.form.protocol, this.form.srcIp, this.form.srcPort, this.form.dstIp, this.form.dstPort, this.form.beginTime, this.form.endTime);
this.dialog = false;
this.saveAddJobForm();
},
saveAddJobForm() {
if (this.form.sensorId) localStorage['settings.jobs.addJobForm.sensorId'] = this.form.sensorId;
if (this.form.importId) localStorage['settings.jobs.addJobForm.importId'] = this.form.importId;
+ if (this.form.protocol) localStorage['settings.jobs.addJobForm.protocol'] = this.form.protocol;
if (this.form.srcIp) localStorage['settings.jobs.addJobForm.srcIp'] = this.form.srcIp;
if (this.form.srcPort) localStorage['settings.jobs.addJobForm.srcPort'] = this.form.srcPort;
if (this.form.dstIp) localStorage['settings.jobs.addJobForm.dstIp'] = this.form.dstIp;
@@ -128,6 +131,7 @@ routes.push({ path: '/jobs', name: 'jobs', component: {
clearAddJobForm() {
this.form.sensorId = null;
this.form.importId = null;
+ this.form.protocol = null;
this.form.srcIp = null;
this.form.srcPort = null;
this.form.dstIp = null;
@@ -136,6 +140,7 @@ routes.push({ path: '/jobs', name: 'jobs', component: {
this.form.endTime = null;
localStorage.removeItem('settings.jobs.addJobForm.sensorId');
localStorage.removeItem('settings.jobs.addJobForm.importId');
+ localStorage.removeItem('settings.jobs.addJobForm.protocol');
localStorage.removeItem('settings.jobs.addJobForm.srcIp');
localStorage.removeItem('settings.jobs.addJobForm.srcPort');
localStorage.removeItem('settings.jobs.addJobForm.dstIp');
@@ -143,7 +148,7 @@ routes.push({ path: '/jobs', name: 'jobs', component: {
localStorage.removeItem('settings.jobs.addJobForm.beginTime');
localStorage.removeItem('settings.jobs.addJobForm.endTime');
},
- async addJob(sensorId, importId, srcIp, srcPort, dstIp, dstPort, beginTime, endTime) {
+ async addJob(sensorId, importId, protocol, srcIp, srcPort, dstIp, dstPort, beginTime, endTime) {
try {
if (!sensorId) {
this.$root.showError(this.i18n.sensorIdRequired);
@@ -154,6 +159,7 @@ routes.push({ path: '/jobs', name: 'jobs', component: {
nodeId: sensorId,
filter: {
importId: importId,
+ protocol: protocol.toLowerCase(),
srcIp: srcIp,
srcPort: parseInt(srcPort),
dstIp: dstIp,
diff --git a/model/filter.go b/model/filter.go
index e24ff087..3c81e9d3 100644
--- a/model/filter.go
+++ b/model/filter.go
@@ -10,6 +10,10 @@ import (
"time"
)
+const PROTOCOL_ICMP = "icmp"
+const PROTOCOL_TCP = "tcp"
+const PROTOCOL_UDP = "udp"
+
type Filter struct {
ImportId string `json:"importId"`
BeginTime time.Time `json:"beginTime"`
@@ -18,6 +22,7 @@ type Filter struct {
SrcPort int `json:"srcPort"`
DstIp string `json:"dstIp"`
DstPort int `json:"dstPort"`
+ Protocol string `json:"protocol"`
Parameters map[string]interface{} `json:"parameters"`
}
diff --git a/packet/parser.go b/packet/parser.go
index 84ad3f0f..8f033fd8 100644
--- a/packet/parser.go
+++ b/packet/parser.go
@@ -65,6 +65,20 @@ func ToStream(packets []gopacket.Packet) (io.ReadCloser, error) {
return io.NopCloser(bytes.NewReader(full.Bytes())), nil
}
+func getPacketProtocol(packet gopacket.Packet) string {
+ if packet.Layer(layers.LayerTypeTCP) != nil {
+ return model.PROTOCOL_TCP
+ }
+ if packet.Layer(layers.LayerTypeUDP) != nil {
+ return model.PROTOCOL_UDP
+ }
+ if packet.Layer(layers.LayerTypeICMPv4) != nil ||
+ packet.Layer(layers.LayerTypeICMPv6) != nil {
+ return model.PROTOCOL_ICMP
+ }
+ return ""
+}
+
func filterPacket(filter *model.Filter, packet gopacket.Packet) bool {
var srcIp, dstIp string
var srcPort, dstPort int
@@ -100,10 +114,14 @@ func filterPacket(filter *model.Filter, packet gopacket.Packet) bool {
include := (filter.BeginTime.IsZero() || timestamp.After(filter.BeginTime)) &&
(filter.EndTime.IsZero() || timestamp.Before(filter.EndTime)) &&
+ (filter.Protocol == "" || filter.Protocol == getPacketProtocol(packet)) &&
(filter.SrcIp == "" || srcIp == filter.SrcIp) &&
- (filter.SrcPort == 0 || srcPort == filter.SrcPort) &&
- (filter.DstIp == "" || dstIp == filter.DstIp) &&
- (filter.DstPort == 0 || dstPort == filter.DstPort)
+ (filter.DstIp == "" || dstIp == filter.DstIp)
+
+ if include && (filter.Protocol == "udp" || filter.Protocol == "tcp") {
+ include = (filter.SrcPort == 0 || srcPort == filter.SrcPort) &&
+ (filter.DstPort == 0 || dstPort == filter.DstPort)
+ }
return include
}
diff --git a/packet/parser_test.go b/packet/parser_test.go
index 19fdddf5..f2c01aeb 100644
--- a/packet/parser_test.go
+++ b/packet/parser_test.go
@@ -43,6 +43,7 @@ func TestParseAndStream(tester *testing.T) {
filter.BeginTime = startTime
endTime, _ := time.Parse(time.RFC3339, "2019-12-08T23:59:59Z")
filter.EndTime = endTime
+ filter.Protocol = model.PROTOCOL_TCP
filter.SrcIp = "185.47.63.113"
filter.SrcPort = 19
filter.DstIp = "176.126.243.198"
@@ -62,6 +63,22 @@ func TestParseAndStream(tester *testing.T) {
assert.Equal(tester, pcap_length, count)
}
+func TestParseWrongProtocol(tester *testing.T) {
+ path := "test_resources/so-pcap.1575817346"
+ filter := model.NewFilter()
+ startTime, _ := time.Parse(time.RFC3339, "2019-12-08T00:00:00Z")
+ filter.BeginTime = startTime
+ endTime, _ := time.Parse(time.RFC3339, "2019-12-08T23:59:59Z")
+ filter.EndTime = endTime
+ filter.Protocol = model.PROTOCOL_ICMP
+ filter.SrcIp = "185.47.63.113"
+ filter.DstIp = "176.126.243.198"
+
+ packets, perr := ParseRawPcap(path, 999, filter)
+ assert.Nil(tester, perr)
+ assert.Len(tester, packets, 0)
+}
+
func TestParseAndStreamFail(tester *testing.T) {
path := "test_resources/so-pcap.nonexistent"
filter := model.NewFilter()
@@ -69,3 +86,30 @@ func TestParseAndStreamFail(tester *testing.T) {
_, perr := ParseRawPcap(path, 999, filter)
assert.ErrorContains(tester, perr, "No such file")
}
+
+func TestParseAndStreamIcmp(tester *testing.T) {
+ path := "test_resources/icmp.pcap"
+ filter := model.NewFilter()
+ startTime, _ := time.Parse(time.RFC3339, "2024-02-12T00:00:00Z")
+ filter.BeginTime = startTime
+ endTime, _ := time.Parse(time.RFC3339, "2024-02-12T23:59:59Z")
+ filter.EndTime = endTime
+ filter.Protocol = model.PROTOCOL_ICMP
+ filter.SrcIp = "90.151.225.16"
+ filter.SrcPort = 19
+ filter.DstIp = "192.168.10.128"
+ filter.DstPort = 34515
+
+ packets, perr := ParseRawPcap(path, 999, filter)
+ assert.Nil(tester, perr)
+ assert.Len(tester, packets, 2)
+
+ reader, err := ToStream(packets)
+
+ assert.Nil(tester, err)
+ pcap_length := 196 // correlates to two icmp packets in icmp.pcap
+ bytes := make([]byte, 32768)
+ count, err := reader.Read(bytes)
+ assert.Nil(tester, err)
+ assert.Equal(tester, pcap_length, count)
+}
diff --git a/packet/test_resources/icmp.pcap b/packet/test_resources/icmp.pcap
new file mode 100644
index 00000000..d5aa1169
Binary files /dev/null and b/packet/test_resources/icmp.pcap differ
diff --git a/server/modules/elastic/elastic.go b/server/modules/elastic/elastic.go
index d9f16b60..148074bf 100644
--- a/server/modules/elastic/elastic.go
+++ b/server/modules/elastic/elastic.go
@@ -67,8 +67,9 @@ func (elastic *Elastic) Init(cfg module.ModuleConfig) error {
intervals := module.GetIntDefault(cfg, "intervals", DEFAULT_INTERVALS)
maxLogLength := module.GetIntDefault(cfg, "maxLogLength", DEFAULT_MAX_LOG_LENGTH)
casesEnabled := module.GetBoolDefault(cfg, "casesEnabled", true)
+ lookupTunnelParent := module.GetBoolDefault(cfg, "lookupTunnelParent", true)
err := elastic.store.Init(host, remoteHosts, username, password, verifyCert, timeShiftMs, defaultDurationMs,
- esSearchOffsetMs, timeoutMs, cacheMs, index, asyncThreshold, intervals, maxLogLength)
+ esSearchOffsetMs, timeoutMs, cacheMs, index, asyncThreshold, intervals, maxLogLength, lookupTunnelParent)
if err == nil && elastic.server != nil {
elastic.server.Eventstore = elastic.store
if casesEnabled {
diff --git a/server/modules/elastic/elastic_test.go b/server/modules/elastic/elastic_test.go
index 44e42d67..a9d20d5b 100644
--- a/server/modules/elastic/elastic_test.go
+++ b/server/modules/elastic/elastic_test.go
@@ -37,6 +37,7 @@ func TestElasticInit(tester *testing.T) {
assert.Equal(tester, DEFAULT_INDEX, elastic.store.index)
assert.Equal(tester, DEFAULT_INTERVALS, elastic.store.intervals)
assert.Equal(tester, DEFAULT_MAX_LOG_LENGTH, elastic.store.maxLogLength)
+ assert.Equal(tester, false, elastic.store.lookupTunnelParent)
// Ensure casestore has been setup
assert.NotNil(tester, srv.Casestore)
diff --git a/server/modules/elastic/elasticeventstore.go b/server/modules/elastic/elasticeventstore.go
index 93726aa5..08a93d62 100644
--- a/server/modules/elastic/elasticeventstore.go
+++ b/server/modules/elastic/elasticeventstore.go
@@ -37,23 +37,24 @@ type FieldDefinition struct {
}
type ElasticEventstore struct {
- server *server.Server
- hostUrls []string
- esClient *elasticsearch.Client
- esRemoteClients []*elasticsearch.Client
- esAllClients []*elasticsearch.Client
- timeShiftMs int
- defaultDurationMs int
- esSearchOffsetMs int
- timeoutMs time.Duration
- index string
- cacheMs time.Duration
- cacheTime time.Time
- cacheLock sync.Mutex
- fieldDefs map[string]*FieldDefinition
- intervals int
- asyncThreshold int
- maxLogLength int
+ server *server.Server
+ hostUrls []string
+ esClient *elasticsearch.Client
+ esRemoteClients []*elasticsearch.Client
+ esAllClients []*elasticsearch.Client
+ timeShiftMs int
+ defaultDurationMs int
+ esSearchOffsetMs int
+ timeoutMs time.Duration
+ index string
+ cacheMs time.Duration
+ cacheTime time.Time
+ cacheLock sync.Mutex
+ fieldDefs map[string]*FieldDefinition
+ intervals int
+ asyncThreshold int
+ maxLogLength int
+ lookupTunnelParent bool
}
func NewElasticEventstore(srv *server.Server) *ElasticEventstore {
@@ -78,7 +79,8 @@ func (store *ElasticEventstore) Init(hostUrl string,
index string,
asyncThreshold int,
intervals int,
- maxLogLength int) error {
+ maxLogLength int,
+ lookupTunnelParent bool) error {
store.timeShiftMs = timeShiftMs
store.defaultDurationMs = defaultDurationMs
store.esSearchOffsetMs = esSearchOffsetMs
@@ -88,6 +90,7 @@ func (store *ElasticEventstore) Init(hostUrl string,
store.cacheMs = time.Duration(cacheMs) * time.Millisecond
store.intervals = intervals
store.maxLogLength = maxLogLength
+ store.lookupTunnelParent = lookupTunnelParent
var err error
store.esClient, err = store.makeEsClient(hostUrl, user, pass, verifyCert)
@@ -564,7 +567,7 @@ func (store *ElasticEventstore) buildRangeFilter(timestampStr string) (string, t
- Fetch record via provided Elasticsearch document query.
- If the record has a tunnel_parent, search for a UID=tunnel_parent[0]
- - If found, discard original record and replace with the new record
- - If the record has source IP/port and destination IP/port, use it as the filter.
+ - If the record has source IP/port and destination IP/port, or protocol is icmp, use it as the filter.
- Else if the record has a Zeek x509 "ID" search for the first Zeek record with this ID.
- Else if the record has a Zeek file "FUID" search for the first Zeek record with this FUID.
- Search for the Zeek record with a matching log.id.uid equal to the UID from the previously found record
@@ -611,42 +614,45 @@ func (store *ElasticEventstore) PopulateJobFromDocQuery(ctx context.Context, idF
rangeFilter, timestamp = store.buildRangeFilter(timestampStr)
}
- // Check if user has pivoted to a PCAP that is encapsulated in a tunnel. The best we
- // can do in this situation is respond with the tunnel PCAP data, which could be excessive.
- tunnelParent := gjson.Get(json, "hits.hits.0._source.log.id.tunnel_parents").String()
- if len(tunnelParent) > 0 {
- log.Info("Document is inside of a tunnel; attempting to lookup tunnel connection log")
- if tunnelParent[0] == '[' {
- tunnelParent = gjson.Get(json, "hits.hits.0._source.log.id.tunnel_parents.0").String()
- }
- query := fmt.Sprintf(`
- {
- "query" : {
- "bool": {
- "must": [
- { "match" : { "log.id.uid" : "%s" }}%s
- ]
- }
- }
- }`, tunnelParent, rangeFilter)
+ if store.lookupTunnelParent {
+ // Check if user has pivoted to a PCAP that is encapsulated in a tunnel. The best we
+ // can do in this situation is respond with the tunnel PCAP data, which could be excessive.
+ tunnelParent := gjson.Get(json, "hits.hits.0._source.log.id.tunnel_parents").String()
+ if len(tunnelParent) > 0 {
+ log.Info("Document is inside of a tunnel; attempting to lookup tunnel connection log")
+ if tunnelParent[0] == '[' {
+ tunnelParent = gjson.Get(json, "hits.hits.0._source.log.id.tunnel_parents.0").String()
+ }
+ query := fmt.Sprintf(`
+ {
+ "query" : {
+ "bool": {
+ "must": [
+ { "match" : { "log.id.uid" : "%s" }}%s
+ ]
+ }
+ }
+ }`, tunnelParent, rangeFilter)
- json, err = store.luceneSearch(ctx, query)
- log.WithFields(log.Fields{
- "query": store.truncate(query),
- "response": store.truncate(json),
- }).Debug("Elasticsearch tunnel search finished")
- if err != nil {
- log.WithField("query", store.truncate(query)).WithError(err).Error("Unable to lookup tunnel record")
- return err
- }
- hits := gjson.Get(json, "hits.total.value").Int()
- if hits == 0 {
- log.WithField("query", store.truncate(query)).Error("Tunnel record was not found")
- return errors.New("Unable to locate encapsulating tunnel record")
+ json, err = store.luceneSearch(ctx, query)
+ log.WithFields(log.Fields{
+ "query": store.truncate(query),
+ "response": store.truncate(json),
+ }).Debug("Elasticsearch tunnel search finished")
+ if err != nil {
+ log.WithField("query", store.truncate(query)).WithError(err).Error("Unable to lookup tunnel record")
+ return err
+ }
+ hits := gjson.Get(json, "hits.total.value").Int()
+ if hits == 0 {
+ log.WithField("query", store.truncate(query)).Error("Tunnel record was not found")
+ return errors.New("Unable to locate encapsulating tunnel record")
+ }
}
}
filter.ImportId = gjson.Get(json, "hits.hits.0._source.import.id").String()
+ filter.Protocol = strings.ToLower(gjson.Get(json, "hits.hits.0._source.network.transport").String())
filter.SrcIp = gjson.Get(json, "hits.hits.0._source.source.ip").String()
filter.SrcPort = int(gjson.Get(json, "hits.hits.0._source.source.port").Int())
filter.DstIp = gjson.Get(json, "hits.hits.0._source.destination.ip").String()
@@ -658,7 +664,7 @@ func (store *ElasticEventstore) PopulateJobFromDocQuery(ctx context.Context, idF
duration := int64(store.defaultDurationMs)
// If source and destination IP/port details aren't available search ES again for a correlating Zeek record
- if len(filter.SrcIp) == 0 || len(filter.DstIp) == 0 || filter.SrcPort == 0 || filter.DstPort == 0 {
+ if (len(filter.SrcIp) == 0 || len(filter.DstIp) == 0 || filter.SrcPort == 0 || filter.DstPort == 0) && filter.Protocol != model.PROTOCOL_ICMP {
if len(uid) == 0 || uid[0] != 'C' {
zeekFileQuery := ""
if len(x509id) > 0 && x509id[0] == 'F' {
@@ -746,12 +752,13 @@ func (store *ElasticEventstore) PopulateJobFromDocQuery(ctx context.Context, idF
matchTs, err = time.Parse(time.RFC3339, ts.String())
if err == nil {
idxStr := strconv.Itoa(idx)
+ protocol := strings.ToLower(gjson.Get(json, "hits.hits.0._source.network.transport").String())
srcIp := gjson.Get(json, "hits.hits."+idxStr+"._source.source.ip").String()
srcPort := int(gjson.Get(json, "hits.hits."+idxStr+"._source.source.port").Int())
dstIp := gjson.Get(json, "hits.hits."+idxStr+"._source.destination.ip").String()
dstPort := int(gjson.Get(json, "hits.hits."+idxStr+"._source.destination.port").Int())
- if len(srcIp) > 0 && len(dstIp) > 0 && srcPort > 0 && dstPort > 0 {
+ if (len(srcIp) > 0 && len(dstIp) > 0 && srcPort > 0 && dstPort > 0) || (protocol == model.PROTOCOL_ICMP) {
delta := timestamp.Sub(matchTs)
deltaNs := delta.Nanoseconds()
if deltaNs < 0 {
@@ -761,6 +768,7 @@ func (store *ElasticEventstore) PopulateJobFromDocQuery(ctx context.Context, idF
closestDeltaNs = deltaNs
timestamp = matchTs
+ filter.Protocol = protocol
filter.SrcIp = srcIp
filter.SrcPort = srcPort
filter.DstIp = dstIp