Skip to content

Commit

Permalink
Add unit tests for the missing aggregator (#23895)
Browse files Browse the repository at this point in the history
* Add unit tests for the missing aggregator

Relates #22278
  • Loading branch information
jimczi authored Apr 4, 2017
1 parent a04350f commit c14be20
Show file tree
Hide file tree
Showing 3 changed files with 156 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -238,13 +238,16 @@ protected <A extends InternalAggregation, C extends Aggregator> A searchAndReduc
if (aggs.isEmpty()) {
return null;
} else {
if (randomBoolean()) {
if (randomBoolean() && aggs.size() > 1) {
// sometimes do an incremental reduce
List<InternalAggregation> internalAggregations = randomSubsetOf(randomIntBetween(1, aggs.size()), aggs);
A internalAgg = (A) aggs.get(0).doReduce(internalAggregations,
int toReduceSize = aggs.size();
Collections.shuffle(aggs, random());
int r = randomIntBetween(1, toReduceSize);
List<InternalAggregation> toReduce = aggs.subList(0, r);
A reduced = (A) aggs.get(0).doReduce(toReduce,
new InternalAggregation.ReduceContext(root.context().bigArrays(), null, false));
aggs.removeAll(internalAggregations);
aggs.add(internalAgg);
aggs = new ArrayList<>(aggs.subList(r, toReduceSize));
aggs.add(reduced);
}
// now do the final reduce
@SuppressWarnings("unchecked")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,15 +63,15 @@ public void testReduceRandom() {
ScriptService mockScriptService = mockScriptService();
MockBigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService());
if (randomBoolean() && toReduce.size() > 1) {
// sometimes do an incremental reduce
Collections.shuffle(toReduce, random());
// we leave at least one element in the list
int r = Math.max(1, randomIntBetween(0, toReduceSize - 2));
int r = randomIntBetween(1, toReduceSize);
List<InternalAggregation> internalAggregations = toReduce.subList(0, r);
InternalAggregation.ReduceContext context =
new InternalAggregation.ReduceContext(bigArrays, mockScriptService, false);
@SuppressWarnings("unchecked")
T reduced = (T) inputs.get(0).reduce(internalAggregations, context);
toReduce = toReduce.subList(r, toReduceSize);
toReduce = new ArrayList<>(toReduce.subList(r, toReduceSize));
toReduce.add(reduced);
}
InternalAggregation.ReduceContext context =
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.search.aggregations.bucket.missing;

import org.apache.lucene.document.Document;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.support.ValueType;

import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;


public class MissingAggregatorTests extends AggregatorTestCase {
public void testMatchNoDocs() throws IOException {
int numDocs = randomIntBetween(10, 200);
testBothCases(numDocs,
"field",
Queries.newMatchAllQuery(),
doc -> doc.add(new SortedNumericDocValuesField("field", randomLong())),
internalMissing -> assertEquals(internalMissing.getDocCount(), 0));
}

public void testMatchAllDocs() throws IOException {
int numDocs = randomIntBetween(10, 200);
testBothCases(numDocs,
"field",
Queries.newMatchAllQuery(),
doc -> doc.add(new SortedNumericDocValuesField("another_field", randomLong())),
internalMissing -> assertEquals(internalMissing.getDocCount(), numDocs));
}

public void testMatchSparse() throws IOException {
int numDocs = randomIntBetween(100, 200);
final AtomicInteger count = new AtomicInteger();
testBothCases(numDocs,
"field",
Queries.newMatchAllQuery(),
doc -> {
if (randomBoolean()) {
doc.add(new SortedNumericDocValuesField("another_field", randomLong()));
count.incrementAndGet();
} else {
doc.add(new SortedNumericDocValuesField("field", randomLong()));
}
},
internalMissing -> {
assertEquals(internalMissing.getDocCount(), count.get());
count.set(0);
});
}

public void testMissingField() throws IOException {
int numDocs = randomIntBetween(10, 20);
testBothCases(numDocs,
"unknown_field",
Queries.newMatchAllQuery(),
doc -> {
doc.add(new SortedNumericDocValuesField("field", randomLong()));
},
internalMissing -> {
assertEquals(internalMissing.getDocCount(), numDocs);
});
}

private void testBothCases(int numDocs,
String fieldName,
Query query,
Consumer<Document> consumer,
Consumer<InternalMissing> verify) throws IOException {
executeTestCase(numDocs, fieldName, query, consumer, verify, false);
executeTestCase(numDocs, fieldName, query, consumer, verify, true);

}

private void executeTestCase(int numDocs,
String fieldName,
Query query,
Consumer<Document> consumer,
Consumer<InternalMissing> verify,
boolean reduced) throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Document document = new Document();
for (int i = 0; i < numDocs; i++) {
if (frequently()) {
indexWriter.commit();
}
consumer.accept(document);
indexWriter.addDocument(document);
document.clear();
}
}

try (IndexReader indexReader = DirectoryReader.open(directory)) {
IndexSearcher indexSearcher =
newSearcher(indexReader, true, true);
MissingAggregationBuilder builder =
new MissingAggregationBuilder("_name", ValueType.LONG);
builder.field(fieldName);

NumberFieldMapper.Builder mapperBuilder = new NumberFieldMapper.Builder("_name",
NumberFieldMapper.NumberType.LONG);
MappedFieldType fieldType = mapperBuilder.fieldType();
fieldType.setHasDocValues(true);
fieldType.setName(builder.field());

InternalMissing missing;
if (reduced) {
missing = searchAndReduce(indexSearcher, query, builder, fieldType);
} else {
missing = search(indexSearcher, query, builder, fieldType);
}
verify.accept(missing);
}
}
}
}

0 comments on commit c14be20

Please sign in to comment.