diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 363e972456efb..a0f7bbfcba43b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -238,13 +238,16 @@ protected A searchAndReduc if (aggs.isEmpty()) { return null; } else { - if (randomBoolean()) { + if (randomBoolean() && aggs.size() > 1) { // sometimes do an incremental reduce - List internalAggregations = randomSubsetOf(randomIntBetween(1, aggs.size()), aggs); - A internalAgg = (A) aggs.get(0).doReduce(internalAggregations, + int toReduceSize = aggs.size(); + Collections.shuffle(aggs, random()); + int r = randomIntBetween(1, toReduceSize); + List toReduce = aggs.subList(0, r); + A reduced = (A) aggs.get(0).doReduce(toReduce, new InternalAggregation.ReduceContext(root.context().bigArrays(), null, false)); - aggs.removeAll(internalAggregations); - aggs.add(internalAgg); + aggs = new ArrayList<>(aggs.subList(r, toReduceSize)); + aggs.add(reduced); } // now do the final reduce @SuppressWarnings("unchecked") diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationTestCase.java index 05d75d9af77f1..48f48951c474a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationTestCase.java @@ -63,15 +63,15 @@ public void testReduceRandom() { ScriptService mockScriptService = mockScriptService(); MockBigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); if (randomBoolean() && toReduce.size() > 1) { + // sometimes do an incremental reduce Collections.shuffle(toReduce, random()); - // we leave at least one element in the list - int r = Math.max(1, randomIntBetween(0, toReduceSize - 2)); + int r = randomIntBetween(1, toReduceSize); List internalAggregations = toReduce.subList(0, r); InternalAggregation.ReduceContext context = new InternalAggregation.ReduceContext(bigArrays, mockScriptService, false); @SuppressWarnings("unchecked") T reduced = (T) inputs.get(0).reduce(internalAggregations, context); - toReduce = toReduce.subList(r, toReduceSize); + toReduce = new ArrayList<>(toReduce.subList(r, toReduceSize)); toReduce.add(reduced); } InternalAggregation.ReduceContext context = diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java new file mode 100644 index 0000000000000..424c3aed2105d --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java @@ -0,0 +1,145 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.missing; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.ValueType; + +import java.io.IOException; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Consumer; + + +public class MissingAggregatorTests extends AggregatorTestCase { + public void testMatchNoDocs() throws IOException { + int numDocs = randomIntBetween(10, 200); + testBothCases(numDocs, + "field", + Queries.newMatchAllQuery(), + doc -> doc.add(new SortedNumericDocValuesField("field", randomLong())), + internalMissing -> assertEquals(internalMissing.getDocCount(), 0)); + } + + public void testMatchAllDocs() throws IOException { + int numDocs = randomIntBetween(10, 200); + testBothCases(numDocs, + "field", + Queries.newMatchAllQuery(), + doc -> doc.add(new SortedNumericDocValuesField("another_field", randomLong())), + internalMissing -> assertEquals(internalMissing.getDocCount(), numDocs)); + } + + public void testMatchSparse() throws IOException { + int numDocs = randomIntBetween(100, 200); + final AtomicInteger count = new AtomicInteger(); + testBothCases(numDocs, + "field", + Queries.newMatchAllQuery(), + doc -> { + if (randomBoolean()) { + doc.add(new SortedNumericDocValuesField("another_field", randomLong())); + count.incrementAndGet(); + } else { + doc.add(new SortedNumericDocValuesField("field", randomLong())); + } + }, + internalMissing -> { + assertEquals(internalMissing.getDocCount(), count.get()); + count.set(0); + }); + } + + public void testMissingField() throws IOException { + int numDocs = randomIntBetween(10, 20); + testBothCases(numDocs, + "unknown_field", + Queries.newMatchAllQuery(), + doc -> { + doc.add(new SortedNumericDocValuesField("field", randomLong())); + }, + internalMissing -> { + assertEquals(internalMissing.getDocCount(), numDocs); + }); + } + + private void testBothCases(int numDocs, + String fieldName, + Query query, + Consumer consumer, + Consumer verify) throws IOException { + executeTestCase(numDocs, fieldName, query, consumer, verify, false); + executeTestCase(numDocs, fieldName, query, consumer, verify, true); + + } + + private void executeTestCase(int numDocs, + String fieldName, + Query query, + Consumer consumer, + Consumer verify, + boolean reduced) throws IOException { + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + for (int i = 0; i < numDocs; i++) { + if (frequently()) { + indexWriter.commit(); + } + consumer.accept(document); + indexWriter.addDocument(document); + document.clear(); + } + } + + try (IndexReader indexReader = DirectoryReader.open(directory)) { + IndexSearcher indexSearcher = + newSearcher(indexReader, true, true); + MissingAggregationBuilder builder = + new MissingAggregationBuilder("_name", ValueType.LONG); + builder.field(fieldName); + + NumberFieldMapper.Builder mapperBuilder = new NumberFieldMapper.Builder("_name", + NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType = mapperBuilder.fieldType(); + fieldType.setHasDocValues(true); + fieldType.setName(builder.field()); + + InternalMissing missing; + if (reduced) { + missing = searchAndReduce(indexSearcher, query, builder, fieldType); + } else { + missing = search(indexSearcher, query, builder, fieldType); + } + verify.accept(missing); + } + } + } +}