diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 28008f4313c97..0a60d6ef87a44 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -71,7 +71,9 @@ public class PluginBuildPlugin extends BuildPlugin { if (isModule) { project.integTestCluster.module(project) project.tasks.run.clusterConfig.module(project) - project.tasks.run.clusterConfig.distribution = 'integ-test-zip' + project.tasks.run.clusterConfig.distribution = System.getProperty( + 'run.distribution', 'integ-test-zip' + ) } else { project.integTestCluster.plugin(project.path) project.tasks.run.clusterConfig.plugin(project.path) @@ -111,7 +113,7 @@ public class PluginBuildPlugin extends BuildPlugin { private static void createIntegTestTask(Project project) { RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class) integTest.mustRunAfter(project.precommit, project.test) - project.integTestCluster.distribution = 'integ-test-zip' + project.integTestCluster.distribution = System.getProperty('tests.distribution', 'integ-test-zip') project.check.dependsOn(integTest) } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 14aa53e4a1762..be0fb3a07c699 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -88,6 +88,9 @@ class ClusterFormationTasks { Configuration currentDistro = project.configurations.create("${prefix}_elasticsearchDistro") Configuration bwcDistro = project.configurations.create("${prefix}_elasticsearchBwcDistro") Configuration bwcPlugins = project.configurations.create("${prefix}_elasticsearchBwcPlugins") + if (System.getProperty('tests.distribution', 'oss-zip') == 'integ-test-zip') { + throw new Exception("tests.distribution=integ-test-zip is not supported") + } configureDistributionDependency(project, config.distribution, currentDistro, VersionProperties.elasticsearch) if (config.numBwcNodes > 0) { if (config.bwcVersion == null) { @@ -533,7 +536,8 @@ class ClusterFormationTasks { static Task configureInstallModuleTask(String name, Project project, Task setup, NodeInfo node, Project module) { if (node.config.distribution != 'integ-test-zip') { - throw new GradleException("Module ${module.path} not allowed be installed distributions other than integ-test-zip because they should already have all modules bundled!") + project.logger.info("Not installing modules for $name, ${node.config.distribution} already has them") + return setup } if (module.plugins.hasPlugin(PluginBuildPlugin) == false) { throw new GradleException("Task ${name} cannot include module ${module.path} which is not an esplugin") @@ -643,6 +647,9 @@ class ClusterFormationTasks { BuildPlugin.requireJavaHome(start, node.javaVersion) } start.doLast(elasticsearchRunner) + start.doFirst { + project.logger.info("Starting node in ${node.clusterName} distribution: ${node.config.distribution}") + } return start } diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java index e73edb143e0d0..e66ef6208a6cf 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java @@ -329,7 +329,7 @@ public NoopSearchRequestBuilder addSort(String field, SortOrder order) { * * @see org.elasticsearch.search.sort.SortBuilders */ - public NoopSearchRequestBuilder addSort(SortBuilder sort) { + public NoopSearchRequestBuilder addSort(SortBuilder sort) { sourceBuilder().sort(sort); return this; } @@ -415,7 +415,7 @@ public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer) { * @param window rescore window * @return this for chaining */ - public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer, int window) { + public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer, int window) { sourceBuilder().clearRescorers(); return addRescorer(rescorer.windowSize(window)); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 4193685f14bc2..b8a6b7d2d8ad2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -113,7 +113,7 @@ public void testIndex() throws Exception { .source(jsonMap); // <1> //end::index-request-map IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.CREATED); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } { //tag::index-request-xcontent @@ -129,7 +129,7 @@ public void testIndex() throws Exception { .source(builder); // <1> //end::index-request-xcontent IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult()); } { //tag::index-request-shortcut @@ -139,7 +139,7 @@ public void testIndex() throws Exception { "message", "trying out Elasticsearch"); // <1> //end::index-request-shortcut IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult()); } { //tag::index-request-string @@ -158,7 +158,7 @@ public void testIndex() throws Exception { // tag::index-execute IndexResponse indexResponse = client.index(request, RequestOptions.DEFAULT); // end::index-execute - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult()); // tag::index-response String index = indexResponse.getIndex(); @@ -269,7 +269,7 @@ public void testUpdate() throws Exception { { IndexRequest indexRequest = new IndexRequest("posts", "doc", "1").source("field", 0); IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertSame(indexResponse.status(), RestStatus.CREATED); + assertSame(RestStatus.CREATED, indexResponse.status()); Request request = new Request("POST", "/_scripts/increment-field"); request.setJsonEntity(Strings.toString(JsonXContent.contentBuilder() @@ -280,7 +280,7 @@ public void testUpdate() throws Exception { .endObject() .endObject())); Response response = client().performRequest(request); - assertEquals(response.getStatusLine().getStatusCode(), RestStatus.OK.getStatus()); + assertEquals(RestStatus.OK.getStatus(), response.getStatusLine().getStatusCode()); } { //tag::update-request @@ -298,7 +298,7 @@ public void testUpdate() throws Exception { request.script(inline); // <3> //end::update-request-with-inline-script UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertEquals(4, updateResponse.getGetResult().getSource().get("field")); request = new UpdateRequest("posts", "doc", "1").fetchSource(true); @@ -308,7 +308,7 @@ public void testUpdate() throws Exception { request.script(stored); // <2> //end::update-request-with-stored-script updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertEquals(8, updateResponse.getGetResult().getSource().get("field")); } { @@ -320,7 +320,7 @@ public void testUpdate() throws Exception { .doc(jsonMap); // <1> //end::update-request-with-doc-as-map UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); } { //tag::update-request-with-doc-as-xcontent @@ -335,7 +335,7 @@ public void testUpdate() throws Exception { .doc(builder); // <1> //end::update-request-with-doc-as-xcontent UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); } { //tag::update-request-shortcut @@ -344,7 +344,7 @@ public void testUpdate() throws Exception { "reason", "daily update"); // <1> //end::update-request-shortcut UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); } { //tag::update-request-with-doc-as-string @@ -359,7 +359,7 @@ public void testUpdate() throws Exception { // tag::update-execute UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); // end::update-execute - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); // tag::update-response String index = updateResponse.getIndex(); @@ -434,7 +434,7 @@ public void testUpdate() throws Exception { request.fetchSource(true); // <1> //end::update-request-no-source UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertNotNull(updateResponse.getGetResult()); assertEquals(3, updateResponse.getGetResult().sourceAsMap().size()); } @@ -446,7 +446,7 @@ public void testUpdate() throws Exception { request.fetchSource(new FetchSourceContext(true, includes, excludes)); // <1> //end::update-request-source-include UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); Map sourceAsMap = updateResponse.getGetResult().sourceAsMap(); assertEquals(2, sourceAsMap.size()); assertEquals("source includes", sourceAsMap.get("reason")); @@ -460,7 +460,7 @@ public void testUpdate() throws Exception { request.fetchSource(new FetchSourceContext(true, includes, excludes)); // <1> //end::update-request-source-exclude UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); Map sourceAsMap = updateResponse.getGetResult().sourceAsMap(); assertEquals(2, sourceAsMap.size()); assertEquals("source excludes", sourceAsMap.get("reason")); @@ -538,7 +538,7 @@ public void testDelete() throws Exception { { IndexRequest indexRequest = new IndexRequest("posts", "doc", "1").source("field", "value"); IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertSame(indexResponse.status(), RestStatus.CREATED); + assertSame(RestStatus.CREATED, indexResponse.status()); } { @@ -552,7 +552,7 @@ public void testDelete() throws Exception { // tag::delete-execute DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT); // end::delete-execute - assertSame(deleteResponse.getResult(), DocWriteResponse.Result.DELETED); + assertSame(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); // tag::delete-response String index = deleteResponse.getIndex(); @@ -605,7 +605,7 @@ public void testDelete() throws Exception { { IndexResponse indexResponse = client.index(new IndexRequest("posts", "doc", "1").source("field", "value") , RequestOptions.DEFAULT); - assertSame(indexResponse.status(), RestStatus.CREATED); + assertSame(RestStatus.CREATED, indexResponse.status()); // tag::delete-conflict try { @@ -621,7 +621,7 @@ public void testDelete() throws Exception { { IndexResponse indexResponse = client.index(new IndexRequest("posts", "doc", "async").source("field", "value"), RequestOptions.DEFAULT); - assertSame(indexResponse.status(), RestStatus.CREATED); + assertSame(RestStatus.CREATED, indexResponse.status()); DeleteRequest request = new DeleteRequest("posts", "doc", "async"); @@ -666,7 +666,7 @@ public void testBulk() throws Exception { // tag::bulk-execute BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT); // end::bulk-execute - assertSame(bulkResponse.status(), RestStatus.OK); + assertSame(RestStatus.OK, bulkResponse.status()); assertFalse(bulkResponse.hasFailures()); } { @@ -679,7 +679,7 @@ public void testBulk() throws Exception { .source(XContentType.JSON,"field", "baz")); // end::bulk-request-with-mixed-operations BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT); - assertSame(bulkResponse.status(), RestStatus.OK); + assertSame(RestStatus.OK, bulkResponse.status()); assertFalse(bulkResponse.hasFailures()); // tag::bulk-response @@ -778,7 +778,7 @@ public void testGet() throws Exception { "postDate", new Date(), "message", "trying out Elasticsearch"); IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.CREATED); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } { //tag::get-request diff --git a/client/rest/src/main/java/org/elasticsearch/client/HasAttributeNodeSelector.java b/client/rest/src/main/java/org/elasticsearch/client/HasAttributeNodeSelector.java index e4bb43458648b..11232a08c3d29 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/HasAttributeNodeSelector.java +++ b/client/rest/src/main/java/org/elasticsearch/client/HasAttributeNodeSelector.java @@ -22,6 +22,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Objects; /** * A {@link NodeSelector} that selects nodes that have a particular value @@ -49,6 +50,24 @@ public void select(Iterable nodes) { } } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + HasAttributeNodeSelector that = (HasAttributeNodeSelector) o; + return Objects.equals(key, that.key) && + Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + @Override public String toString() { return key + "=" + value; diff --git a/distribution/packages/src/common/scripts/preinst b/distribution/packages/src/common/scripts/preinst index 2aec2172ad856..22f2405af3c2b 100644 --- a/distribution/packages/src/common/scripts/preinst +++ b/distribution/packages/src/common/scripts/preinst @@ -9,6 +9,18 @@ # $1=1 : indicates an new install # $1=2 : indicates an upgrade +# Check for these at preinst time due to failures in postinst if they do not exist +if [ -x "$JAVA_HOME/bin/java" ]; then + JAVA="$JAVA_HOME/bin/java" +else + JAVA=`which java` +fi + +if [ -z "$JAVA" ]; then + echo "could not find java; set JAVA_HOME or ensure java is in PATH" + exit 1 +fi + case "$1" in # Debian #################################################### diff --git a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc index b23a683b05610..5b68fa7be451f 100644 --- a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -13,8 +13,8 @@ Here is an example on how to create the aggregation request: -------------------------------------------------- ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript(new Script("params._agg.heights = []")) - .mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")); + .initScript(new Script("state.heights = []")) + .mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")); -------------------------------------------------- You can also specify a `combine` script which will be executed on each shard: @@ -23,9 +23,9 @@ You can also specify a `combine` script which will be executed on each shard: -------------------------------------------------- ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript(new Script("params._agg.heights = []")) - .mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) - .combineScript(new Script("double heights_sum = 0.0; for (t in params._agg.heights) { heights_sum += t } return heights_sum")); + .initScript(new Script("state.heights = []")) + .mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) + .combineScript(new Script("double heights_sum = 0.0; for (t in state.heights) { heights_sum += t } return heights_sum")); -------------------------------------------------- You can also specify a `reduce` script which will be executed on the node which gets the request: @@ -34,10 +34,10 @@ You can also specify a `reduce` script which will be executed on the node which -------------------------------------------------- ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript(new Script("params._agg.heights = []")) - .mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) - .combineScript(new Script("double heights_sum = 0.0; for (t in params._agg.heights) { heights_sum += t } return heights_sum")) - .reduceScript(new Script("double heights_sum = 0.0; for (a in params._aggs) { heights_sum += a } return heights_sum")); + .initScript(new Script("state.heights = []")) + .mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) + .combineScript(new Script("double heights_sum = 0.0; for (t in state.heights) { heights_sum += t } return heights_sum")) + .reduceScript(new Script("double heights_sum = 0.0; for (a in states) { heights_sum += a } return heights_sum")); -------------------------------------------------- diff --git a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc index 019094cfa3fe2..3bfa8d91f8b4e 100644 --- a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc @@ -224,7 +224,7 @@ Time values can also be specified via abbreviations supported by < Supports expressive date <> -====== Time Zone +*Time Zone* Date-times are stored in Elasticsearch in UTC. By default, all bucketing and rounding is also done in UTC. The `time_zone` parameter can be used to indicate diff --git a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc index 1a4d6d4774c49..c4857699f9805 100644 --- a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -15,10 +15,10 @@ POST ledger/_search?size=0 "aggs": { "profit": { "scripted_metric": { - "init_script" : "params._agg.transactions = []", - "map_script" : "params._agg.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)", <1> - "combine_script" : "double profit = 0; for (t in params._agg.transactions) { profit += t } return profit", - "reduce_script" : "double profit = 0; for (a in params._aggs) { profit += a } return profit" + "init_script" : "state.transactions = []", + "map_script" : "state.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)", <1> + "combine_script" : "double profit = 0; for (t in state.transactions) { profit += t } return profit", + "reduce_script" : "double profit = 0; for (a in states) { profit += a } return profit" } } } @@ -67,8 +67,7 @@ POST ledger/_search?size=0 "id": "my_combine_script" }, "params": { - "field": "amount", <1> - "_agg": {} <2> + "field": "amount" <1> }, "reduce_script" : { "id": "my_reduce_script" @@ -82,8 +81,7 @@ POST ledger/_search?size=0 // TEST[setup:ledger,stored_scripted_metric_script] <1> script parameters for `init`, `map` and `combine` scripts must be specified -in a global `params` object so that it can be share between the scripts. -<2> if you specify script parameters then you must specify `"_agg": {}`. +in a global `params` object so that it can be shared between the scripts. //// Verify this response as well but in a hidden block. @@ -108,7 +106,7 @@ For more details on specifying scripts see <> fields can take any <>. - <> fields accept a https://docs.oracle.com/javase/8/docs/api/java/text/DecimalFormat.html[DecimalFormat pattern]. diff --git a/modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/10_basic.yml b/modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/10_basic.yml index 5ca9a323387ec..cde34dfa10760 100644 --- a/modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/10_basic.yml +++ b/modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: aggs-matrix-stats } + - contains: { nodes.$master.modules: { name: aggs-matrix-stats } } diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/10_basic.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/10_basic.yml index d27a0861b2e38..b9b905639fd70 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/10_basic.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/10_basic.yml @@ -8,4 +8,4 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: analysis-common } + - contains: { nodes.$master.modules: { name: analysis-common } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java index 2a1046acb9cdb..1c64fdb7408ef 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java @@ -30,6 +30,7 @@ import java.util.Set; import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; +import static org.elasticsearch.ingest.ConfigurationUtils.readBooleanProperty; import static org.elasticsearch.ingest.ConfigurationUtils.readMap; import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; @@ -47,16 +48,28 @@ public final class ForEachProcessor extends AbstractProcessor { private final String field; private final Processor processor; + private final boolean ignoreMissing; - ForEachProcessor(String tag, String field, Processor processor) { + ForEachProcessor(String tag, String field, Processor processor, boolean ignoreMissing) { super(tag); this.field = field; this.processor = processor; + this.ignoreMissing = ignoreMissing; + } + + boolean isIgnoreMissing() { + return ignoreMissing; } @Override public void execute(IngestDocument ingestDocument) throws Exception { - List values = ingestDocument.getFieldValue(field, List.class); + List values = ingestDocument.getFieldValue(field, List.class, ignoreMissing); + if (values == null) { + if (ignoreMissing) { + return; + } + throw new IllegalArgumentException("field [" + field + "] is null, cannot loop over its elements."); + } List newValues = new ArrayList<>(values.size()); for (Object value : values) { Object previousValue = ingestDocument.getIngestMetadata().put("_value", value); @@ -87,6 +100,7 @@ public static final class Factory implements Processor.Factory { public ForEachProcessor create(Map factories, String tag, Map config) throws Exception { String field = readStringProperty(TYPE, tag, config, "field"); + boolean ignoreMissing = readBooleanProperty(TYPE, tag, config, "ignore_missing", false); Map> processorConfig = readMap(TYPE, tag, config, "processor"); Set>> entries = processorConfig.entrySet(); if (entries.size() != 1) { @@ -94,7 +108,7 @@ public ForEachProcessor create(Map factories, String } Map.Entry> entry = entries.iterator().next(); Processor processor = ConfigurationUtils.readProcessor(factories, entry.getKey(), entry.getValue()); - return new ForEachProcessor(tag, field, processor); + return new ForEachProcessor(tag, field, processor, ignoreMissing); } } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java index 2042bb745bc1b..7a48c9ace326d 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java @@ -91,7 +91,7 @@ public void testAppendValuesToNonExistingList() throws Exception { appendProcessor = createAppendProcessor(field, values); } appendProcessor.execute(ingestDocument); - List list = ingestDocument.getFieldValue(field, List.class); + List list = ingestDocument.getFieldValue(field, List.class); assertThat(list, not(sameInstance(values))); assertThat(list, equalTo(values)); } @@ -115,7 +115,7 @@ public void testConvertScalarToList() throws Exception { appendProcessor = createAppendProcessor(field, values); } appendProcessor.execute(ingestDocument); - List fieldValue = ingestDocument.getFieldValue(field, List.class); + List fieldValue = ingestDocument.getFieldValue(field, List.class); assertThat(fieldValue.size(), equalTo(values.size() + 1)); assertThat(fieldValue.get(0), equalTo(initialValue)); for (int i = 1; i < values.size() + 1; i++) { @@ -144,7 +144,7 @@ public void testAppendMetadataExceptVersion() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Object initialValue = ingestDocument.getSourceAndMetadata().get(randomMetaData.getFieldName()); appendProcessor.execute(ingestDocument); - List list = ingestDocument.getFieldValue(randomMetaData.getFieldName(), List.class); + List list = ingestDocument.getFieldValue(randomMetaData.getFieldName(), List.class); if (initialValue == null) { assertThat(list, equalTo(values)); } else { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java index 49611d76f4081..f382ad8dcfb6a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java @@ -46,6 +46,24 @@ public void testCreate() throws Exception { assertThat(forEachProcessor, Matchers.notNullValue()); assertThat(forEachProcessor.getField(), equalTo("_field")); assertThat(forEachProcessor.getProcessor(), Matchers.sameInstance(processor)); + assertFalse(forEachProcessor.isIgnoreMissing()); + } + + public void testSetIgnoreMissing() throws Exception { + Processor processor = new TestProcessor(ingestDocument -> { }); + Map registry = new HashMap<>(); + registry.put("_name", (r, t, c) -> processor); + ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("processor", Collections.singletonMap("_name", Collections.emptyMap())); + config.put("ignore_missing", true); + ForEachProcessor forEachProcessor = forEachFactory.create(registry, null, config); + assertThat(forEachProcessor, Matchers.notNullValue()); + assertThat(forEachProcessor.getField(), equalTo("_field")); + assertThat(forEachProcessor.getProcessor(), Matchers.sameInstance(processor)); + assertTrue(forEachProcessor.isIgnoreMissing()); } public void testCreateWithTooManyProcessorTypes() throws Exception { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java index 07573a780a17a..1491bd481bd07 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java @@ -19,14 +19,6 @@ package org.elasticsearch.ingest.common; -import org.elasticsearch.ingest.CompoundProcessor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Processor; -import org.elasticsearch.ingest.TestProcessor; -import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.script.TemplateScript; -import org.elasticsearch.test.ESTestCase; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -34,7 +26,15 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import org.elasticsearch.ingest.CompoundProcessor; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.TestProcessor; +import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.script.TemplateScript; +import org.elasticsearch.test.ESTestCase; +import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.Matchers.equalTo; public class ForEachProcessorTests extends ESTestCase { @@ -49,7 +49,8 @@ public void testExecute() throws Exception { ); ForEachProcessor processor = new ForEachProcessor( - "_tag", "values", new UppercaseProcessor("_tag", "_ingest._value", false, "_ingest._value") + "_tag", "values", new UppercaseProcessor("_tag", "_ingest._value", false, "_ingest._value"), + false ); processor.execute(ingestDocument); @@ -69,7 +70,7 @@ public void testExecuteWithFailure() throws Exception { throw new RuntimeException("failure"); } }); - ForEachProcessor processor = new ForEachProcessor("_tag", "values", testProcessor); + ForEachProcessor processor = new ForEachProcessor("_tag", "values", testProcessor, false); try { processor.execute(ingestDocument); fail("exception expected"); @@ -89,7 +90,8 @@ public void testExecuteWithFailure() throws Exception { }); Processor onFailureProcessor = new TestProcessor(ingestDocument1 -> {}); processor = new ForEachProcessor( - "_tag", "values", new CompoundProcessor(false, Arrays.asList(testProcessor), Arrays.asList(onFailureProcessor)) + "_tag", "values", new CompoundProcessor(false, Arrays.asList(testProcessor), Arrays.asList(onFailureProcessor)), + false ); processor.execute(ingestDocument); assertThat(testProcessor.getInvokedCounter(), equalTo(3)); @@ -109,7 +111,7 @@ public void testMetaDataAvailable() throws Exception { id.setFieldValue("_ingest._value.type", id.getSourceAndMetadata().get("_type")); id.setFieldValue("_ingest._value.id", id.getSourceAndMetadata().get("_id")); }); - ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor); + ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor, false); processor.execute(ingestDocument); assertThat(innerProcessor.getInvokedCounter(), equalTo(2)); @@ -137,7 +139,7 @@ public void testRestOfTheDocumentIsAvailable() throws Exception { ForEachProcessor processor = new ForEachProcessor( "_tag", "values", new SetProcessor("_tag", new TestTemplateService.MockTemplateScript.Factory("_ingest._value.new_field"), - (model) -> model.get("other"))); + (model) -> model.get("other")), false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("values.0.new_field", String.class), equalTo("value")); @@ -174,7 +176,7 @@ public String getTag() { "_index", "_type", "_id", null, null, null, Collections.singletonMap("values", values) ); - ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor); + ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor, false); processor.execute(ingestDocument); @SuppressWarnings("unchecked") List result = ingestDocument.getFieldValue("values", List.class); @@ -199,7 +201,7 @@ public void testModifyFieldsOutsideArray() throws Exception { "_tag", "values", new CompoundProcessor(false, Collections.singletonList(new UppercaseProcessor("_tag_upper", "_ingest._value", false, "_ingest._value")), Collections.singletonList(new AppendProcessor("_tag", template, (model) -> (Collections.singletonList("added")))) - )); + ), false); processor.execute(ingestDocument); List result = ingestDocument.getFieldValue("values", List.class); @@ -225,7 +227,7 @@ public void testScalarValueAllowsUnderscoreValueFieldToRemainAccessible() throws TestProcessor processor = new TestProcessor(doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_source._value", String.class))); - ForEachProcessor forEachProcessor = new ForEachProcessor("_tag", "values", processor); + ForEachProcessor forEachProcessor = new ForEachProcessor("_tag", "values", processor, false); forEachProcessor.execute(ingestDocument); List result = ingestDocument.getFieldValue("values", List.class); @@ -258,7 +260,7 @@ public void testNestedForEach() throws Exception { doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_ingest._value", String.class).toUpperCase(Locale.ENGLISH)) ); ForEachProcessor processor = new ForEachProcessor( - "_tag", "values1", new ForEachProcessor("_tag", "_ingest._value.values2", testProcessor)); + "_tag", "values1", new ForEachProcessor("_tag", "_ingest._value.values2", testProcessor, false), false); processor.execute(ingestDocument); List result = ingestDocument.getFieldValue("values1.0.values2", List.class); @@ -270,4 +272,16 @@ public void testNestedForEach() throws Exception { assertThat(result.get(1), equalTo("JKL")); } + public void testIgnoreMissing() throws Exception { + IngestDocument originalIngestDocument = new IngestDocument( + "_index", "_type", "_id", null, null, null, Collections.emptyMap() + ); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + TestProcessor testProcessor = new TestProcessor(doc -> {}); + ForEachProcessor processor = new ForEachProcessor("_tag", "_ingest._value", testProcessor, true); + processor.execute(ingestDocument); + assertIngestDocument(originalIngestDocument, ingestDocument); + assertThat(testProcessor.getInvokedCounter(), equalTo(0)); + } + } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml index a58c329a7c525..12efaa9570372 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml @@ -8,25 +8,25 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: ingest-common } - - match: { nodes.$master.ingest.processors.0.type: append } - - match: { nodes.$master.ingest.processors.1.type: convert } - - match: { nodes.$master.ingest.processors.2.type: date } - - match: { nodes.$master.ingest.processors.3.type: date_index_name } - - match: { nodes.$master.ingest.processors.4.type: dot_expander } - - match: { nodes.$master.ingest.processors.5.type: fail } - - match: { nodes.$master.ingest.processors.6.type: foreach } - - match: { nodes.$master.ingest.processors.7.type: grok } - - match: { nodes.$master.ingest.processors.8.type: gsub } - - match: { nodes.$master.ingest.processors.9.type: join } - - match: { nodes.$master.ingest.processors.10.type: json } - - match: { nodes.$master.ingest.processors.11.type: kv } - - match: { nodes.$master.ingest.processors.12.type: lowercase } - - match: { nodes.$master.ingest.processors.13.type: remove } - - match: { nodes.$master.ingest.processors.14.type: rename } - - match: { nodes.$master.ingest.processors.15.type: script } - - match: { nodes.$master.ingest.processors.16.type: set } - - match: { nodes.$master.ingest.processors.17.type: sort } - - match: { nodes.$master.ingest.processors.18.type: split } - - match: { nodes.$master.ingest.processors.19.type: trim } - - match: { nodes.$master.ingest.processors.20.type: uppercase } + - contains: { nodes.$master.modules: { name: ingest-common } } + - contains: { nodes.$master.ingest.processors: { type: append } } + - contains: { nodes.$master.ingest.processors: { type: convert } } + - contains: { nodes.$master.ingest.processors: { type: date } } + - contains: { nodes.$master.ingest.processors: { type: date_index_name } } + - contains: { nodes.$master.ingest.processors: { type: dot_expander } } + - contains: { nodes.$master.ingest.processors: { type: fail } } + - contains: { nodes.$master.ingest.processors: { type: foreach } } + - contains: { nodes.$master.ingest.processors: { type: grok } } + - contains: { nodes.$master.ingest.processors: { type: gsub } } + - contains: { nodes.$master.ingest.processors: { type: join } } + - contains: { nodes.$master.ingest.processors: { type: json } } + - contains: { nodes.$master.ingest.processors: { type: kv } } + - contains: { nodes.$master.ingest.processors: { type: lowercase } } + - contains: { nodes.$master.ingest.processors: { type: remove } } + - contains: { nodes.$master.ingest.processors: { type: rename } } + - contains: { nodes.$master.ingest.processors: { type: script } } + - contains: { nodes.$master.ingest.processors: { type: set } } + - contains: { nodes.$master.ingest.processors: { type: sort } } + - contains: { nodes.$master.ingest.processors: { type: split } } + - contains: { nodes.$master.ingest.processors: { type: trim } } + - contains: { nodes.$master.ingest.processors: { type: uppercase } } diff --git a/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yml b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yml index cc777bd826bbc..0ca21cab93089 100644 --- a/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yml +++ b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: lang-expression } + - contains: { nodes.$master.modules: { name: lang-expression } } diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yml index 5deabe038906d..1a014e9cceaa6 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: lang-mustache } + - contains: { nodes.$master.modules: { name: lang-mustache } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java new file mode 100644 index 0000000000000..66d49be16ba9a --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java @@ -0,0 +1,126 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Scorer; +import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.script.ScriptedMetricAggContexts; +import org.elasticsearch.script.ScriptContext; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ScriptedMetricAggContextsTests extends ScriptTestCase { + @Override + protected Map, List> scriptContexts() { + Map, List> contexts = new HashMap<>(); + contexts.put(ScriptedMetricAggContexts.InitScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.MapScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.CombineScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.ReduceScript.CONTEXT, Whitelist.BASE_WHITELISTS); + return contexts; + } + + public void testInitBasic() { + ScriptedMetricAggContexts.InitScript.Factory factory = scriptEngine.compile("test", + "state.testField = params.initialVal", ScriptedMetricAggContexts.InitScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + params.put("initialVal", 10); + + ScriptedMetricAggContexts.InitScript script = factory.newInstance(params, state); + script.execute(); + + assert(state.containsKey("testField")); + assertEquals(10, state.get("testField")); + } + + public void testMapBasic() { + ScriptedMetricAggContexts.MapScript.Factory factory = scriptEngine.compile("test", + "state.testField = 2*_score", ScriptedMetricAggContexts.MapScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + Scorer scorer = new Scorer(null) { + @Override + public int docID() { return 0; } + + @Override + public float score() { return 0.5f; } + + @Override + public DocIdSetIterator iterator() { return null; } + }; + + ScriptedMetricAggContexts.MapScript.LeafFactory leafFactory = factory.newFactory(params, state, null); + ScriptedMetricAggContexts.MapScript script = leafFactory.newInstance(null); + + script.setScorer(scorer); + script.execute(); + + assert(state.containsKey("testField")); + assertEquals(1.0, state.get("testField")); + } + + public void testCombineBasic() { + ScriptedMetricAggContexts.CombineScript.Factory factory = scriptEngine.compile("test", + "state.testField = params.initialVal; return state.testField + params.inc", ScriptedMetricAggContexts.CombineScript.CONTEXT, + Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + params.put("initialVal", 10); + params.put("inc", 2); + + ScriptedMetricAggContexts.CombineScript script = factory.newInstance(params, state); + Object res = script.execute(); + + assert(state.containsKey("testField")); + assertEquals(10, state.get("testField")); + assertEquals(12, res); + } + + public void testReduceBasic() { + ScriptedMetricAggContexts.ReduceScript.Factory factory = scriptEngine.compile("test", + "states[0].testField + states[1].testField", ScriptedMetricAggContexts.ReduceScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + List states = new ArrayList<>(); + + Map state1 = new HashMap<>(), state2 = new HashMap<>(); + state1.put("testField", 1); + state2.put("testField", 2); + + states.add(state1); + states.add(state2); + + ScriptedMetricAggContexts.ReduceScript script = factory.newInstance(params, states); + Object res = script.execute(); + assertEquals(3, res); + } +} diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/10_basic.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/10_basic.yml index 1c81782f33a67..6d008a484ee3f 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/10_basic.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: lang-painless } + - contains: { nodes.$master.modules: { name: lang-painless } } diff --git a/modules/repository-url/src/test/resources/rest-api-spec/test/repository_url/10_basic.yml b/modules/repository-url/src/test/resources/rest-api-spec/test/repository_url/10_basic.yml index 7edbc4c08fbf7..2def885234c3e 100644 --- a/modules/repository-url/src/test/resources/rest-api-spec/test/repository_url/10_basic.yml +++ b/modules/repository-url/src/test/resources/rest-api-spec/test/repository_url/10_basic.yml @@ -112,7 +112,7 @@ teardown: - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: repository-url } + - contains: { nodes.$master.modules: { name: repository-url } } --- "Restore with repository-url using http://": diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java index 9719d15778b53..0fa331ba138f6 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.tasks.Task; import java.io.Closeable; import java.net.SocketAddress; @@ -74,7 +75,7 @@ static Collection returnHttpResponseBodies(Collection static Collection returnOpaqueIds(Collection responses) { List list = new ArrayList<>(responses.size()); for (HttpResponse response : responses) { - list.add(response.headers().get("X-Opaque-Id")); + list.add(response.headers().get(Task.X_OPAQUE_ID)); } return list; } diff --git a/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yml b/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yml index e74b7f58c7520..e8b23fa71408b 100644 --- a/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yml +++ b/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yml @@ -10,7 +10,7 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: transport-netty4 } + - contains: { nodes.$master.modules: { name: transport-netty4 } } - do: cluster.stats: {} diff --git a/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yml b/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yml index ea042d8a52da8..6d12da177ea66 100644 --- a/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yml +++ b/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: discovery-azure-classic } + - contains: { nodes.$master.plugins: { name: discovery-azure-classic } } diff --git a/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yml b/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yml index d612c75db979c..3c5866663b94b 100644 --- a/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yml +++ b/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: discovery-ec2 } + - contains: { nodes.$master.plugins: { name: discovery-ec2 } } diff --git a/plugins/discovery-file/build.gradle b/plugins/discovery-file/build.gradle index 529b8cbef304d..e7f2b3442716f 100644 --- a/plugins/discovery-file/build.gradle +++ b/plugins/discovery-file/build.gradle @@ -38,7 +38,7 @@ task setupSeedNodeAndUnicastHostsFile(type: DefaultTask) { // setup the initial cluster with one node that will serve as the seed node // for unicast discovery ClusterConfiguration config = new ClusterConfiguration(project) -config.distribution = 'integ-test-zip' +config.distribution = System.getProperty('tests.distribution', 'integ-test-zip') config.clusterName = 'discovery-file-test-cluster' List nodes = ClusterFormationTasks.setup(project, 'initialCluster', setupSeedNodeAndUnicastHostsFile, config) File srcUnicastHostsFile = file('build/cluster/unicast_hosts.txt') diff --git a/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml b/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml index 6f48aa6c29e90..f16599c40fa32 100644 --- a/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml +++ b/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: discovery-gce } + - contains: { nodes.$master.plugins: { name: discovery-gce } } diff --git a/plugins/examples/painless-whitelist/build.gradle b/plugins/examples/painless-whitelist/build.gradle index 12bbff8b0419e..ef1ca7d741e9a 100644 --- a/plugins/examples/painless-whitelist/build.gradle +++ b/plugins/examples/painless-whitelist/build.gradle @@ -30,8 +30,8 @@ dependencies { compileOnly project(':modules:lang-painless') } -integTestCluster { - distribution = 'zip' +if (System.getProperty('tests.distribution') == null) { + integTestCluster.distribution = 'oss-zip' } test.enabled = false diff --git a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/10_basic.yml b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/10_basic.yml index f0abcf117da15..a915c08067e5c 100644 --- a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/10_basic.yml +++ b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: painless-whitelist } + - contains: { nodes.$master.plugins: { name: painless-whitelist } } diff --git a/plugins/examples/rescore/src/test/resources/rest-api-spec/test/example-rescore/10_basic.yml b/plugins/examples/rescore/src/test/resources/rest-api-spec/test/example-rescore/10_basic.yml index 75c22d6b578bd..62a47df9d7869 100644 --- a/plugins/examples/rescore/src/test/resources/rest-api-spec/test/example-rescore/10_basic.yml +++ b/plugins/examples/rescore/src/test/resources/rest-api-spec/test/example-rescore/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: example-rescore } + - contains: { nodes.$master.plugins: { name: example-rescore } } diff --git a/plugins/examples/script-expert-scoring/src/test/resources/rest-api-spec/test/script_expert_scoring/10_basic.yml b/plugins/examples/script-expert-scoring/src/test/resources/rest-api-spec/test/script_expert_scoring/10_basic.yml index b4fafd69dd4ab..26980a95b730b 100644 --- a/plugins/examples/script-expert-scoring/src/test/resources/rest-api-spec/test/script_expert_scoring/10_basic.yml +++ b/plugins/examples/script-expert-scoring/src/test/resources/rest-api-spec/test/script_expert_scoring/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: script-expert-scoring } + - contains: { nodes.$master.plugins: { name: script-expert-scoring } } diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index 3bca078bd59c4..f000fdfeef5e0 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -23,8 +23,8 @@ esplugin { } versions << [ - 'tika': '1.17', - 'pdfbox': '2.0.8', + 'tika': '1.18', + 'pdfbox': '2.0.9', 'bouncycastle': '1.55', 'poi': '3.17', 'mime4j': '0.8.1' @@ -33,9 +33,10 @@ versions << [ dependencies { // mandatory for tika compile "org.apache.tika:tika-core:${versions.tika}" + // build against Jackson 2.9.5, but still works on our current version compile "org.apache.tika:tika-parsers:${versions.tika}" - compile 'org.tukaani:xz:1.6' - compile 'commons-io:commons-io:2.5' + compile 'org.tukaani:xz:1.8' + compile 'commons-io:commons-io:2.6' compile "org.slf4j:slf4j-api:${versions.slf4j}" // character set detection @@ -62,7 +63,7 @@ dependencies { // MS Office compile "org.apache.poi:poi-scratchpad:${versions.poi}" // Apple iWork - compile 'org.apache.commons:commons-compress:1.14' + compile 'org.apache.commons:commons-compress:1.16.1' // Outlook documents compile "org.apache.james:apache-mime4j-core:${versions.mime4j}" compile "org.apache.james:apache-mime4j-dom:${versions.mime4j}" @@ -118,6 +119,10 @@ thirdPartyAudit.excludes = [ 'com.drew.metadata.jpeg.JpegDirectory', 'com.github.junrar.Archive', 'com.github.junrar.rarfile.FileHeader', + 'com.github.luben.zstd.ZstdInputStream', + 'com.github.luben.zstd.ZstdOutputStream', + 'com.github.openjson.JSONArray', + 'com.github.openjson.JSONObject', 'com.google.common.reflect.TypeToken', 'com.google.gson.Gson', 'com.googlecode.mp4parser.DataSource', @@ -531,6 +536,7 @@ thirdPartyAudit.excludes = [ 'org.apache.commons.exec.PumpStreamHandler', 'org.apache.commons.exec.environment.EnvironmentUtils', 'org.apache.commons.lang.StringUtils', + 'org.apache.commons.lang.SystemUtils', 'org.apache.ctakes.typesystem.type.refsem.UmlsConcept', 'org.apache.ctakes.typesystem.type.textsem.IdentifiedAnnotation', 'org.apache.cxf.jaxrs.client.WebClient', @@ -635,8 +641,6 @@ thirdPartyAudit.excludes = [ 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SignatureTimeStampList', 'org.etsi.uri.x01903.v14.ValidationDataType$Factory', 'org.etsi.uri.x01903.v14.ValidationDataType', - 'org.json.JSONArray', - 'org.json.JSONObject', 'org.json.simple.JSONArray', 'org.json.simple.JSONObject', 'org.json.simple.parser.JSONParser', diff --git a/plugins/ingest-attachment/licenses/commons-compress-1.14.jar.sha1 b/plugins/ingest-attachment/licenses/commons-compress-1.14.jar.sha1 deleted file mode 100644 index a93cac2243e69..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-compress-1.14.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7b18320d668ab080758bf5383d6d8fcf750babce \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 b/plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..93be07c90a41c --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 @@ -0,0 +1 @@ +7b5cdabadb4cf12f5ee0f801399e70635583193f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-io-2.5.jar.sha1 b/plugins/ingest-attachment/licenses/commons-io-2.5.jar.sha1 deleted file mode 100644 index b7f1d93e89702..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-io-2.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2852e6e05fbb95076fc091f6d1780f1f8fe35e0f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-io-2.6.jar.sha1 b/plugins/ingest-attachment/licenses/commons-io-2.6.jar.sha1 new file mode 100644 index 0000000000000..75f7934c08267 --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-io-2.6.jar.sha1 @@ -0,0 +1 @@ +815893df5f31da2ece4040fe0a12fd44b577afaf \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/fontbox-2.0.8.jar.sha1 b/plugins/ingest-attachment/licenses/fontbox-2.0.8.jar.sha1 deleted file mode 100644 index f8abddbc755eb..0000000000000 --- a/plugins/ingest-attachment/licenses/fontbox-2.0.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -52f852fcfc7481d45efdffd224eb78b85981b17b \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 b/plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 new file mode 100644 index 0000000000000..4ded3b5488825 --- /dev/null +++ b/plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 @@ -0,0 +1 @@ +f961f17ebdbc307e9055e3cf7c0e207f0895ae55 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/pdfbox-2.0.8.jar.sha1 b/plugins/ingest-attachment/licenses/pdfbox-2.0.8.jar.sha1 deleted file mode 100644 index 1c346871e2119..0000000000000 --- a/plugins/ingest-attachment/licenses/pdfbox-2.0.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -17bdf273d66f3afe41eedb9d3ab6a7b819c44a0c \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 b/plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 new file mode 100644 index 0000000000000..9bf91e07976c2 --- /dev/null +++ b/plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 @@ -0,0 +1 @@ +d0425578218624388f2ec84a0b3a11efd55df0f5 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-1.17.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-1.17.jar.sha1 deleted file mode 100644 index 571314b3378da..0000000000000 --- a/plugins/ingest-attachment/licenses/tika-core-1.17.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b450102c2aee98107474d2f92661d947b9cef183 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 new file mode 100644 index 0000000000000..ef162f03439cc --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 @@ -0,0 +1 @@ +69556697de96cf0b22df846e970dafd29866eee0 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-1.17.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-1.17.jar.sha1 deleted file mode 100644 index c4487e4970f25..0000000000000 --- a/plugins/ingest-attachment/licenses/tika-parsers-1.17.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4277c54fcaed542fbc8a0001fdb4c23baccc0132 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 new file mode 100644 index 0000000000000..6441e8b64e7b7 --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 @@ -0,0 +1 @@ +7d9b6dea91d783165f3313d320d3aaaa9a4dfc13 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/xz-1.6.jar.sha1 b/plugins/ingest-attachment/licenses/xz-1.6.jar.sha1 deleted file mode 100644 index d91cd44c0b4d3..0000000000000 --- a/plugins/ingest-attachment/licenses/xz-1.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -05b6f921f1810bdf90e25471968f741f87168b64 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/xz-1.8.jar.sha1 b/plugins/ingest-attachment/licenses/xz-1.8.jar.sha1 new file mode 100644 index 0000000000000..7455feac7983b --- /dev/null +++ b/plugins/ingest-attachment/licenses/xz-1.8.jar.sha1 @@ -0,0 +1 @@ +c4f7d054303948eb6a4066194253886c8af07128 \ No newline at end of file diff --git a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java index 97ca1c0b19774..6606d1bc72727 100644 --- a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java +++ b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java @@ -159,6 +159,7 @@ static PermissionCollection getRestrictedPermissions() { perms.add(new SecurityPermission("putProviderProperty.BC")); perms.add(new SecurityPermission("insertProvider")); perms.add(new ReflectPermission("suppressAccessChecks")); + perms.add(new RuntimePermission("accessClassInPackage.sun.java2d.cmm.kcms")); // xmlbeans, use by POI, needs to get the context classloader perms.add(new RuntimePermission("getClassLoader")); // ZipFile needs accessDeclaredMembers on JDK 10; cf. https://bugs.openjdk.java.net/browse/JDK-8187485 diff --git a/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy b/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy index 0cd359a99731b..bcc5eef3193d7 100644 --- a/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy @@ -31,4 +31,6 @@ grant { permission java.lang.RuntimePermission "getClassLoader"; // ZipFile needs accessDeclaredMembers on Java 10 permission java.lang.RuntimePermission "accessDeclaredMembers"; + // PDFBox checks for the existence of this class + permission java.lang.RuntimePermission "accessClassInPackage.sun.java2d.cmm.kcms"; }; diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java index 598d3f4e8175c..654bc361f53ad 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java @@ -214,6 +214,12 @@ public void testAsciidocDocument() throws Exception { assertThat(attachmentData.get("content_type").toString(), containsString("text/plain")); } + // See (https://issues.apache.org/jira/browse/COMPRESS-432) for information + // about the issue that causes a zip file to hang in Tika versions prior to 1.18. + public void testZipFileDoesNotHang() { + expectThrows(Exception.class, () -> parseDocument("bad_tika.zip", processor)); + } + public void testParseAsBytesArray() throws Exception { String path = "/org/elasticsearch/ingest/attachment/test/sample-files/text-in-english.txt"; byte[] bytes; diff --git a/plugins/ingest-attachment/src/test/resources/org/elasticsearch/ingest/attachment/test/sample-files/bad_tika.zip b/plugins/ingest-attachment/src/test/resources/org/elasticsearch/ingest/attachment/test/sample-files/bad_tika.zip new file mode 100644 index 0000000000000..58ebd8411edce Binary files /dev/null and b/plugins/ingest-attachment/src/test/resources/org/elasticsearch/ingest/attachment/test/sample-files/bad_tika.zip differ diff --git a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yml b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yml index 88accac7730e7..42be90f77f944 100644 --- a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yml +++ b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yml @@ -7,6 +7,6 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: ingest-attachment } - - match: { nodes.$master.ingest.processors.0.type: attachment } + - contains: { 'nodes.$master.plugins': { name: ingest-attachment } } + - contains: { 'nodes.$master.ingest.processors': { type: attachment } } diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yml index 7a06326a86411..413745eab4051 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yml @@ -7,5 +7,5 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: ingest-geoip } - - match: { nodes.$master.ingest.processors.0.type: geoip } + - contains: { nodes.$master.plugins: { name: ingest-geoip } } + - contains: { nodes.$master.ingest.processors: { type: geoip } } diff --git a/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yml b/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yml index fee3173f39335..4cb1c9b1fba20 100644 --- a/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yml +++ b/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yml @@ -7,5 +7,5 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: ingest-user-agent } - - match: { nodes.$master.ingest.processors.0.type: user_agent } + - contains: { nodes.$master.plugins: { name: ingest-user-agent } } + - contains: { nodes.$master.ingest.processors: { type: user_agent } } diff --git a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml index 3a754a34a8a14..199d543dda87e 100644 --- a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml +++ b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: repository-azure } + - contains: { nodes.$master.plugins: { name: repository-azure } } diff --git a/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml index f4259771644b2..5c8fa70bb7a5f 100644 --- a/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml +++ b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: repository-gcs } + - contains: { nodes.$master.plugins: { name: repository-gcs } } diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 304e0f4ae0e1f..8856ae1526a21 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -158,7 +158,6 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', project.afterEvaluate { for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) { ClusterConfiguration cluster = project.extensions.getByName("${integTestTaskName}Cluster") as ClusterConfiguration - cluster.distribution = 'integ-test-zip' cluster.dependsOn(project.bundlePlugin) Task restIntegTestTask = project.tasks.getByName(integTestTaskName) diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yml index 6fbbfc82e872d..f11e0148402cf 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yml @@ -12,7 +12,7 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: repository-hdfs } + - contains: { nodes.$master.plugins: { name: repository-hdfs } } --- # # Check that we can't use file:// repositories or anything like that diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/10_basic.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/10_basic.yml index 6fbbfc82e872d..f11e0148402cf 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/10_basic.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/10_basic.yml @@ -12,7 +12,7 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: repository-hdfs } + - contains: { nodes.$master.plugins: { name: repository-hdfs } } --- # # Check that we can't use file:// repositories or anything like that diff --git a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yml index 7bb65a508863d..190a628f0b375 100644 --- a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yml +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: repository-s3 } + - contains: { nodes.$master.plugins: { name: repository-s3 } } diff --git a/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yml b/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yml index a210fd4e5970d..60228c1b92356 100644 --- a/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yml +++ b/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: store-smb } + - contains: { nodes.$master.plugins: { name: store-smb } } diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/Netty4HttpClient.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/Netty4HttpClient.java index 32f294f47ce9c..becebade37348 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/Netty4HttpClient.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/Netty4HttpClient.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.tasks.Task; import java.io.Closeable; import java.net.SocketAddress; @@ -74,7 +75,7 @@ static Collection returnHttpResponseBodies(Collection static Collection returnOpaqueIds(Collection responses) { List list = new ArrayList<>(responses.size()); for (HttpResponse response : responses) { - list.add(response.headers().get("X-Opaque-Id")); + list.add(response.headers().get(Task.X_OPAQUE_ID)); } return list; } @@ -90,7 +91,7 @@ public Collection get(SocketAddress remoteAddress, String... u for (int i = 0; i < uris.length; i++) { final HttpRequest httpRequest = new DefaultFullHttpRequest(HTTP_1_1, HttpMethod.GET, uris[i]); httpRequest.headers().add(HOST, "localhost"); - httpRequest.headers().add("X-Opaque-ID", String.valueOf(i)); + httpRequest.headers().add(Task.X_OPAQUE_ID, String.valueOf(i)); requests.add(httpRequest); } return sendRequests(remoteAddress, requests); diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index da99bbb4c8036..ac57d51def7c6 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -57,13 +57,6 @@ for (Version version : bwcVersions.wireCompatible) { tasks.getByName("${baseName}#mixedClusterTestRunner").configure { /* To support taking index snapshots, we have to set path.repo setting */ systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") - if ('zip'.equals(extension.distribution)) { - systemProperty 'tests.rest.blacklist', [ - 'cat.templates/10_basic/No templates', - 'cat.templates/10_basic/Sort templates', - 'cat.templates/10_basic/Multiple template', - ].join(',') - } } } diff --git a/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats b/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats index 397660b239a46..749c72c8b312f 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats @@ -72,6 +72,14 @@ setup() { [ "$status" -eq 1 ] } +@test "[DEB] temporarily remove java and ensure the install fails" { + move_java + run dpkg -i elasticsearch-oss-$(cat version).deb + output=$status + unmove_java + [ "$output" -eq 1 ] +} + @test "[DEB] install package" { dpkg -i elasticsearch-oss-$(cat version).deb } diff --git a/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats b/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats index 52347c7ef4e41..cb12d4b50e02b 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats @@ -71,6 +71,14 @@ setup() { [ "$status" -eq 1 ] } +@test "[RPM] temporarily remove java and ensure the install fails" { + move_java + run rpm -i elasticsearch-oss-$(cat version).rpm + output=$status + unmove_java + [ "$output" -eq 1 ] +} + @test "[RPM] install package" { rpm -i elasticsearch-oss-$(cat version).rpm } diff --git a/qa/vagrant/src/test/resources/packaging/utils/utils.bash b/qa/vagrant/src/test/resources/packaging/utils/utils.bash index 53662ca9d3c1d..c07037a5f275b 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/utils.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/utils.bash @@ -68,8 +68,11 @@ if [ ! -x "`which unzip 2>/dev/null`" ]; then fi if [ ! -x "`which java 2>/dev/null`" ]; then - echo "'java' command is mandatory to run the tests" - exit 1 + # there are some tests that move java temporarily + if [ ! -x "`command -v java.bak 2>/dev/null`" ]; then + echo "'java' command is mandatory to run the tests" + exit 1 + fi fi # Returns 0 if the 'dpkg' command is available @@ -578,3 +581,17 @@ file_privileges_for_user_from_umask() { echo $((0777 & ~$(sudo -E -u $user sh -c umask) & ~0111)) } + +# move java to simulate it not being in the path +move_java() { + which_java=`command -v java` + assert_file_exist $which_java + mv $which_java ${which_java}.bak +} + +# move java back to its original location +unmove_java() { + which_java=`command -v java.bak` + assert_file_exist $which_java + mv $which_java `dirname $which_java`/java +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yml index 78b7a4277570a..fe0d7ee30730f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yml @@ -15,7 +15,7 @@ --- "No templates": - skip: - features: default_shards + features: default_shards, no_xpack - do: cat.templates: {} @@ -177,7 +177,7 @@ --- "Sort templates": - skip: - features: default_shards + features: default_shards, no_xpack - do: indices.put_template: name: test @@ -227,7 +227,7 @@ --- "Multiple template": - skip: - features: default_shards + features: default_shards, no_xpack - do: indices.put_template: name: test_1 diff --git a/server/src/main/java/org/elasticsearch/action/Action.java b/server/src/main/java/org/elasticsearch/action/Action.java index 2fc49d69ed1cc..771762ad15c30 100644 --- a/server/src/main/java/org/elasticsearch/action/Action.java +++ b/server/src/main/java/org/elasticsearch/action/Action.java @@ -57,7 +57,7 @@ public TransportRequestOptions transportOptions(Settings settings) { @Override public boolean equals(Object o) { - return o instanceof Action && name.equals(((Action) o).name()); + return o instanceof Action && name.equals(((Action) o).name()); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 324e75d64d80f..48e1cef08d00a 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -316,6 +316,7 @@ import org.elasticsearch.rest.action.search.RestMultiSearchAction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.rest.action.search.RestSearchScrollAction; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.usage.UsageService; @@ -369,7 +370,7 @@ public ActionModule(boolean transportClient, Settings settings, IndexNameExpress destructiveOperations = new DestructiveOperations(settings, clusterSettings); Set headers = Stream.concat( actionPlugins.stream().flatMap(p -> p.getRestHeaders().stream()), - Stream.of("X-Opaque-Id") + Stream.of(Task.X_OPAQUE_ID) ).collect(Collectors.toSet()); UnaryOperator restWrapper = null; for (ActionPlugin plugin : actionPlugins) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index 81b9812d61c5f..44a66f497c846 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -47,7 +48,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; /** Response object for {@link GetFieldMappingsRequest} API */ -public class GetFieldMappingsResponse extends ActionResponse implements ToXContentFragment { +public class GetFieldMappingsResponse extends ActionResponse implements ToXContentObject { private static final ParseField MAPPINGS = new ParseField("mappings"); @@ -111,6 +112,7 @@ public FieldMappingMetaData fieldMappings(String index, String type, String fiel @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); for (Map.Entry>> indexEntry : mappings.entrySet()) { builder.startObject(indexEntry.getKey()); builder.startObject(MAPPINGS.getPreferredName()); @@ -126,6 +128,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); builder.endObject(); } + builder.endObject(); return builder; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 5d4e558dbb25b..5afba8f66aed3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -558,9 +558,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject("mappings"); for (Map.Entry entry : mappings.entrySet()) { builder.field(entry.getKey()); - XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, entry.getValue()); - builder.copyCurrentStructure(parser); + try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, entry.getValue())) { + builder.copyCurrentStructure(parser); + } } builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java index 3180f57d20409..434f87de121ed 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java @@ -31,7 +31,7 @@ public class BulkItemRequest implements Streamable { private int id; - private DocWriteRequest request; + private DocWriteRequest request; private volatile BulkItemResponse primaryResponse; BulkItemRequest() { @@ -39,7 +39,7 @@ public class BulkItemRequest implements Streamable { } // NOTE: public for testing only - public BulkItemRequest(int id, DocWriteRequest request) { + public BulkItemRequest(int id, DocWriteRequest request) { this.id = id; this.request = request; } @@ -48,7 +48,7 @@ public int id() { return id; } - public DocWriteRequest request() { + public DocWriteRequest request() { return request; } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index 9febbd63962ee..f8f9d154b14d6 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -248,24 +248,24 @@ public synchronized boolean awaitClose(long timeout, TimeUnit unit) throws Inter * (for example, if no id is provided, one will be generated, or usage of the create flag). */ public BulkProcessor add(IndexRequest request) { - return add((DocWriteRequest) request); + return add((DocWriteRequest) request); } /** * Adds an {@link DeleteRequest} to the list of actions to execute. */ public BulkProcessor add(DeleteRequest request) { - return add((DocWriteRequest) request); + return add((DocWriteRequest) request); } /** * Adds either a delete or an index request. */ - public BulkProcessor add(DocWriteRequest request) { + public BulkProcessor add(DocWriteRequest request) { return add(request, null); } - public BulkProcessor add(DocWriteRequest request, @Nullable Object payload) { + public BulkProcessor add(DocWriteRequest request, @Nullable Object payload) { internalAdd(request, payload); return this; } @@ -280,7 +280,7 @@ protected void ensureOpen() { } } - private synchronized void internalAdd(DocWriteRequest request, @Nullable Object payload) { + private synchronized void internalAdd(DocWriteRequest request, @Nullable Object payload) { ensureOpen(); bulkRequest.add(request, payload); executeIfNeeded(); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index ca5d997dc3882..989172b711a13 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -83,7 +83,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques * {@link WriteRequest}s to this but java doesn't support syntax to declare that everything in the array has both types so we declare * the one with the least casts. */ - final List requests = new ArrayList<>(); + final List> requests = new ArrayList<>(); private final Set indices = new HashSet<>(); List payloads = null; @@ -99,14 +99,14 @@ public BulkRequest() { /** * Adds a list of requests to be executed. Either index or delete requests. */ - public BulkRequest add(DocWriteRequest... requests) { - for (DocWriteRequest request : requests) { + public BulkRequest add(DocWriteRequest... requests) { + for (DocWriteRequest request : requests) { add(request, null); } return this; } - public BulkRequest add(DocWriteRequest request) { + public BulkRequest add(DocWriteRequest request) { return add(request, null); } @@ -116,7 +116,7 @@ public BulkRequest add(DocWriteRequest request) { * @param payload Optional payload * @return the current bulk request */ - public BulkRequest add(DocWriteRequest request, @Nullable Object payload) { + public BulkRequest add(DocWriteRequest request, @Nullable Object payload) { if (request instanceof IndexRequest) { add((IndexRequest) request, payload); } else if (request instanceof DeleteRequest) { @@ -133,8 +133,8 @@ public BulkRequest add(DocWriteRequest request, @Nullable Object payload) { /** * Adds a list of requests to be executed. Either index or delete requests. */ - public BulkRequest add(Iterable requests) { - for (DocWriteRequest request : requests) { + public BulkRequest add(Iterable> requests) { + for (DocWriteRequest request : requests) { add(request); } return this; @@ -223,7 +223,7 @@ private void addPayload(Object payload) { /** * The list of requests in this bulk request. */ - public List requests() { + public List> requests() { return this.requests; } @@ -527,7 +527,7 @@ private int findNextMarker(byte marker, int from, BytesReference data, int lengt * @return Whether this bulk request contains index request with an ingest pipeline enabled. */ public boolean hasIndexRequestsWithPipelines() { - for (DocWriteRequest actionRequest : requests) { + for (DocWriteRequest actionRequest : requests) { if (actionRequest instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) actionRequest; if (Strings.hasText(indexRequest.getPipeline())) { @@ -545,7 +545,7 @@ public ActionRequestValidationException validate() { if (requests.isEmpty()) { validationException = addValidationError("no requests added", validationException); } - for (DocWriteRequest request : requests) { + for (DocWriteRequest request : requests) { // We first check if refresh has been set if (((WriteRequest) request).getRefreshPolicy() != RefreshPolicy.NONE) { validationException = addValidationError( @@ -580,7 +580,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); waitForActiveShards.writeTo(out); out.writeVInt(requests.size()); - for (DocWriteRequest request : requests) { + for (DocWriteRequest request : requests) { DocWriteRequest.writeDocumentRequest(out, request); } refreshPolicy.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 247970dafcee3..a6ed8de653007 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -143,8 +143,8 @@ protected void doExecute(Task task, BulkRequest bulkRequest, ActionListener indices = bulkRequest.requests.stream() // delete requests should not attempt to create the index (if the index does not // exists), unless an external versioning is used - .filter(request -> request.opType() != DocWriteRequest.OpType.DELETE - || request.versionType() == VersionType.EXTERNAL + .filter(request -> request.opType() != DocWriteRequest.OpType.DELETE + || request.versionType() == VersionType.EXTERNAL || request.versionType() == VersionType.EXTERNAL_GTE) .map(DocWriteRequest::index) .collect(Collectors.toSet()); @@ -184,7 +184,7 @@ public void onFailure(Exception e) { if (!(ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException)) { // fail all requests involving this index, if create didn't work for (int i = 0; i < bulkRequest.requests.size(); i++) { - DocWriteRequest request = bulkRequest.requests.get(i); + DocWriteRequest request = bulkRequest.requests.get(i); if (request != null && setResponseFailureIfIndexMatches(responses, i, request, index, e)) { bulkRequest.requests.set(i, null); } @@ -221,7 +221,7 @@ void createIndex(String index, TimeValue timeout, ActionListener responses, int idx, DocWriteRequest request, String index, Exception e) { + private boolean setResponseFailureIfIndexMatches(AtomicArray responses, int idx, DocWriteRequest request, String index, Exception e) { if (index.equals(request.index())) { responses.set(idx, new BulkItemResponse(idx, request.opType(), new BulkItemResponse.Failure(request.index(), request.type(), request.id(), e))); return true; @@ -271,7 +271,7 @@ protected void doRun() throws Exception { final ConcreteIndices concreteIndices = new ConcreteIndices(clusterState, indexNameExpressionResolver); MetaData metaData = clusterState.metaData(); for (int i = 0; i < bulkRequest.requests.size(); i++) { - DocWriteRequest docWriteRequest = bulkRequest.requests.get(i); + DocWriteRequest docWriteRequest = bulkRequest.requests.get(i); //the request can only be null because we set it to null in the previous step, so it gets ignored if (docWriteRequest == null) { continue; @@ -315,7 +315,7 @@ protected void doRun() throws Exception { // first, go over all the requests and create a ShardId -> Operations mapping Map> requestsByShard = new HashMap<>(); for (int i = 0; i < bulkRequest.requests.size(); i++) { - DocWriteRequest request = bulkRequest.requests.get(i); + DocWriteRequest request = bulkRequest.requests.get(i); if (request == null) { continue; } @@ -362,7 +362,7 @@ public void onFailure(Exception e) { // create failures for all relevant requests for (BulkItemRequest request : requests) { final String indexName = concreteIndices.getConcreteIndex(request.index()).getName(); - DocWriteRequest docWriteRequest = request.request(); + DocWriteRequest docWriteRequest = request.request(); responses.set(request.id(), new BulkItemResponse(request.id(), docWriteRequest.opType(), new BulkItemResponse.Failure(indexName, docWriteRequest.type(), docWriteRequest.id(), e))); } @@ -418,7 +418,7 @@ public void onTimeout(TimeValue timeout) { }); } - private boolean addFailureIfIndexIsUnavailable(DocWriteRequest request, int idx, final ConcreteIndices concreteIndices, + private boolean addFailureIfIndexIsUnavailable(DocWriteRequest request, int idx, final ConcreteIndices concreteIndices, final MetaData metaData) { IndexNotFoundException cannotCreate = indicesThatCannotBeCreated.get(request.index()); if (cannotCreate != null) { @@ -442,7 +442,7 @@ private boolean addFailureIfIndexIsUnavailable(DocWriteRequest request, int idx, return false; } - private void addFailure(DocWriteRequest request, int idx, Exception unavailableException) { + private void addFailure(DocWriteRequest request, int idx, Exception unavailableException) { BulkItemResponse.Failure failure = new BulkItemResponse.Failure(request.index(), request.type(), request.id(), unavailableException); BulkItemResponse bulkItemResponse = new BulkItemResponse(idx, request.opType(), failure); @@ -471,7 +471,7 @@ Index getConcreteIndex(String indexOrAlias) { return indices.get(indexOrAlias); } - Index resolveIfAbsent(DocWriteRequest request) { + Index resolveIfAbsent(DocWriteRequest request) { Index concreteIndex = indices.get(request.index()); if (concreteIndex == null) { concreteIndex = indexNameExpressionResolver.concreteSingleIndex(state, request); @@ -512,7 +512,7 @@ void processBulkIndexIngestRequest(Task task, BulkRequest original, ActionListen }); } - static final class BulkRequestModifier implements Iterator { + static final class BulkRequestModifier implements Iterator> { final BulkRequest bulkRequest; final SparseFixedBitSet failedSlots; @@ -528,7 +528,7 @@ static final class BulkRequestModifier implements Iterator { } @Override - public DocWriteRequest next() { + public DocWriteRequest next() { return bulkRequest.requests().get(++currentSlot); } @@ -547,10 +547,10 @@ BulkRequest getBulkRequest() { modifiedBulkRequest.timeout(bulkRequest.timeout()); int slot = 0; - List requests = bulkRequest.requests(); + List> requests = bulkRequest.requests(); originalSlots = new int[requests.size()]; // oversize, but that's ok for (int i = 0; i < requests.size(); i++) { - DocWriteRequest request = requests.get(i); + DocWriteRequest request = requests.get(i); if (failedSlots.get(i) == false) { modifiedBulkRequest.add(request); originalSlots[slot++] = i; diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index aa3784efdd04f..1adfdbca8786b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.support.replication; import com.carrotsearch.hppc.cursors.IntObjectCursor; + import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java index d8bb04a1a39c3..497dc49198bfc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java @@ -157,9 +157,13 @@ public void computeAndValidateWriteIndex() { List writeIndices = referenceIndexMetaDatas.stream() .filter(idxMeta -> Boolean.TRUE.equals(idxMeta.getAliases().get(aliasName).writeIndex())) .collect(Collectors.toList()); - if (referenceIndexMetaDatas.size() == 1) { - writeIndex.set(referenceIndexMetaDatas.get(0)); - } else if (writeIndices.size() == 1) { + + if (writeIndices.isEmpty() && referenceIndexMetaDatas.size() == 1 + && referenceIndexMetaDatas.get(0).getAliases().get(aliasName).writeIndex() == null) { + writeIndices.add(referenceIndexMetaDatas.get(0)); + } + + if (writeIndices.size() == 1) { writeIndex.set(writeIndices.get(0)); } else if (writeIndices.size() > 1) { List writeIndicesStrings = writeIndices.stream() diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java index 8cb51f2b06b0e..62d6e7e311d5d 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java @@ -39,7 +39,7 @@ public AbstractComponent(Settings settings) { this.settings = settings; } - public AbstractComponent(Settings settings, Class customClass) { + public AbstractComponent(Settings settings, Class customClass) { this.logger = LogManager.getLogger(customClass); this.deprecationLogger = new DeprecationLogger(logger); this.settings = settings; diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java index 2ed43ccaa24e6..de14e0cd53db6 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java @@ -35,7 +35,7 @@ protected AbstractLifecycleComponent(Settings settings) { super(settings); } - protected AbstractLifecycleComponent(Settings settings, Class customClass) { + protected AbstractLifecycleComponent(Settings settings, Class customClass) { super(settings, customClass); } diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index b9c23842a5a8c..d2ff86ea63ced 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -19,23 +19,22 @@ package org.elasticsearch.common.geo.builders; -import org.elasticsearch.common.geo.GeoShapeType; -import org.elasticsearch.common.geo.parsers.ShapeParser; -import org.elasticsearch.common.geo.parsers.GeoWKTParser; -import org.locationtech.spatial4j.shape.Shape; - import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.geo.parsers.GeoWKTParser; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.locationtech.spatial4j.shape.Shape; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Objects; -public class GeometryCollectionBuilder extends ShapeBuilder { +public class GeometryCollectionBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.GEOMETRYCOLLECTION; diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index 3d917bcff6e48..bac74c29dd805 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -20,15 +20,14 @@ package org.elasticsearch.common.geo.builders; import org.elasticsearch.common.geo.GeoShapeType; -import org.elasticsearch.common.geo.parsers.ShapeParser; -import org.elasticsearch.common.geo.parsers.GeoWKTParser; -import org.locationtech.spatial4j.shape.Shape; -import org.locationtech.jts.geom.Coordinate; - import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.geo.parsers.GeoWKTParser; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.spatial4j.shape.Shape; import java.io.IOException; import java.util.ArrayList; @@ -36,7 +35,7 @@ import java.util.Locale; import java.util.Objects; -public class MultiPolygonBuilder extends ShapeBuilder { +public class MultiPolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOLYGON; diff --git a/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java b/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java index 49b7d68b583ff..af0e0248471d5 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java +++ b/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java @@ -55,57 +55,66 @@ protected static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper s String malformedException = null; XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - String fieldName = parser.currentName(); - - if (ShapeParser.FIELD_TYPE.match(fieldName, parser.getDeprecationHandler())) { - parser.nextToken(); - final GeoShapeType type = GeoShapeType.forName(parser.text()); - if (shapeType != null && shapeType.equals(type) == false) { - malformedException = ShapeParser.FIELD_TYPE + " already parsed as [" - + shapeType + "] cannot redefine as [" + type + "]"; + try { + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + String fieldName = parser.currentName(); + + if (ShapeParser.FIELD_TYPE.match(fieldName, parser.getDeprecationHandler())) { + parser.nextToken(); + final GeoShapeType type = GeoShapeType.forName(parser.text()); + if (shapeType != null && shapeType.equals(type) == false) { + malformedException = ShapeParser.FIELD_TYPE + " already parsed as [" + + shapeType + "] cannot redefine as [" + type + "]"; + } else { + shapeType = type; + } + } else if (ShapeParser.FIELD_COORDINATES.match(fieldName, parser.getDeprecationHandler())) { + parser.nextToken(); + CoordinateNode tempNode = parseCoordinates(parser, ignoreZValue.value()); + if (coordinateNode != null && tempNode.numDimensions() != coordinateNode.numDimensions()) { + throw new ElasticsearchParseException("Exception parsing coordinates: " + + "number of dimensions do not match"); + } + coordinateNode = tempNode; + } else if (ShapeParser.FIELD_GEOMETRIES.match(fieldName, parser.getDeprecationHandler())) { + if (shapeType == null) { + shapeType = GeoShapeType.GEOMETRYCOLLECTION; + } else if (shapeType.equals(GeoShapeType.GEOMETRYCOLLECTION) == false) { + malformedException = "cannot have [" + ShapeParser.FIELD_GEOMETRIES + "] with type set to [" + + shapeType + "]"; + } + parser.nextToken(); + geometryCollections = parseGeometries(parser, shapeMapper); + } else if (CircleBuilder.FIELD_RADIUS.match(fieldName, parser.getDeprecationHandler())) { + if (shapeType == null) { + shapeType = GeoShapeType.CIRCLE; + } else if (shapeType != null && shapeType.equals(GeoShapeType.CIRCLE) == false) { + malformedException = "cannot have [" + CircleBuilder.FIELD_RADIUS + "] with type set to [" + + shapeType + "]"; + } + parser.nextToken(); + radius = DistanceUnit.Distance.parseDistance(parser.text()); + } else if (ShapeParser.FIELD_ORIENTATION.match(fieldName, parser.getDeprecationHandler())) { + if (shapeType != null + && (shapeType.equals(GeoShapeType.POLYGON) || shapeType.equals(GeoShapeType.MULTIPOLYGON)) == false) { + malformedException = "cannot have [" + ShapeParser.FIELD_ORIENTATION + "] with type set to [" + shapeType + "]"; + } + parser.nextToken(); + requestedOrientation = ShapeBuilder.Orientation.fromString(parser.text()); } else { - shapeType = type; + parser.nextToken(); + parser.skipChildren(); } - } else if (ShapeParser.FIELD_COORDINATES.match(fieldName, parser.getDeprecationHandler())) { - parser.nextToken(); - CoordinateNode tempNode = parseCoordinates(parser, ignoreZValue.value()); - if (coordinateNode != null && tempNode.numDimensions() != coordinateNode.numDimensions()) { - throw new ElasticsearchParseException("Exception parsing coordinates: " + - "number of dimensions do not match"); - } - coordinateNode = tempNode; - } else if (ShapeParser.FIELD_GEOMETRIES.match(fieldName, parser.getDeprecationHandler())) { - if (shapeType == null) { - shapeType = GeoShapeType.GEOMETRYCOLLECTION; - } else if (shapeType.equals(GeoShapeType.GEOMETRYCOLLECTION) == false) { - malformedException = "cannot have [" + ShapeParser.FIELD_GEOMETRIES + "] with type set to [" - + shapeType + "]"; - } - parser.nextToken(); - geometryCollections = parseGeometries(parser, shapeMapper); - } else if (CircleBuilder.FIELD_RADIUS.match(fieldName, parser.getDeprecationHandler())) { - if (shapeType == null) { - shapeType = GeoShapeType.CIRCLE; - } else if (shapeType != null && shapeType.equals(GeoShapeType.CIRCLE) == false) { - malformedException = "cannot have [" + CircleBuilder.FIELD_RADIUS + "] with type set to [" - + shapeType + "]"; - } - parser.nextToken(); - radius = DistanceUnit.Distance.parseDistance(parser.text()); - } else if (ShapeParser.FIELD_ORIENTATION.match(fieldName, parser.getDeprecationHandler())) { - if (shapeType != null - && (shapeType.equals(GeoShapeType.POLYGON) || shapeType.equals(GeoShapeType.MULTIPOLYGON)) == false) { - malformedException = "cannot have [" + ShapeParser.FIELD_ORIENTATION + "] with type set to [" + shapeType + "]"; - } - parser.nextToken(); - requestedOrientation = ShapeBuilder.Orientation.fromString(parser.text()); - } else { - parser.nextToken(); - parser.skipChildren(); } } + } catch (Exception ex) { + // Skip all other fields until the end of the object + while (parser.currentToken() != XContentParser.Token.END_OBJECT && parser.currentToken() != null) { + parser.nextToken(); + parser.skipChildren(); + } + throw ex; } if (malformedException != null) { @@ -144,6 +153,12 @@ protected static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper s * XContentParser */ private static CoordinateNode parseCoordinates(XContentParser parser, boolean ignoreZValue) throws IOException { + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + parser.skipChildren(); + parser.nextToken(); + throw new ElasticsearchParseException("coordinates cannot be specified as objects"); + } + XContentParser.Token token = parser.nextToken(); // Base cases if (token != XContentParser.Token.START_ARRAY && @@ -168,8 +183,13 @@ private static CoordinateNode parseCoordinates(XContentParser parser, boolean ig } private static Coordinate parseCoordinate(XContentParser parser, boolean ignoreZValue) throws IOException { + if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) { + throw new ElasticsearchParseException("geo coordinates must be numbers"); + } double lon = parser.doubleValue(); - parser.nextToken(); + if (parser.nextToken() != XContentParser.Token.VALUE_NUMBER) { + throw new ElasticsearchParseException("geo coordinates must be numbers"); + } double lat = parser.doubleValue(); XContentParser.Token token = parser.nextToken(); // alt (for storing purposes only - future use includes 3d shapes) diff --git a/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java index 8b501a561292e..30b799601487f 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java @@ -85,7 +85,7 @@ public Boolean visit(ScopeBinding scopeBinding) { } @Override - public Boolean visit(InjectionRequest injectionRequest) { + public Boolean visit(InjectionRequest injectionRequest) { return false; } diff --git a/server/src/main/java/org/elasticsearch/common/inject/Binder.java b/server/src/main/java/org/elasticsearch/common/inject/Binder.java index 2a4799cefccb1..03d164bcbaa52 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/Binder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/Binder.java @@ -360,7 +360,7 @@ void bindListener(Matcher> typeMatcher, * @return a binder that shares its configuration with this binder. * @since 2.0 */ - Binder skipSources(Class... classesToSkip); + Binder skipSources(Class... classesToSkip); /** * Creates a new private child environment for bindings and other configuration. The returned diff --git a/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java index e560eeb1efd63..971b100a6799e 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java @@ -72,7 +72,7 @@ public Boolean visit(Binding command) { if (Void.class.equals(command.getKey().getRawType())) { if (command instanceof ProviderInstanceBinding - && ((ProviderInstanceBinding) command).getProviderInstance() instanceof ProviderMethod) { + && ((ProviderInstanceBinding) command).getProviderInstance() instanceof ProviderMethod) { errors.voidProviderMethod(); } else { errors.missingConstantValues(); @@ -274,7 +274,7 @@ private void putBinding(BindingImpl binding) { */ private boolean isOkayDuplicate(Binding original, BindingImpl binding) { if (original instanceof ExposedBindingImpl) { - ExposedBindingImpl exposed = (ExposedBindingImpl) original; + ExposedBindingImpl exposed = (ExposedBindingImpl) original; InjectorImpl exposedFrom = (InjectorImpl) exposed.getPrivateElements().getInjector(); return (exposedFrom == binding.getInjector()); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java b/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java index 6711456004380..8440ab98b5cb8 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java @@ -46,7 +46,7 @@ public interface ElementVisitor { /** * Visit a request to inject the instance fields and methods of an instance. */ - V visit(InjectionRequest request); + V visit(InjectionRequest request); /** * Visit a request to inject the static fields and methods of type. diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index eb4e294642417..8847c8138a706 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -83,7 +83,7 @@ protected AbstractScopedSettings(Settings settings, Set> settingsSet, this.keySettings = Collections.unmodifiableMap(keySettings); } - protected void validateSettingKey(Setting setting) { + protected void validateSettingKey(Setting setting) { if (isValidKey(setting.getKey()) == false && (setting.isGroupSetting() && isValidGroupKey(setting.getKey()) || isValidAffixKey(setting.getKey())) == false || setting.getKey().endsWith(".0")) { throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]"); @@ -366,7 +366,7 @@ void validate(final String key, final Settings settings, final boolean validateD * @throws IllegalArgumentException if the setting is invalid */ void validate(final String key, final Settings settings, final boolean validateDependencies, final boolean validateInternalIndex) { - Setting setting = getRaw(key); + Setting setting = getRaw(key); if (setting == null) { LevensteinDistance ld = new LevensteinDistance(); List> scoredKeys = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java b/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java index 1ed012e2bb393..77ac63a984f55 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java @@ -32,7 +32,7 @@ */ final class BigObjectArray extends AbstractBigArray implements ObjectArray { - private static final BigObjectArray ESTIMATOR = new BigObjectArray(0, BigArrays.NON_RECYCLING_INSTANCE); + private static final BigObjectArray ESTIMATOR = new BigObjectArray(0, BigArrays.NON_RECYCLING_INSTANCE); private Object[][] pages; diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index ba72561f0c145..490f3d680e428 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -489,7 +489,8 @@ private ThreadContextStruct putResponse(final String key, final String value, fi final List existingValues = newResponseHeaders.get(key); if (existingValues != null) { final Set existingUniqueValues = existingValues.stream().map(uniqueValue).collect(Collectors.toSet()); - assert existingValues.size() == existingUniqueValues.size(); + assert existingValues.size() == existingUniqueValues.size() : + "existing values: [" + existingValues + "], existing unique values [" + existingUniqueValues + "]"; if (existingUniqueValues.contains(uniqueValue.apply(value))) { return this; } diff --git a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java index 38bf1e751ef9d..9d21896182c67 100644 --- a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java +++ b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java @@ -38,6 +38,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.tasks.Task.X_OPAQUE_ID; + /** * The default rest channel for incoming requests. This class implements the basic logic for sending a rest * response. It will set necessary headers nad ensure that bytes are released after the response is sent. @@ -50,7 +52,6 @@ public class DefaultRestChannel extends AbstractRestChannel implements RestChann static final String CONTENT_TYPE = "content-type"; static final String CONTENT_LENGTH = "content-length"; static final String SET_COOKIE = "set-cookie"; - static final String X_OPAQUE_ID = "X-Opaque-Id"; private final HttpRequest httpRequest; private final BigArrays bigArrays; diff --git a/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java b/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java index f3c5d07f1f2f4..10b4c4318a30e 100644 --- a/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java +++ b/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.index.shard.SearchOperationListener; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.tasks.Task; import java.util.Collections; import java.util.concurrent.TimeUnit; @@ -174,6 +175,11 @@ public String toString() { } else { sb.append("source[], "); } + if (context.getTask().getHeader(Task.X_OPAQUE_ID) != null) { + sb.append("id[").append(context.getTask().getHeader(Task.X_OPAQUE_ID)).append("], "); + } else { + sb.append("id[], "); + } return sb.toString(); } } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java index 82ae0bb5bf1ea..6fd08b82668f6 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java @@ -48,7 +48,7 @@ public long ramBytesUsed() { } @Override - public final ScriptDocValues getScriptValues() { + public final ScriptDocValues getScriptValues() { return new ScriptDocValues.Doubles(getDoubleValues()); } @@ -69,7 +69,7 @@ public static AtomicNumericFieldData empty(final int maxDoc) { public SortedNumericDoubleValues getDoubleValues() { return FieldData.emptySortedNumericDoubles(); } - + @Override public Collection getChildResources() { return Collections.emptyList(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index e19bdb6708370..69b6a6e04a936 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -80,7 +80,8 @@ public BinaryFieldMapper build(BuilderContext context) { public static class TypeParser implements Mapper.TypeParser { @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + public BinaryFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) + throws MapperParsingException { BinaryFieldMapper.Builder builder = new BinaryFieldMapper.Builder(name); parseField(builder, name, node, parserContext); return builder; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index c50a7d18113bf..cb44e777f871d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -94,7 +94,8 @@ public BooleanFieldMapper build(BuilderContext context) { public static class TypeParser implements Mapper.TypeParser { @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + public BooleanFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) + throws MapperParsingException { BooleanFieldMapper.Builder builder = new BooleanFieldMapper.Builder(name); parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { diff --git a/server/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java index a3ef36204f032..f06ee48d06b67 100644 --- a/server/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java @@ -163,7 +163,7 @@ protected final int doHashCode() { } @Override - protected final boolean doEquals(BaseTermQueryBuilder other) { + protected final boolean doEquals(QB other) { return Objects.equals(fieldName, other.fieldName) && Objects.equals(value, other.value); } diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index f1062f7b5384c..a8aca4fdfe59d 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -53,7 +53,7 @@ public PipelineExecutionService(PipelineStore store, ThreadPool threadPool) { this.threadPool = threadPool; } - public void executeBulkRequest(Iterable actionRequests, + public void executeBulkRequest(Iterable> actionRequests, BiConsumer itemFailureHandler, Consumer completionHandler) { threadPool.executor(ThreadPool.Names.WRITE).execute(new AbstractRunnable() { @@ -65,7 +65,7 @@ public void onFailure(Exception e) { @Override protected void doRun() throws Exception { - for (DocWriteRequest actionRequest : actionRequests) { + for (DocWriteRequest actionRequest : actionRequests) { IndexRequest indexRequest = null; if (actionRequest instanceof IndexRequest) { indexRequest = (IndexRequest) actionRequest; diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 7da072f2e8b0a..4f3847a22994b 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -136,6 +136,7 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.snapshots.SnapshotShardsService; import org.elasticsearch.snapshots.SnapshotsService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskResultsService; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; @@ -446,7 +447,7 @@ protected Node(final Environment environment, Collection final Transport transport = networkModule.getTransportSupplier().get(); Set taskHeaders = Stream.concat( pluginsService.filterPlugins(ActionPlugin.class).stream().flatMap(p -> p.getTaskHeaders().stream()), - Stream.of("X-Opaque-Id") + Stream.of(Task.X_OPAQUE_ID) ).collect(Collectors.toSet()); final TransportService transportService = newTransportService(settings, transport, threadPool, networkModule.getTransportInterceptor(), localNodeFactory, settingsModule.getClusterSettings(), taskHeaders); diff --git a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java index eb8b7130d7054..54d9ade581e89 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java @@ -19,9 +19,9 @@ package org.elasticsearch.plugins; +import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.TransportActions; @@ -68,7 +68,7 @@ public interface ActionPlugin { * Client actions added by this plugin. This defaults to all of the {@linkplain Action} in * {@linkplain ActionPlugin#getActions()}. */ - default List getClientActions() { + default List> getClientActions() { return getActions().stream().map(a -> a.action).collect(Collectors.toList()); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java index ea68d9cc3c04f..c43f14dcddf26 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -81,9 +81,7 @@ public RestResponse buildResponse(GetFieldMappingsResponse response, XContentBui if (mappingsByIndex.isEmpty() && fields.length > 0) { status = NOT_FOUND; } - builder.startObject(); response.toXContent(builder, request); - builder.endObject(); return new BytesRestResponse(status, builder); } }); diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index 7074d3ad9fe44..f0e075eac7d93 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -53,7 +53,11 @@ public class ScriptModule { SimilarityScript.CONTEXT, SimilarityWeightScript.CONTEXT, TemplateScript.CONTEXT, - MovingFunctionScript.CONTEXT + MovingFunctionScript.CONTEXT, + ScriptedMetricAggContexts.InitScript.CONTEXT, + ScriptedMetricAggContexts.MapScript.CONTEXT, + ScriptedMetricAggContexts.CombineScript.CONTEXT, + ScriptedMetricAggContexts.ReduceScript.CONTEXT ).collect(Collectors.toMap(c -> c.name, Function.identity())); } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java new file mode 100644 index 0000000000000..774dc95d39977 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java @@ -0,0 +1,161 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorer; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.search.lookup.LeafSearchLookup; +import org.elasticsearch.search.lookup.SearchLookup; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class ScriptedMetricAggContexts { + private abstract static class ParamsAndStateBase { + private final Map params; + private final Object state; + + ParamsAndStateBase(Map params, Object state) { + this.params = params; + this.state = state; + } + + public Map getParams() { + return params; + } + + public Object getState() { + return state; + } + } + + public abstract static class InitScript extends ParamsAndStateBase { + public InitScript(Map params, Object state) { + super(params, state); + } + + public abstract void execute(); + + public interface Factory { + InitScript newInstance(Map params, Object state); + } + + public static String[] PARAMETERS = {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_init", Factory.class); + } + + public abstract static class MapScript extends ParamsAndStateBase { + private final LeafSearchLookup leafLookup; + private Scorer scorer; + + public MapScript(Map params, Object state, SearchLookup lookup, LeafReaderContext leafContext) { + super(params, state); + + this.leafLookup = leafContext == null ? null : lookup.getLeafSearchLookup(leafContext); + } + + // Return the doc as a map (instead of LeafDocLookup) in order to abide by type whitelisting rules for + // Painless scripts. + public Map> getDoc() { + return leafLookup == null ? null : leafLookup.doc(); + } + + public void setDocument(int docId) { + if (leafLookup != null) { + leafLookup.setDocument(docId); + } + } + + public void setScorer(Scorer scorer) { + this.scorer = scorer; + } + + // get_score() is named this way so that it's picked up by Painless as '_score' + public double get_score() { + if (scorer == null) { + return 0.0; + } + + try { + return scorer.score(); + } catch (IOException e) { + throw new ElasticsearchException("Couldn't look up score", e); + } + } + + public abstract void execute(); + + public interface LeafFactory { + MapScript newInstance(LeafReaderContext ctx); + } + + public interface Factory { + LeafFactory newFactory(Map params, Object state, SearchLookup lookup); + } + + public static String[] PARAMETERS = new String[] {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_map", Factory.class); + } + + public abstract static class CombineScript extends ParamsAndStateBase { + public CombineScript(Map params, Object state) { + super(params, state); + } + + public abstract Object execute(); + + public interface Factory { + CombineScript newInstance(Map params, Object state); + } + + public static String[] PARAMETERS = {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_combine", Factory.class); + } + + public abstract static class ReduceScript { + private final Map params; + private final List states; + + public ReduceScript(Map params, List states) { + this.params = params; + this.states = states; + } + + public Map getParams() { + return params; + } + + public List getStates() { + return states; + } + + public abstract Object execute(); + + public interface Factory { + ReduceScript newInstance(Map params, List states); + } + + public static String[] PARAMETERS = {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_reduce", Factory.class); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java index e350ecbed5814..f4281c063ff2c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -90,16 +90,19 @@ public InternalAggregation doReduce(List aggregations, Redu InternalScriptedMetric firstAggregation = ((InternalScriptedMetric) aggregations.get(0)); List aggregation; if (firstAggregation.reduceScript != null && reduceContext.isFinalReduce()) { - Map vars = new HashMap<>(); - vars.put("_aggs", aggregationObjects); + Map params = new HashMap<>(); if (firstAggregation.reduceScript.getParams() != null) { - vars.putAll(firstAggregation.reduceScript.getParams()); + params.putAll(firstAggregation.reduceScript.getParams()); } - ExecutableScript.Factory factory = reduceContext.scriptService().compile( - firstAggregation.reduceScript, ExecutableScript.AGGS_CONTEXT); - ExecutableScript script = factory.newInstance(vars); - Object scriptResult = script.run(); + // Add _aggs to params map for backwards compatibility (redundant with a context variable on the ReduceScript created below). + params.put("_aggs", aggregationObjects); + + ScriptedMetricAggContexts.ReduceScript.Factory factory = reduceContext.scriptService().compile( + firstAggregation.reduceScript, ScriptedMetricAggContexts.ReduceScript.CONTEXT); + ScriptedMetricAggContexts.ReduceScript script = factory.newInstance(params, aggregationObjects); + + Object scriptResult = script.execute(); CollectionUtils.ensureNoSelfReferences(scriptResult, "reduce script"); aggregation = Collections.singletonList(scriptResult); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java index 225398e51b7c0..8b6d834184d73 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java @@ -26,9 +26,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; @@ -202,30 +201,32 @@ protected ScriptedMetricAggregatorFactory doBuild(SearchContext context, Aggrega // Extract params from scripts and pass them along to ScriptedMetricAggregatorFactory, since it won't have // access to them for the scripts it's given precompiled. - ExecutableScript.Factory executableInitScript; + ScriptedMetricAggContexts.InitScript.Factory compiledInitScript; Map initScriptParams; if (initScript != null) { - executableInitScript = queryShardContext.getScriptService().compile(initScript, ExecutableScript.AGGS_CONTEXT); + compiledInitScript = queryShardContext.getScriptService().compile(initScript, ScriptedMetricAggContexts.InitScript.CONTEXT); initScriptParams = initScript.getParams(); } else { - executableInitScript = p -> null; + compiledInitScript = (p, a) -> null; initScriptParams = Collections.emptyMap(); } - SearchScript.Factory searchMapScript = queryShardContext.getScriptService().compile(mapScript, SearchScript.AGGS_CONTEXT); + ScriptedMetricAggContexts.MapScript.Factory compiledMapScript = queryShardContext.getScriptService().compile(mapScript, + ScriptedMetricAggContexts.MapScript.CONTEXT); Map mapScriptParams = mapScript.getParams(); - ExecutableScript.Factory executableCombineScript; + ScriptedMetricAggContexts.CombineScript.Factory compiledCombineScript; Map combineScriptParams; if (combineScript != null) { - executableCombineScript = queryShardContext.getScriptService().compile(combineScript, ExecutableScript.AGGS_CONTEXT); + compiledCombineScript = queryShardContext.getScriptService().compile(combineScript, + ScriptedMetricAggContexts.CombineScript.CONTEXT); combineScriptParams = combineScript.getParams(); } else { - executableCombineScript = p -> null; + compiledCombineScript = (p, a) -> null; combineScriptParams = Collections.emptyMap(); } - return new ScriptedMetricAggregatorFactory(name, searchMapScript, mapScriptParams, executableInitScript, initScriptParams, - executableCombineScript, combineScriptParams, reduceScript, + return new ScriptedMetricAggregatorFactory(name, compiledMapScript, mapScriptParams, compiledInitScript, + initScriptParams, compiledCombineScript, combineScriptParams, reduceScript, params, queryShardContext.lookup(), context, parent, subfactoriesBuilder, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java index d6e861a9a6792..ffdff44b783b6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java @@ -20,10 +20,10 @@ package org.elasticsearch.search.aggregations.metrics.scripted; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorer; import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; @@ -38,17 +38,17 @@ public class ScriptedMetricAggregator extends MetricsAggregator { - private final SearchScript.LeafFactory mapScript; - private final ExecutableScript combineScript; + private final ScriptedMetricAggContexts.MapScript.LeafFactory mapScript; + private final ScriptedMetricAggContexts.CombineScript combineScript; private final Script reduceScript; - private Map params; + private Object aggState; - protected ScriptedMetricAggregator(String name, SearchScript.LeafFactory mapScript, ExecutableScript combineScript, - Script reduceScript, - Map params, SearchContext context, Aggregator parent, List pipelineAggregators, Map metaData) - throws IOException { + protected ScriptedMetricAggregator(String name, ScriptedMetricAggContexts.MapScript.LeafFactory mapScript, ScriptedMetricAggContexts.CombineScript combineScript, + Script reduceScript, Object aggState, SearchContext context, Aggregator parent, + List pipelineAggregators, Map metaData) + throws IOException { super(name, context, parent, pipelineAggregators, metaData); - this.params = params; + this.aggState = aggState; this.mapScript = mapScript; this.combineScript = combineScript; this.reduceScript = reduceScript; @@ -62,14 +62,20 @@ public boolean needsScores() { @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { - final SearchScript leafMapScript = mapScript.newInstance(ctx); + final ScriptedMetricAggContexts.MapScript leafMapScript = mapScript.newInstance(ctx); return new LeafBucketCollectorBase(sub, leafMapScript) { + @Override + public void setScorer(Scorer scorer) throws IOException { + leafMapScript.setScorer(scorer); + } + @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0 : bucket; + leafMapScript.setDocument(doc); - leafMapScript.run(); - CollectionUtils.ensureNoSelfReferences(params, "Scripted metric aggs map script"); + leafMapScript.execute(); + CollectionUtils.ensureNoSelfReferences(aggState, "Scripted metric aggs map script"); } }; } @@ -78,10 +84,10 @@ public void collect(int doc, long bucket) throws IOException { public InternalAggregation buildAggregation(long owningBucketOrdinal) { Object aggregation; if (combineScript != null) { - aggregation = combineScript.run(); + aggregation = combineScript.execute(); CollectionUtils.ensureNoSelfReferences(aggregation, "Scripted metric aggs combine script"); } else { - aggregation = params.get("_agg"); + aggregation = aggState; } return new InternalScriptedMetric(name, aggregation, reduceScript, pipelineAggregators(), metaData()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java index 0deda32e79d77..9bd904a07013d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java @@ -19,10 +19,9 @@ package org.elasticsearch.search.aggregations.metrics.scripted; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -39,20 +38,21 @@ public class ScriptedMetricAggregatorFactory extends AggregatorFactory { - private final SearchScript.Factory mapScript; + private final ScriptedMetricAggContexts.MapScript.Factory mapScript; private final Map mapScriptParams; - private final ExecutableScript.Factory combineScript; + private final ScriptedMetricAggContexts.CombineScript.Factory combineScript; private final Map combineScriptParams; private final Script reduceScript; private final Map aggParams; private final SearchLookup lookup; - private final ExecutableScript.Factory initScript; + private final ScriptedMetricAggContexts.InitScript.Factory initScript; private final Map initScriptParams; - public ScriptedMetricAggregatorFactory(String name, SearchScript.Factory mapScript, Map mapScriptParams, - ExecutableScript.Factory initScript, Map initScriptParams, - ExecutableScript.Factory combineScript, Map combineScriptParams, - Script reduceScript, Map aggParams, + public ScriptedMetricAggregatorFactory(String name, + ScriptedMetricAggContexts.MapScript.Factory mapScript, Map mapScriptParams, + ScriptedMetricAggContexts.InitScript.Factory initScript, Map initScriptParams, + ScriptedMetricAggContexts.CombineScript.Factory combineScript, + Map combineScriptParams, Script reduceScript, Map aggParams, SearchLookup lookup, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactories, Map metaData) throws IOException { super(name, context, parent, subFactories, metaData); @@ -79,21 +79,29 @@ public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBu } else { aggParams = new HashMap<>(); } + + // Add _agg to params map for backwards compatibility (redundant with context variables on the scripts created below). + // When this is removed, aggState (as passed to ScriptedMetricAggregator) can be changed to Map, since + // it won't be possible to completely replace it with another type as is possible when it's an entry in params. if (aggParams.containsKey("_agg") == false) { aggParams.put("_agg", new HashMap()); } + Object aggState = aggParams.get("_agg"); - final ExecutableScript initScript = this.initScript.newInstance(mergeParams(aggParams, initScriptParams)); - final SearchScript.LeafFactory mapScript = this.mapScript.newFactory(mergeParams(aggParams, mapScriptParams), lookup); - final ExecutableScript combineScript = this.combineScript.newInstance(mergeParams(aggParams, combineScriptParams)); + final ScriptedMetricAggContexts.InitScript initScript = this.initScript.newInstance( + mergeParams(aggParams, initScriptParams), aggState); + final ScriptedMetricAggContexts.MapScript.LeafFactory mapScript = this.mapScript.newFactory( + mergeParams(aggParams, mapScriptParams), aggState, lookup); + final ScriptedMetricAggContexts.CombineScript combineScript = this.combineScript.newInstance( + mergeParams(aggParams, combineScriptParams), aggState); final Script reduceScript = deepCopyScript(this.reduceScript, context); if (initScript != null) { - initScript.run(); - CollectionUtils.ensureNoSelfReferences(aggParams.get("_agg"), "Scripted metric aggs init script"); + initScript.execute(); + CollectionUtils.ensureNoSelfReferences(aggState, "Scripted metric aggs init script"); } return new ScriptedMetricAggregator(name, mapScript, - combineScript, reduceScript, aggParams, context, parent, + combineScript, reduceScript, aggState, context, parent, pipelineAggregators, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java index e59bd718d3226..7888f6cd5a098 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java @@ -115,7 +115,7 @@ public abstract class AbstractHighlighterBuilder template, QueryBuilder queryBuilder) { preTags = template.preTags; postTags = template.postTags; fragmentSize = template.fragmentSize; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java index 9e31d8370cbe3..b6713f81ec48c 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java @@ -22,7 +22,7 @@ /** * Builder for {@link ContextMapping} */ -public abstract class ContextBuilder { +public abstract class ContextBuilder> { protected String name; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java index 7eab4e072f146..1aa82eeb2190a 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java @@ -143,7 +143,7 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ContextMapping that = (ContextMapping) o; + ContextMapping that = (ContextMapping) o; if (type != that.type) return false; return name.equals(that.name); } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java index 4d6b53296f157..961d7fd9f59a7 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java @@ -57,10 +57,10 @@ public class ContextMappings implements ToXContent { private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ContextMappings.class)); - private final List contextMappings; - private final Map contextNameMap; + private final List> contextMappings; + private final Map> contextNameMap; - public ContextMappings(List contextMappings) { + public ContextMappings(List> contextMappings) { if (contextMappings.size() > 255) { // we can support more, but max of 255 (1 byte) unique context types per suggest field // seems reasonable? @@ -68,7 +68,7 @@ public ContextMappings(List contextMappings) { } this.contextMappings = contextMappings; contextNameMap = new HashMap<>(contextMappings.size()); - for (ContextMapping mapping : contextMappings) { + for (ContextMapping mapping : contextMappings) { contextNameMap.put(mapping.name(), mapping); } } @@ -84,8 +84,8 @@ public int size() { /** * Returns a context mapping by its name */ - public ContextMapping get(String name) { - ContextMapping contextMapping = contextNameMap.get(name); + public ContextMapping get(String name) { + ContextMapping contextMapping = contextNameMap.get(name); if (contextMapping == null) { List keys = new ArrayList<>(contextNameMap.keySet()); Collections.sort(keys); @@ -138,7 +138,7 @@ protected Iterable contexts() { for (int typeId = 0; typeId < contextMappings.size(); typeId++) { scratch.setCharAt(0, (char) typeId); scratch.setLength(1); - ContextMapping mapping = contextMappings.get(typeId); + ContextMapping mapping = contextMappings.get(typeId); Set contexts = new HashSet<>(mapping.parseContext(document)); if (this.contexts.get(mapping.name()) != null) { contexts.addAll(this.contexts.get(mapping.name())); @@ -173,7 +173,7 @@ public ContextQuery toContextQuery(CompletionQuery query, Map mapping = contextMappings.get(typeId); List internalQueryContext = queryContexts.get(mapping.name()); if (internalQueryContext != null) { for (ContextMapping.InternalQueryContext context : internalQueryContext) { @@ -204,7 +204,7 @@ public Map> getNamedContexts(List contex for (CharSequence typedContext : contexts) { int typeId = typedContext.charAt(0); assert typeId < contextMappings.size() : "Returned context has invalid type"; - ContextMapping mapping = contextMappings.get(typeId); + ContextMapping mapping = contextMappings.get(typeId); Set contextEntries = contextMap.get(mapping.name()); if (contextEntries == null) { contextEntries = new HashSet<>(); @@ -224,10 +224,10 @@ public Map> getNamedContexts(List contex * */ public static ContextMappings load(Object configuration, Version indexVersionCreated) throws ElasticsearchParseException { - final List contextMappings; + final List> contextMappings; if (configuration instanceof List) { contextMappings = new ArrayList<>(); - List configurations = (List)configuration; + List configurations = (List) configuration; for (Object contextConfig : configurations) { contextMappings.add(load((Map) contextConfig, indexVersionCreated)); } @@ -242,10 +242,10 @@ public static ContextMappings load(Object configuration, Version indexVersionCre return new ContextMappings(contextMappings); } - private static ContextMapping load(Map contextConfig, Version indexVersionCreated) { + private static ContextMapping load(Map contextConfig, Version indexVersionCreated) { String name = extractRequiredValue(contextConfig, FIELD_NAME); String type = extractRequiredValue(contextConfig, FIELD_TYPE); - final ContextMapping contextMapping; + final ContextMapping contextMapping; switch (Type.fromString(type)) { case CATEGORY: contextMapping = CategoryContextMapping.load(name, contextConfig); @@ -276,7 +276,7 @@ private static String extractRequiredValue(Map contextConfig, St */ @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - for (ContextMapping contextMapping : contextMappings) { + for (ContextMapping contextMapping : contextMappings) { builder.startObject(); contextMapping.toXContent(builder, params); builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index 9fd9019cd213c..f639846b418e3 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -34,6 +34,11 @@ */ public class Task { + /** + * The request header to mark tasks with specific ids + */ + public static final String X_OPAQUE_ID = "X-Opaque-Id"; + private final long id; private final String type; diff --git a/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java b/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java index 0290a6c5d100b..bce5965e50b6b 100644 --- a/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java +++ b/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java @@ -75,7 +75,7 @@ public String toString() { } } - interface CollapsingDocValuesProducer { + interface CollapsingDocValuesProducer> { T randomGroup(int maxGroup); void add(Document doc, T value, boolean multivalued); @@ -83,14 +83,14 @@ interface CollapsingDocValuesProducer { SortField sortField(boolean multivalued); } - void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, boolean numeric) throws IOException { + > void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, boolean numeric) throws IOException { assertSearchCollapse(dvProducers, numeric, true, true); assertSearchCollapse(dvProducers, numeric, true, false); assertSearchCollapse(dvProducers, numeric, false, true); assertSearchCollapse(dvProducers, numeric, false, false); } - private void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, + private > void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, boolean numeric, boolean multivalued, boolean trackMaxScores) throws IOException { final int numDocs = randomIntBetween(1000, 2000); @@ -120,7 +120,7 @@ private void assertSearchCollapse(CollapsingDocValuesProd int expectedNumGroups = values.size(); - final CollapsingTopDocsCollector collapsingCollector; + final CollapsingTopDocsCollector collapsingCollector; if (numeric) { collapsingCollector = CollapsingTopDocsCollector.createNumeric(collapseField.getField(), sort, expectedNumGroups, trackMaxScores); @@ -199,7 +199,7 @@ private void assertSearchCollapse(CollapsingDocValuesProd final Weight weight = searcher.createNormalizedWeight(new MatchAllDocsQuery(), true); for (int shardIDX = 0; shardIDX < subSearchers.length; shardIDX++) { final SegmentSearcher subSearcher = subSearchers[shardIDX]; - final CollapsingTopDocsCollector c; + final CollapsingTopDocsCollector c; if (numeric) { c = CollapsingTopDocsCollector.createNumeric(collapseField.getField(), sort, expectedNumGroups, trackMaxScores); } else { @@ -221,7 +221,7 @@ private static void assertTopDocsEquals(CollapseTopFieldDocs topDocs1, CollapseT } public void testCollapseLong() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public Long randomGroup(int maxGroup) { return randomNonNegativeLong() % maxGroup; @@ -249,7 +249,7 @@ public SortField sortField(boolean multivalued) { } public void testCollapseInt() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public Integer randomGroup(int maxGroup) { return randomIntBetween(0, maxGroup - 1); @@ -277,10 +277,10 @@ public SortField sortField(boolean multivalued) { } public void testCollapseFloat() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public Float randomGroup(int maxGroup) { - return new Float(randomIntBetween(0, maxGroup - 1)); + return Float.valueOf(randomIntBetween(0, maxGroup - 1)); } @Override @@ -305,10 +305,10 @@ public SortField sortField(boolean multivalued) { } public void testCollapseDouble() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public Double randomGroup(int maxGroup) { - return new Double(randomIntBetween(0, maxGroup - 1)); + return Double.valueOf(randomIntBetween(0, maxGroup - 1)); } @Override @@ -333,7 +333,7 @@ public SortField sortField(boolean multivalued) { } public void testCollapseString() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public BytesRef randomGroup(int maxGroup) { return new BytesRef(Integer.toString(randomIntBetween(0, maxGroup - 1))); @@ -383,7 +383,7 @@ public void testEmptyNumericSegment() throws Exception { SortField sortField = new SortField("group", SortField.Type.LONG); sortField.setMissingValue(Long.MAX_VALUE); Sort sort = new Sort(sortField); - final CollapsingTopDocsCollector collapsingCollector = + final CollapsingTopDocsCollector collapsingCollector = CollapsingTopDocsCollector.createNumeric("group", sort, 10, false); searcher.search(new MatchAllDocsQuery(), collapsingCollector); CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs(); @@ -419,7 +419,7 @@ public void testEmptySortedSegment() throws Exception { final IndexReader reader = w.getReader(); final IndexSearcher searcher = newSearcher(reader); Sort sort = new Sort(new SortField("group", SortField.Type.STRING_VAL)); - final CollapsingTopDocsCollector collapsingCollector = + final CollapsingTopDocsCollector collapsingCollector = CollapsingTopDocsCollector.createKeyword("group", sort, 10, false); searcher.search(new MatchAllDocsQuery(), collapsingCollector); CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs(); diff --git a/server/src/test/java/org/elasticsearch/action/ActionTests.java b/server/src/test/java/org/elasticsearch/action/ActionTests.java index c159d36ca9158..a7dca3f098d05 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionTests.java @@ -24,7 +24,7 @@ public class ActionTests extends ESTestCase { public void testEquals() { - class FakeAction extends Action { + class FakeAction extends Action { protected FakeAction(String name) { super(name); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 09a64a016ab8d..d33fff45308f3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -357,7 +357,7 @@ public void testSearchTaskDescriptions() { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); Map headers = new HashMap<>(); - headers.put("X-Opaque-Id", "my_id"); + headers.put(Task.X_OPAQUE_ID, "my_id"); headers.put("Foo-Header", "bar"); headers.put("Custom-Task-Header", "my_value"); assertSearchResponse( @@ -404,7 +404,7 @@ public void testSearchTaskHeaderLimit() { int maxSize = Math.toIntExact(SETTING_HTTP_MAX_HEADER_SIZE.getDefault(Settings.EMPTY).getBytes() / 2 + 1); Map headers = new HashMap<>(); - headers.put("X-Opaque-Id", "my_id"); + headers.put(Task.X_OPAQUE_ID, "my_id"); headers.put("Custom-Task-Header", randomAlphaOfLengthBetween(maxSize, maxSize + 100)); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, @@ -415,7 +415,7 @@ public void testSearchTaskHeaderLimit() { private void assertTaskHeaders(TaskInfo taskInfo) { assertThat(taskInfo.getHeaders().keySet(), hasSize(2)); - assertEquals("my_id", taskInfo.getHeaders().get("X-Opaque-Id")); + assertEquals("my_id", taskInfo.getHeaders().get(Task.X_OPAQUE_ID)); assertEquals("my_value", taskInfo.getHeaders().get("Custom-Task-Header")); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java index c358d0fb6ca52..9701e76619824 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java @@ -58,12 +58,13 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws assertThat(iae.getMessage(), containsString("[cluster_update_settings_request] unknown field [" + unsupportedField + "], parser not found")); } else { - XContentParser parser = createParser(xContentType.xContent(), originalBytes); - ClusterUpdateSettingsRequest parsedRequest = ClusterUpdateSettingsRequest.fromXContent(parser); + try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { + ClusterUpdateSettingsRequest parsedRequest = ClusterUpdateSettingsRequest.fromXContent(parser); - assertNull(parser.nextToken()); - assertThat(parsedRequest.transientSettings(), equalTo(request.transientSettings())); - assertThat(parsedRequest.persistentSettings(), equalTo(request.persistentSettings())); + assertNull(parser.nextToken()); + assertThat(parsedRequest.transientSettings(), equalTo(request.transientSettings())); + assertThat(parsedRequest.persistentSettings(), equalTo(request.persistentSettings())); + } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java index e50805ab5b263..1c27934927413 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -134,11 +134,12 @@ public static void assertMappingsEqual(Map expected, Map expectedEntry : expected.entrySet()) { String expectedValue = expectedEntry.getValue(); String actualValue = actual.get(expectedEntry.getKey()); - XContentParser expectedJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + try (XContentParser expectedJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, expectedValue); - XContentParser actualJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, actualValue); - assertEquals(expectedJson.map(), actualJson.map()); + XContentParser actualJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, actualValue)){ + assertEquals(expectedJson.map(), actualJson.map()); + } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index e816b08187f1b..be44d790b4004 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -147,9 +147,10 @@ public void testToAndFromXContent() throws IOException { private void assertMappingsEqual(String expected, String actual) throws IOException { - XContentParser expectedJson = createParser(XContentType.JSON.xContent(), expected); - XContentParser actualJson = createParser(XContentType.JSON.xContent(), actual); - assertEquals(expectedJson.mapOrdered(), actualJson.mapOrdered()); + try (XContentParser expectedJson = createParser(XContentType.JSON.xContent(), expected); + XContentParser actualJson = createParser(XContentType.JSON.xContent(), actual)) { + assertEquals(expectedJson.mapOrdered(), actualJson.mapOrdered()); + } } /** diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java index 4fa99374f0fab..ffbab5805c0a6 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.test.ESTestCase; @@ -93,7 +94,9 @@ public void testToAndFromXContent() throws IOException { ResizeRequest parsedResizeRequest = new ResizeRequest(resizeRequest.getTargetIndexRequest().index(), resizeRequest.getSourceIndex()); - parsedResizeRequest.fromXContent(createParser(xContentType.xContent(), originalBytes)); + try (XContentParser xParser = createParser(xContentType.xContent(), originalBytes)) { + parsedResizeRequest.fromXContent(xParser); + } assertEquals(resizeRequest.getSourceIndex(), parsedResizeRequest.getSourceIndex()); assertEquals(resizeRequest.getTargetIndexRequest().index(), parsedResizeRequest.getTargetIndexRequest().index()); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 1d03d065e7af7..f1842b5b0dd1d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -124,7 +124,7 @@ public void testBulkAllowExplicitIndex() throws Exception { public void testBulkAddIterable() { BulkRequest bulkRequest = Requests.bulkRequest(); - List requests = new ArrayList<>(); + List> requests = new ArrayList<>(); requests.add(new IndexRequest("test", "test", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value")); requests.add(new UpdateRequest("test", "test", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value")); requests.add(new DeleteRequest("test", "test", "id")); @@ -279,7 +279,7 @@ public void testSmileIsSupported() throws IOException { BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(data, null, null, xContentType); assertEquals(1, bulkRequest.requests().size()); - DocWriteRequest docWriteRequest = bulkRequest.requests().get(0); + DocWriteRequest docWriteRequest = bulkRequest.requests().get(0); assertEquals(DocWriteRequest.OpType.INDEX, docWriteRequest.opType()); assertEquals("index", docWriteRequest.index()); assertEquals("type", docWriteRequest.type()); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index bcd16386df3d4..66527726573a5 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -84,7 +84,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { @Captor ArgumentCaptor> remoteResponseHandler; @Captor - ArgumentCaptor> bulkDocsItr; + ArgumentCaptor>> bulkDocsItr; /** The actual action we want to test, with real indexing mocked */ TestTransportBulkAction action; @@ -225,7 +225,7 @@ public void testIngestLocal() throws Exception { assertTrue(failureCalled.get()); // now check success - Iterator req = bulkDocsItr.getValue().iterator(); + Iterator> req = bulkDocsItr.getValue().iterator(); failureHandler.getValue().accept((IndexRequest)req.next(), exception); // have an exception for our one index request indexRequest2.setPipeline(null); // this is done by the real pipeline execution service when processing completionHandler.getValue().accept(null); diff --git a/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java b/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java index f1de226704e53..fcb4539c9afe7 100644 --- a/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java @@ -54,9 +54,9 @@ public void testAddWithInvalidKey() throws IOException { builder.endArray(); } builder.endObject(); - final XContentParser parser = createParser(builder); - final MultiGetRequest mgr = new MultiGetRequest(); - final ParsingException e = expectThrows( + try (XContentParser parser = createParser(builder)) { + final MultiGetRequest mgr = new MultiGetRequest(); + final ParsingException e = expectThrows( ParsingException.class, () -> { final String defaultIndex = randomAlphaOfLength(5); @@ -64,9 +64,10 @@ public void testAddWithInvalidKey() throws IOException { final FetchSourceContext fetchSource = FetchSourceContext.FETCH_SOURCE; mgr.add(defaultIndex, defaultType, null, fetchSource, null, parser, true); }); - assertThat( + assertThat( e.toString(), containsString("unknown key [doc] for a START_ARRAY, expected [docs] or [ids]")); + } } public void testUnexpectedField() throws IOException { @@ -141,16 +142,17 @@ public void testXContentSerialization() throws IOException { MultiGetRequest expected = createTestInstance(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); - XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); - MultiGetRequest actual = new MultiGetRequest(); - actual.add(null, null, null, null, null, parser, true); - assertThat(parser.nextToken(), nullValue()); - - assertThat(actual.items.size(), equalTo(expected.items.size())); - for (int i = 0; i < expected.items.size(); i++) { - MultiGetRequest.Item expectedItem = expected.items.get(i); - MultiGetRequest.Item actualItem = actual.items.get(i); - assertThat(actualItem, equalTo(expectedItem)); + try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + MultiGetRequest actual = new MultiGetRequest(); + actual.add(null, null, null, null, null, parser, true); + assertThat(parser.nextToken(), nullValue()); + + assertThat(actual.items.size(), equalTo(expected.items.size())); + for (int i = 0; i < expected.items.size(); i++) { + MultiGetRequest.Item expectedItem = expected.items.get(i); + MultiGetRequest.Item actualItem = actual.items.get(i); + assertThat(actualItem, equalTo(expectedItem)); + } } } } diff --git a/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java b/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java index 1eae583316e15..6331d5ef31dff 100644 --- a/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java @@ -39,10 +39,11 @@ public void testFromXContent() throws IOException { MultiGetResponse expected = createTestInstance(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); - - XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); - MultiGetResponse parsed = MultiGetResponse.fromXContent(parser); - assertNull(parser.nextToken()); + MultiGetResponse parsed; + try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + parsed = MultiGetResponse.fromXContent(parser); + assertNull(parser.nextToken()); + } assertNotSame(expected, parsed); assertThat(parsed.getResponses().length, equalTo(expected.getResponses().length)); @@ -60,6 +61,7 @@ public void testFromXContent() throws IOException { assertThat(actualItem.getResponse(), equalTo(expectedItem.getResponse())); } } + } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java index 5701bcc27800f..6b673c49efa0b 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java @@ -31,7 +31,7 @@ public class SimulateDocumentVerboseResultTests extends AbstractXContentTestCase { static SimulateDocumentVerboseResult createTestInstance(boolean withFailures) { - int numDocs = randomIntBetween(0, 10); + int numDocs = randomIntBetween(0, 5); List results = new ArrayList<>(); for (int i = 0; i results = new ArrayList<>(numResults); for (int i = 0; i < numResults; i++) { if (isVerbose) { diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java index 874bea5ff657e..4f1fa4cf06116 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java @@ -40,9 +40,11 @@ public void testFromXContent() throws IOException { MultiSearchResponse expected = createTestInstance(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); - XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); - MultiSearchResponse actual = MultiSearchResponse.fromXContext(parser); - assertThat(parser.nextToken(), nullValue()); + MultiSearchResponse actual; + try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + actual = MultiSearchResponse.fromXContext(parser); + assertThat(parser.nextToken(), nullValue()); + } assertThat(actual.getTook(), equalTo(expected.getTook())); assertThat(actual.getResponses().length, equalTo(expected.getResponses().length)); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index bfa45bb072dcf..012cc71437a80 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -18,9 +18,6 @@ */ package org.elasticsearch.action.support.replication; -import org.elasticsearch.action.support.ActionTestUtils; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.UnavailableShardsException; @@ -28,7 +25,9 @@ import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.flush.TransportFlushAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; @@ -41,6 +40,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -104,6 +104,7 @@ threadPool, BigArrays.NON_RECYCLING_INSTANCE, circuitBreakerService, new NamedWr new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), null); } + @Override @After public void tearDown() throws Exception { super.tearDown(); @@ -244,13 +245,15 @@ public FlushResponse assertImmediateResponse(String index, TransportFlushAction return flushResponse; } - public BroadcastResponse executeAndAssertImmediateResponse(TransportBroadcastReplicationAction broadcastAction, DummyBroadcastRequest request) { + public BroadcastResponse executeAndAssertImmediateResponse( + TransportBroadcastReplicationAction broadcastAction, + DummyBroadcastRequest request) { PlainActionFuture response = PlainActionFuture.newFuture(); broadcastAction.execute(request, response); return response.actionGet("5s"); } - private void assertBroadcastResponse(int total, int successful, int failed, BroadcastResponse response, Class exceptionClass) { + private void assertBroadcastResponse(int total, int successful, int failed, BroadcastResponse response, Class exceptionClass) { assertThat(response.getSuccessfulShards(), equalTo(successful)); assertThat(response.getTotalShards(), equalTo(total)); assertThat(response.getFailedShards(), equalTo(failed)); diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index bd76557f9a86f..e7606ec071895 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -167,7 +167,7 @@ protected static class TestConfig { public final boolean requestPositions; public final boolean requestOffsets; public final boolean requestPayloads; - public Class expectedException = null; + public Class expectedException = null; public TestConfig(TestDoc doc, String[] selectedFields, boolean requestPositions, boolean requestOffsets, boolean requestPayloads) { this.doc = doc; @@ -177,7 +177,7 @@ public TestConfig(TestDoc doc, String[] selectedFields, boolean requestPositions this.requestPayloads = requestPayloads; } - public TestConfig expectedException(Class exceptionClass) { + public TestConfig expectedException(Class exceptionClass) { this.expectedException = exceptionClass; return this; } diff --git a/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java index 5dea451dbacfd..31f6963536c50 100644 --- a/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java +++ b/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java @@ -20,8 +20,8 @@ package org.elasticsearch.client; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; @@ -56,7 +56,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { .put(ThreadContext.PREFIX + ".key2", "val 2") .build(); - private static final Action[] ACTIONS = new Action[] { + private static final Action[] ACTIONS = new Action[] { // client actions GetAction.INSTANCE, SearchAction.INSTANCE, DeleteAction.INSTANCE, DeleteStoredScriptAction.INSTANCE, IndexAction.INSTANCE, @@ -92,7 +92,7 @@ public void tearDown() throws Exception { terminate(threadPool); } - protected abstract Client buildClient(Settings headersSettings, Action[] testedActions); + protected abstract Client buildClient(Settings headersSettings, Action[] testedActions); public void testActions() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java index 812dfd8f6f686..e1fbc47c4a022 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java @@ -136,8 +136,7 @@ public void testAddWriteOnlyWithNoExistingAliases() { ClusterState after = service.innerExecute(before, Arrays.asList( new AliasAction.Add("test", "alias", null, null, null, false))); assertFalse(after.metaData().index("test").getAliases().get("alias").writeIndex()); - assertThat(((AliasOrIndex.Alias) after.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex(), - equalTo(after.metaData().index("test"))); + assertNull(((AliasOrIndex.Alias) after.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex()); after = service.innerExecute(before, Arrays.asList( new AliasAction.Add("test", "alias", null, null, null, null))); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 96a533118c8da..32dd4324ff835 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -181,8 +181,7 @@ public void testUnknownFieldClusterMetaData() throws IOException { .field("random", "value") .endObject() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, metadata); - try { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, metadata)) { MetaData.Builder.fromXContent(parser); fail(); } catch (IllegalArgumentException e) { @@ -197,8 +196,7 @@ public void testUnknownFieldIndexMetaData() throws IOException { .field("random", "value") .endObject() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, metadata); - try { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, metadata)) { IndexMetaData.Builder.fromXContent(parser); fail(); } catch (IllegalArgumentException e) { @@ -225,9 +223,10 @@ public void testXContentWithIndexGraveyard() throws IOException { builder.startObject(); originalMeta.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - final MetaData fromXContentMeta = MetaData.fromXContent(parser); - assertThat(fromXContentMeta.indexGraveyard(), equalTo(originalMeta.indexGraveyard())); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + final MetaData fromXContentMeta = MetaData.fromXContent(parser); + assertThat(fromXContentMeta.indexGraveyard(), equalTo(originalMeta.indexGraveyard())); + } } public void testSerializationWithIndexGraveyard() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java b/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java index f7771f0f84466..420f5c5caefb2 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java @@ -18,13 +18,13 @@ */ package org.elasticsearch.common.geo; -import org.locationtech.jts.geom.Geometry; -import org.locationtech.jts.geom.GeometryFactory; import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions; +import org.locationtech.jts.geom.Geometry; +import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.spatial4j.shape.Shape; import org.locationtech.spatial4j.shape.ShapeCollection; import org.locationtech.spatial4j.shape.jts.JtsGeometry; @@ -49,16 +49,18 @@ abstract class BaseGeoParsingTestCase extends ESTestCase { public abstract void testParseEnvelope() throws IOException; public abstract void testParseGeometryCollection() throws IOException; - protected void assertValidException(XContentBuilder builder, Class expectedException) throws IOException { - XContentParser parser = createParser(builder); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, expectedException); + protected void assertValidException(XContentBuilder builder, Class expectedException) throws IOException { + try (XContentParser parser = createParser(builder)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, expectedException); + } } protected void assertGeometryEquals(Shape expected, XContentBuilder geoJson) throws IOException { - XContentParser parser = createParser(geoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertEquals(expected, ShapeParser.parse(parser).build()); + try (XContentParser parser = createParser(geoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertEquals(expected, ShapeParser.parse(parser).build()); + } } protected ShapeCollection shapeCollection(Shape... shapes) { diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java index 6f9128454f374..f054450f00abe 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java @@ -145,6 +145,7 @@ public void testParseMultiDimensionShapes() throws IOException { XContentParser parser = createParser(pointGeoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); // multi dimension linestring XContentBuilder lineGeoJson = XContentFactory.jsonBuilder() @@ -159,6 +160,7 @@ public void testParseMultiDimensionShapes() throws IOException { parser = createParser(lineGeoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); } @Override @@ -193,18 +195,22 @@ public void testParseEnvelope() throws IOException { .startArray().value(50).value(-39).endArray() .endArray() .endObject(); - XContentParser parser = createParser(multilinesGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(multilinesGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test #4: "envelope" with empty coordinates multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope") .startArray("coordinates") .endArray() .endObject(); - parser = createParser(multilinesGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(multilinesGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } @Override @@ -266,9 +272,10 @@ public void testParse3DPolygon() throws IOException { Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext); - XContentParser parser = createParser(polygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertEquals(jtsGeom(expected), ShapeParser.parse(parser, mapperBuilder).build()); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertEquals(jtsGeom(expected), ShapeParser.parse(parser, mapperBuilder).build()); + } } public void testInvalidDimensionalPolygon() throws IOException { @@ -285,9 +292,11 @@ public void testInvalidDimensionalPolygon() throws IOException { .endArray() .endArray() .endObject(); - XContentParser parser = createParser(polygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } public void testParseInvalidPoint() throws IOException { @@ -299,9 +308,11 @@ public void testParseInvalidPoint() throws IOException { .startArray().value(-74.011).value(40.753).endArray() .endArray() .endObject(); - XContentParser parser = createParser(invalidPoint1); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidPoint1)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 2: create an invalid point object with an empty number of coordinates XContentBuilder invalidPoint2 = XContentFactory.jsonBuilder() @@ -310,9 +321,11 @@ public void testParseInvalidPoint() throws IOException { .startArray("coordinates") .endArray() .endObject(); - parser = createParser(invalidPoint2); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidPoint2)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } public void testParseInvalidMultipoint() throws IOException { @@ -322,9 +335,11 @@ public void testParseInvalidMultipoint() throws IOException { .field("type", "multipoint") .startArray("coordinates").value(-74.011).value(40.753).endArray() .endObject(); - XContentParser parser = createParser(invalidMultipoint1); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidMultipoint1)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 2: create an invalid multipoint object with null coordinate XContentBuilder invalidMultipoint2 = XContentFactory.jsonBuilder() @@ -333,9 +348,11 @@ public void testParseInvalidMultipoint() throws IOException { .startArray("coordinates") .endArray() .endObject(); - parser = createParser(invalidMultipoint2); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidMultipoint2)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 3: create a valid formatted multipoint object with invalid number (0) of coordinates XContentBuilder invalidMultipoint3 = XContentFactory.jsonBuilder() @@ -345,9 +362,11 @@ public void testParseInvalidMultipoint() throws IOException { .startArray().endArray() .endArray() .endObject(); - parser = createParser(invalidMultipoint3); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidMultipoint3)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } public void testParseInvalidMultiPolygon() throws IOException { @@ -380,9 +399,11 @@ public void testParseInvalidMultiPolygon() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); + assertNull(parser.nextToken()); + } } public void testParseInvalidDimensionalMultiPolygon() throws IOException { @@ -419,10 +440,12 @@ public void testParseInvalidDimensionalMultiPolygon() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); - } + try (XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } + } public void testParseOGCPolygonWithoutHoles() throws IOException { @@ -440,11 +463,12 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - Shape shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 2: ccw poly crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -460,11 +484,12 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } // test 3: cw poly not crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -480,11 +505,12 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 4: cw poly crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -500,11 +526,12 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } } public void testParseOGCPolygonWithHoles() throws IOException { @@ -528,11 +555,12 @@ public void testParseOGCPolygonWithHoles() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - Shape shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 2: ccw poly crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -554,11 +582,12 @@ public void testParseOGCPolygonWithHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } // test 3: cw poly not crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -580,11 +609,13 @@ public void testParseOGCPolygonWithHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); + + ElasticsearchGeoAssertions.assertPolygon(shape); + } - ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: cw poly crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -606,11 +637,12 @@ public void testParseOGCPolygonWithHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } } public void testParseInvalidPolygon() throws IOException { @@ -627,9 +659,11 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 2: create an invalid polygon with only 1 point invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -640,9 +674,11 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 3: create an invalid polygon with 0 points invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -653,9 +689,11 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 4: create an invalid polygon with null value points invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -666,9 +704,11 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); + assertNull(parser.nextToken()); + } // test case 5: create an invalid polygon with 1 invalid LinearRing invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -677,18 +717,22 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); + assertNull(parser.nextToken()); + } // test case 6: create an invalid polygon with 0 LinearRings invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates").endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 7: create an invalid polygon with 0 LinearRings invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -697,9 +741,11 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } public void testParsePolygonWithHole() throws IOException { @@ -764,9 +810,11 @@ public void testParseSelfCrossingPolygon() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); + assertNull(parser.nextToken()); + } } @Override @@ -980,11 +1028,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - XContentParser parser = createParser(polygonGeoJson); - parser.nextToken(); - Shape shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 2: valid ccw (right handed system) poly not crossing dateline (with 'ccw' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1009,11 +1058,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 3: valid ccw (right handed system) poly not crossing dateline (with 'counterclockwise' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1038,11 +1088,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 4: valid cw (left handed system) poly crossing dateline (with 'left' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1067,11 +1118,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } // test 5: valid cw multipoly (left handed system) poly crossing dateline (with 'cw' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1096,11 +1148,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } // test 6: valid cw multipoly (left handed system) poly crossing dateline (with 'clockwise' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1125,10 +1178,39 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); + + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } + } + + public void testParseInvalidShapes() throws IOException { + // single dimensions point + XContentBuilder tooLittlePointGeoJson = XContentFactory.jsonBuilder() + .startObject() + .field("type", "Point") + .startArray("coordinates").value(10.0).endArray() + .endObject(); + + try (XContentParser parser = createParser(tooLittlePointGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } + + // zero dimensions point + XContentBuilder emptyPointGeoJson = XContentFactory.jsonBuilder() + .startObject() + .field("type", "Point") + .startObject("coordinates").field("foo", "bar").endObject() + .endObject(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + try (XContentParser parser = createParser(emptyPointGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } } diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoUtilTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoUtilTests.java index efec56e788da1..f23e89ecb2bf7 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoUtilTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoUtilTests.java @@ -59,13 +59,14 @@ private int parsePrecision(CheckedConsumer tokenGe XContentBuilder builder = jsonBuilder().startObject(); tokenGenerator.accept(builder); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); // { - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); // field name - assertTrue(parser.nextToken().isValue()); // field value - int precision = GeoUtils.parsePrecision(parser); - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); // } - assertNull(parser.nextToken()); // no more tokens - return precision; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); // { + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); // field name + assertTrue(parser.nextToken().isValue()); // field value + int precision = GeoUtils.parsePrecision(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); // } + assertNull(parser.nextToken()); // no more tokens + return precision; + } } } diff --git a/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 5ac55832959d7..20e159ded41e4 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -37,7 +37,7 @@ import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; -public abstract class AbstractShapeBuilderTestCase extends ESTestCase { +public abstract class AbstractShapeBuilderTestCase> extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; private static NamedWriteableRegistry namedWriteableRegistry; @@ -79,12 +79,13 @@ public void testFromXContent() throws IOException { } XContentBuilder builder = testShape.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser shapeContentParser = createParser(shuffled); - shapeContentParser.nextToken(); - ShapeBuilder parsedShape = ShapeParser.parse(shapeContentParser); - assertNotSame(testShape, parsedShape); - assertEquals(testShape, parsedShape); - assertEquals(testShape.hashCode(), parsedShape.hashCode()); + try (XContentParser shapeContentParser = createParser(shuffled)) { + shapeContentParser.nextToken(); + ShapeBuilder parsedShape = ShapeParser.parse(shapeContentParser); + assertNotSame(testShape, parsedShape); + assertEquals(testShape, parsedShape); + assertEquals(testShape.hashCode(), parsedShape.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java b/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java index 79b6aa5f60436..0074da43fcfb8 100644 --- a/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java +++ b/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java @@ -45,13 +45,14 @@ public void testParseFromXContent() throws IOException { XContentBuilder json = jsonBuilder().startObject() .field(Fuzziness.X_FIELD_NAME, floatValue) .endObject(); - XContentParser parser = createParser(json); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_NUMBER)); - Fuzziness fuzziness = Fuzziness.parse(parser); - assertThat(fuzziness.asFloat(), equalTo(floatValue)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + try (XContentParser parser = createParser(json)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_NUMBER)); + Fuzziness fuzziness = Fuzziness.parse(parser); + assertThat(fuzziness.asFloat(), equalTo(floatValue)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + } } { Integer intValue = frequently() ? randomIntBetween(0, 2) : randomIntBetween(0, 100); @@ -63,28 +64,29 @@ public void testParseFromXContent() throws IOException { XContentBuilder json = jsonBuilder().startObject() .field(Fuzziness.X_FIELD_NAME, randomBoolean() ? value.toString() : value) .endObject(); - XContentParser parser = createParser(json); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING))); - Fuzziness fuzziness = Fuzziness.parse(parser); - if (value.intValue() >= 1) { - assertThat(fuzziness.asDistance(), equalTo(Math.min(2, value.intValue()))); - } - assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); - if (intValue.equals(value)) { - switch (intValue) { - case 1: - assertThat(fuzziness, sameInstance(Fuzziness.ONE)); - break; - case 2: - assertThat(fuzziness, sameInstance(Fuzziness.TWO)); - break; - case 0: - assertThat(fuzziness, sameInstance(Fuzziness.ZERO)); - break; - default: - break; + try (XContentParser parser = createParser(json)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING))); + Fuzziness fuzziness = Fuzziness.parse(parser); + if (value.intValue() >= 1) { + assertThat(fuzziness.asDistance(), equalTo(Math.min(2, value.intValue()))); + } + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + if (intValue.equals(value)) { + switch (intValue) { + case 1: + assertThat(fuzziness, sameInstance(Fuzziness.ONE)); + break; + case 2: + assertThat(fuzziness, sameInstance(Fuzziness.TWO)); + break; + case 0: + assertThat(fuzziness, sameInstance(Fuzziness.ZERO)); + break; + default: + break; + } } } } @@ -102,15 +104,16 @@ public void testParseFromXContent() throws IOException { .field(Fuzziness.X_FIELD_NAME, auto) .endObject(); } - XContentParser parser = createParser(json); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); - Fuzziness fuzziness = Fuzziness.parse(parser); - if (isDefaultAutoFuzzinessTested) { - assertThat(fuzziness, sameInstance(Fuzziness.AUTO)); + try (XContentParser parser = createParser(json)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); + Fuzziness fuzziness = Fuzziness.parse(parser); + if (isDefaultAutoFuzzinessTested) { + assertThat(fuzziness, sameInstance(Fuzziness.AUTO)); + } + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); } - assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); } } @@ -152,15 +155,16 @@ public void testSerializationCustomAuto() throws IOException { .field(Fuzziness.X_FIELD_NAME, auto) .endObject(); - XContentParser parser = createParser(json); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); - Fuzziness fuzziness = Fuzziness.parse(parser); + try (XContentParser parser = createParser(json)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); + Fuzziness fuzziness = Fuzziness.parse(parser); - Fuzziness deserializedFuzziness = doSerializeRoundtrip(fuzziness); - assertEquals(fuzziness, deserializedFuzziness); - assertEquals(fuzziness.asString(), deserializedFuzziness.asString()); + Fuzziness deserializedFuzziness = doSerializeRoundtrip(fuzziness); + assertEquals(fuzziness, deserializedFuzziness); + assertEquals(fuzziness.asString(), deserializedFuzziness.asString()); + } } private static Fuzziness doSerializeRoundtrip(Fuzziness in) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 86e55c1ab6a91..0efeae29c3cce 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -274,14 +274,15 @@ public void testBinaryField() throws Exception { final byte[] randomBytes = randomBytes(); BytesReference bytes = BytesReference.bytes(builder().startObject().field("binary", randomBytes).endObject()); - XContentParser parser = createParser(xcontentType().xContent(), bytes); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "binary"); - assertTrue(parser.nextToken().isValue()); - assertArrayEquals(randomBytes, parser.binaryValue()); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), bytes)) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "binary"); + assertTrue(parser.nextToken().isValue()); + assertArrayEquals(randomBytes, parser.binaryValue()); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testBinaryValue() throws Exception { @@ -290,14 +291,15 @@ public void testBinaryValue() throws Exception { final byte[] randomBytes = randomBytes(); BytesReference bytes = BytesReference.bytes(builder().startObject().field("binary").value(randomBytes).endObject()); - XContentParser parser = createParser(xcontentType().xContent(), bytes); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "binary"); - assertTrue(parser.nextToken().isValue()); - assertArrayEquals(randomBytes, parser.binaryValue()); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), bytes)) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "binary"); + assertTrue(parser.nextToken().isValue()); + assertArrayEquals(randomBytes, parser.binaryValue()); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testBinaryValueWithOffsetLength() throws Exception { @@ -315,14 +317,15 @@ public void testBinaryValueWithOffsetLength() throws Exception { } builder.endObject(); - XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "bin"); - assertTrue(parser.nextToken().isValue()); - assertArrayEquals(Arrays.copyOfRange(randomBytes, offset, offset + length), parser.binaryValue()); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder))) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "bin"); + assertTrue(parser.nextToken().isValue()); + assertArrayEquals(Arrays.copyOfRange(randomBytes, offset, offset + length), parser.binaryValue()); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testBinaryUTF8() throws Exception { @@ -333,14 +336,15 @@ public void testBinaryUTF8() throws Exception { builder.field("utf8").utf8Value(randomBytesRef.bytes, randomBytesRef.offset, randomBytesRef.length); builder.endObject(); - XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "utf8"); - assertTrue(parser.nextToken().isValue()); - assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(randomBytesRef.utf8ToString())); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder))) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "utf8"); + assertTrue(parser.nextToken().isValue()); + assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(randomBytesRef.utf8ToString())); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testText() throws Exception { @@ -351,14 +355,15 @@ public void testText() throws Exception { final BytesReference random = new BytesArray(randomBytes()); XContentBuilder builder = builder().startObject().field("text", new Text(random)).endObject(); - XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "text"); - assertTrue(parser.nextToken().isValue()); - assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(random.utf8ToString())); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder))) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "text"); + assertTrue(parser.nextToken().isValue()); + assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(random.utf8ToString())); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testReadableInstant() throws Exception { @@ -624,7 +629,7 @@ public void testToXContent() throws Exception { public void testMap() throws Exception { Map> maps = new HashMap<>(); - maps.put("{'map':null}", (Map) null); + maps.put("{'map':null}", (Map) null); maps.put("{'map':{}}", Collections.emptyMap()); maps.put("{'map':{'key':'value'}}", singletonMap("key", "value")); @@ -649,7 +654,7 @@ public void testMap() throws Exception { public void testIterable() throws Exception { Map> iterables = new HashMap<>(); - iterables.put("{'iter':null}", (Iterable) null); + iterables.put("{'iter':null}", (Iterable) null); iterables.put("{'iter':[]}", Collections.emptyList()); iterables.put("{'iter':['a','b']}", Arrays.asList("a", "b")); @@ -741,18 +746,19 @@ void doTestRawField(XContent source, boolean useStream) throws Exception { generator.writeEndObject(); } - XContentParser parser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("bar", parser.currentName()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("foo", parser.currentName()); - assertEquals(Token.VALUE_NULL, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); + try (XContentParser parser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray())) { + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("bar", parser.currentName()); + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("foo", parser.currentName()); + assertEquals(Token.VALUE_NULL, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } } public void testRawValue() throws Exception { @@ -776,14 +782,15 @@ void doTestRawValue(XContent source) throws Exception { generator.writeRawValue(new BytesArray(rawData).streamInput(), source.type()); } - XContentParser parser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("foo", parser.currentName()); - assertEquals(Token.VALUE_NULL, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); + try (XContentParser parser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray())) { + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("foo", parser.currentName()); + assertEquals(Token.VALUE_NULL, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } os = new ByteArrayOutputStream(); try (XContentGenerator generator = xcontentType().xContent().createGenerator(os)) { @@ -793,18 +800,19 @@ void doTestRawValue(XContent source) throws Exception { generator.writeEndObject(); } - parser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("test", parser.currentName()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("foo", parser.currentName()); - assertEquals(Token.VALUE_NULL, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); + try (XContentParser parser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray())) { + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("test", parser.currentName()); + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("foo", parser.currentName()); + assertEquals(Token.VALUE_NULL, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } } @@ -822,11 +830,12 @@ protected void doTestBigInteger(JsonGenerator generator, ByteArrayOutputStream o generator.flush(); byte[] serialized = os.toByteArray(); - XContentParser parser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, serialized); - Map map = parser.map(); - assertEquals("bar", map.get("foo")); - assertEquals(bigInteger, map.get("bigint")); + try (XContentParser parser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, serialized)) { + Map map = parser.map(); + assertEquals("bar", map.get("foo")); + assertEquals(bigInteger, map.get("bigint")); + } } public void testEnsureNameNotNull() { @@ -935,7 +944,7 @@ public void testSelfReferencingIterable() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder() .startObject() - .field("field", (Iterable) values) + .field("field", values) .endObject()); assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself")); } @@ -950,7 +959,7 @@ public void testSelfReferencingIterableOneLevel() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder() .startObject() - .field("field", (Iterable) values) + .field("field", values) .endObject()); assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself")); } @@ -963,7 +972,7 @@ public void testSelfReferencingIterableTwoLevels() throws IOException { List it1 = new ArrayList<>(); map0.put("foo", 0); - map0.put("it1", (Iterable) it1); // map 0 -> it1 + map0.put("it1", it1); // map 0 -> it1 it1.add(map1); it1.add(map2); // it 1 -> map 1, map 2 @@ -984,44 +993,46 @@ public void testChecksForDuplicates() throws Exception { .field("key", 1) .field("key", 2) .endObject(); - - JsonParseException pex = expectThrows(JsonParseException.class, () -> createParser(builder).map()); - assertThat(pex.getMessage(), startsWith("Duplicate field 'key'")); + try (XContentParser xParser = createParser(builder)) { + JsonParseException pex = expectThrows(JsonParseException.class, () -> xParser.map()); + assertThat(pex.getMessage(), startsWith("Duplicate field 'key'")); + } } public void testNamedObject() throws IOException { Object test1 = new Object(); Object test2 = new Object(); NamedXContentRegistry registry = new NamedXContentRegistry(Arrays.asList( - new NamedXContentRegistry.Entry(Object.class, new ParseField("test1"), p -> test1), - new NamedXContentRegistry.Entry(Object.class, new ParseField("test2", "deprecated"), p -> test2), - new NamedXContentRegistry.Entry(Object.class, new ParseField("str"), p -> p.text()))); + new NamedXContentRegistry.Entry(Object.class, new ParseField("test1"), p -> test1), + new NamedXContentRegistry.Entry(Object.class, new ParseField("test2", "deprecated"), p -> test2), + new NamedXContentRegistry.Entry(Object.class, new ParseField("str"), p -> p.text()))); XContentBuilder b = XContentBuilder.builder(xcontentType().xContent()); b.value("test"); - XContentParser p = xcontentType().xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, - BytesReference.bytes(b).streamInput()); - assertEquals(test1, p.namedObject(Object.class, "test1", null)); - assertEquals(test2, p.namedObject(Object.class, "test2", null)); - assertEquals(test2, p.namedObject(Object.class, "deprecated", null)); - assertWarnings("Deprecated field [deprecated] used, expected [test2] instead"); - { + try (XContentParser p = xcontentType().xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, + BytesReference.bytes(b).streamInput())) { + assertEquals(test1, p.namedObject(Object.class, "test1", null)); + assertEquals(test2, p.namedObject(Object.class, "test2", null)); + assertEquals(test2, p.namedObject(Object.class, "deprecated", null)); + assertWarnings("Deprecated field [deprecated] used, expected [test2] instead"); p.nextToken(); assertEquals("test", p.namedObject(Object.class, "str", null)); - NamedObjectNotFoundException e = expectThrows(NamedObjectNotFoundException.class, + { + NamedObjectNotFoundException e = expectThrows(NamedObjectNotFoundException.class, () -> p.namedObject(Object.class, "unknown", null)); - assertThat(e.getMessage(), endsWith("unable to parse Object with name [unknown]: parser not found")); - } - { - Exception e = expectThrows(NamedObjectNotFoundException.class, () -> p.namedObject(String.class, "doesn't matter", null)); - assertEquals("unknown named object category [java.lang.String]", e.getMessage()); + assertThat(e.getMessage(), endsWith("unable to parse Object with name [unknown]: parser not found")); + } + { + Exception e = expectThrows(NamedObjectNotFoundException.class, () -> p.namedObject(String.class, "doesn't matter", null)); + assertEquals("unknown named object category [java.lang.String]", e.getMessage()); + } } - { - XContentParser emptyRegistryParser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, new byte[] {}); + try (XContentParser emptyRegistryParser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, new byte[] {})) { Exception e = expectThrows(NamedObjectNotFoundException.class, - () -> emptyRegistryParser.namedObject(String.class, "doesn't matter", null)); + () -> emptyRegistryParser.namedObject(String.class, "doesn't matter", null)); assertEquals("named objects are not supported for this parser", e.getMessage()); } + } private static void expectUnclosedException(ThrowingRunnable runnable) { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java index cb666418b6cac..07338d9286b70 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java @@ -216,43 +216,44 @@ public void testCopyCurrentStructure() throws Exception { } builder.field("fakefield", terms).endObject().endObject().endObject(); - - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - XContentBuilder filterBuilder = null; XContentParser.Token token; - String currentFieldName = null; - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if ("test".equals(currentFieldName)) { - assertThat(parser.text(), equalTo("test field")); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if ("filter".equals(currentFieldName)) { - filterBuilder = XContentFactory.contentBuilder(parser.contentType()); - filterBuilder.copyCurrentStructure(parser); + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + + String currentFieldName = null; + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if ("test".equals(currentFieldName)) { + assertThat(parser.text(), equalTo("test field")); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if ("filter".equals(currentFieldName)) { + filterBuilder = XContentFactory.contentBuilder(parser.contentType()); + filterBuilder.copyCurrentStructure(parser); + } } } } - assertNotNull(filterBuilder); - parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(filterBuilder)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.currentName(), equalTo("terms")); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.currentName(), equalTo("fakefield")); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_ARRAY)); - int i = 0; - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - assertThat(parser.text(), equalTo(terms.get(i++))); - } + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(filterBuilder))) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.currentName(), equalTo("terms")); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.currentName(), equalTo("fakefield")); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_ARRAY)); + int i = 0; + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + assertThat(parser.text(), equalTo(terms.get(i++))); + } - assertThat(i, equalTo(terms.size())); + assertThat(i, equalTo(terms.size())); + } } public void testHandlingOfPath() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java index 146b83c8c17a9..0e682e8be66c1 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java @@ -33,9 +33,10 @@ public void testEmptyValue() throws IOException { for (int i = 0; i < 2; i++) { // Running this part twice triggers the issue. // See https://github.com/elastic/elasticsearch/issues/8629 - XContentParser parser = createParser(CborXContent.cborXContent, ref); - while (parser.nextToken() != null) { - parser.charBuffer(); + try (XContentParser parser = createParser(CborXContent.cborXContent, ref)) { + while (parser.nextToken() != null) { + parser.charBuffer(); + } } } } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java index e165425400eb5..b10cce71f718a 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java @@ -62,8 +62,10 @@ public void testCompareParsingTokens() throws IOException { xsonGen.close(); jsonGen.close(); - - verifySameTokens(createParser(JsonXContent.jsonXContent, jsonOs.bytes()), createParser(CborXContent.cborXContent, xsonOs.bytes())); + try (XContentParser json0sParser = createParser(JsonXContent.jsonXContent, jsonOs.bytes()); + XContentParser xson0sParser = createParser(CborXContent.cborXContent, xsonOs.bytes())) { + verifySameTokens(json0sParser, xson0sParser); + } } private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java index 47913a5481e33..7f909df694f8e 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java @@ -63,8 +63,10 @@ public void testCompareParsingTokens() throws IOException { xsonGen.close(); jsonGen.close(); - verifySameTokens(createParser(JsonXContent.jsonXContent, jsonOs.bytes()), - createParser(SmileXContent.smileXContent, xsonOs.bytes())); + try (XContentParser jsonParser = createParser(JsonXContent.jsonXContent, jsonOs.bytes()); + XContentParser smileParser = createParser(SmileXContent.smileXContent, xsonOs.bytes())) { + verifySameTokens(jsonParser, smileParser); + } } private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java index 1d12defe6988d..4aa19b78a5ca0 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java @@ -75,15 +75,15 @@ static void assertXContentBuilderAsString(final XContentBuilder expected, final } static void assertXContentBuilderAsBytes(final XContentBuilder expected, final XContentBuilder actual) { - try { - XContent xContent = XContentFactory.xContent(actual.contentType()); + XContent xContent = XContentFactory.xContent(actual.contentType()); + try ( XContentParser jsonParser = xContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(expected).streamInput()); XContentParser testParser = xContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(actual).streamInput()); - + ) { while (true) { XContentParser.Token token1 = jsonParser.nextToken(); XContentParser.Token token2 = testParser.nextToken(); diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java index bc499ed8a420a..fd68376109802 100644 --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -180,7 +181,7 @@ public void testResponse() { public void testHeadersSet() { Settings settings = Settings.builder().build(); final TestRequest httpRequest = new TestRequest(HttpRequest.HttpVersion.HTTP_1_1, RestRequest.Method.GET, "/"); - httpRequest.getHeaders().put(DefaultRestChannel.X_OPAQUE_ID, Collections.singletonList("abc")); + httpRequest.getHeaders().put(Task.X_OPAQUE_ID, Collections.singletonList("abc")); final RestRequest request = RestRequest.request(xContentRegistry(), httpRequest, httpChannel); HttpHandlingSettings handlingSettings = HttpHandlingSettings.fromSettings(settings); @@ -200,7 +201,7 @@ public void testHeadersSet() { Map> headers = httpResponse.headers; assertNull(headers.get("non-existent-header")); assertEquals(customHeaderValue, headers.get(customHeader).get(0)); - assertEquals("abc", headers.get(DefaultRestChannel.X_OPAQUE_ID).get(0)); + assertEquals("abc", headers.get(Task.X_OPAQUE_ID).get(0)); assertEquals(Integer.toString(resp.content().length()), headers.get(DefaultRestChannel.CONTENT_LENGTH).get(0)); assertEquals(resp.contentType(), headers.get(DefaultRestChannel.CONTENT_TYPE).get(0)); } @@ -208,7 +209,7 @@ public void testHeadersSet() { public void testCookiesSet() { Settings settings = Settings.builder().put(HttpTransportSettings.SETTING_HTTP_RESET_COOKIES.getKey(), true).build(); final TestRequest httpRequest = new TestRequest(HttpRequest.HttpVersion.HTTP_1_1, RestRequest.Method.GET, "/"); - httpRequest.getHeaders().put(DefaultRestChannel.X_OPAQUE_ID, Collections.singletonList("abc")); + httpRequest.getHeaders().put(Task.X_OPAQUE_ID, Collections.singletonList("abc")); final RestRequest request = RestRequest.request(xContentRegistry(), httpRequest, httpChannel); HttpHandlingSettings handlingSettings = HttpHandlingSettings.fromSettings(settings); diff --git a/server/src/test/java/org/elasticsearch/index/IndexTests.java b/server/src/test/java/org/elasticsearch/index/IndexTests.java index f1360071745d0..9b0ca1978075a 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexTests.java @@ -56,9 +56,10 @@ public void testXContent() throws IOException { final Index original = new Index(name, uuid); final XContentBuilder builder = JsonXContent.contentBuilder(); original.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - parser.nextToken(); // the beginning of the parser - assertThat(Index.fromXContent(parser), equalTo(original)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + parser.nextToken(); // the beginning of the parser + assertThat(Index.fromXContent(parser), equalTo(original)); + } } public void testEquals() { diff --git a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java index 23d2f7bcafa96..adb7a087367d2 100644 --- a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index; import org.elasticsearch.Version; +import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; @@ -34,12 +35,15 @@ import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; +import java.util.Collections; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; @@ -166,10 +170,12 @@ public void testSlowLogSearchContextPrinterToLog() throws IOException { SearchContext searchContext = createSearchContext(index); SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); searchContext.request().source(source); + searchContext.setTask(new SearchTask(0, "n/a", "n/a", "test", null, Collections.singletonMap(Task.X_OPAQUE_ID, "my_id"))); SearchSlowLog.SlowLogSearchContextPrinter p = new SearchSlowLog.SlowLogSearchContextPrinter(searchContext, 10); assertThat(p.toString(), startsWith("[foo][0]")); // Makes sure that output doesn't contain any new lines assertThat(p.toString(), not(containsString("\n"))); + assertThat(p.toString(), endsWith("id[my_id], ")); } public void testLevelSetting() { diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index df6328feabc86..cd1dc01d9ef4a 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.util.List; + import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -72,7 +73,7 @@ protected long minRamBytesUsed() { public void testDeletedDocs() throws Exception { add2SingleValuedDocumentsAndDeleteOneOfThem(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -86,7 +87,7 @@ public void testDeletedDocs() throws Exception { public void testSingleValueAllSet() throws Exception { fillSingleValueAllSet(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -156,7 +157,7 @@ public void assertValues(SortedBinaryDocValues values, int docId, String... actu public void testSingleValueWithMissing() throws Exception { fillSingleValueWithMissing(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -177,7 +178,7 @@ public void testMultiValueAllSet() throws Exception { // the segments are force merged to a single segment so that the sorted binary doc values can be asserted within a single segment. // Previously we used the SlowCompositeReaderWrapper but this is an unideal solution so force merging is a better idea. writer.forceMerge(1); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -211,7 +212,7 @@ public void testMultiValueAllSet() throws Exception { public void testMultiValueWithMissing() throws Exception { fillMultiValueWithMissing(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -227,7 +228,7 @@ public void testMultiValueWithMissing() throws Exception { public void testMissingValueForAll() throws Exception { fillAllMissing(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -251,7 +252,7 @@ public void testMissingValueForAll() throws Exception { public void testSortMultiValuesFields() throws Exception { fillExtendedMvSet(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); SortField sortField = diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index c204690c76e07..ee8f18aa11e6b 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -170,7 +170,7 @@ public void testEmpty() throws Exception { writer.addDocument(d); refreshReader(); - IndexFieldData fieldData = getForField("non_existing_field"); + IndexFieldData fieldData = getForField("non_existing_field"); int max = randomInt(7); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData previous = null; diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index a478d2c37426d..04cd13766176b 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -260,7 +260,7 @@ public void testActualMissingValue(boolean reverse) throws IOException { } } - final IndexFieldData indexFieldData = getForField("value"); + final IndexFieldData indexFieldData = getForField("value"); final String missingValue = values[1]; IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(missingValue, MultiValueMode.MIN, null, reverse); @@ -315,7 +315,7 @@ public void testSortMissing(boolean first, boolean reverse) throws IOException { writer.commit(); } } - final IndexFieldData indexFieldData = getForField("value"); + final IndexFieldData indexFieldData = getForField("value"); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(first ? "_first" : "_last", MultiValueMode.MIN, null, reverse); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField)); diff --git a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index a417cba13b9a4..362adf4a4c996 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; @@ -169,8 +170,10 @@ public void testIllegalArguments() { public void testEmptyBooleanQuery() throws Exception { XContentBuilder contentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); contentBuilder.startObject().startObject("bool").endObject().endObject(); - Query parsedQuery = parseQuery(createParser(contentBuilder)).toQuery(createShardContext()); - assertThat(parsedQuery, Matchers.instanceOf(MatchAllDocsQuery.class)); + try (XContentParser xParser = createParser(contentBuilder)) { + Query parsedQuery = parseQuery(xParser).toQuery(createShardContext()); + assertThat(parsedQuery, Matchers.instanceOf(MatchAllDocsQuery.class)); + } } public void testDefaultMinShouldMatch() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index a2068a666f44c..95a91e1668c3e 100644 --- a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -124,11 +124,12 @@ public void testFromAndToXContent() throws Exception { innerHit.toXContent(builder, ToXContent.EMPTY_PARAMS); //fields is printed out as an object but parsed into a List where order matters, we disable shuffling XContentBuilder shuffled = shuffleXContent(builder, "fields"); - XContentParser parser = createParser(shuffled); - InnerHitBuilder secondInnerHits = InnerHitBuilder.fromXContent(parser); - assertThat(innerHit, not(sameInstance(secondInnerHits))); - assertThat(innerHit, equalTo(secondInnerHits)); - assertThat(innerHit.hashCode(), equalTo(secondInnerHits.hashCode())); + try (XContentParser parser = createParser(shuffled)) { + InnerHitBuilder secondInnerHits = InnerHitBuilder.fromXContent(parser); + assertThat(innerHit, not(sameInstance(secondInnerHits))); + assertThat(innerHit, equalTo(secondInnerHits)); + assertThat(innerHit.hashCode(), equalTo(secondInnerHits.hashCode())); + } } } diff --git a/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java b/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java index 4b580aa6a2467..b116c61d27c28 100644 --- a/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java @@ -107,16 +107,17 @@ public void testInvalidPointEmbeddedObject() throws IOException { content.endObject(); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); - - XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser2.nextToken(); - e = expectThrows(ElasticsearchParseException.class, () -> - GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } + try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser2.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } } public void testInvalidPointLatHashMix() throws IOException { @@ -125,16 +126,17 @@ public void testInvalidPointLatHashMix() throws IOException { content.field("lat", 0).field("geohash", stringEncode(0d, 0d)); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); - - XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser2.nextToken(); - e = expectThrows(ElasticsearchParseException.class, () -> - GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); - assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + } + try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser2.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); + assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + } } public void testInvalidPointLonHashMix() throws IOException { @@ -143,17 +145,18 @@ public void testInvalidPointLonHashMix() throws IOException { content.field("lon", 0).field("geohash", stringEncode(0d, 0d)); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); - - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); - XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser2.nextToken(); - e = expectThrows(ElasticsearchParseException.class, () -> - GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); - assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + } + try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser2.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); + assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + } } public void testInvalidField() throws IOException { @@ -162,17 +165,18 @@ public void testInvalidField() throws IOException { content.field("lon", 0).field("lat", 0).field("test", 0); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); - + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } - XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser2.nextToken(); - e = expectThrows(ElasticsearchParseException.class, () -> - GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser2.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } } public void testInvalidGeoHash() throws IOException { @@ -181,11 +185,12 @@ public void testInvalidGeoHash() throws IOException { content.field("geohash", "!!!!"); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("unsupported symbol [!] in geohash [!!!!]")); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("unsupported symbol [!] in geohash [!!!!]")); + } } private XContentParser objectLatLon(double lat, double lon) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java b/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java index d390490dd225c..9fec336e2a33f 100644 --- a/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java @@ -384,29 +384,33 @@ public void testParseGeoPoint() throws IOException { double lat = randomDouble() * 180 - 90 + randomIntBetween(-1000, 1000) * 180; double lon = randomDouble() * 360 - 180 + randomIntBetween(-1000, 1000) * 360; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", lon).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - GeoPoint point = GeoUtils.parseGeoPoint(parser); - assertThat(point, equalTo(new GeoPoint(lat, lon))); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point, equalTo(new GeoPoint(lat, lon))); + } json = jsonBuilder().startObject().field("lat", String.valueOf(lat)).field("lon", String.valueOf(lon)).endObject(); - parser = createParser(json); - parser.nextToken(); - point = GeoUtils.parseGeoPoint(parser); - assertThat(point, equalTo(new GeoPoint(lat, lon))); - json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).endArray().endObject(); - parser = createParser(json); - while (parser.currentToken() != Token.START_ARRAY) { + try (XContentParser parser = createParser(json)) { parser.nextToken(); + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point, equalTo(new GeoPoint(lat, lon))); + } + json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).endArray().endObject(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.START_ARRAY) { + parser.nextToken(); + } + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point, equalTo(new GeoPoint(lat, lon))); } - point = GeoUtils.parseGeoPoint(parser); - assertThat(point, equalTo(new GeoPoint(lat, lon))); json = jsonBuilder().startObject().field("foo", lat + "," + lon).endObject(); - parser = createParser(json); - while (parser.currentToken() != Token.VALUE_STRING) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.VALUE_STRING) { + parser.nextToken(); + } + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point, equalTo(new GeoPoint(lat, lon))); } - point = GeoUtils.parseGeoPoint(parser); - assertThat(point, equalTo(new GeoPoint(lat, lon))); } } @@ -415,12 +419,13 @@ public void testParseGeoPointStringZValueError() throws IOException { double lon = randomDouble() * 360 - 180 + randomIntBetween(-1000, 1000) * 360; double alt = randomDouble() * 1000; XContentBuilder json = jsonBuilder().startObject().field("foo", lat + "," + lon + "," + alt).endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.VALUE_STRING) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.VALUE_STRING) { + parser.nextToken(); + } + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser, new GeoPoint(), false)); + assertThat(e.getMessage(), containsString("but [ignore_z_value] parameter is [false]")); } - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser, new GeoPoint(), false)); - assertThat(e.getMessage(), containsString("but [ignore_z_value] parameter is [false]")); } public void testParseGeoPointGeohash() throws IOException { @@ -431,74 +436,82 @@ public void testParseGeoPointGeohash() throws IOException { geohashBuilder.append(BASE_32[randomInt(BASE_32.length - 1)]); } XContentBuilder json = jsonBuilder().startObject().field("geohash", geohashBuilder.toString()).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - GeoPoint point = GeoUtils.parseGeoPoint(parser); - assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); - assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); - json = jsonBuilder().startObject().field("geohash", geohashBuilder.toString()).endObject(); - parser = createParser(json); - while (parser.currentToken() != Token.VALUE_STRING) { + try (XContentParser parser = createParser(json)) { parser.nextToken(); + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); + assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); + } + json = jsonBuilder().startObject().field("geohash", geohashBuilder.toString()).endObject(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.VALUE_STRING) { + parser.nextToken(); + } + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); + assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); } - point = GeoUtils.parseGeoPoint(parser); - assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); - assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); } } public void testParseGeoPointGeohashWrongType() throws IOException { XContentBuilder json = jsonBuilder().startObject().field("geohash", 1.0).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), containsString("geohash must be a string")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), containsString("geohash must be a string")); + } } public void testParseGeoPointLatNoLon() throws IOException { double lat = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field [lon] missing")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field [lon] missing")); + } } public void testParseGeoPointLonNoLat() throws IOException { double lon = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lon", lon).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field [lat] missing")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field [lat] missing")); + } } public void testParseGeoPointLonWrongType() throws IOException { double lat = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", false).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("longitude must be a number")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("longitude must be a number")); + } } public void testParseGeoPointLatWrongType() throws IOException { double lon = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lat", false).field("lon", lon).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("latitude must be a number")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("latitude must be a number")); + } } public void testParseGeoPointExtraField() throws IOException { double lat = 0.0; double lon = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", lon).field("foo", true).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } } public void testParseGeoPointLonLatGeoHash() throws IOException { @@ -506,10 +519,11 @@ public void testParseGeoPointLonLatGeoHash() throws IOException { double lon = 0.0; String geohash = "abcd"; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", lon).field("geohash", geohash).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), containsString("field must be either lat/lon or geohash")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), containsString("field must be either lat/lon or geohash")); + } } public void testParseGeoPointArrayTooManyValues() throws IOException { @@ -517,12 +531,13 @@ public void testParseGeoPointArrayTooManyValues() throws IOException { double lon = 0.0; double elev = 0.0; XContentBuilder json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).value(elev).endArray().endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.START_ARRAY) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.START_ARRAY) { + parser.nextToken(); + } + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("Exception parsing coordinates: found Z value [0.0] but [ignore_z_value] parameter is [false]")); } - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("Exception parsing coordinates: found Z value [0.0] but [ignore_z_value] parameter is [false]")); } public void testParseGeoPointArray3D() throws IOException { @@ -530,35 +545,38 @@ public void testParseGeoPointArray3D() throws IOException { double lon = -180.0; double elev = 0.0; XContentBuilder json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).value(elev).endArray().endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.START_ARRAY) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.START_ARRAY) { + parser.nextToken(); + } + GeoPoint point = GeoUtils.parseGeoPoint(parser, new GeoPoint(), true); + assertThat(point.lat(), equalTo(lat)); + assertThat(point.lon(), equalTo(lon)); } - GeoPoint point = GeoUtils.parseGeoPoint(parser, new GeoPoint(), true); - assertThat(point.lat(), equalTo(lat)); - assertThat(point.lon(), equalTo(lon)); } public void testParseGeoPointArrayWrongType() throws IOException { double lat = 0.0; boolean lon = false; XContentBuilder json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).endArray().endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.START_ARRAY) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.START_ARRAY) { + parser.nextToken(); + } + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("numeric value expected")); } - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("numeric value expected")); } public void testParseGeoPointInvalidType() throws IOException { XContentBuilder json = jsonBuilder().startObject().field("foo", 5).endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.VALUE_NUMBER) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.VALUE_NUMBER) { + parser.nextToken(); + } + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("geo_point expected")); } - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("geo_point expected")); } public void testPrefixTreeCellSizes() { @@ -619,9 +637,10 @@ public void testParseGeoPointGeohashPositions() throws IOException { } private GeoPoint parseGeohash(String geohash, GeoUtils.EffectivePoint effectivePoint) throws IOException { - XContentParser parser = createParser(jsonBuilder().startObject().field("geohash", geohash).endObject()); - parser.nextToken(); - return GeoUtils.parseGeoPoint(parser, new GeoPoint(), randomBoolean(), effectivePoint); + try (XContentParser parser = createParser(jsonBuilder().startObject().field("geohash", geohash).endObject())) { + parser.nextToken(); + return GeoUtils.parseGeoPoint(parser, new GeoPoint(), randomBoolean(), effectivePoint); + } } private static void assertNormalizedPoint(GeoPoint input, GeoPoint expected) { diff --git a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java index f5cac445b220d..2ba943ba0dc4b 100644 --- a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -149,9 +149,7 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { params.put("pretty", "true"); GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("field1", "obj.subfield").get(); XContentBuilder responseBuilder = XContentFactory.jsonBuilder().prettyPrint(); - responseBuilder.startObject(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); - responseBuilder.endObject(); String responseStrings = Strings.toString(responseBuilder); @@ -163,9 +161,7 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { response = client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("field1", "obj.subfield").get(); responseBuilder = XContentFactory.jsonBuilder().prettyPrint().lfAtEnd(); - responseBuilder.startObject(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); - responseBuilder.endObject(); responseStrings = Strings.toString(responseBuilder); prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint(); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java index 518b775d7f802..6684544a74749 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java @@ -57,15 +57,16 @@ public void testFromXContent() throws IOException { ingestMetadata.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); XContentBuilder shuffled = shuffleXContent(builder); - final XContentParser parser = createParser(shuffled); - MetaData.Custom custom = IngestMetadata.fromXContent(parser); - assertTrue(custom instanceof IngestMetadata); - IngestMetadata m = (IngestMetadata) custom; - assertEquals(2, m.getPipelines().size()); - assertEquals("1", m.getPipelines().get("1").getId()); - assertEquals("2", m.getPipelines().get("2").getId()); - assertEquals(pipeline.getConfigAsMap(), m.getPipelines().get("1").getConfigAsMap()); - assertEquals(pipeline2.getConfigAsMap(), m.getPipelines().get("2").getConfigAsMap()); + try (XContentParser parser = createParser(shuffled)) { + MetaData.Custom custom = IngestMetadata.fromXContent(parser); + assertTrue(custom instanceof IngestMetadata); + IngestMetadata m = (IngestMetadata) custom; + assertEquals(2, m.getPipelines().size()); + assertEquals("1", m.getPipelines().get("1").getId()); + assertEquals("2", m.getPipelines().get("2").getId()); + assertEquals(pipeline.getConfigAsMap(), m.getPipelines().get("1").getConfigAsMap()); + assertEquals(pipeline2.getConfigAsMap(), m.getPipelines().get("2").getConfigAsMap()); + } } public void testDiff() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java index 5b1f74d6cdfa5..2a180cc12dd19 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java @@ -174,8 +174,10 @@ public void testSerializationContext() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(testInstance, xContentType, params, false); - XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); - PersistentTasksCustomMetaData newInstance = doParseInstance(parser); + PersistentTasksCustomMetaData newInstance; + try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + newInstance = doParseInstance(parser); + } assertNotSame(newInstance, testInstance); assertEquals(testInstance.tasks().size(), newInstance.tasks().size()); diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java index d0cf5d374897d..1d37490e2ff5f 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java @@ -62,11 +62,12 @@ public void testXContent() throws IOException { RepositoryData repositoryData = generateRandomRepoData(); XContentBuilder builder = JsonXContent.contentBuilder(); repositoryData.snapshotsToXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - long gen = (long) randomIntBetween(0, 500); - RepositoryData fromXContent = RepositoryData.snapshotsFromXContent(parser, gen); - assertEquals(repositoryData, fromXContent); - assertEquals(gen, fromXContent.getGenId()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + long gen = (long) randomIntBetween(0, 500); + RepositoryData fromXContent = RepositoryData.snapshotsFromXContent(parser, gen); + assertEquals(repositoryData, fromXContent); + assertEquals(gen, fromXContent.getGenId()); + } } public void testAddSnapshots() { @@ -166,7 +167,10 @@ public void testIndexThatReferencesAnUnknownSnapshot() throws IOException { XContentBuilder builder = XContentBuilder.builder(xContent); repositoryData.snapshotsToXContent(builder, ToXContent.EMPTY_PARAMS); - RepositoryData parsedRepositoryData = RepositoryData.snapshotsFromXContent(createParser(builder), repositoryData.getGenId()); + RepositoryData parsedRepositoryData; + try (XContentParser xParser = createParser(builder)) { + parsedRepositoryData = RepositoryData.snapshotsFromXContent(xParser, repositoryData.getGenId()); + } assertEquals(repositoryData, parsedRepositoryData); Map snapshotIds = new HashMap<>(); @@ -195,10 +199,12 @@ public void testIndexThatReferencesAnUnknownSnapshot() throws IOException { final XContentBuilder corruptedBuilder = XContentBuilder.builder(xContent); corruptedRepositoryData.snapshotsToXContent(corruptedBuilder, ToXContent.EMPTY_PARAMS); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> - RepositoryData.snapshotsFromXContent(createParser(corruptedBuilder), corruptedRepositoryData.getGenId())); - assertThat(e.getMessage(), equalTo("Detected a corrupted repository, index " + corruptedIndexId + " references an unknown " + - "snapshot uuid [_does_not_exist]")); + try (XContentParser xParser = createParser(corruptedBuilder)) { + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> + RepositoryData.snapshotsFromXContent(xParser, corruptedRepositoryData.getGenId())); + assertThat(e.getMessage(), equalTo("Detected a corrupted repository, index " + corruptedIndexId + " references an unknown " + + "snapshot uuid [_does_not_exist]")); + } } public void testIndexThatReferenceANullSnapshot() throws IOException { @@ -230,9 +236,12 @@ public void testIndexThatReferenceANullSnapshot() throws IOException { } builder.endObject(); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> - RepositoryData.snapshotsFromXContent(createParser(builder), randomNonNegativeLong())); - assertThat(e.getMessage(), equalTo("Detected a corrupted repository, index [docs/_id] references an unknown snapshot uuid [null]")); + try (XContentParser xParser = createParser(builder)) { + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> + RepositoryData.snapshotsFromXContent(xParser, randomNonNegativeLong())); + assertThat(e.getMessage(), equalTo("Detected a corrupted repository, " + + "index [docs/_id] references an unknown snapshot uuid [null]")); + } } public static RepositoryData generateRandomRepoData() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java index 66f0bd796eaef..406e9b1d36c07 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java @@ -37,28 +37,29 @@ public class RestAnalyzeActionTests extends ESTestCase { public void testParseXContentForAnalyzeRequest() throws Exception { - XContentParser content = createParser(XContentFactory.jsonBuilder() + try (XContentParser content = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") .field("tokenizer", "keyword") .array("filter", "lowercase") - .endObject()); + .endObject())) { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - RestAnalyzeAction.buildFromContent(content, analyzeRequest); + RestAnalyzeAction.buildFromContent(content, analyzeRequest); - assertThat(analyzeRequest.text().length, equalTo(1)); - assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); - assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); - assertThat(analyzeRequest.tokenFilters().size(), equalTo(1)); - for (AnalyzeRequest.NameOrDefinition filter : analyzeRequest.tokenFilters()) { - assertThat(filter.name, equalTo("lowercase")); + assertThat(analyzeRequest.text().length, equalTo(1)); + assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); + assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); + assertThat(analyzeRequest.tokenFilters().size(), equalTo(1)); + for (AnalyzeRequest.NameOrDefinition filter : analyzeRequest.tokenFilters()) { + assertThat(filter.name, equalTo("lowercase")); + } } } public void testParseXContentForAnalyzeRequestWithCustomFilters() throws Exception { - XContentParser content = createParser(XContentFactory.jsonBuilder() + try (XContentParser content = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") .field("tokenizer", "keyword") @@ -76,21 +77,22 @@ public void testParseXContentForAnalyzeRequestWithCustomFilters() throws Excepti .endObject() .endArray() .field("normalizer", "normalizer") - .endObject()); + .endObject())) { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - RestAnalyzeAction.buildFromContent(content, analyzeRequest); - - assertThat(analyzeRequest.text().length, equalTo(1)); - assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); - assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); - assertThat(analyzeRequest.tokenFilters().size(), equalTo(2)); - assertThat(analyzeRequest.tokenFilters().get(0).name, equalTo("lowercase")); - assertThat(analyzeRequest.tokenFilters().get(1).definition, notNullValue()); - assertThat(analyzeRequest.charFilters().size(), equalTo(1)); - assertThat(analyzeRequest.charFilters().get(0).definition, notNullValue()); - assertThat(analyzeRequest.normalizer(), equalTo("normalizer")); + RestAnalyzeAction.buildFromContent(content, analyzeRequest); + + assertThat(analyzeRequest.text().length, equalTo(1)); + assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); + assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); + assertThat(analyzeRequest.tokenFilters().size(), equalTo(2)); + assertThat(analyzeRequest.tokenFilters().get(0).name, equalTo("lowercase")); + assertThat(analyzeRequest.tokenFilters().get(1).definition, notNullValue()); + assertThat(analyzeRequest.charFilters().size(), equalTo(1)); + assertThat(analyzeRequest.charFilters().get(0).definition, notNullValue()); + assertThat(analyzeRequest.normalizer(), equalTo("normalizer")); + } } public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception { @@ -103,84 +105,83 @@ public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() t public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception { AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - XContentParser invalidContent = createParser(XContentFactory.jsonBuilder() + try (XContentParser invalidContent = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") .field("unknown", "keyword") - .endObject()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + .endObject())) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest)); - assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); + assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); + } } public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception { AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() + try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() .startObject() .field("explain", "fals") - .endObject()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); - assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); + .endObject())) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); + assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); + } } public void testParseXContentForAnalyzeRequestWithInvalidNormalizerThrowsException() throws Exception { AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() + try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() .startObject() .field("normalizer", true) - .endObject()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); - assertThat(e.getMessage(), startsWith("normalizer should be normalizer's name")); + .endObject())) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); + assertThat(e.getMessage(), startsWith("normalizer should be normalizer's name")); + } } public void testDeprecatedParamIn2xException() throws Exception { - { - XContentParser parser = createParser(XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("filters", "lowercase") - .endObject()); + try (XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("filters", "lowercase") + .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeRequest("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [filters]")); } - { - XContentParser parser = createParser(XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("token_filters", "lowercase") - .endObject()); + try (XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("token_filters", "lowercase") + .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeRequest("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]")); } - { - XContentParser parser = createParser(XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("char_filters", "lowercase") - .endObject()); + try (XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("char_filters", "lowercase") + .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeRequest("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]")); } - { - XContentParser parser = createParser(XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("token_filter", "lowercase") - .endObject()); + try (XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("token_filter", "lowercase") + .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeRequest("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]")); } } diff --git a/server/src/test/java/org/elasticsearch/script/ScriptTests.java b/server/src/test/java/org/elasticsearch/script/ScriptTests.java index 6e578ed910d40..8b66bb32c486e 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptTests.java @@ -89,9 +89,11 @@ public void testParse() throws IOException { Script expectedScript = createScript(); try (XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()))) { expectedScript.toXContent(builder, ToXContent.EMPTY_PARAMS); - Settings settings = Settings.fromXContent(createParser(builder)); - Script actualScript = Script.parse(settings); - assertThat(actualScript, equalTo(expectedScript)); + try (XContentParser xParser = createParser(builder)) { + Settings settings = Settings.fromXContent(xParser); + Script actualScript = Script.parse(settings); + assertThat(actualScript, equalTo(expectedScript)); + } } } } diff --git a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java index b0eb9e907618f..b1c46f3bcedf4 100644 --- a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java +++ b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java @@ -58,10 +58,11 @@ public void testFromXContent() throws IOException { builder.prettyPrint(); } builder = nestedIdentity.innerToXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(builder); - NestedIdentity parsedNestedIdentity = NestedIdentity.fromXContent(parser); - assertEquals(nestedIdentity, parsedNestedIdentity); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(builder)) { + NestedIdentity parsedNestedIdentity = NestedIdentity.fromXContent(parser); + assertEquals(nestedIdentity, parsedNestedIdentity); + assertNull(parser.nextToken()); + } } public void testToXContent() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java index 9b5d64b46bc33..9919e9dcdbbd1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java @@ -56,13 +56,14 @@ public void testNeedsScores() throws Exception { } private boolean needsScores(IndexService index, String agg) throws IOException { - XContentParser aggParser = createParser(JsonXContent.jsonXContent, agg); - aggParser.nextToken(); - SearchContext context = createSearchContext(index); - final AggregatorFactories factories = AggregatorFactories.parseAggregators(aggParser).build(context, null); - final Aggregator[] aggregators = factories.createTopLevelAggregators(); - assertEquals(1, aggregators.length); - return aggregators[0].needsScores(); + try (XContentParser aggParser = createParser(JsonXContent.jsonXContent, agg)) { + aggParser.nextToken(); + SearchContext context = createSearchContext(index); + final AggregatorFactories factories = AggregatorFactories.parseAggregators(aggParser).build(context, null); + final Aggregator[] aggregators = factories.createTopLevelAggregators(); + assertEquals(1, aggregators.length); + return aggregators[0].needsScores(); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java index bc98dda41d661..38d9e62604c46 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java @@ -22,6 +22,7 @@ import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; import java.util.HashMap; import java.util.Map; @@ -29,8 +30,6 @@ import static java.util.Collections.singletonMap; -import org.elasticsearch.script.ScriptType; - /** * This class contains various mocked scripts that are used in aggregations integration tests. */ @@ -68,32 +67,32 @@ protected Map, Object>> pluginScripts() { }); scripts.put("doc['value'].value", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get("value"); }); scripts.put("doc['value'].value - dec", vars -> { int dec = (int) vars.get("dec"); - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("value"); return value.getValue() - dec; }); scripts.put("doc['value'].value + inc", vars -> { int inc = (int) vars.get("inc"); - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("value"); return value.getValue() + inc; }); scripts.put("doc['values'].values", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get("values"); }); scripts.put(DECREMENT_ALL_VALUES.getIdOrCode(), vars -> { int dec = (int) vars.get("dec"); - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs values = (ScriptDocValues.Longs) doc.get("values"); double[] res = new double[values.size()]; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java index 29d8e327d5cd7..79984f5894904 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java @@ -99,10 +99,10 @@ */ public class AggregationsTests extends ESTestCase { - private static final List aggsTests = getAggsTests(); + private static final List> aggsTests = getAggsTests(); - private static List getAggsTests() { - List aggsTests = new ArrayList<>(); + private static List> getAggsTests() { + List> aggsTests = new ArrayList<>(); aggsTests.add(new InternalCardinalityTests()); aggsTests.add(new InternalTDigestPercentilesTests()); aggsTests.add(new InternalTDigestPercentilesRanksTests()); @@ -156,11 +156,11 @@ protected NamedXContentRegistry xContentRegistry() { @Before public void init() throws Exception { - for (InternalAggregationTestCase aggsTest : aggsTests) { + for (InternalAggregationTestCase aggsTest : aggsTests) { if (aggsTest instanceof InternalMultiBucketAggregationTestCase) { // Lower down the number of buckets generated by multi bucket aggregation tests in // order to avoid too many aggregations to be created. - ((InternalMultiBucketAggregationTestCase) aggsTest).setMaxNumberOfBuckets(3); + ((InternalMultiBucketAggregationTestCase) aggsTest).setMaxNumberOfBuckets(3); } aggsTest.setUp(); } @@ -168,7 +168,7 @@ public void init() throws Exception { @After public void cleanUp() throws Exception { - for (InternalAggregationTestCase aggsTest : aggsTests) { + for (InternalAggregationTestCase aggsTest : aggsTests) { aggsTest.tearDown(); } } @@ -268,9 +268,9 @@ private static InternalAggregations createTestInstance(final int minNumAggs, fin int numAggs = randomIntBetween(minNumAggs, 4); List aggs = new ArrayList<>(numAggs); for (int i = 0; i < numAggs; i++) { - InternalAggregationTestCase testCase = randomFrom(aggsTests); + InternalAggregationTestCase testCase = randomFrom(aggsTests); if (testCase instanceof InternalMultiBucketAggregationTestCase) { - InternalMultiBucketAggregationTestCase multiBucketAggTestCase = (InternalMultiBucketAggregationTestCase) testCase; + InternalMultiBucketAggregationTestCase multiBucketAggTestCase = (InternalMultiBucketAggregationTestCase) testCase; if (currentDepth < maxDepth) { multiBucketAggTestCase.setSubAggregationsSupplier( () -> createTestInstance(0, currentDepth + 1, maxDepth) @@ -281,7 +281,7 @@ private static InternalAggregations createTestInstance(final int minNumAggs, fin ); } } else if (testCase instanceof InternalSingleBucketAggregationTestCase) { - InternalSingleBucketAggregationTestCase singleBucketAggTestCase = (InternalSingleBucketAggregationTestCase) testCase; + InternalSingleBucketAggregationTestCase singleBucketAggTestCase = (InternalSingleBucketAggregationTestCase) testCase; if (currentDepth < maxDepth) { singleBucketAggTestCase.subAggregationsSupplier = () -> createTestInstance(0, currentDepth + 1, maxDepth); } else { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java index 828b419909238..c7bbcfc147780 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java @@ -106,13 +106,14 @@ public void testFromXContent() throws IOException { } factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser parser = createParser(shuffled); - String contentString = factoriesBuilder.toString(); - logger.info("Content string: {}", contentString); - PipelineAggregationBuilder newAgg = parse(parser); - assertNotSame(newAgg, testAgg); - assertEquals(testAgg, newAgg); - assertEquals(testAgg.hashCode(), newAgg.hashCode()); + try (XContentParser parser = createParser(shuffled)) { + String contentString = factoriesBuilder.toString(); + logger.info("Content string: {}", contentString); + PipelineAggregationBuilder newAgg = parse(parser); + assertNotSame(newAgg, testAgg); + assertEquals(testAgg, newAgg); + assertEquals(testAgg.hashCode(), newAgg.hashCode()); + } } protected PipelineAggregationBuilder parse(XContentParser parser) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java index 4577986da270c..327a717f05c52 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java @@ -94,34 +94,37 @@ public void testOtherBucket() throws IOException { builder.startObject(); builder.startArray("filters").endArray(); builder.endObject(); - XContentParser parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); - // The other bucket is disabled by default - assertFalse(filters.otherBucket()); - - builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - builder.startObject(); - builder.startArray("filters").endArray(); - builder.field("other_bucket_key", "some_key"); - builder.endObject(); - parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - filters = FiltersAggregationBuilder.parse("agg_name", parser); - // but setting a key enables it automatically - assertTrue(filters.otherBucket()); - - builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - builder.startObject(); - builder.startArray("filters").endArray(); - builder.field("other_bucket", false); - builder.field("other_bucket_key", "some_key"); - builder.endObject(); - parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - filters = FiltersAggregationBuilder.parse("agg_name", parser); - // unless the other bucket is explicitly disabled - assertFalse(filters.otherBucket()); + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); + // The other bucket is disabled by default + assertFalse(filters.otherBucket()); + + builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder.startObject(); + builder.startArray("filters").endArray(); + builder.field("other_bucket_key", "some_key"); + builder.endObject(); + } + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); + // but setting a key enables it automatically + assertTrue(filters.otherBucket()); + + builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder.startObject(); + builder.startArray("filters").endArray(); + builder.field("other_bucket", false); + builder.field("other_bucket_key", "some_key"); + builder.endObject(); + } + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); + // unless the other bucket is explicitly disabled + assertFalse(filters.otherBucket()); + } } public void testRewrite() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index 414954a2d905b..5009594160ef7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -264,9 +264,8 @@ public void testBuilderAndParser() throws Exception { protected void checkParseException(ParseFieldRegistry significanceHeuristicParserRegistry, String faultyHeuristicDefinition, String expectedError) throws IOException { - try { - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}"); + try (XContentParser stParser = createParser(JsonXContent.jsonXContent, + "{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}")) { stParser.nextToken(); SignificantTermsAggregationBuilder.getParser(significanceHeuristicParserRegistry).parse("testagg", stParser); fail(); @@ -301,9 +300,10 @@ private static SignificanceHeuristic parseSignificanceHeuristic( protected SignificanceHeuristic parseFromString(ParseFieldRegistry significanceHeuristicParserRegistry, String heuristicString) throws IOException { - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}"); - return parseSignificanceHeuristic(significanceHeuristicParserRegistry, stParser); + try (XContentParser stParser = createParser(JsonXContent.jsonXContent, + "{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}")) { + return parseSignificanceHeuristic(significanceHeuristicParserRegistry, stParser); + } } void testBackgroundAssertions(SignificanceHeuristic heuristicIsSuperset, SignificanceHeuristic heuristicNotSuperset) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java index b8b33b97e4d00..c770bef7df613 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java @@ -69,23 +69,23 @@ protected Map, Object>> pluginScripts() { scripts.put("_value", vars -> vars.get("_value")); scripts.put("doc['str_value'].value", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get("str_value"); }); scripts.put("doc['str_values'].values", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Strings strValue = (ScriptDocValues.Strings) doc.get("str_values"); return strValue.getValues(); }); scripts.put("doc[' + singleNumericField() + '].value", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc =(Map) vars.get("doc"); return doc.get(singleNumericField()); }); scripts.put("doc[' + multiNumericField(false) + '].values", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc =(Map) vars.get("doc"); return ((ScriptDocValues) doc.get(multiNumericField(false))).getValues(); }); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 816c0464d95d9..13e1489795996 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -193,14 +193,55 @@ protected Map, Object>> pluginScripts() { return newAggregation; }); + scripts.put("state.items = new ArrayList()", vars -> + aggContextScript(vars, state -> ((HashMap) state).put("items", new ArrayList()))); + + scripts.put("state.items.add(1)", vars -> + aggContextScript(vars, state -> { + HashMap stateMap = (HashMap) state; + List items = (List) stateMap.get("items"); + items.add(1); + })); + + scripts.put("sum context state values", vars -> { + int sum = 0; + HashMap state = (HashMap) vars.get("state"); + List items = (List) state.get("items"); + + for (Object x : items) { + sum += (Integer)x; + } + + return sum; + }); + + scripts.put("sum context states", vars -> { + Integer sum = 0; + + List states = (List) vars.get("states"); + for (Object state : states) { + sum += ((Number) state).intValue(); + } + + return sum; + }); + return scripts; } - @SuppressWarnings("unchecked") static Object aggScript(Map vars, Consumer fn) { - T agg = (T) vars.get("_agg"); - fn.accept(agg); - return agg; + return aggScript(vars, fn, "_agg"); + } + + static Object aggContextScript(Map vars, Consumer fn) { + return aggScript(vars, fn, "state"); + } + + @SuppressWarnings("unchecked") + private static Object aggScript(Map vars, Consumer fn, String stateVarName) { + T aggState = (T) vars.get(stateVarName); + fn.accept(aggState); + return aggState; } } @@ -1015,4 +1056,37 @@ public void testConflictingAggAndScriptParams() { SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, builder::get); assertThat(ex.getCause().getMessage(), containsString("Parameter name \"param1\" used in both aggregation and script parameters")); } + + public void testAggFromContext() { + Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.items = new ArrayList()", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.items.add(1)", Collections.emptyMap()); + Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum context state values", Collections.emptyMap()); + Script reduceScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum context states", + Collections.emptyMap()); + + SearchResponse response = client() + .prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted") + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript)) + .get(); + + Aggregation aggregation = response.getAggregations().get("scripted"); + assertThat(aggregation, notNullValue()); + assertThat(aggregation, instanceOf(ScriptedMetric.class)); + + ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation; + assertThat(scriptedMetricAggregation.getName(), equalTo("scripted")); + assertThat(scriptedMetricAggregation.aggregation(), notNullValue()); + + assertThat(scriptedMetricAggregation.aggregation(), instanceOf(Integer.class)); + Integer aggResult = (Integer) scriptedMetricAggregation.aggregation(); + long totalAgg = aggResult.longValue(); + assertThat(totalAgg, equalTo(numDocs)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java index 7a7c66d21aada..b2a949ceeee1a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.script.MockScriptEngine; -import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptModule; @@ -107,7 +106,7 @@ public static void initMockScripts() { }); SCRIPTS.put("mapScriptScore", params -> { Map agg = (Map) params.get("_agg"); - ((List) agg.get("collector")).add(((ScoreAccessor) params.get("_score")).doubleValue()); + ((List) agg.get("collector")).add(((Number) params.get("_score")).doubleValue()); return agg; }); SCRIPTS.put("combineScriptScore", params -> { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java index 6e477021a541f..dc2624dc39e40 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java @@ -214,21 +214,22 @@ private IncludeExclude serialize(IncludeExclude incExc, ParseField field) throws incExc.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(builder); - XContentParser.Token token = parser.nextToken(); - assertEquals(token, XContentParser.Token.START_OBJECT); - token = parser.nextToken(); - assertEquals(token, XContentParser.Token.FIELD_NAME); - assertEquals(field.getPreferredName(), parser.currentName()); - token = parser.nextToken(); - - if (field.getPreferredName().equalsIgnoreCase("include")) { - return IncludeExclude.parseInclude(parser); - } else if (field.getPreferredName().equalsIgnoreCase("exclude")) { - return IncludeExclude.parseExclude(parser); - } else { - throw new IllegalArgumentException( + try (XContentParser parser = createParser(builder)) { + XContentParser.Token token = parser.nextToken(); + assertEquals(token, XContentParser.Token.START_OBJECT); + token = parser.nextToken(); + assertEquals(token, XContentParser.Token.FIELD_NAME); + assertEquals(field.getPreferredName(), parser.currentName()); + token = parser.nextToken(); + + if (field.getPreferredName().equalsIgnoreCase("include")) { + return IncludeExclude.parseInclude(parser); + } else if (field.getPreferredName().equalsIgnoreCase("exclude")) { + return IncludeExclude.parseExclude(parser); + } else { + throw new IllegalArgumentException( "Unexpected field name serialized in test: " + field.getPreferredName()); + } } } @@ -260,28 +261,29 @@ private IncludeExclude serializeMixedRegex(IncludeExclude incExc) throws IOExcep incExc.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(builder); - XContentParser.Token token = parser.nextToken(); - assertEquals(token, XContentParser.Token.START_OBJECT); - - IncludeExclude inc = null; - IncludeExclude exc = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - assertEquals(XContentParser.Token.FIELD_NAME, token); - if (IncludeExclude.INCLUDE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { - token = parser.nextToken(); - inc = IncludeExclude.parseInclude(parser); - } else if (IncludeExclude.EXCLUDE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { - token = parser.nextToken(); - exc = IncludeExclude.parseExclude(parser); - } else { - throw new IllegalArgumentException("Unexpected field name serialized in test: " + parser.currentName()); + try (XContentParser parser = createParser(builder)) { + XContentParser.Token token = parser.nextToken(); + assertEquals(token, XContentParser.Token.START_OBJECT); + + IncludeExclude inc = null; + IncludeExclude exc = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + assertEquals(XContentParser.Token.FIELD_NAME, token); + if (IncludeExclude.INCLUDE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { + token = parser.nextToken(); + inc = IncludeExclude.parseInclude(parser); + } else if (IncludeExclude.EXCLUDE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { + token = parser.nextToken(); + exc = IncludeExclude.parseExclude(parser); + } else { + throw new IllegalArgumentException("Unexpected field name serialized in test: " + parser.currentName()); + } } + assertNotNull(inc); + assertNotNull(exc); + // Include and Exclude clauses are parsed independently and then merged + return IncludeExclude.merge(inc, exc); } - assertNotNull(inc); - assertNotNull(exc); - // Include and Exclude clauses are parsed independently and then merged - return IncludeExclude.merge(inc, exc); } } diff --git a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 2550c0a4a444c..12c3e487ff124 100644 --- a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -64,7 +64,9 @@ public void testFromXContent() throws IOException { builder.prettyPrint(); } testSearchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertParseSearchSource(testSearchSourceBuilder, createParser(builder)); + try (XContentParser xParser = createParser(builder)) { + assertParseSearchSource(testSearchSourceBuilder, xParser); + } } public void testFromXContentInvalid() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 95da15e838c31..37359d9f20d71 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -139,17 +139,18 @@ public void testFromXContent() throws IOException { shuffled = shuffleXContent(builder, "fields"); } - XContentParser parser = createParser(shuffled); - parser.nextToken(); - HighlightBuilder secondHighlightBuilder; - try { - secondHighlightBuilder = HighlightBuilder.fromXContent(parser); - } catch (RuntimeException e) { - throw new RuntimeException("Error parsing " + highlightBuilder, e); + try (XContentParser parser = createParser(shuffled)) { + parser.nextToken(); + HighlightBuilder secondHighlightBuilder; + try { + secondHighlightBuilder = HighlightBuilder.fromXContent(parser); + } catch (RuntimeException e) { + throw new RuntimeException("Error parsing " + highlightBuilder, e); + } + assertNotSame(highlightBuilder, secondHighlightBuilder); + assertEquals(highlightBuilder, secondHighlightBuilder); + assertEquals(highlightBuilder.hashCode(), secondHighlightBuilder.hashCode()); } - assertNotSame(highlightBuilder, secondHighlightBuilder); - assertEquals(highlightBuilder, secondHighlightBuilder); - assertEquals(highlightBuilder.hashCode(), secondHighlightBuilder.hashCode()); } } @@ -179,8 +180,9 @@ public void testUnknownArrayNameExpection() throws IOException { } private T expectParseThrows(Class exceptionClass, String highlightElement) throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement); - return expectThrows(exceptionClass, () -> HighlightBuilder.fromXContent(parser)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { + return expectThrows(exceptionClass, () -> HighlightBuilder.fromXContent(parser)); + } } /** @@ -389,30 +391,32 @@ public void testParsingTagsSchema() throws IOException { String highlightElement = "{\n" + " \"tags_schema\" : \"styled\"\n" + "}\n"; - XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { - HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); - assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlightBuilder.DEFAULT_STYLED_PRE_TAG, + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlightBuilder.DEFAULT_STYLED_PRE_TAG, highlightBuilder.preTags()); - assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlightBuilder.DEFAULT_STYLED_POST_TAGS, + assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlightBuilder.DEFAULT_STYLED_POST_TAGS, highlightBuilder.postTags()); - highlightElement = "{\n" + + highlightElement = "{\n" + " \"tags_schema\" : \"default\"\n" + "}\n"; - parser = createParser(JsonXContent.jsonXContent, highlightElement); + } + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { - highlightBuilder = HighlightBuilder.fromXContent(parser); - assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlightBuilder.DEFAULT_PRE_TAGS, + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlightBuilder.DEFAULT_PRE_TAGS, highlightBuilder.preTags()); - assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlightBuilder.DEFAULT_POST_TAGS, + assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlightBuilder.DEFAULT_POST_TAGS, highlightBuilder.postTags()); - XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"tags_schema\" : \"somthing_else\"\n" + "}\n"); - assertThat(e.getMessage(), containsString("[highlight] failed to parse field [tags_schema]")); - assertEquals("Unknown tag schema [somthing_else]", e.getCause().getMessage()); + assertThat(e.getMessage(), containsString("[highlight] failed to parse field [tags_schema]")); + assertEquals("Unknown tag schema [somthing_else]", e.getCause().getMessage()); + } } /** @@ -420,22 +424,22 @@ public void testParsingTagsSchema() throws IOException { */ public void testParsingEmptyStructure() throws IOException { String highlightElement = "{ }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement); - - HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); - assertEquals("expected plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertEquals("expected plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); + } highlightElement = "{ \"fields\" : { } }"; - parser = createParser(JsonXContent.jsonXContent, highlightElement); - - highlightBuilder = HighlightBuilder.fromXContent(parser); - assertEquals("defining no field should return plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertEquals("defining no field should return plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); + } highlightElement = "{ \"fields\" : { \"foo\" : { } } }"; - parser = createParser(JsonXContent.jsonXContent, highlightElement); - - highlightBuilder = HighlightBuilder.fromXContent(parser); - assertEquals("expected HighlightBuilder with field", new HighlightBuilder().field(new Field("foo")), highlightBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertEquals("expected HighlightBuilder with field", new HighlightBuilder().field(new Field("foo")), highlightBuilder); + } } public void testPreTagsWithoutPostTags() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java index 7b27cf78ec65a..7044a7b103098 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java @@ -62,16 +62,17 @@ public void testFromXContent() throws IOException { builder.startObject(); // we need to wrap xContent output in proper object to create a parser for it builder = highlightField.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(builder); - parser.nextToken(); // skip to the opening object token, fromXContent advances from here and starts with the field name - parser.nextToken(); - HighlightField parsedField = HighlightField.fromXContent(parser); - assertEquals(highlightField, parsedField); - if (highlightField.fragments() != null) { - assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); + try (XContentParser parser = createParser(builder)) { + parser.nextToken(); // skip to the opening object token, fromXContent advances from here and starts with the field name + parser.nextToken(); + HighlightField parsedField = HighlightField.fromXContent(parser); + assertEquals(highlightField, parsedField); + if (highlightField.fragments() != null) { + assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); + } + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); } - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); } public void testToXContent() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index efd3e5ef2ca06..700b3949facf4 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -121,12 +121,13 @@ public void testFromXContent() throws IOException { XContentBuilder shuffled = shuffleXContent(builder); - XContentParser parser = createParser(shuffled); - parser.nextToken(); - RescorerBuilder secondRescoreBuilder = RescorerBuilder.parseFromXContent(parser); - assertNotSame(rescoreBuilder, secondRescoreBuilder); - assertEquals(rescoreBuilder, secondRescoreBuilder); - assertEquals(rescoreBuilder.hashCode(), secondRescoreBuilder.hashCode()); + try (XContentParser parser = createParser(shuffled)) { + parser.nextToken(); + RescorerBuilder secondRescoreBuilder = RescorerBuilder.parseFromXContent(parser); + assertNotSame(rescoreBuilder, secondRescoreBuilder); + assertEquals(rescoreBuilder, secondRescoreBuilder); + assertEquals(rescoreBuilder.hashCode(), secondRescoreBuilder.hashCode()); + } } } @@ -214,67 +215,61 @@ public MappedFieldType fieldMapper(String name) { public void testUnknownFieldsExpection() throws IOException { String rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"bad_rescorer_name\" : { }\n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"window_size\" : 20,\n" + + " \"bad_rescorer_name\" : { }\n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(NamedObjectNotFoundException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("[3:27] unable to parse RescorerBuilder with name [bad_rescorer_name]: parser not found", e.getMessage()); } - rescoreElement = "{\n" + - " \"bad_fieldName\" : 20\n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"bad_fieldName\" : 20\n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("rescore doesn't support [bad_fieldName]", e.getMessage()); } rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"query\" : [ ]\n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"window_size\" : 20,\n" + + " \"query\" : [ ]\n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("unexpected token [START_ARRAY] after [query]", e.getMessage()); } rescoreElement = "{ }"; - { - XContentParser parser = createParser(rescoreElement); + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("missing rescore type", e.getMessage()); } rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"query\" : { \"bad_fieldname\" : 1.0 } \n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"window_size\" : 20,\n" + + " \"query\" : { \"bad_fieldname\" : 1.0 } \n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { XContentParseException e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("[3:17] [query] unknown field [bad_fieldname], parser not found", e.getMessage()); } rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"query\" : { \"rescore_query\" : { \"unknown_queryname\" : { } } } \n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"window_size\" : 20,\n" + + " \"query\" : { \"rescore_query\" : { \"unknown_queryname\" : { } } } \n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertThat(e.getMessage(), containsString("[query] failed to parse field [rescore_query]")); } rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"query\" : { \"rescore_query\" : { \"match_all\" : { } } } \n" - + "}\n"; - XContentParser parser = createParser(rescoreElement); - RescorerBuilder.parseFromXContent(parser); + " \"window_size\" : 20,\n" + + " \"query\" : { \"rescore_query\" : { \"match_all\" : { } } } \n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { + RescorerBuilder.parseFromXContent(parser); + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index 53bd9da2ff1de..f7457d965744a 100644 --- a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -136,11 +136,12 @@ private SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException { } jsonBuilder.endArray(); jsonBuilder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(jsonBuilder)); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - return SearchAfterBuilder.fromXContent(parser); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(jsonBuilder))) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + return SearchAfterBuilder.fromXContent(parser); + } } private static SearchAfterBuilder serializedCopy(SearchAfterBuilder original) throws IOException { @@ -174,14 +175,15 @@ public void testFromXContent() throws Exception { builder.startObject(); searchAfterBuilder.innerToXContent(builder); builder.endObject(); - XContentParser parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.fromXContent(parser); - assertNotSame(searchAfterBuilder, secondSearchAfterBuilder); - assertEquals(searchAfterBuilder, secondSearchAfterBuilder); - assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode()); + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.fromXContent(parser); + assertNotSame(searchAfterBuilder, secondSearchAfterBuilder); + assertEquals(searchAfterBuilder, secondSearchAfterBuilder); + assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java index b93ebc1adde72..30ed0cb5ab5b5 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java @@ -306,11 +306,12 @@ public void testFromXContent() throws Exception { builder.startObject(); sliceBuilder.innerToXContent(builder); builder.endObject(); - XContentParser parser = createParser(shuffleXContent(builder)); - SliceBuilder secondSliceBuilder = SliceBuilder.fromXContent(parser); - assertNotSame(sliceBuilder, secondSliceBuilder); - assertEquals(sliceBuilder, secondSliceBuilder); - assertEquals(sliceBuilder.hashCode(), secondSliceBuilder.hashCode()); + try (XContentParser parser = createParser(shuffleXContent(builder))) { + SliceBuilder secondSliceBuilder = SliceBuilder.fromXContent(parser); + assertNotSame(sliceBuilder, secondSliceBuilder); + assertEquals(sliceBuilder, secondSliceBuilder); + assertEquals(sliceBuilder.hashCode(), secondSliceBuilder.hashCode()); + } } public void testInvalidArguments() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index d05ddf4ee640e..2285af3ec46c0 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -121,21 +121,22 @@ public void testFromXContent() throws IOException { } testItem.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser itemParser = createParser(shuffled); - itemParser.nextToken(); - - /* - * filter out name of sort, or field name to sort on for element fieldSort - */ - itemParser.nextToken(); - String elementName = itemParser.currentName(); - itemParser.nextToken(); - - T parsedItem = fromXContent(itemParser, elementName); - assertNotSame(testItem, parsedItem); - assertEquals(testItem, parsedItem); - assertEquals(testItem.hashCode(), parsedItem.hashCode()); - assertWarnings(testItem); + try (XContentParser itemParser = createParser(shuffled)) { + itemParser.nextToken(); + + /* + * filter out name of sort, or field name to sort on for element fieldSort + */ + itemParser.nextToken(); + String elementName = itemParser.currentName(); + itemParser.nextToken(); + + T parsedItem = fromXContent(itemParser, elementName); + assertNotSame(testItem, parsedItem); + assertEquals(testItem, parsedItem); + assertEquals(testItem.hashCode(), parsedItem.hashCode()); + assertWarnings(testItem); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index 6aceed996ccdc..268f4aeb26d65 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -304,14 +304,15 @@ public void testBuildNested() throws IOException { public void testUnknownOptionFails() throws IOException { String json = "{ \"post_date\" : {\"reverse\" : true} },\n"; - XContentParser parser = createParser(JsonXContent.jsonXContent, json); - // need to skip until parser is located on second START_OBJECT - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - - XContentParseException e = expectThrows(XContentParseException.class, () -> FieldSortBuilder.fromXContent(parser, "")); - assertEquals("[1:18] [field_sort] unknown field [reverse], parser not found", e.getMessage()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) { + // need to skip until parser is located on second START_OBJECT + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + + XContentParseException e = expectThrows(XContentParseException.class, () -> FieldSortBuilder.fromXContent(parser, "")); + assertEquals("[1:18] [field_sort] unknown field [reverse], parser not found", e.getMessage()); + } } @Override diff --git a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index b70a87ea9860f..7ffedbf43ec2c 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -232,12 +232,13 @@ public void testSortModeSumIsRejectedInJSON() throws IOException { " \"distance_type\" : \"arc\",\n" + " \"mode\" : \"SUM\"\n" + "}"; - XContentParser itemParser = createParser(JsonXContent.jsonXContent, json); - itemParser.nextToken(); + try (XContentParser itemParser = createParser(JsonXContent.jsonXContent, json)) { + itemParser.nextToken(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> GeoDistanceSortBuilder.fromXContent(itemParser, "")); - assertEquals("sort_mode [sum] isn't supported for sorting by geo distance", e.getMessage()); + assertEquals("sort_mode [sum] isn't supported for sorting by geo distance", e.getMessage()); + } } public void testGeoDistanceSortCanBeParsedFromGeoHash() throws IOException { @@ -258,16 +259,17 @@ public void testGeoDistanceSortCanBeParsedFromGeoHash() throws IOException { " },\n" + " \"validation_method\" : \"STRICT\"\n" + " }"; - XContentParser itemParser = createParser(JsonXContent.jsonXContent, json); - itemParser.nextToken(); + try (XContentParser itemParser = createParser(JsonXContent.jsonXContent, json)) { + itemParser.nextToken(); - GeoDistanceSortBuilder result = GeoDistanceSortBuilder.fromXContent(itemParser, json); - assertEquals("[-19.700583312660456, -2.8225036337971687, " + GeoDistanceSortBuilder result = GeoDistanceSortBuilder.fromXContent(itemParser, json); + assertEquals("[-19.700583312660456, -2.8225036337971687, " + "31.537466906011105, -74.63590376079082, " + "43.71844606474042, -5.548660643398762, " + "-37.20467280596495, 38.71751043945551, " + "-69.44606635719538, 84.25200328230858, " + "-39.03717711567879, 44.74099852144718]", Arrays.toString(result.points())); + } } public void testGeoDistanceSortParserManyPointsNoException() throws Exception { @@ -380,9 +382,10 @@ public void testGeoDistanceSortDeprecatedSortModeException() throws Exception { } private GeoDistanceSortBuilder parse(XContentBuilder sortBuilder) throws Exception { - XContentParser parser = createParser(sortBuilder); - parser.nextToken(); - return GeoDistanceSortBuilder.fromXContent(parser, null); + try (XContentParser parser = createParser(sortBuilder)) { + parser.nextToken(); + return GeoDistanceSortBuilder.fromXContent(parser, null); + } } @Override diff --git a/server/src/test/java/org/elasticsearch/search/sort/NestedSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/NestedSortBuilderTests.java index 0908d83896f92..b0613b320b86a 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/NestedSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/NestedSortBuilderTests.java @@ -73,12 +73,13 @@ public void testFromXContent() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); testItem.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser parser = createParser(shuffled); - parser.nextToken(); - NestedSortBuilder parsedItem = NestedSortBuilder.fromXContent(parser); - assertNotSame(testItem, parsedItem); - assertEquals(testItem, parsedItem); - assertEquals(testItem.hashCode(), parsedItem.hashCode()); + try (XContentParser parser = createParser(shuffled)) { + parser.nextToken(); + NestedSortBuilder parsedItem = NestedSortBuilder.fromXContent(parser); + assertNotSame(testItem, parsedItem); + assertEquals(testItem, parsedItem); + assertEquals(testItem.hashCode(), parsedItem.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 9a030cc3aabcb..0f19b709a4fed 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -177,20 +177,21 @@ public void testParseJson() throws IOException { "\"mode\" : \"max\",\n" + "\"order\" : \"asc\"\n" + "} }\n"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - - ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(parser, null); - assertEquals("doc['field_name'].value * factor", builder.script().getIdOrCode()); - assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang()); - assertEquals(1.1, builder.script().getParams().get("factor")); - assertEquals(ScriptType.INLINE, builder.script().getType()); - assertEquals(ScriptSortType.NUMBER, builder.type()); - assertEquals(SortOrder.ASC, builder.order()); - assertEquals(SortMode.MAX, builder.sortMode()); - assertNull(builder.getNestedSort()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + + ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(parser, null); + assertEquals("doc['field_name'].value * factor", builder.script().getIdOrCode()); + assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang()); + assertEquals(1.1, builder.script().getParams().get("factor")); + assertEquals(ScriptType.INLINE, builder.script().getType()); + assertEquals(ScriptSortType.NUMBER, builder.type()); + assertEquals(SortOrder.ASC, builder.order()); + assertEquals(SortMode.MAX, builder.sortMode()); + assertNull(builder.getNestedSort()); + } } public void testParseJson_simple() throws IOException { @@ -201,54 +202,58 @@ public void testParseJson_simple() throws IOException { "\"mode\" : \"max\",\n" + "\"order\" : \"asc\"\n" + "} }\n"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - - ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(parser, null); - assertEquals("doc['field_name'].value", builder.script().getIdOrCode()); - assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang()); - assertEquals(builder.script().getParams(), Collections.emptyMap()); - assertEquals(ScriptType.INLINE, builder.script().getType()); - assertEquals(ScriptSortType.NUMBER, builder.type()); - assertEquals(SortOrder.ASC, builder.order()); - assertEquals(SortMode.MAX, builder.sortMode()); - assertNull(builder.getNestedSort()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + + ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(parser, null); + assertEquals("doc['field_name'].value", builder.script().getIdOrCode()); + assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang()); + assertEquals(builder.script().getParams(), Collections.emptyMap()); + assertEquals(ScriptType.INLINE, builder.script().getType()); + assertEquals(ScriptSortType.NUMBER, builder.type()); + assertEquals(SortOrder.ASC, builder.order()); + assertEquals(SortMode.MAX, builder.sortMode()); + assertNull(builder.getNestedSort()); + } } public void testParseBadFieldNameExceptions() throws IOException { String scriptSort = "{\"_script\" : {" + "\"bad_field\" : \"number\"" + "} }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); - XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); - assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); + assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); + } } public void testParseBadFieldNameExceptionsOnStartObject() throws IOException { String scriptSort = "{\"_script\" : {" + "\"bad_field\" : { \"order\" : \"asc\" } } }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); - XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); - assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); + assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); + } } public void testParseUnexpectedToken() throws IOException { String scriptSort = "{\"_script\" : {" + "\"script\" : [ \"order\" : \"asc\" ] } }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); - Exception e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); - assertThat(e.getMessage(), containsString("[_script] script doesn't support values of type: START_ARRAY")); + Exception e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); + assertThat(e.getMessage(), containsString("[_script] script doesn't support values of type: START_ARRAY")); + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java index f267dec2a8623..5f5ea5e869450 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java @@ -252,12 +252,13 @@ protected NamedXContentRegistry xContentRegistry() { } private List> parseSort(String jsonString) throws IOException { - XContentParser itemParser = createParser(JsonXContent.jsonXContent, jsonString); + try (XContentParser itemParser = createParser(JsonXContent.jsonXContent, jsonString)) { - assertEquals(XContentParser.Token.START_OBJECT, itemParser.nextToken()); - assertEquals(XContentParser.Token.FIELD_NAME, itemParser.nextToken()); - assertEquals("sort", itemParser.currentName()); - itemParser.nextToken(); - return SortBuilder.fromXContent(itemParser); + assertEquals(XContentParser.Token.START_OBJECT, itemParser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, itemParser.nextToken()); + assertEquals("sort", itemParser.currentName()); + itemParser.nextToken(); + return SortBuilder.fromXContent(itemParser); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index eb31f19ad4e83..00a287f02528c 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -140,14 +140,15 @@ public void testFromXContent() throws IOException { xContentBuilder.endObject(); XContentBuilder shuffled = shuffleXContent(xContentBuilder, shuffleProtectedFields()); - XContentParser parser = createParser(shuffled); - // we need to skip the start object and the name, those will be parsed by outer SuggestBuilder - parser.nextToken(); + try (XContentParser parser = createParser(shuffled)) { + // we need to skip the start object and the name, those will be parsed by outer SuggestBuilder + parser.nextToken(); - SuggestionBuilder secondSuggestionBuilder = SuggestionBuilder.fromXContent(parser); - assertNotSame(suggestionBuilder, secondSuggestionBuilder); - assertEquals(suggestionBuilder, secondSuggestionBuilder); - assertEquals(suggestionBuilder.hashCode(), secondSuggestionBuilder.hashCode()); + SuggestionBuilder secondSuggestionBuilder = SuggestionBuilder.fromXContent(parser); + assertNotSame(suggestionBuilder, secondSuggestionBuilder); + assertEquals(suggestionBuilder, secondSuggestionBuilder); + assertEquals(suggestionBuilder.hashCode(), secondSuggestionBuilder.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 0717e1be2121e..a3fff7f9d5bcc 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -890,7 +890,7 @@ public void testSkipDuplicates() throws Exception { assertSuggestions(searchResponse, true, "suggestions", expected); } - public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) { + public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) { SearchResponse searchResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion(suggestionName, suggestBuilder)).execute().actionGet(); assertSuggestions(searchResponse, suggestionName, suggestions); } @@ -971,7 +971,7 @@ private void createIndexAndMappingAndSettings(Settings settings, CompletionMappi if (completionMappingBuilder.contextMappings != null) { mapping = mapping.startArray("contexts"); - for (Map.Entry contextMapping : completionMappingBuilder.contextMappings.entrySet()) { + for (Map.Entry> contextMapping : completionMappingBuilder.contextMappings.entrySet()) { mapping = mapping.startObject() .field("name", contextMapping.getValue().name()) .field("type", contextMapping.getValue().type().name()); @@ -1189,7 +1189,7 @@ static class CompletionMappingBuilder { String indexAnalyzer = "simple"; Boolean preserveSeparators = random().nextBoolean(); Boolean preservePositionIncrements = random().nextBoolean(); - LinkedHashMap contextMappings = null; + LinkedHashMap> contextMappings = null; public CompletionMappingBuilder searchAnalyzer(String searchAnalyzer) { this.searchAnalyzer = searchAnalyzer; @@ -1208,7 +1208,7 @@ public CompletionMappingBuilder preservePositionIncrements(Boolean preservePosit return this; } - public CompletionMappingBuilder context(LinkedHashMap contextMappings) { + public CompletionMappingBuilder context(LinkedHashMap> contextMappings) { this.contextMappings = contextMappings; return this; } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index 13f7e55277cc4..00defee8daaf4 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; @@ -68,7 +69,7 @@ protected int numberOfReplicas() { } public void testContextPrefix() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); boolean addAnotherContext = randomBoolean(); if (addAnotherContext) { @@ -99,7 +100,7 @@ public void testContextPrefix() throws Exception { } public void testContextRegex() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); boolean addAnotherContext = randomBoolean(); if (addAnotherContext) { @@ -130,7 +131,7 @@ public void testContextRegex() throws Exception { } public void testContextFuzzy() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); boolean addAnotherContext = randomBoolean(); if (addAnotherContext) { @@ -162,7 +163,7 @@ public void testContextFuzzy() throws Exception { public void testContextFilteringWorksWithUTF8Categories() throws Exception { CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); + LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); IndexResponse indexResponse = client().prepareIndex(INDEX, TYPE, "1") @@ -183,7 +184,7 @@ public void testContextFilteringWorksWithUTF8Categories() throws Exception { public void testSingleContextFiltering() throws Exception { CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); + LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); int numDocs = 10; @@ -209,7 +210,7 @@ public void testSingleContextFiltering() throws Exception { public void testSingleContextBoosting() throws Exception { CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); + LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); int numDocs = 10; @@ -237,7 +238,7 @@ public void testSingleContextBoosting() throws Exception { public void testSingleContextMultipleContexts() throws Exception { CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); + LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); int numDocs = 10; @@ -262,7 +263,7 @@ public void testSingleContextMultipleContexts() throws Exception { } public void testMultiContextFiltering() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); map.put("type", ContextBuilder.category("type").field("type").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); @@ -306,7 +307,7 @@ public void testMultiContextFiltering() throws Exception { @AwaitsFix(bugUrl = "multiple context boosting is broken, as a suggestion, contexts pair is treated as (num(context) entries)") public void testMultiContextBoosting() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); map.put("type", ContextBuilder.category("type").field("type").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); @@ -361,7 +362,7 @@ public void testMultiContextBoosting() throws Exception { } public void testMissingContextValue() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); map.put("type", ContextBuilder.category("type").field("type").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); @@ -391,7 +392,7 @@ public void testMissingContextValue() throws Exception { } public void testSeveralContexts() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); final int numContexts = randomIntBetween(2, 5); for (int i = 0; i < numContexts; i++) { map.put("type" + i, ContextBuilder.category("type" + i).field("type" + i).build()); @@ -421,7 +422,7 @@ public void testSeveralContexts() throws Exception { } public void testSimpleGeoPrefix() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -446,7 +447,7 @@ public void testSimpleGeoPrefix() throws Exception { } public void testGeoFiltering() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -478,7 +479,7 @@ public void testGeoFiltering() throws Exception { } public void testGeoBoosting() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -511,7 +512,7 @@ public void testGeoBoosting() throws Exception { } public void testGeoPointContext() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -551,7 +552,7 @@ public void testGeoNeighbours() throws Exception { neighbours.add("gcpu"); neighbours.add("u10h"); - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").precision(4).build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -640,7 +641,7 @@ public void testGeoField() throws Exception { } public void testSkipDuplicatesWithContexts() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("type", ContextBuilder.category("type").field("type").build()); map.put("cat", ContextBuilder.category("cat").field("cat").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); @@ -706,7 +707,7 @@ private void createIndexAndMappingAndSettings(Settings settings, CompletionMappi List categoryContextFields = new ArrayList<>(); if (completionMappingBuilder.contextMappings != null) { mapping.startArray("contexts"); - for (Map.Entry contextMapping : completionMappingBuilder.contextMappings.entrySet()) { + for (Map.Entry> contextMapping : completionMappingBuilder.contextMappings.entrySet()) { mapping.startObject() .field("name", contextMapping.getValue().name()) .field("type", contextMapping.getValue().type().name()); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java index 16de2a3506740..2b99c62185b7c 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java @@ -74,11 +74,12 @@ public void testFromXContent() throws IOException { xContentBuilder.prettyPrint(); } suggestBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(xContentBuilder); - SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(parser); - assertNotSame(suggestBuilder, secondSuggestBuilder); - assertEquals(suggestBuilder, secondSuggestBuilder); - assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode()); + try (XContentParser parser = createParser(xContentBuilder)) { + SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(parser); + assertNotSame(suggestBuilder, secondSuggestBuilder); + assertEquals(suggestBuilder, secondSuggestBuilder); + assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 6ebced51e1ea1..3a7451e78fb4f 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -368,44 +368,48 @@ public void testIndexingWithMultipleContexts() throws Exception { public void testQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value("context1"); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + } } public void testBooleanQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value(true); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("true")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("true")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + } } public void testNumberQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value(10); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("10")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("10")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + } } public void testNULLQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().nullValue(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + } } public void testQueryContextParsingArray() throws Exception { @@ -413,16 +417,17 @@ public void testQueryContextParsingArray() throws Exception { .value("context1") .value("context2") .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(2)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(1)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(2)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(1)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + } } public void testQueryContextParsingMixedTypeValuesArray() throws Exception { @@ -432,22 +437,23 @@ public void testQueryContextParsingMixedTypeValuesArray() throws Exception { .value(true) .value(10) .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(4)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(1)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(2).context, equalTo("true")); - assertThat(internalQueryContexts.get(2).boost, equalTo(1)); - assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(3).context, equalTo("10")); - assertThat(internalQueryContexts.get(3).boost, equalTo(1)); - assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(4)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(1)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(2).context, equalTo("true")); + assertThat(internalQueryContexts.get(2).boost, equalTo(1)); + assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(3).context, equalTo("10")); + assertThat(internalQueryContexts.get(3).boost, equalTo(1)); + assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false)); + } } public void testQueryContextParsingMixedTypeValuesArrayHavingNULL() throws Exception { @@ -458,11 +464,12 @@ public void testQueryContextParsingMixedTypeValuesArrayHavingNULL() throws Excep .value(10) .nullValue() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + } } public void testQueryContextParsingObject() throws Exception { @@ -471,13 +478,14 @@ public void testQueryContextParsingObject() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(10)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(10)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + } } public void testQueryContextParsingObjectHavingBoolean() throws Exception { @@ -486,13 +494,14 @@ public void testQueryContextParsingObjectHavingBoolean() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("false")); - assertThat(internalQueryContexts.get(0).boost, equalTo(10)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("false")); + assertThat(internalQueryContexts.get(0).boost, equalTo(10)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + } } public void testQueryContextParsingObjectHavingNumber() throws Exception { @@ -501,13 +510,14 @@ public void testQueryContextParsingObjectHavingNumber() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("333")); - assertThat(internalQueryContexts.get(0).boost, equalTo(10)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("333")); + assertThat(internalQueryContexts.get(0).boost, equalTo(10)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + } } public void testQueryContextParsingObjectHavingNULL() throws Exception { @@ -516,11 +526,12 @@ public void testQueryContextParsingObjectHavingNULL() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - Exception e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(e.getMessage(), containsString("category context must be a string, number or boolean")); + Exception e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(e.getMessage(), containsString("category context must be a string, number or boolean")); + } } public void testQueryContextParsingObjectArray() throws Exception { @@ -536,16 +547,17 @@ public void testQueryContextParsingObjectArray() throws Exception { .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(2)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(2)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(3)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(2)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(2)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(3)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + } } public void testQueryContextParsingMixedTypeObjectArray() throws Exception { @@ -571,22 +583,23 @@ public void testQueryContextParsingMixedTypeObjectArray() throws Exception { .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(4)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(2)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(3)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(2).context, equalTo("true")); - assertThat(internalQueryContexts.get(2).boost, equalTo(3)); - assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(3).context, equalTo("333")); - assertThat(internalQueryContexts.get(3).boost, equalTo(3)); - assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(4)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(2)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(3)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(2).context, equalTo("true")); + assertThat(internalQueryContexts.get(2).boost, equalTo(3)); + assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(3).context, equalTo("333")); + assertThat(internalQueryContexts.get(3).boost, equalTo(3)); + assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false)); + } } public void testQueryContextParsingMixedTypeObjectArrayHavingNULL() throws Exception { @@ -617,11 +630,12 @@ public void testQueryContextParsingMixedTypeObjectArrayHavingNULL() throws Excep .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be a string, number or boolean")); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be a string, number or boolean")); + } } @@ -640,22 +654,23 @@ public void testQueryContextParsingMixed() throws Exception { .field("prefix", true) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(4)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(2)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(1)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(2).context, equalTo("false")); - assertThat(internalQueryContexts.get(2).boost, equalTo(1)); - assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(3).context, equalTo("333")); - assertThat(internalQueryContexts.get(3).boost, equalTo(2)); - assertThat(internalQueryContexts.get(3).isPrefix, equalTo(true)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(4)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(2)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(1)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(2).context, equalTo("false")); + assertThat(internalQueryContexts.get(2).boost, equalTo(1)); + assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(3).context, equalTo("333")); + assertThat(internalQueryContexts.get(3).boost, equalTo(2)); + assertThat(internalQueryContexts.get(3).isPrefix, equalTo(true)); + } } public void testQueryContextParsingMixedHavingNULL() throws Exception { @@ -674,11 +689,12 @@ public void testQueryContextParsingMixedHavingNULL() throws Exception { .endObject() .nullValue() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + } } public void testUnknownQueryContextParsing() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 862916890e1bb..37fdb7e0aa08b 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -48,7 +48,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe private static final Map> contextMap = new HashMap<>(); private static String categoryContextName; private static String geoQueryContextName; - private static List contextMappings = new ArrayList<>(); + private static List> contextMappings = new ArrayList<>(); @Override protected CompletionSuggestionBuilder randomSuggestionBuilder() { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index ebfac5f58ef77..925526323a540 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -124,12 +124,13 @@ public void testFromXContent() throws IOException { builder.prettyPrint(); } generator.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - DirectCandidateGeneratorBuilder secondGenerator = DirectCandidateGeneratorBuilder.PARSER.apply(parser, null); - assertNotSame(generator, secondGenerator); - assertEquals(generator, secondGenerator); - assertEquals(generator.hashCode(), secondGenerator.hashCode()); + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + DirectCandidateGeneratorBuilder secondGenerator = DirectCandidateGeneratorBuilder.PARSER.apply(parser, null); + assertNotSame(generator, secondGenerator); + assertEquals(generator, secondGenerator); + assertEquals(generator.hashCode(), secondGenerator.hashCode()); + } } } @@ -187,9 +188,10 @@ public void testIllegalXContent() throws IOException { private void assertIllegalXContent(String directGenerator, Class exceptionClass, String exceptionMsg) throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, directGenerator); - Exception e = expectThrows(exceptionClass, () -> DirectCandidateGeneratorBuilder.PARSER.apply(parser, null)); - assertThat(e.getMessage(), containsString(exceptionMsg)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, directGenerator)) { + Exception e = expectThrows(exceptionClass, () -> DirectCandidateGeneratorBuilder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString(exceptionMsg)); + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java index e75d01739ccb8..5923cd3332e5e 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -95,12 +95,13 @@ public void testFromXContent() throws IOException { contentBuilder.startObject(); testModel.innerToXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); - XContentParser parser = createParser(shuffleXContent(contentBuilder)); - parser.nextToken(); // go to start token, real parsing would do that in the outer element parser - SmoothingModel parsedModel = fromXContent(parser); - assertNotSame(testModel, parsedModel); - assertEquals(testModel, parsedModel); - assertEquals(testModel.hashCode(), parsedModel.hashCode()); + try (XContentParser parser = createParser(shuffleXContent(contentBuilder))) { + parser.nextToken(); // go to start token, real parsing would do that in the outer element parser + SmoothingModel parsedModel = fromXContent(parser); + assertNotSame(testModel, parsedModel); + assertEquals(testModel, parsedModel); + assertEquals(testModel.hashCode(), parsedModel.hashCode()); + } } /** diff --git a/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java b/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java index 7213d7bf9802f..5842b179078d0 100644 --- a/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java +++ b/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java @@ -19,13 +19,13 @@ package org.elasticsearch.test.hamcrest; +import org.elasticsearch.common.geo.GeoDistance; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.parsers.ShapeParser; -import org.locationtech.spatial4j.shape.Shape; -import org.locationtech.spatial4j.shape.ShapeCollection; -import org.locationtech.spatial4j.shape.impl.GeoCircle; -import org.locationtech.spatial4j.shape.impl.RectangleImpl; -import org.locationtech.spatial4j.shape.jts.JtsGeometry; -import org.locationtech.spatial4j.shape.jts.JtsPoint; +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.common.xcontent.XContentParser; +import org.hamcrest.Matcher; +import org.junit.Assert; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.LineString; @@ -33,12 +33,12 @@ import org.locationtech.jts.geom.MultiPoint; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Polygon; -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.xcontent.XContentParser; -import org.hamcrest.Matcher; -import org.junit.Assert; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeCollection; +import org.locationtech.spatial4j.shape.impl.GeoCircle; +import org.locationtech.spatial4j.shape.impl.RectangleImpl; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; +import org.locationtech.spatial4j.shape.jts.JtsPoint; import java.util.Arrays; import java.util.Collections; @@ -208,9 +208,9 @@ public static void assertEquals(Shape s1, Shape s2) { } else if (s1 instanceof ShapeCollection && s2 instanceof ShapeCollection) { assertEquals((ShapeCollection)s1, (ShapeCollection)s2); } else if (s1 instanceof GeoCircle && s2 instanceof GeoCircle) { - Assert.assertEquals((GeoCircle)s1, (GeoCircle)s2); + Assert.assertEquals(s1, s2); } else if (s1 instanceof RectangleImpl && s2 instanceof RectangleImpl) { - Assert.assertEquals((RectangleImpl)s1, (RectangleImpl)s2); + Assert.assertEquals(s1, s2); } else { //We want to know the type of the shape because we test shape equality in a special way... //... in particular we test that one ring is equivalent to another ring even if the points are rotated or reversed. @@ -254,7 +254,7 @@ private static double distance(double lat1, double lon1, double lat2, double lon return GeoDistance.ARC.calculate(lat1, lon1, lat2, lon2, DistanceUnit.DEFAULT); } - public static void assertValidException(XContentParser parser, Class expectedException) { + public static void assertValidException(XContentParser parser, Class expectedException) { try { ShapeParser.parse(parser).build(); Assert.fail("process completed successfully when " + expectedException.getName() + " expected"); diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index c4bf2518a9f8f..35dac2e99e00d 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -20,6 +20,7 @@ package org.elasticsearch.bootstrap; import com.carrotsearch.randomizedtesting.RandomizedRunner; + import org.apache.logging.log4j.Logger; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.common.Booleans; @@ -175,7 +176,7 @@ public boolean implies(ProtectionDomain domain, Permission permission) { /** Add the codebase url of the given classname to the codebases map, if the class exists. */ private static void addClassCodebase(Map codebases, String name, String classname) { try { - Class clazz = BootstrapForTesting.class.getClassLoader().loadClass(classname); + Class clazz = BootstrapForTesting.class.getClassLoader().loadClass(classname); if (codebases.put(name, clazz.getProtectionDomain().getCodeSource().getLocation()) != null) { throw new IllegalStateException("Already added " + name + " codebase for testing"); } diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index b86cb9ff29352..e608bd13d2559 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.function.Function; @@ -115,6 +116,18 @@ public String execute() { } else if (context.instanceClazz.equals(ScoreScript.class)) { ScoreScript.Factory factory = new MockScoreScript(script); return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.InitScript.class)) { + ScriptedMetricAggContexts.InitScript.Factory factory = mockCompiled::createMetricAggInitScript; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.MapScript.class)) { + ScriptedMetricAggContexts.MapScript.Factory factory = mockCompiled::createMetricAggMapScript; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.CombineScript.class)) { + ScriptedMetricAggContexts.CombineScript.Factory factory = mockCompiled::createMetricAggCombineScript; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.ReduceScript.class)) { + ScriptedMetricAggContexts.ReduceScript.Factory factory = mockCompiled::createMetricAggReduceScript; + return context.factoryClazz.cast(factory); } throw new IllegalArgumentException("mock script engine does not know how to handle context [" + context.name + "]"); } @@ -179,6 +192,23 @@ public SimilarityWeightScript createSimilarityWeightScript() { public MovingFunctionScript createMovingFunctionScript() { return new MockMovingFunctionScript(); } + + public ScriptedMetricAggContexts.InitScript createMetricAggInitScript(Map params, Object state) { + return new MockMetricAggInitScript(params, state, script != null ? script : ctx -> 42d); + } + + public ScriptedMetricAggContexts.MapScript.LeafFactory createMetricAggMapScript(Map params, Object state, + SearchLookup lookup) { + return new MockMetricAggMapScript(params, state, lookup, script != null ? script : ctx -> 42d); + } + + public ScriptedMetricAggContexts.CombineScript createMetricAggCombineScript(Map params, Object state) { + return new MockMetricAggCombineScript(params, state, script != null ? script : ctx -> 42d); + } + + public ScriptedMetricAggContexts.ReduceScript createMetricAggReduceScript(Map params, List states) { + return new MockMetricAggReduceScript(params, states, script != null ? script : ctx -> 42d); + } } public class MockExecutableScript implements ExecutableScript { @@ -333,6 +363,108 @@ public double execute(Query query, Field field, Term term) throws IOException { } } + public static class MockMetricAggInitScript extends ScriptedMetricAggContexts.InitScript { + private final Function, Object> script; + + MockMetricAggInitScript(Map params, Object state, + Function, Object> script) { + super(params, state); + this.script = script; + } + + public void execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("state", getState()); + script.apply(map); + } + } + + public static class MockMetricAggMapScript implements ScriptedMetricAggContexts.MapScript.LeafFactory { + private final Map params; + private final Object state; + private final SearchLookup lookup; + private final Function, Object> script; + + MockMetricAggMapScript(Map params, Object state, SearchLookup lookup, + Function, Object> script) { + this.params = params; + this.state = state; + this.lookup = lookup; + this.script = script; + } + + @Override + public ScriptedMetricAggContexts.MapScript newInstance(LeafReaderContext context) { + return new ScriptedMetricAggContexts.MapScript(params, state, lookup, context) { + @Override + public void execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("state", getState()); + map.put("doc", getDoc()); + map.put("_score", get_score()); + + script.apply(map); + } + }; + } + } + + public static class MockMetricAggCombineScript extends ScriptedMetricAggContexts.CombineScript { + private final Function, Object> script; + + MockMetricAggCombineScript(Map params, Object state, + Function, Object> script) { + super(params, state); + this.script = script; + } + + public Object execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("state", getState()); + return script.apply(map); + } + } + + public static class MockMetricAggReduceScript extends ScriptedMetricAggContexts.ReduceScript { + private final Function, Object> script; + + MockMetricAggReduceScript(Map params, List states, + Function, Object> script) { + super(params, states); + this.script = script; + } + + public Object execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("states", getStates()); + return script.apply(map); + } + } + public static Script mockInlineScript(final String script) { return new Script(ScriptType.INLINE, "mock", script, emptyMap()); } @@ -343,15 +475,15 @@ public double execute(Map params, double[] values) { return MovingFunctions.unweightedAvg(values); } } - + public class MockScoreScript implements ScoreScript.Factory { - + private final Function, Object> scripts; - + MockScoreScript(Function, Object> scripts) { this.scripts = scripts; } - + @Override public ScoreScript.LeafFactory newFactory(Map params, SearchLookup lookup) { return new ScoreScript.LeafFactory() { @@ -359,7 +491,7 @@ public ScoreScript.LeafFactory newFactory(Map params, SearchLook public boolean needs_score() { return true; } - + @Override public ScoreScript newInstance(LeafReaderContext ctx) throws IOException { Scorer[] scorerHolder = new Scorer[1]; @@ -373,7 +505,7 @@ public double execute() { } return ((Number) scripts.apply(vars)).doubleValue(); } - + @Override public void setScorer(Scorer scorer) { scorerHolder[0] = scorer; diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java index a509645495858..06eefb7ccba14 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java +++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java @@ -187,9 +187,9 @@ public static BytesReference randomSource(Random random, XContentType xContentTy * Randomly adds fields, objects, or arrays to the provided builder. The maximum depth is 5. */ private static void addFields(Random random, XContentBuilder builder, int minNumFields, int currentDepth) throws IOException { - int numFields = randomIntBetween(random, minNumFields, 10); + int numFields = randomIntBetween(random, minNumFields, 5); for (int i = 0; i < numFields; i++) { - if (currentDepth < 5 && random.nextBoolean()) { + if (currentDepth < 5 && random.nextInt(100) >= 70) { if (random.nextBoolean()) { builder.startObject(RandomStrings.randomAsciiOfLengthBetween(random, 6, 10)); addFields(random, builder, minNumFields, currentDepth + 1); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 672d19d5dc2a6..495df4aa461a9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -31,6 +31,7 @@ import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.ssl.SSLContexts; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; @@ -40,6 +41,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -91,13 +94,38 @@ public abstract class ESRestTestCase extends ESTestCase { /** * Convert the entity from a {@link Response} into a map of maps. */ - public Map entityAsMap(Response response) throws IOException { + public static Map entityAsMap(Response response) throws IOException { XContentType xContentType = XContentType.fromMediaTypeOrFormat(response.getEntity().getContentType().getValue()); - try (XContentParser parser = createParser(xContentType.xContent(), response.getEntity().getContent())) { + // EMPTY and THROW are fine here because `.map` doesn't use named x content or deprecation + try (XContentParser parser = xContentType.xContent().createParser( + NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + response.getEntity().getContent())) { return parser.map(); } } + /** + * Does the cluster being tested have xpack installed? + */ + public static boolean hasXPack() throws IOException { + RestClient client = adminClient(); + if (client == null) { + throw new IllegalStateException("must be called inside of a rest test case test"); + } + Map response = entityAsMap(client.performRequest(new Request("GET", "_nodes/plugins"))); + Map nodes = (Map) response.get("nodes"); + for (Map.Entry node : nodes.entrySet()) { + Map nodeInfo = (Map) node.getValue(); + for (Object module: (List) nodeInfo.get("modules")) { + Map moduleInfo = (Map) module; + if (moduleInfo.get("name").toString().startsWith("x-pack-")) { + return true; + } + } + } + return false; + } + private static List clusterHosts; /** * A client for the running Elasticsearch cluster diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java index fdc10a1a246e7..2d6bcc8cf5665 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java @@ -40,6 +40,7 @@ import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestPath; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; +import java.io.Closeable; import java.io.IOException; import java.io.UncheckedIOException; import java.net.URI; @@ -56,18 +57,18 @@ * {@link RestClient} instance used to send the REST requests. Holds the {@link ClientYamlSuiteRestSpec} used to translate api calls into * REST calls. */ -public class ClientYamlTestClient { +public class ClientYamlTestClient implements Closeable { private static final Logger logger = Loggers.getLogger(ClientYamlTestClient.class); private static final ContentType YAML_CONTENT_TYPE = ContentType.create("application/yaml"); private final ClientYamlSuiteRestSpec restSpec; - protected final Map restClients = new HashMap<>(); + private final Map restClients = new HashMap<>(); private final Version esVersion; private final Version masterVersion; private final CheckedConsumer clientBuilderConsumer; - public ClientYamlTestClient( + ClientYamlTestClient( final ClientYamlSuiteRestSpec restSpec, final RestClient restClient, final List hosts, @@ -202,10 +203,10 @@ protected RestClient getRestClient(NodeSelector nodeSelector) { RestClientBuilder builder = RestClient.builder(anyClient.getNodes().toArray(new Node[0])); try { clientBuilderConsumer.accept(builder); - } catch(IOException e) { + } catch (IOException e) { throw new UncheckedIOException(e); } - builder.setNodeSelector(nodeSelector); + builder.setNodeSelector(selector); return builder.build(); }); } @@ -247,4 +248,11 @@ private ClientYamlSuiteRestApi restApi(String apiName) { } return restApi; } + + @Override + public void close() throws IOException { + for (RestClient restClient : restClients.values()) { + restClient.close(); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java index 757fc2218d51c..cfce0653d31c2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java @@ -19,9 +19,12 @@ package org.elasticsearch.test.rest.yaml; +import java.io.IOException; import java.util.Arrays; import java.util.List; +import org.elasticsearch.test.rest.ESRestTestCase; + import static java.util.Collections.unmodifiableList; /** @@ -53,11 +56,23 @@ private Features() { * Tells whether all the features provided as argument are supported */ public static boolean areAllSupported(List features) { - for (String feature : features) { - if (!SUPPORTED.contains(feature)) { - return false; + try { + for (String feature : features) { + if (feature.equals("xpack")) { + if (false == ESRestTestCase.hasXPack()) { + return false; + } + } else if (feature.equals("no_xpack")) { + if (ESRestTestCase.hasXPack()) { + return false; + } + } else if (false == SUPPORTED.contains(feature)) { + return false; + } } + return true; + } catch (IOException e) { + throw new RuntimeException("error checking if xpack is available", e); } - return true; } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ContainsAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ContainsAssertion.java new file mode 100644 index 0000000000000..9d2d91790c7c2 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ContainsAssertion.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test.rest.yaml.section; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.xcontent.XContentLocation; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +public class ContainsAssertion extends Assertion { + public static ContainsAssertion parse(XContentParser parser) throws IOException { + XContentLocation location = parser.getTokenLocation(); + Tuple stringObjectTuple = ParserUtils.parseTuple(parser); + return new ContainsAssertion(location, stringObjectTuple.v1(), stringObjectTuple.v2()); + } + + private static final Logger logger = Loggers.getLogger(ContainsAssertion.class); + + public ContainsAssertion(XContentLocation location, String field, Object expectedValue) { + super(location, field, expectedValue); + } + + @Override + protected void doAssert(Object actualValue, Object expectedValue) { + // add support for matching objects ({a:b}) against list of objects ([ {a:b, c:d} ]) + if(expectedValue instanceof Map && actualValue instanceof List) { + logger.trace("assert that [{}] contains [{}]", actualValue, expectedValue); + Map expectedMap = (Map) expectedValue; + List actualList = (List) actualValue; + List> actualValues = actualList.stream() + .filter(each -> each instanceof Map) + .map((each -> (Map) each)) + .filter(each -> each.keySet().containsAll(expectedMap.keySet())) + .collect(Collectors.toList()); + assertThat( + getField() + " expected to be a list with at least one object that has keys: " + + expectedMap.keySet() + " but it was " + actualList, + actualValues, + is(not(empty())) + ); + assertTrue( + getField() + " expected to be a list with at least on object that matches " + expectedMap + + " but was " + actualValues, + actualValues.stream() + .anyMatch(each -> each.entrySet().containsAll(expectedMap.entrySet())) + ); + } else { + fail("'contains' only supports checking an object against a list of objects"); + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 8697b0bedcdf5..4e46a9ec89fd1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -449,6 +449,24 @@ public void select(Iterable nodes) { lhs.select(nodes); } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ComposeNodeSelector that = (ComposeNodeSelector) o; + return Objects.equals(lhs, that.lhs) && + Objects.equals(rhs, that.rhs); + } + + @Override + public int hashCode() { + return Objects.hash(lhs, rhs); + } + @Override public String toString() { // . as in haskell's "compose" operator diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java index ce5ea1c1cde06..ff02d6d16aa4a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java @@ -47,6 +47,7 @@ public interface ExecutableSection { new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("gte"), GreaterThanEqualToAssertion::parse), new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("lt"), LessThanAssertion::parse), new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("lte"), LessThanOrEqualToAssertion::parse), + new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("contains"), ContainsAssertion::parse), new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("length"), LengthAssertion::parse))); /** diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 0b676e1403481..dd8dd5f81ffc9 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -823,7 +823,7 @@ public void handleException(TransportException exp) { }); try { - StringMessageResponse message = res.txGet(); + res.txGet(); fail("exception should be thrown"); } catch (Exception e) { assertThat(e, instanceOf(ReceiveTimeoutTransportException.class)); @@ -939,8 +939,8 @@ public void handleException(TransportException exp) { } public void testTracerLog() throws InterruptedException { - TransportRequestHandler handler = (request, channel, task) -> channel.sendResponse(new StringMessageResponse("")); - TransportRequestHandler handlerWithError = new TransportRequestHandler() { + TransportRequestHandler handler = (request, channel, task) -> channel.sendResponse(new StringMessageResponse("")); + TransportRequestHandler handlerWithError = new TransportRequestHandler() { @Override public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) throws Exception { if (request.timeout() > 0) { @@ -952,7 +952,7 @@ public void messageReceived(StringMessageRequest request, TransportChannel chann }; final Semaphore requestCompleted = new Semaphore(0); - TransportResponseHandler noopResponseHandler = new TransportResponseHandler() { + TransportResponseHandler noopResponseHandler = new TransportResponseHandler() { @Override public StringMessageResponse newInstance() { diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java index fcef74678359e..ddf153ff44f5c 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java @@ -134,6 +134,22 @@ public void testParseMatchArray() throws Exception { assertThat(strings.get(1).toString(), equalTo("test_percolator_2")); } + @SuppressWarnings("unchecked") + public void testParseContains() throws Exception { + parser = createParser(YamlXContent.yamlXContent, + "{testKey: { someKey: someValue } }" + ); + + ContainsAssertion containsAssertion = ContainsAssertion.parse(parser); + assertThat(containsAssertion, notNullValue()); + assertThat(containsAssertion.getField(), equalTo("testKey")); + assertThat(containsAssertion.getExpectedValue(), instanceOf(Map.class)); + assertThat( + ((Map) containsAssertion.getExpectedValue()).get("someKey"), + equalTo("someValue") + ); + } + @SuppressWarnings("unchecked") public void testParseMatchSourceValues() throws Exception { parser = createParser(YamlXContent.yamlXContent, diff --git a/x-pack/docs/en/watcher/encrypting-data.asciidoc b/x-pack/docs/en/watcher/encrypting-data.asciidoc index 166ef6f14d760..9319c9f793870 100644 --- a/x-pack/docs/en/watcher/encrypting-data.asciidoc +++ b/x-pack/docs/en/watcher/encrypting-data.asciidoc @@ -6,6 +6,12 @@ information or details about your SMTP email service. You can encrypt this data by generating a key and adding some secure settings on each node in your cluster. +Every `password` field that is used in your watch within a HTTP basic +authentication block - for example within a webhook, a HTTP input or when using +the reporting email attachment - will not be stored as plain text anymore. Also +be aware, that there is no way to configure your own fields in a watch to be +encrypted. + To encrypt sensitive data in {watcher}: . Use the {ref}/syskeygen.html[elasticsearch-syskeygen] command to create a system key file. diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index ac423c4281138..3822ef1d4d584 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -192,3 +192,7 @@ integTestCluster { return tmpFile.exists() } } +if (integTestCluster.distribution.startsWith("oss-") == false) { + integTest.enabled = false +} + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 0bf6601593dee..d3ddac3289999 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core; import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.MetaData; @@ -157,7 +158,6 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPlugin { - @SuppressWarnings("OptionalUsedAsFieldOrParameterType") static Optional X_PACK_FEATURE = Optional.of("x-pack"); @Override @@ -205,7 +205,7 @@ static Settings additionalSettings(final Settings settings, final boolean enable } @Override - public List getClientActions() { + public List> getClientActions() { return Arrays.asList( // deprecation DeprecationInfoAction.INSTANCE, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index 920081572cfc7..d14c72383d6a8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -9,9 +9,9 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.SpecialPermission; import org.elasticsearch.Version; +import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; @@ -261,8 +261,8 @@ public Collection createComponents(Client client, ClusterService cluster } @Override - public List getClientActions() { - List actions = new ArrayList<>(); + public List> getClientActions() { + List> actions = new ArrayList<>(); actions.addAll(licensing.getClientActions()); actions.addAll(super.getClientActions()); return actions; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionFactory.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionFactory.java index 3618b2de4080b..8c6d82f718735 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionFactory.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionFactory.java @@ -24,5 +24,6 @@ protected ActionFactory(Logger actionLogger) { /** * Parses the given xcontent and creates a concrete action */ - public abstract ExecutableAction parseExecutable(String watchId, String actionId, XContentParser parser) throws IOException; + public abstract ExecutableAction parseExecutable(String watchId, String actionId, XContentParser parser) + throws IOException; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapper.java index 47d3500f2e920..f2cdc63c6e94c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapper.java @@ -40,14 +40,14 @@ public class ActionWrapper implements ToXContentObject { @Nullable private final ExecutableCondition condition; @Nullable - private final ExecutableTransform transform; + private final ExecutableTransform transform; private final ActionThrottler throttler; - private final ExecutableAction action; + private final ExecutableAction action; public ActionWrapper(String id, ActionThrottler throttler, @Nullable ExecutableCondition condition, - @Nullable ExecutableTransform transform, - ExecutableAction action) { + @Nullable ExecutableTransform transform, + ExecutableAction action) { this.id = id; this.condition = condition; this.throttler = throttler; @@ -63,7 +63,7 @@ public ExecutableCondition condition() { return condition; } - public ExecutableTransform transform() { + public ExecutableTransform transform() { return transform; } @@ -71,7 +71,7 @@ public Throttler throttler() { return throttler; } - public ExecutableAction action() { + public ExecutableAction action() { return action; } @@ -196,9 +196,9 @@ static ActionWrapper parse(String watchId, String actionId, XContentParser parse assert parser.currentToken() == XContentParser.Token.START_OBJECT; ExecutableCondition condition = null; - ExecutableTransform transform = null; + ExecutableTransform transform = null; TimeValue throttlePeriod = null; - ExecutableAction action = null; + ExecutableAction action = null; String currentFieldName = null; XContentParser.Token token; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java index 23f357eb1885e..4be0cefe525e6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java @@ -20,18 +20,16 @@ public class MlRestTestStateCleaner { private final Logger logger; private final RestClient adminClient; - private final ESRestTestCase testCase; - public MlRestTestStateCleaner(Logger logger, RestClient adminClient, ESRestTestCase testCase) { + public MlRestTestStateCleaner(Logger logger, RestClient adminClient) { this.logger = logger; this.adminClient = adminClient; - this.testCase = testCase; } public void clearMlMetadata() throws IOException { deleteAllDatafeeds(); deleteAllJobs(); - // indices will be deleted by the ESIntegTestCase class + // indices will be deleted by the ESRestTestCase class } @SuppressWarnings("unchecked") @@ -41,7 +39,7 @@ private void deleteAllDatafeeds() throws IOException { final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest); @SuppressWarnings("unchecked") final List> datafeeds = - (List>) XContentMapValues.extractValue("datafeeds", testCase.entityAsMap(datafeedsResponse)); + (List>) XContentMapValues.extractValue("datafeeds", ESRestTestCase.entityAsMap(datafeedsResponse)); if (datafeeds == null) { return; } @@ -83,7 +81,7 @@ private void deleteAllJobs() throws IOException { final Response response = adminClient.performRequest(jobsRequest); @SuppressWarnings("unchecked") final List> jobConfigs = - (List>) XContentMapValues.extractValue("jobs", testCase.entityAsMap(response)); + (List>) XContentMapValues.extractValue("jobs", ESRestTestCase.entityAsMap(response)); if (jobConfigs == null) { return; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java index a9a8223863d72..9938f3a41962b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java @@ -29,18 +29,16 @@ public class RollupRestTestStateCleaner { private final Logger logger; private final RestClient adminClient; - private final ESRestTestCase testCase; - public RollupRestTestStateCleaner(Logger logger, RestClient adminClient, ESRestTestCase testCase) { + public RollupRestTestStateCleaner(Logger logger, RestClient adminClient) { this.logger = logger; this.adminClient = adminClient; - this.testCase = testCase; } public void clearRollupMetadata() throws Exception { deleteAllJobs(); waitForPendingTasks(); - // indices will be deleted by the ESIntegTestCase class + // indices will be deleted by the ESRestTestCase class } private void waitForPendingTasks() throws Exception { @@ -75,7 +73,7 @@ private void waitForPendingTasks() throws Exception { @SuppressWarnings("unchecked") private void deleteAllJobs() throws Exception { Response response = adminClient.performRequest("GET", "/_xpack/rollup/job/_all"); - Map jobs = testCase.entityAsMap(response); + Map jobs = ESRestTestCase.entityAsMap(response); @SuppressWarnings("unchecked") List> jobConfigs = (List>) XContentMapValues.extractValue("jobs", jobs); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java index 40ea8419765ec..16b62cc23de19 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java @@ -46,7 +46,7 @@ static Aggregations createAggs(List aggsList) { } @SuppressWarnings("unchecked") - static Histogram createHistogramAggregation(String name, List histogramBuckets) { + static Histogram createHistogramAggregation(String name, List histogramBuckets) { Histogram histogram = mock(Histogram.class); when((List)histogram.getBuckets()).thenReturn(histogramBuckets); when(histogram.getName()).thenReturn(name); @@ -72,7 +72,7 @@ static NumericMetricsAggregation.SingleValue createSingleValue(String name, doub static Terms createTerms(String name, Term... terms) { Terms termsAgg = mock(Terms.class); when(termsAgg.getName()).thenReturn(name); - List buckets = new ArrayList<>(); + List buckets = new ArrayList<>(); for (Term term: terms) { StringTerms.Bucket bucket = mock(StringTerms.Bucket.class); when(bucket.getKey()).thenReturn(term.key); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java index e3d67bb0bdb71..9e8d17e84b44a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java @@ -206,7 +206,7 @@ public void testDedicatedMlNode() throws Exception { assertBusy(() -> { ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTask task = tasks.getTask(MlMetadata.jobTaskId(jobId)); + PersistentTask task = tasks.getTask(MlMetadata.jobTaskId(jobId)); DiscoveryNode node = clusterState.nodes().resolveNode(task.getExecutorNode()); assertThat(node.getAttributes(), hasEntry(MachineLearning.ML_ENABLED_NODE_ATTR, "true")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java index 47a168aefad6b..f5a4e34bc67ec 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java @@ -172,7 +172,7 @@ void finishMock() { if (responses.size() > 0) { ActionFuture first = wrapResponse(responses.get(0)); if (responses.size() > 1) { - List rest = new ArrayList<>(); + List> rest = new ArrayList<>(); for (int i = 1; i < responses.size(); ++i) { rest.add(wrapResponse(responses.get(i))); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java index cedc65c2ee225..57e5f6cfdb3ff 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java @@ -229,6 +229,7 @@ private AutodetectProcess mockAutodetectProcessWithOutputStream() throws IOExcep return process; } + @SuppressWarnings("unchecked") private AutodetectCommunicator createAutodetectCommunicator(ExecutorService executorService, AutodetectProcess autodetectProcess, AutoDetectResultProcessor autoDetectResultProcessor, Consumer finishHandler) throws IOException { @@ -242,12 +243,13 @@ private AutodetectCommunicator createAutodetectCommunicator(ExecutorService exec new NamedXContentRegistry(Collections.emptyList()), executorService); } + @SuppressWarnings("unchecked") private AutodetectCommunicator createAutodetectCommunicator(AutodetectProcess autodetectProcess, AutoDetectResultProcessor autoDetectResultProcessor) throws IOException { ExecutorService executorService = mock(ExecutorService.class); when(executorService.submit(any(Callable.class))).thenReturn(mock(Future.class)); doAnswer(invocationOnMock -> { - Callable runnable = (Callable) invocationOnMock.getArguments()[0]; + Callable runnable = (Callable) invocationOnMock.getArguments()[0]; runnable.call(); return mock(Future.class); }).when(executorService).submit(any(Callable.class)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index fa41cf0918f71..a1b9aad452b9e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; @@ -42,6 +41,7 @@ import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange; @@ -202,6 +202,7 @@ public void testOpenJob() { verify(jobTask).updatePersistentTaskState(eq(new JobTaskState(JobState.OPENED, 1L)), any()); } + @SuppressWarnings("unchecked") public void testOpenJob_exceedMaxNumJobs() { when(jobManager.getJobOrThrowIfUnknown("foo")).thenReturn(createJobDetails("foo")); when(jobManager.getJobOrThrowIfUnknown("bar")).thenReturn(createJobDetails("bar")); @@ -214,7 +215,7 @@ public void testOpenJob_exceedMaxNumJobs() { ThreadPool.Cancellable cancellable = mock(ThreadPool.Cancellable.class); when(threadPool.scheduleWithFixedDelay(any(), any(), any())).thenReturn(cancellable); ExecutorService executorService = mock(ExecutorService.class); - Future future = mock(Future.class); + Future future = mock(Future.class); when(executorService.submit(any(Callable.class))).thenReturn(future); when(threadPool.executor(anyString())).thenReturn(EsExecutors.newDirectExecutorService()); AutodetectProcess autodetectProcess = mock(AutodetectProcess.class); @@ -230,7 +231,6 @@ public void testOpenJob_exceedMaxNumJobs() { doReturn(executorService).when(manager).createAutodetectExecutorService(any()); doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") CheckedConsumer consumer = (CheckedConsumer) invocationOnMock.getArguments()[2]; consumer.accept(null); return null; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index 3ba3611293fdc..5abd701ce4b2e 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -415,7 +415,11 @@ private QueryBuilder createBoundaryQuery(Map position) { DateHistoGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHisto(); String fieldName = dateHisto.getField(); String rollupFieldName = fieldName + "." + DateHistogramAggregationBuilder.NAME; - long lowerBound = position != null ? (long) position.get(rollupFieldName) : 0; + long lowerBound = 0L; + if (position != null) { + Number value = (Number) position.get(rollupFieldName); + lowerBound = value.longValue(); + } assert lowerBound <= maxBoundary; final RangeQueryBuilder query = new RangeQueryBuilder(fieldName) .gte(lowerBound) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 19760ccab0202..09de32643ed93 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -403,7 +403,7 @@ private IllegalArgumentException illegalArgument(String message) { } private static String getAction(BulkItemRequest item) { - final DocWriteRequest docWriteRequest = item.request(); + final DocWriteRequest docWriteRequest = item.request(); switch (docWriteRequest.opType()) { case INDEX: case CREATE: diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index 67e21aadcbceb..5d9176b18976e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -129,14 +129,14 @@ private static boolean isInternalAction(String action) { */ public static class AsyncAuthorizer { - private final ActionListener listener; + private final ActionListener listener; private final BiConsumer consumer; private final Authentication authentication; private volatile Role userRoles; private volatile Role runAsRoles; private CountDown countDown = new CountDown(2); // we expect only two responses!! - public AsyncAuthorizer(Authentication authentication, ActionListener listener, BiConsumer consumer) { + public AsyncAuthorizer(Authentication authentication, ActionListener listener, BiConsumer consumer) { this.consumer = consumer; this.listener = listener; this.authentication = authentication; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java index 77bf8e6a4008e..7d4469133687e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java @@ -37,9 +37,9 @@ import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; +import static org.hamcrest.Matchers.nullValue; public class AuditTrailTests extends SecurityIntegTestCase { @@ -163,7 +163,7 @@ private Collection> getAuditEvents() throws Exception { .request(); request.indicesOptions().ignoreUnavailable(); - final PlainActionFuture>> listener = new PlainActionFuture(); + final PlainActionFuture>> listener = new PlainActionFuture<>(); ScrollHelper.fetchAllByEntity(client, request, listener, SearchHit::getSourceAsMap); return listener.get(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index cd685b8f34c28..bb32ed699950c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -199,7 +199,6 @@ public void shutdownThreadpool() throws InterruptedException { } } - @SuppressWarnings("unchecked") public void testTokenFirstMissingSecondFound() throws Exception { when(firstRealm.token(threadContext)).thenReturn(null); when(secondRealm.token(threadContext)).thenReturn(token); @@ -227,7 +226,6 @@ public void testTokenMissing() throws Exception { verifyNoMoreInteractions(auditTrail); } - @SuppressWarnings("unchecked") public void testAuthenticateBothSupportSecondSucceeds() throws Exception { User user = new User("_username", "r1"); when(firstRealm.supports(token)).thenReturn(true); @@ -698,7 +696,7 @@ public void testRunAsLookupSameRealm() throws Exception { mockAuthenticate(secondRealm, token, user); mockRealmLookupReturnsNull(firstRealm, "run_as"); doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(new User("looked up user", new String[]{"some role"})); return null; }).when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class)); @@ -735,6 +733,7 @@ public void testRunAsLookupSameRealm() throws Exception { assertTrue(completed.get()); } + @SuppressWarnings("unchecked") public void testRunAsLookupDifferentRealm() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); @@ -742,7 +741,7 @@ public void testRunAsLookupDifferentRealm() throws Exception { when(secondRealm.supports(token)).thenReturn(true); mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(new User("looked up user", new String[]{"some role"})); return null; }).when(firstRealm).lookupUser(eq("run_as"), any(ActionListener.class)); @@ -805,6 +804,7 @@ public void testRunAsWithEmptyRunAsUsername() throws Exception { } } + @SuppressWarnings("unchecked") public void testAuthenticateTransportDisabledRunAsUser() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); @@ -813,7 +813,7 @@ public void testAuthenticateTransportDisabledRunAsUser() throws Exception { mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); mockRealmLookupReturnsNull(firstRealm, "run_as"); doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(new User("looked up user", new String[]{"some role"}, null, null, null, false)); return null; }).when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class)); @@ -833,7 +833,8 @@ public void testAuthenticateRestDisabledRunAsUser() throws Exception { mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); mockRealmLookupReturnsNull(firstRealm, "run_as"); doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(new User("looked up user", new String[]{"some role"}, null, null, null, false)); return null; }).when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class)); @@ -984,9 +985,10 @@ void assertThreadContextContainsAuthentication(Authentication authentication) th assertThat(threadContext.getHeader(AuthenticationField.AUTHENTICATION_KEY), equalTo((Object) authentication.encode())); } + @SuppressWarnings("unchecked") private void mockAuthenticate(Realm realm, AuthenticationToken token, User user) { doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; if (user == null) { listener.onResponse(AuthenticationResult.notHandled()); } else { @@ -1008,9 +1010,10 @@ private Authentication authenticateBlocking(String action, TransportMessage mess return future.actionGet(); } + @SuppressWarnings("unchecked") private static void mockRealmLookupReturnsNull(Realm realm, String username) { doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(null); return null; }).when(realm).lookupUser(eq(username), any(ActionListener.class)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index bcd31c32f7f78..11ee0a6a0012e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -168,6 +168,7 @@ public class AuthorizationServiceTests extends ESTestCase { private Map roleMap = new HashMap<>(); private CompositeRolesStore rolesStore; + @SuppressWarnings("unchecked") @Before public void setup() { rolesStore = mock(CompositeRolesStore.class); @@ -208,7 +209,7 @@ public void setup() { } private void authorize(Authentication authentication, String action, TransportRequest request) { - PlainActionFuture future = new PlainActionFuture(); + PlainActionFuture future = new PlainActionFuture<>(); AuthorizationUtils.AsyncAuthorizer authorizer = new AuthorizationUtils.AsyncAuthorizer(authentication, future, (userRoles, runAsRoles) -> { authorizationService.authorize(authentication, action, request, userRoles, runAsRoles); @@ -598,7 +599,6 @@ public void testAuditTrailIsRecordedWhenIndexWildcardThrowsError() { public void testRunAsRequestWithNoRolesUser() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = createAuthentication(new User("run as me", null, new User("test user", "admin"))); - final User user = new User("run as me", null, new User("test user", "admin")); assertNotEquals(authentication.getUser().authenticatedUser(), authentication); assertThrowsAuthorizationExceptionRunAs( () -> authorize(authentication, "indices:a", request), diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 99a6e29e334f6..412c75f0e639c 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -252,7 +252,7 @@ public void cleanup() throws Exception { */ private void clearMlState() throws Exception { if (isMachineLearningTest()) { - new MlRestTestStateCleaner(logger, adminClient(), this).clearMlMetadata(); + new MlRestTestStateCleaner(logger, adminClient()).clearMlMetadata(); } } @@ -263,7 +263,7 @@ private void clearMlState() throws Exception { */ private void clearRollupState() throws Exception { if (isRollupTest()) { - new RollupRestTestStateCleaner(logger, adminClient(), this).clearRollupMetadata(); + new RollupRestTestStateCleaner(logger, adminClient()).clearRollupMetadata(); } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/put_watch/90_ensure_watch_gets_overwritten_without_version.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/put_watch/90_ensure_watch_gets_overwritten_without_version.yml new file mode 100644 index 0000000000000..4bea2f655e624 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/put_watch/90_ensure_watch_gets_overwritten_without_version.yml @@ -0,0 +1,73 @@ +--- +"Test put watch api without version overwrites watch": + - do: + cluster.health: + wait_for_status: yellow + + - do: + xpack.watcher.put_watch: + id: "my_watch" + body: > + { + "trigger": { + "schedule": { + "hourly": { + "minute": [ 0, 5 ] + } + } + }, + "input": { + "simple": { + "foo": "bar" + } + }, + "actions": { + "logging": { + "logging": { + "text": "yaml test" + } + } + } + } + - match: { _id: "my_watch" } + + - do: + xpack.watcher.get_watch: + id: "my_watch" + - match: { watch.input.simple.foo: "bar" } + + # change the simple input fields, then ensure the old + # field does not exist on get + - do: + xpack.watcher.put_watch: + id: "my_watch" + body: > + { + "trigger": { + "schedule": { + "hourly": { + "minute": [ 0, 5 ] + } + } + }, + "input": { + "simple": { + "spam": "eggs" + } + }, + "actions": { + "logging": { + "logging": { + "text": "yaml test" + } + } + } + } + - match: { _id: "my_watch" } + + - do: + xpack.watcher.get_watch: + id: "my_watch" + - match: { watch.input.simple.spam: "eggs" } + - is_false: watch.input.simple.foo + diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml index b93f0b717233d..2aea0126e9e47 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml @@ -10,15 +10,15 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.13.name: x-pack-ccr } - - match: { nodes.$master.modules.14.name: x-pack-core } - - match: { nodes.$master.modules.15.name: x-pack-deprecation } - - match: { nodes.$master.modules.16.name: x-pack-graph } - - match: { nodes.$master.modules.17.name: x-pack-logstash } - - match: { nodes.$master.modules.18.name: x-pack-ml } - - match: { nodes.$master.modules.19.name: x-pack-monitoring } - - match: { nodes.$master.modules.20.name: x-pack-rollup } - - match: { nodes.$master.modules.21.name: x-pack-security } - - match: { nodes.$master.modules.22.name: x-pack-sql } - - match: { nodes.$master.modules.23.name: x-pack-upgrade } - - match: { nodes.$master.modules.24.name: x-pack-watcher } + - contains: { nodes.$master.modules: { name: x-pack-ccr } } + - contains: { nodes.$master.modules: { name: x-pack-core } } + - contains: { nodes.$master.modules: { name: x-pack-deprecation } } + - contains: { nodes.$master.modules: { name: x-pack-graph } } + - contains: { nodes.$master.modules: { name: x-pack-logstash } } + - contains: { nodes.$master.modules: { name: x-pack-ml } } + - contains: { nodes.$master.modules: { name: x-pack-monitoring } } + - contains: { nodes.$master.modules: { name: x-pack-rollup } } + - contains: { nodes.$master.modules: { name: x-pack-security } } + - contains: { nodes.$master.modules: { name: x-pack-sql } } + - contains: { nodes.$master.modules: { name: x-pack-upgrade } } + - contains: { nodes.$master.modules: { name: x-pack-watcher } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java index 8e0fbcb7cb4fc..f3b77b922aa89 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java @@ -110,16 +110,6 @@ public Engine.Index preIndex(ShardId shardId, Engine.Index operation) { return operation; } - // the watch status is -1, in case a watch has been freshly stored and this save - // watch operation does not stem from an execution - // we dont need to update the trigger service, when the watch has been updated as - // part of an execution, so we can exit early - boolean isWatchExecutionOperation = watch.status().version() != -1; - if (isWatchExecutionOperation) { - logger.debug("not updating trigger for watch [{}], watch has been updated as part of an execution", watch.id()); - return operation; - } - boolean shouldBeTriggered = shardAllocationConfiguration.shouldBeTriggered(watch.id()); if (shouldBeTriggered) { if (watch.status().state().isActive()) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java index 460725c3dda98..732653d829307 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java @@ -33,7 +33,7 @@ public InputRegistry(Settings settings, Map factories) { * @param parser The parser containing the input definition * @return A new input instance from the parser */ - public ExecutableInput parse(String watchId, XContentParser parser) throws IOException { + public ExecutableInput parse(String watchId, XContentParser parser) throws IOException { String type = null; if (parser.currentToken() != XContentParser.Token.START_OBJECT) { @@ -42,7 +42,7 @@ public ExecutableInput parse(String watchId, XContentParser parser) throws IOExc } XContentParser.Token token; - ExecutableInput input = null; + ExecutableInput input = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { type = parser.currentName(); @@ -50,7 +50,7 @@ public ExecutableInput parse(String watchId, XContentParser parser) throws IOExc throw new ElasticsearchParseException("could not parse input for watch [{}]. expected field indicating the input type, " + "but found [{}] instead", watchId, token); } else if (token == XContentParser.Token.START_OBJECT) { - InputFactory factory = factories.get(type); + InputFactory factory = factories.get(type); if (factory == null) { throw new ElasticsearchParseException("could not parse input for watch [{}]. unknown input type [{}]", watchId, type); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java index 84efc21594365..5be1236574f37 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java @@ -7,6 +7,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; @@ -89,18 +91,29 @@ protected void doExecute(PutWatchRequest request, ActionListenerwrap(response -> { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, updateRequest, + ActionListener.wrap(response -> { + boolean created = response.getResult() == DocWriteResponse.Result.CREATED; + listener.onResponse(new PutWatchResponse(response.getId(), response.getVersion(), created)); + }, listener::onFailure), + client::update); + } else { + IndexRequest indexRequest = new IndexRequest(Watch.INDEX, Watch.DOC_TYPE, request.getId()); + indexRequest.source(builder); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, indexRequest, + ActionListener.wrap(response -> { boolean created = response.getResult() == DocWriteResponse.Result.CREATED; listener.onResponse(new PutWatchResponse(response.getId(), response.getVersion(), created)); }, listener::onFailure), - client::update); + client::index); + } } } catch (Exception e) { listener.onFailure(e); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java index 7754e622d5a6b..a81868f05edfc 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.actions.Action; import org.elasticsearch.xpack.core.watcher.actions.ActionStatus; +import org.elasticsearch.xpack.core.watcher.actions.ActionStatus.AckStatus.State; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapperResult; import org.elasticsearch.xpack.core.watcher.actions.ExecutableAction; @@ -22,7 +23,6 @@ import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.xpack.core.watcher.actions.ActionStatus.AckStatus.State; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; @@ -32,7 +32,8 @@ public class ActionWrapperTests extends ESTestCase { private DateTime now = DateTime.now(DateTimeZone.UTC); private Watch watch = mock(Watch.class); - private ExecutableAction executableAction = mock(ExecutableAction.class); + @SuppressWarnings("unchecked") + private ExecutableAction executableAction = mock(ExecutableAction.class); private ActionWrapper actionWrapper = new ActionWrapper("_action", null, NeverCondition.INSTANCE, null, executableAction); public void testThatUnmetActionConditionResetsAckStatus() throws Exception { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java index bc22d58917931..05256ba5fc476 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java @@ -57,7 +57,7 @@ public void testSingleActionAckThrottle() throws Exception { .trigger(schedule(interval("60m"))); AvailableAction availableAction = randomFrom(AvailableAction.values()); - Action.Builder action = availableAction.action(); + Action.Builder action = availableAction.action(); watchSourceBuilder.addAction("test_id", action); watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder.buildAsBytes(XContentType.JSON), @@ -98,7 +98,7 @@ public void testRandomMultiActionAckThrottle() throws Exception { Set ackingActions = new HashSet<>(); for (int i = 0; i < scaledRandomIntBetween(5,10); ++i) { AvailableAction availableAction = randomFrom(AvailableAction.values()); - Action.Builder action = availableAction.action(); + Action.Builder action = availableAction.action(); watchSourceBuilder.addAction("test_id" + i, action); if (randomBoolean()) { ackingActions.add("test_id" + i); @@ -352,7 +352,7 @@ public void testFailingActionDoesGetThrottled() throws Exception { enum AvailableAction { EMAIL { @Override - public Action.Builder action() throws Exception { + public Action.Builder action() throws Exception { EmailTemplate.Builder emailBuilder = EmailTemplate.builder(); emailBuilder.from("test@test.com"); emailBuilder.to("test@test.com"); @@ -367,7 +367,7 @@ public String type() { }, WEBHOOK { @Override - public Action.Builder action() throws Exception { + public Action.Builder action() throws Exception { HttpRequestTemplate.Builder requestBuilder = HttpRequestTemplate.builder("localhost", 1234) .path("/") .method(HttpMethod.GET); @@ -381,7 +381,7 @@ public String type() { }, LOGGING { @Override - public Action.Builder action() throws Exception { + public Action.Builder action() throws Exception { return LoggingAction.builder(new TextTemplate("_logging")); } @@ -392,7 +392,7 @@ public String type() { }, INDEX { @Override - public Action.Builder action() throws Exception { + public Action.Builder action() throws Exception { return IndexAction.builder("test_index", "test_type"); } @@ -402,7 +402,7 @@ public String type() { } }; - public abstract Action.Builder action() throws Exception; + public abstract Action.Builder action() throws Exception; public abstract String type(); } diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index 1daae6dc9f50a..7f2706a773aa9 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -16,9 +16,6 @@ integTestRunner { 'index/10_with_id/Index with ID', 'indices.get_alias/10_basic/Get alias against closed indices', 'indices.get_alias/20_empty/Check empty aliases when getting all aliases via /_alias', - 'cat.templates/10_basic/No templates', - 'cat.templates/10_basic/Sort templates', - 'cat.templates/10_basic/Multiple template', ].join(',') systemProperty 'tests.rest.cluster.username', System.getProperty('tests.rest.cluster.username', 'test_user') diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 2502944a99691..5276abdbfb1d8 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -9,6 +9,7 @@ import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.Version; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Booleans; @@ -20,7 +21,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger; +import org.hamcrest.Matcher; import org.junit.Before; import java.io.IOException; @@ -38,6 +39,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -254,6 +256,71 @@ public void testWatcher() throws Exception { } } + /** + * Tests that a RollUp job created on a old cluster is correctly restarted after the upgrade. + */ + public void testRollupAfterRestart() throws Exception { + assumeTrue("Rollup can be tested with 6.3.0 and onwards", oldClusterVersion.onOrAfter(Version.V_6_3_0)); + if (runningAgainstOldCluster) { + final int numDocs = 59; + final int year = randomIntBetween(1970, 2018); + + // index documents for the rollup job + final StringBuilder bulk = new StringBuilder(); + for (int i = 0; i < numDocs; i++) { + bulk.append("{\"index\":{\"_index\":\"rollup-docs\",\"_type\":\"doc\"}}\n"); + String date = String.format(Locale.ROOT, "%04d-01-01T00:%02d:00Z", year, i); + bulk.append("{\"timestamp\":\"").append(date).append("\",\"value\":").append(i).append("}\n"); + } + bulk.append("\r\n"); + + final Request bulkRequest = new Request("POST", "/_bulk"); + bulkRequest.setJsonEntity(bulk.toString()); + client().performRequest(bulkRequest); + + // create the rollup job + final Request createRollupJobRequest = new Request("PUT", "/_xpack/rollup/job/rollup-job-test"); + createRollupJobRequest.setJsonEntity("{" + + "\"index_pattern\":\"rollup-*\"," + + "\"rollup_index\":\"results-rollup\"," + + "\"cron\":\"*/30 * * * * ?\"," + + "\"page_size\":100," + + "\"groups\":{" + + " \"date_histogram\":{" + + " \"field\":\"timestamp\"," + + " \"interval\":\"5m\"" + + " }" + + "}," + + "\"metrics\":[" + + " {\"field\":\"value\",\"metrics\":[\"min\",\"max\",\"sum\"]}" + + "]" + + "}"); + + Map createRollupJobResponse = toMap(client().performRequest(createRollupJobRequest)); + assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + + // start the rollup job + final Request startRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-job-test/_start"); + Map startRollupJobResponse = toMap(client().performRequest(startRollupJobRequest)); + assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE)); + + assertRollUpJob("rollup-job-test"); + + } else { + + final Request clusterHealthRequest = new Request("GET", "/_cluster/health"); + clusterHealthRequest.addParameter("wait_for_status", "yellow"); + clusterHealthRequest.addParameter("wait_for_no_relocating_shards", "true"); + if (oldClusterVersion.onOrAfter(Version.V_6_2_0)) { + clusterHealthRequest.addParameter("wait_for_no_initializing_shards", "true"); + } + Map clusterHealthResponse = toMap(client().performRequest(clusterHealthRequest)); + assertThat(clusterHealthResponse.get("timed_out"), equalTo(Boolean.FALSE)); + + assertRollUpJob("rollup-job-test"); + } + } + public void testSqlFailsOnIndexWithTwoTypes() throws IOException { // TODO this isn't going to trigger until we backport to 6.1 assumeTrue("It is only possible to build an index that sql doesn't like before 6.0.0", @@ -393,43 +460,6 @@ private void waitForHits(String indexName, int expectedHits) throws Exception { }, 30, TimeUnit.SECONDS); } - @SuppressWarnings("unchecked") - private void waitForMonitoringTemplates() throws Exception { - assertBusy(() -> { - final Map templates = toMap(client().performRequest("GET", "/_template/.monitoring-*")); - - // in earlier versions, we published legacy templates in addition to the current ones to support transitioning - assertThat(templates.size(), greaterThanOrEqualTo(MonitoringTemplateUtils.TEMPLATE_IDS.length)); - - // every template should be updated to whatever the current version is - for (final String templateId : MonitoringTemplateUtils.TEMPLATE_IDS) { - final String templateName = MonitoringTemplateUtils.templateName(templateId); - final Map template = (Map) templates.get(templateName); - - assertThat(template.get("version"), is(MonitoringTemplateUtils.LAST_UPDATED_VERSION)); - } - }, 30, TimeUnit.SECONDS); - } - - @SuppressWarnings("unchecked") - private void waitForClusterStats(final String expectedVersion) throws Exception { - assertBusy(() -> { - final Map params = new HashMap<>(3); - params.put("q", "type:cluster_stats"); - params.put("size", "1"); - params.put("sort", "timestamp:desc"); - - final Map response = toMap(client().performRequest("GET", "/.monitoring-es-*/_search", params)); - final Map hits = (Map) response.get("hits"); - - assertThat("No cluster_stats documents found.", (int)hits.get("total"), greaterThanOrEqualTo(1)); - - final Map hit = (Map) ((List) hits.get("hits")).get(0); - final Map source = (Map) hit.get("_source"); - assertThat(source.get("version"), is(expectedVersion)); - }, 30, TimeUnit.SECONDS); - } - static Map toMap(Response response) throws IOException { return toMap(EntityUtils.toString(response.getEntity())); } @@ -492,4 +522,48 @@ private void assertRoleInfo(final String role) throws Exception { assertNotNull(response.get("cluster")); assertNotNull(response.get("indices")); } + + @SuppressWarnings("unchecked") + private void assertRollUpJob(final String rollupJob) throws Exception { + final Matcher expectedStates = anyOf(equalTo("indexing"), equalTo("started")); + waitForRollUpJob(rollupJob, expectedStates); + + // check that the rollup job is started using the RollUp API + final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); + Map getRollupJobResponse = toMap(client().performRequest(getRollupJobRequest)); + assertThat(ObjectPath.eval("jobs.0.status.job_state", getRollupJobResponse), expectedStates); + + // check that the rollup job is started using the Tasks API + final Request taskRequest = new Request("GET", "_tasks"); + taskRequest.addParameter("detailed", "true"); + taskRequest.addParameter("actions", "xpack/rollup/*"); + Map taskResponse = toMap(client().performRequest(taskRequest)); + Map taskResponseNodes = (Map) taskResponse.get("nodes"); + Map taskResponseNode = (Map) taskResponseNodes.values().iterator().next(); + Map taskResponseTasks = (Map) taskResponseNode.get("tasks"); + Map taskResponseStatus = (Map) taskResponseTasks.values().iterator().next(); + assertThat(ObjectPath.eval("status.job_state", taskResponseStatus), expectedStates); + + // check that the rollup job is started using the Cluster State API + final Request clusterStateRequest = new Request("GET", "_cluster/state/metadata"); + Map clusterStateResponse = toMap(client().performRequest(clusterStateRequest)); + Map rollupJobTask = ObjectPath.eval("metadata.persistent_tasks.tasks.0", clusterStateResponse); + assertThat(ObjectPath.eval("id", rollupJobTask), equalTo("rollup-job-test")); + + // Persistent task state field has been renamed in 6.4.0 from "status" to "state" + final String stateFieldName = (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_4_0)) ? "status" : "state"; + + final String jobStateField = "task.xpack/rollup/job." + stateFieldName + ".job_state"; + assertThat("Expected field [" + jobStateField + "] to be started or indexing in " + rollupJobTask, + ObjectPath.eval(jobStateField, rollupJobTask), expectedStates); + } + + private void waitForRollUpJob(final String rollupJob, final Matcher expectedStates) throws Exception { + assertBusy(() -> { + final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); + Response getRollupJobResponse = client().performRequest(getRollupJobRequest); + assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); + assertThat(ObjectPath.eval("jobs.0.status.job_state", toMap(getRollupJobResponse)), expectedStates); + }, 30L, TimeUnit.SECONDS); + } } diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java index 6731e27aaac19..54d8090a7a421 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java @@ -802,7 +802,7 @@ public static void openJob(RestClient client, String jobId) throws IOException { @After public void clearMlState() throws Exception { - new MlRestTestStateCleaner(logger, adminClient(), this).clearMlMetadata(); + new MlRestTestStateCleaner(logger, adminClient()).clearMlMetadata(); XPackRestTestHelper.waitForPendingTasks(adminClient()); } diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 114fbdd4e5dd3..6713e66692ded 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -676,7 +676,7 @@ private static String responseEntityToString(Response response) throws IOExcepti @After public void clearMlState() throws Exception { - new MlRestTestStateCleaner(logger, adminClient(), this).clearMlMetadata(); + new MlRestTestStateCleaner(logger, adminClient()).clearMlMetadata(); XPackRestTestHelper.waitForPendingTasks(adminClient()); } }