diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index de2ac130f79e1..da21f6d5ecd77 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -105,6 +105,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; @@ -1092,6 +1093,13 @@ static Request xPackInfo(XPackInfoRequest infoRequest) { return request; } + static Request xpackUsage(XPackUsageRequest usageRequest) { + Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage"); + Params parameters = new Params(request); + parameters.withMasterTimeout(usageRequest.masterNodeTimeout()); + return request; + } + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java index fa147a338de0a..785469673747c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java @@ -174,7 +174,7 @@ public void verifyRepositoryAsync(VerifyRepositoryRequest verifyRepositoryReques * See Snapshot and Restore * API on elastic.co */ - public CreateSnapshotResponse createSnapshot(CreateSnapshotRequest createSnapshotRequest, RequestOptions options) + public CreateSnapshotResponse create(CreateSnapshotRequest createSnapshotRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options, CreateSnapshotResponse::fromXContent, emptySet()); @@ -186,7 +186,7 @@ public CreateSnapshotResponse createSnapshot(CreateSnapshotRequest createSnapsho * See Snapshot and Restore * API on elastic.co */ - public void createSnapshotAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options, + public void createAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options, CreateSnapshotResponse::fromXContent, listener, emptySet()); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java index 5942bfa35a477..a497619b987bd 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java @@ -22,6 +22,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.XPackUsageResponse; import java.io.IOException; @@ -70,4 +72,25 @@ public void infoAsync(XPackInfoRequest request, RequestOptions options, restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackInfo, options, XPackInfoResponse::fromXContent, listener, emptySet()); } + + /** + * Fetch usage information about X-Pack features from the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public XPackUsageResponse usage(XPackUsageRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xpackUsage, options, + XPackUsageResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously fetch usage information about X-Pack features from the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void usageAsync(XPackUsageRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xpackUsage, options, + XPackUsageResponse::fromXContent, listener, emptySet()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java index 7ec2ee80f04ac..5dd288e4398d7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java @@ -57,8 +57,8 @@ private PutRepositoryResponse createTestRepository(String repository, String typ private CreateSnapshotResponse createTestSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException { // assumes the repository already exists - return execute(createSnapshotRequest, highLevelClient().snapshot()::createSnapshot, - highLevelClient().snapshot()::createSnapshotAsync); + return execute(createSnapshotRequest, highLevelClient().snapshot()::create, + highLevelClient().snapshot()::createAsync); } public void testCreateRepository() throws IOException { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java index 639a5ce7ceef6..f27949e05f909 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java @@ -35,12 +35,17 @@ import org.elasticsearch.protocol.xpack.XPackInfoResponse.BuildInfo; import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo; import org.elasticsearch.protocol.xpack.XPackInfoResponse.LicenseInfo; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.XPackUsageResponse; import java.io.IOException; import java.util.EnumSet; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.is; + /** * Documentation for miscellaneous APIs in the high level java client. * Code wrapped in {@code tag} and {@code end} tags is included in the docs. @@ -130,6 +135,50 @@ public void onFailure(Exception e) { } } + public void testXPackUsage() throws Exception { + RestHighLevelClient client = highLevelClient(); + { + //tag::x-pack-usage-execute + XPackUsageRequest request = new XPackUsageRequest(); + XPackUsageResponse response = client.xpack().usage(request, RequestOptions.DEFAULT); + //end::x-pack-usage-execute + + //tag::x-pack-usage-response + Map> usages = response.getUsages(); + Map monitoringUsage = usages.get("monitoring"); + assertThat(monitoringUsage.get("available"), is(true)); + assertThat(monitoringUsage.get("enabled"), is(true)); + assertThat(monitoringUsage.get("collection_enabled"), is(false)); + //end::x-pack-usage-response + } + { + XPackUsageRequest request = new XPackUsageRequest(); + // tag::x-pack-usage-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(XPackUsageResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-usage-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-usage-execute-async + client.xpack().usageAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-usage-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testInitializationFromClientBuilder() throws IOException { //tag::rest-high-level-client-init RestHighLevelClient client = new RestHighLevelClient( diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index 6c78c88eeec66..780f315876996 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -337,8 +337,9 @@ public void testSearchRequestAggregations() throws IOException { Range range = aggregations.get("by_company"); // <1> // end::search-request-aggregations-get-wrongCast } catch (ClassCastException ex) { - assertEquals("org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms" - + " cannot be cast to org.elasticsearch.search.aggregations.bucket.range.Range", ex.getMessage()); + String message = ex.getMessage(); + assertThat(message, containsString("org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms")); + assertThat(message, containsString("org.elasticsearch.search.aggregations.bucket.range.Range")); } assertEquals(3, elasticBucket.getDocCount()); assertEquals(30, avg, 0.0); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index 48d01963e236d..d454b04718382 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -420,7 +420,7 @@ public void testSnapshotCreate() throws IOException { // end::create-snapshot-request-waitForCompletion // tag::create-snapshot-execute - CreateSnapshotResponse response = client.snapshot().createSnapshot(request, RequestOptions.DEFAULT); + CreateSnapshotResponse response = client.snapshot().create(request, RequestOptions.DEFAULT); // end::create-snapshot-execute // tag::create-snapshot-response @@ -428,6 +428,12 @@ public void testSnapshotCreate() throws IOException { // end::create-snapshot-response assertEquals(RestStatus.OK, status); + + // tag::create-snapshot-response-snapshot-info + SnapshotInfo snapshotInfo = response.getSnapshotInfo(); // <1> + // end::create-snapshot-response-snapshot-info + + assertNotNull(snapshotInfo); } public void testSnapshotCreateAsync() throws InterruptedException { @@ -455,7 +461,7 @@ public void onFailure(Exception exception) { listener = new LatchedActionListener<>(listener, latch); // tag::create-snapshot-execute-async - client.snapshot().createSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.snapshot().createAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::create-snapshot-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); diff --git a/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc b/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc new file mode 100644 index 0000000000000..0927ae71c0bf5 --- /dev/null +++ b/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc @@ -0,0 +1,54 @@ +[[java-rest-high-x-pack-usage]] +=== X-Pack Usage API + +[[java-rest-high-x-pack-usage-execution]] +==== Execution + +Detailed information about the usage of features from {xpack} can be +retrieved using the `usage()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-execute] +-------------------------------------------------- + +[[java-rest-high-x-pack-info-response]] +==== Response + +The returned `XPackUsageResponse` contains a `Map` keyed by feature name. +Every feature map has an `available` key, indicating whether that +feature is available given the current license, and an `enabled` key, +indicating whether that feature is currently enabled. Other keys +are specific to each feature. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-response] +-------------------------------------------------- + +[[java-rest-high-x-pack-usage-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-execute-async] +-------------------------------------------------- +<1> The call to execute the usage api and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `XPackUsageResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc b/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc index dbd31380a9b4b..971a6ee486711 100644 --- a/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc +++ b/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc @@ -73,11 +73,22 @@ include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-r [[java-rest-high-snapshot-create-snapshot-sync]] ==== Synchronous Execution +Execute a `CreateSnapshotRequest` synchronously to receive a `CreateSnapshotResponse`. + ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute] -------------------------------------------------- +Retrieve the `SnapshotInfo` from a `CreateSnapshotResponse` when the snapshot is fully created. +(The `waitForCompletion` parameter is `true`). + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-response-snapshot-info] +-------------------------------------------------- +<1> The `SnapshotInfo` object. + [[java-rest-high-snapshot-create-snapshot-async]] ==== Asynchronous Execution diff --git a/docs/reference/how-to/recipes.asciidoc b/docs/reference/how-to/recipes.asciidoc index e798f8819d000..451e192ad6ad2 100644 --- a/docs/reference/how-to/recipes.asciidoc +++ b/docs/reference/how-to/recipes.asciidoc @@ -3,8 +3,8 @@ This section includes a few recipes to help with common problems: -* mixing-exact-search-with-stemming -* consistent-scoring +* <> +* <> include::recipes/stemming.asciidoc[] include::recipes/scoring.asciidoc[] diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java index ad574115208da..13ec666381529 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java @@ -26,7 +26,7 @@ import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.IngestScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.ScriptService; @@ -71,10 +71,8 @@ public final class ScriptProcessor extends AbstractProcessor { */ @Override public void execute(IngestDocument document) { - ExecutableScript.Factory factory = scriptService.compile(script, ExecutableScript.INGEST_CONTEXT); - ExecutableScript executableScript = factory.newInstance(script.getParams()); - executableScript.setNextVar("ctx", document.getSourceAndMetadata()); - executableScript.run(); + IngestScript.Factory factory = scriptService.compile(script, IngestScript.CONTEXT); + factory.newInstance(script.getParams()).execute(document.getSourceAndMetadata()); } @Override @@ -144,7 +142,7 @@ public ScriptProcessor create(Map registry, String pr // verify script is able to be compiled before successfully creating processor. try { - scriptService.compile(script, ExecutableScript.INGEST_CONTEXT); + scriptService.compile(script, IngestScript.CONTEXT); } catch (ScriptException e) { throw newConfigurationException(TYPE, processorTag, scriptPropertyUsed, e); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java index 9658637f16444..8c3976d2b175c 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java @@ -58,9 +58,7 @@ protected boolean ignoreExternalCluster() { public static class CustomScriptPlugin extends MockScriptPlugin { @Override protected Map, Object>> pluginScripts() { - return Collections.singletonMap("my_script", script -> { - @SuppressWarnings("unchecked") - Map ctx = (Map) script.get("ctx"); + return Collections.singletonMap("my_script", ctx -> { ctx.put("z", 0); return null; }); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java index 1004a41bcc592..72bc337e9c9f7 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java @@ -19,22 +19,22 @@ package org.elasticsearch.ingest.common; +import java.util.Collections; import java.util.HashMap; import java.util.Map; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.core.Is.is; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class ScriptProcessorTests extends ESTestCase { @@ -42,24 +42,28 @@ public void testScripting() throws Exception { int randomBytesIn = randomInt(); int randomBytesOut = randomInt(); int randomBytesTotal = randomBytesIn + randomBytesOut; - - ScriptService scriptService = mock(ScriptService.class); - Script script = mockScript("_script"); - ExecutableScript.Factory factory = mock(ExecutableScript.Factory.class); - ExecutableScript executableScript = mock(ExecutableScript.class); - when(scriptService.compile(script, ExecutableScript.INGEST_CONTEXT)).thenReturn(factory); - when(factory.newInstance(any())).thenReturn(executableScript); + String scriptName = "script"; + ScriptService scriptService = new ScriptService(Settings.builder().build(), + Collections.singletonMap( + Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine( + Script.DEFAULT_SCRIPT_LANG, + Collections.singletonMap( + scriptName, ctx -> { + ctx.put("bytes_total", randomBytesTotal); + return null; + } + ) + ) + ), + new HashMap<>(ScriptModule.CORE_CONTEXTS) + ); + Script script = new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap()); Map document = new HashMap<>(); document.put("bytes_in", randomInt()); document.put("bytes_out", randomInt()); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - doAnswer(invocationOnMock -> { - ingestDocument.setFieldValue("bytes_total", randomBytesTotal); - return null; - }).when(executableScript).run(); - ScriptProcessor processor = new ScriptProcessor(randomAlphaOfLength(10), script, scriptService); processor.execute(ingestDocument); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java index ae1944c9bd3a9..4560fd85a6589 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java @@ -366,44 +366,7 @@ private void writeNeedsMethods(Class clazz, ClassWriter writer, MainMethodRes } Object compile(Compiler compiler, String scriptName, String source, Map params, Object... args) { - final CompilerSettings compilerSettings; - - if (params.isEmpty()) { - // Use the default settings. - compilerSettings = defaultCompilerSettings; - } else { - // Use custom settings specified by params. - compilerSettings = new CompilerSettings(); - - // Except regexes enabled - this is a node level setting and can't be changed in the request. - compilerSettings.setRegexesEnabled(defaultCompilerSettings.areRegexesEnabled()); - - Map copy = new HashMap<>(params); - - String value = copy.remove(CompilerSettings.MAX_LOOP_COUNTER); - if (value != null) { - compilerSettings.setMaxLoopCounter(Integer.parseInt(value)); - } - - value = copy.remove(CompilerSettings.PICKY); - if (value != null) { - compilerSettings.setPicky(Boolean.parseBoolean(value)); - } - - value = copy.remove(CompilerSettings.INITIAL_CALL_SITE_DEPTH); - if (value != null) { - compilerSettings.setInitialCallSiteDepth(Integer.parseInt(value)); - } - - value = copy.remove(CompilerSettings.REGEX_ENABLED.getKey()); - if (value != null) { - throw new IllegalArgumentException("[painless.regex.enabled] can only be set on node startup."); - } - - if (!copy.isEmpty()) { - throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy); - } - } + final CompilerSettings compilerSettings = buildCompilerSettings(params); // Check we ourselves are not being called by unprivileged code. SpecialPermission.check(); @@ -434,14 +397,33 @@ public Object run() { }, COMPILATION_CONTEXT); // Note that it is safe to catch any of the following errors since Painless is stateless. } catch (OutOfMemoryError | StackOverflowError | VerifyError | Exception e) { - throw convertToScriptException(scriptName == null ? source : scriptName, source, e); + throw convertToScriptException(source, e); } } void compile(Compiler compiler, Loader loader, MainMethodReserved reserved, String scriptName, String source, Map params) { - final CompilerSettings compilerSettings; + final CompilerSettings compilerSettings = buildCompilerSettings(params); + + try { + // Drop all permissions to actually compile the code itself. + AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Void run() { + String name = scriptName == null ? source : scriptName; + compiler.compile(loader, reserved, name, source, compilerSettings); + + return null; + } + }, COMPILATION_CONTEXT); + // Note that it is safe to catch any of the following errors since Painless is stateless. + } catch (OutOfMemoryError | StackOverflowError | VerifyError | Exception e) { + throw convertToScriptException(source, e); + } + } + private CompilerSettings buildCompilerSettings(Map params) { + CompilerSettings compilerSettings; if (params.isEmpty()) { // Use the default settings. compilerSettings = defaultCompilerSettings; @@ -478,25 +460,10 @@ void compile(Compiler compiler, Loader loader, MainMethodReserved reserved, throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy); } } - - try { - // Drop all permissions to actually compile the code itself. - AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Void run() { - String name = scriptName == null ? source : scriptName; - compiler.compile(loader, reserved, name, source, compilerSettings); - - return null; - } - }, COMPILATION_CONTEXT); - // Note that it is safe to catch any of the following errors since Painless is stateless. - } catch (OutOfMemoryError | StackOverflowError | VerifyError | Exception e) { - throw convertToScriptException(scriptName == null ? source : scriptName, source, e); - } + return compilerSettings; } - private ScriptException convertToScriptException(String scriptName, String scriptSource, Throwable t) { + private ScriptException convertToScriptException(String scriptSource, Throwable t) { // create a script stack: this is just the script portion List scriptStack = new ArrayList<>(); for (StackTraceElement element : t.getStackTrace()) { @@ -507,7 +474,7 @@ private ScriptException convertToScriptException(String scriptName, String scrip scriptStack.add("<<< unknown portion of script >>>"); } else { offset--; // offset is 1 based, line numbers must be! - int startOffset = getPreviousStatement(scriptSource, offset); + int startOffset = getPreviousStatement(offset); int endOffset = getNextStatement(scriptSource, offset); StringBuilder snippet = new StringBuilder(); if (startOffset > 0) { @@ -535,7 +502,7 @@ private ScriptException convertToScriptException(String scriptName, String scrip } // very simple heuristic: +/- 25 chars. can be improved later. - private int getPreviousStatement(String scriptSource, int offset) { + private int getPreviousStatement(int offset) { return Math.max(0, offset - 25); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index cd4923704ff35..fda45eed2f310 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -42,9 +42,9 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.Strings.EMPTY_ARRAY; +import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; -import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; /** @@ -408,8 +408,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (indicesOptions != null) { indicesOptions.toXContent(builder, params); } - builder.field("wait_for_completion", waitForCompletion); - builder.field("master_node_timeout", masterNodeTimeout.toString()); builder.endObject(); return builder; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java index a2dc02c5c8299..d0a82e36a97da 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java @@ -21,14 +21,16 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotInfo.SnapshotInfoBuilder; import java.io.IOException; import java.util.Objects; @@ -38,6 +40,14 @@ */ public class CreateSnapshotResponse extends ActionResponse implements ToXContentObject { + private static final ObjectParser PARSER = + new ObjectParser<>(CreateSnapshotResponse.class.getName(), true, CreateSnapshotResponse::new); + + static { + PARSER.declareObject(CreateSnapshotResponse::setSnapshotInfoFromBuilder, + SnapshotInfo.SNAPSHOT_INFO_PARSER, new ParseField("snapshot")); + } + @Nullable private SnapshotInfo snapshotInfo; @@ -48,8 +58,8 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent CreateSnapshotResponse() { } - void setSnapshotInfo(SnapshotInfo snapshotInfo) { - this.snapshotInfo = snapshotInfo; + private void setSnapshotInfoFromBuilder(SnapshotInfoBuilder snapshotInfoBuilder) { + this.snapshotInfo = snapshotInfoBuilder.build(); } /** @@ -101,38 +111,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static CreateSnapshotResponse fromXContent(XContentParser parser) throws IOException { - CreateSnapshotResponse createSnapshotResponse = new CreateSnapshotResponse(); - - parser.nextToken(); // move to '{' - - if (parser.currentToken() != Token.START_OBJECT) { - throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['{']"); - } - - parser.nextToken(); // move to 'snapshot' || 'accepted' - - if ("snapshot".equals(parser.currentName())) { - createSnapshotResponse.snapshotInfo = SnapshotInfo.fromXContent(parser); - } else if ("accepted".equals(parser.currentName())) { - parser.nextToken(); // move to 'accepted' field value - - if (parser.booleanValue()) { - // ensure accepted is a boolean value - } - - parser.nextToken(); // move past 'true'/'false' - } else { - throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "] expected ['snapshot', 'accepted']"); - } - - if (parser.currentToken() != Token.END_OBJECT) { - throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['}']"); - } - - parser.nextToken(); // move past '}' - - return createSnapshotResponse; + public static CreateSnapshotResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index 7f4becc2d9afc..c707fed6ddf7b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestRequest; @@ -322,21 +323,6 @@ public static IndicesOptions fromMap(Map map, IndicesOptions def defaultSettings); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startArray("expand_wildcards"); - for (WildcardStates expandWildcard : expandWildcards) { - builder.value(expandWildcard.toString().toLowerCase(Locale.ROOT)); - } - builder.endArray(); - builder.field("ignore_unavailable", ignoreUnavailable()); - builder.field("allow_no_indices", allowNoIndices()); - builder.field("forbid_aliases_to_multiple_indices", allowAliasesToMultipleIndices() == false); - builder.field("forbid_closed_indices", forbidClosedIndices()); - builder.field("ignore_aliases", ignoreAliases()); - return builder; - } - /** * Returns true if the name represents a valid name for one of the indices option * false otherwise @@ -366,6 +352,18 @@ public static IndicesOptions fromParameters(Object wildcardsString, Object ignor ); } + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startArray("expand_wildcards"); + for (WildcardStates expandWildcard : expandWildcards) { + builder.value(expandWildcard.toString().toLowerCase(Locale.ROOT)); + } + builder.endArray(); + builder.field("ignore_unavailable", ignoreUnavailable()); + builder.field("allow_no_indices", allowNoIndices()); + return builder; + } + /** * @return indices options that requires every specified index to exist, expands wildcards only to open indices and * allows that no indices are resolved from wildcard expressions (not returning an error). diff --git a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java b/server/src/main/java/org/elasticsearch/script/ExecutableScript.java index e87b7cdf3890a..2f7a01c37980d 100644 --- a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java +++ b/server/src/main/java/org/elasticsearch/script/ExecutableScript.java @@ -50,5 +50,4 @@ interface Factory { // TODO: remove these once each has its own script interface ScriptContext AGGS_CONTEXT = new ScriptContext<>("aggs_executable", Factory.class); ScriptContext UPDATE_CONTEXT = new ScriptContext<>("update", Factory.class); - ScriptContext INGEST_CONTEXT = new ScriptContext<>("ingest", Factory.class); } diff --git a/server/src/main/java/org/elasticsearch/script/IngestScript.java b/server/src/main/java/org/elasticsearch/script/IngestScript.java new file mode 100644 index 0000000000000..f357394ed31f0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/IngestScript.java @@ -0,0 +1,52 @@ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import java.util.Map; + +/** + * A script used by the Ingest Script Processor. + */ +public abstract class IngestScript { + + public static final String[] PARAMETERS = { "ctx" }; + + /** The context used to compile {@link IngestScript} factories. */ + public static final ScriptContext CONTEXT = new ScriptContext<>("ingest", Factory.class); + + /** The generic runtime parameters for the script. */ + private final Map params; + + public IngestScript(Map params) { + this.params = params; + } + + /** Return the parameters for this script. */ + public Map getParams() { + return params; + } + + public abstract void execute(Map ctx); + + public interface Factory { + IngestScript newInstance(Map params); + } +} diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index f0e075eac7d93..695b19d88b2ff 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -48,7 +48,7 @@ public class ScriptModule { ExecutableScript.CONTEXT, ExecutableScript.AGGS_CONTEXT, ExecutableScript.UPDATE_CONTEXT, - ExecutableScript.INGEST_CONTEXT, + IngestScript.CONTEXT, FilterScript.CONTEXT, SimilarityScript.CONTEXT, SimilarityWeightScript.CONTEXT, diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java index a1f56a1e47376..bf3d337c49ec5 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java @@ -138,22 +138,6 @@ private void setShardFailures(List shardFailures) { this.shardFailures = shardFailures; } - private void ignoreVersion(String version) { - // ignore extra field - } - - private void ignoreStartTime(String startTime) { - // ignore extra field - } - - private void ignoreEndTime(String endTime) { - // ignore extra field - } - - private void ignoreDurationInMillis(long durationInMillis) { - // ignore extra field - } - public SnapshotInfo build() { SnapshotId snapshotId = new SnapshotId(snapshotName, snapshotUUID); @@ -195,10 +179,6 @@ private void setSuccessfulShards(int successfulShards) { int getSuccessfulShards() { return successfulShards; } - - private void ignoreFailedShards(int failedShards) { - // ignore extra field - } } public static final ObjectParser SNAPSHOT_INFO_PARSER = @@ -220,14 +200,9 @@ private void ignoreFailedShards(int failedShards) { SNAPSHOT_INFO_PARSER.declareInt(SnapshotInfoBuilder::setVersion, new ParseField(VERSION_ID)); SNAPSHOT_INFO_PARSER.declareObjectArray(SnapshotInfoBuilder::setShardFailures, SnapshotShardFailure.SNAPSHOT_SHARD_FAILURE_PARSER, new ParseField(FAILURES)); - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreVersion, new ParseField(VERSION)); - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreStartTime, new ParseField(START_TIME)); - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreEndTime, new ParseField(END_TIME)); - SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::ignoreDurationInMillis, new ParseField(DURATION_IN_MILLIS)); SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setTotalShards, new ParseField(TOTAL)); SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setSuccessfulShards, new ParseField(SUCCESSFUL)); - SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::ignoreFailedShards, new ParseField(FAILED)); } private final SnapshotId snapshotId; diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java index 1bde8ab572b72..0b598be6849cb 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java @@ -102,8 +102,8 @@ public void testToXContent() throws IOException { NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput()); Map map = parser.mapOrdered(); CreateSnapshotRequest processed = new CreateSnapshotRequest((String)map.get("repository"), (String)map.get("snapshot")); - processed.waitForCompletion((boolean)map.getOrDefault("wait_for_completion", false)); - processed.masterNodeTimeout((String)map.getOrDefault("master_node_timeout", "30s")); + processed.waitForCompletion(original.waitForCompletion()); + processed.masterNodeTimeout(original.masterNodeTimeout()); processed.source(map); assertEquals(original, processed); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java index bbfc9755bf215..bbb11fc6feef0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java @@ -40,7 +40,7 @@ protected CreateSnapshotResponse doParseInstance(XContentParser parser) throws I @Override protected boolean supportsUnknownFields() { - return false; + return true; } @Override @@ -63,9 +63,7 @@ protected CreateSnapshotResponse createTestInstance() { boolean globalState = randomBoolean(); - CreateSnapshotResponse response = new CreateSnapshotResponse(); - response.setSnapshotInfo( + return new CreateSnapshotResponse( new SnapshotInfo(snapshotId, indices, startTime, reason, endTime, totalShards, shardFailures, globalState)); - return response; } } diff --git a/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java b/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java index 84904107c2316..e36fe90ea92bb 100644 --- a/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java @@ -324,8 +324,5 @@ public void testToXContent() throws IOException { } assertEquals(map.get("ignore_unavailable"), options.contains(Option.IGNORE_UNAVAILABLE)); assertEquals(map.get("allow_no_indices"), options.contains(Option.ALLOW_NO_INDICES)); - assertEquals(map.get("forbid_aliases_to_multiple_indices"), options.contains(Option.FORBID_ALIASES_TO_MULTIPLE_INDICES)); - assertEquals(map.get("forbid_closed_indices"), options.contains(Option.FORBID_CLOSED_INDICES)); - assertEquals(map.get("ignore_aliases"), options.contains(Option.IGNORE_ALIASES)); } } diff --git a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index b35fcbcc03c17..585f860165160 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -168,7 +168,7 @@ public void testAllowAllScriptContextSettings() throws IOException { assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.CONTEXT); assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.AGGS_CONTEXT); assertCompileAccepted("painless", "script", ScriptType.INLINE, ExecutableScript.UPDATE_CONTEXT); - assertCompileAccepted("painless", "script", ScriptType.INLINE, ExecutableScript.INGEST_CONTEXT); + assertCompileAccepted("painless", "script", ScriptType.INLINE, IngestScript.CONTEXT); } public void testAllowSomeScriptTypeSettings() throws IOException { @@ -209,13 +209,13 @@ public void testAllowNoScriptContextSettings() throws IOException { } public void testCompileNonRegisteredContext() throws IOException { - contexts.remove(ExecutableScript.INGEST_CONTEXT.name); + contexts.remove(IngestScript.CONTEXT.name); buildScriptService(Settings.EMPTY); String type = scriptEngine.getType(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - scriptService.compile(new Script(ScriptType.INLINE, type, "test", Collections.emptyMap()), ExecutableScript.INGEST_CONTEXT)); - assertThat(e.getMessage(), containsString("script context [" + ExecutableScript.INGEST_CONTEXT.name + "] not supported")); + scriptService.compile(new Script(ScriptType.INLINE, type, "test", Collections.emptyMap()), IngestScript.CONTEXT)); + assertThat(e.getMessage(), containsString("script context [" + IngestScript.CONTEXT.name + "] not supported")); } public void testCompileCountedInCompilationStats() throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index e608bd13d2559..8e40e4bcf1468 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -88,6 +88,14 @@ public T compile(String name, String source, ScriptContext context, Map new IngestScript(parameters) { + @Override + public void execute(Map ctx) { + script.apply(ctx); + } + }; + return context.factoryClazz.cast(factory); } else if (context.instanceClazz.equals(TemplateScript.class)) { TemplateScript.Factory factory = vars -> { // TODO: need a better way to implement all these new contexts diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java index f3abad5e68bb3..6b7d5b96d2024 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackFeatureSet; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java index 252283a1dfc7d..87033eac21aee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java @@ -7,6 +7,7 @@ import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; public class XPackUsageAction extends Action { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequest.java deleted file mode 100644 index d578249c147c3..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequest.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.action; - -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeRequest; - -public class XPackUsageRequest extends MasterNodeRequest { - - @Override - public ActionRequestValidationException validate() { - return null; - } - -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java index 789460f133969..92c2ba75ec170 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java @@ -7,6 +7,7 @@ import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; public class XPackUsageRequestBuilder extends MasterNodeOperationRequestBuilder { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java index 3d82ac118f503..3e4e4a84d2f8e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java @@ -94,7 +94,7 @@ public static DateHistoGroupConfig.Builder getDateHisto() { if (ESTestCase.randomBoolean()) { dateHistoBuilder.setDelay(new DateHistogramInterval(randomPositiveTimeValue())); } - dateHistoBuilder.setField(ESTestCase.randomAlphaOfLengthBetween(1, 10 )); + dateHistoBuilder.setField(ESTestCase.randomAlphaOfLengthBetween(5, 10)); return dateHistoBuilder; } @@ -112,8 +112,8 @@ public static TermsGroupConfig.Builder getTerms() { } public static List getFields() { - return IntStream.range(0, ESTestCase.randomIntBetween(1,10)) - .mapToObj(n -> ESTestCase.randomAlphaOfLengthBetween(1,10)) + return IntStream.range(0, ESTestCase.randomIntBetween(1, 10)) + .mapToObj(n -> ESTestCase.randomAlphaOfLengthBetween(5, 10)) .collect(Collectors.toList()); } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java index 6784b00361bc1..49355d51495ec 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackUsageAction; -import org.elasticsearch.xpack.core.action.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; diff --git a/x-pack/plugin/rollup/build.gradle b/x-pack/plugin/rollup/build.gradle index 18ef7abee5c64..ff9c30ed9a934 100644 --- a/x-pack/plugin/rollup/build.gradle +++ b/x-pack/plugin/rollup/build.gradle @@ -1,6 +1,3 @@ -import com.carrotsearch.gradle.junit4.RandomizedTestingTask -import org.elasticsearch.gradle.BuildPlugin - evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' @@ -23,33 +20,8 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } -dependencyLicenses { - ignoreSha 'x-pack-core' -} - run { plugin xpackModule('core') } integTest.enabled = false - - -// Instead we create a separate task to run the -// tests based on ESIntegTestCase -task internalClusterTest(type: RandomizedTestingTask, - group: JavaBasePlugin.VERIFICATION_GROUP, - description: 'Multi-node tests', - dependsOn: test.dependsOn) { - configure(BuildPlugin.commonTestConfig(project)) - classpath = project.test.classpath - testClassesDirs = project.test.testClassesDirs - include '**/*IT.class' - systemProperty 'es.set.netty.runtime.available.processors', 'false' -} -check.dependsOn internalClusterTest -internalClusterTest.mustRunAfter test - -// also add an "alias" task to make typing on the command line easier task icTest { -task icTest { - dependsOn internalClusterTest -} diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java deleted file mode 100644 index 3f930cb42981d..0000000000000 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java +++ /dev/null @@ -1,497 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.rollup; - -import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.analysis.common.CommonAnalysisPlugin; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.query.MatchAllQueryBuilder; -import org.elasticsearch.license.LicenseService; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.Netty4Plugin; -import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction; -import org.elasticsearch.xpack.core.rollup.action.GetRollupJobsAction; -import org.elasticsearch.xpack.core.rollup.action.PutRollupJobAction; -import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction; -import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction; -import org.elasticsearch.xpack.core.rollup.action.StopRollupJobAction; -import org.elasticsearch.xpack.core.rollup.job.DateHistoGroupConfig; -import org.elasticsearch.xpack.core.rollup.job.GroupConfig; -import org.elasticsearch.xpack.core.rollup.job.IndexerState; -import org.elasticsearch.xpack.core.rollup.job.MetricConfig; -import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; -import org.elasticsearch.xpack.core.rollup.job.RollupJobStatus; -import org.hamcrest.Matchers; -import org.joda.time.DateTime; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram; -import static org.hamcrest.core.IsEqual.equalTo; - -@ThreadLeakScope(ThreadLeakScope.Scope.NONE) -public class RollupIT extends ESIntegTestCase { - - private String taskId = "test-bigID"; - - @Override - protected boolean ignoreExternalCluster() { - return true; - } - - @Override - protected Collection> nodePlugins() { - return Arrays.asList(LocalStateRollup.class, CommonAnalysisPlugin.class, Netty4Plugin.class); - } - - @Override - protected Collection> transportClientPlugins() { - return nodePlugins(); - } - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - Settings.Builder builder = Settings.builder(); - builder.put(XPackSettings.ROLLUP_ENABLED.getKey(), true); - builder.put(XPackSettings.SECURITY_ENABLED.getKey(), false); - builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial"); - return builder.build(); - } - - @Override - protected Settings externalClusterClientSettings() { - return nodeSettings(0); - } - - @Override - protected Settings transportClientSettings() { - return Settings.builder().put(super.transportClientSettings()) - .put(XPackSettings.ROLLUP_ENABLED.getKey(), true) - .put(XPackSettings.SECURITY_ENABLED.getKey(), false) - .build(); - } - - @Before - public void createIndex() { - client().admin().indices().prepareCreate("test-1").addMapping("doc", "{\"doc\": {\"properties\": {" + - "\"date_histo\": {\"type\": \"date\"}, " + - "\"histo\": {\"type\": \"integer\"}, " + - "\"terms\": {\"type\": \"keyword\"}}}}", XContentType.JSON).get(); - client().admin().cluster().prepareHealth("test-1").setWaitForYellowStatus().get(); - - BulkRequestBuilder bulk = client().prepareBulk(); - Map source = new HashMap<>(3); - for (int i = 0; i < 20; i++) { - for (int j = 0; j < 20; j++) { - for (int k = 0; k < 20; k++) { - source.put("date_histo", new DateTime().minusDays(i).toString()); - source.put("histo", Integer.toString(j * 100)); - source.put("terms", Integer.toString(k * 100)); - source.put("foo", k); - bulk.add(new IndexRequest("test-1", "doc").source(source)); - source.clear(); - } - } - } - bulk.get(); - client().admin().indices().prepareRefresh("test-1").get(); - } - - public void testGetJob() throws ExecutionException, InterruptedException { - MetricConfig metricConfig = new MetricConfig.Builder() - .setField("foo") - .setMetrics(Arrays.asList("sum", "min", "max", "avg")) - .build(); - - DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder(); - datehistoGroupConfig.setField("date_histo"); - datehistoGroupConfig.setInterval(new DateHistogramInterval("1d")); - - GroupConfig.Builder groupConfig = new GroupConfig.Builder(); - groupConfig.setDateHisto(datehistoGroupConfig.build()); - - - RollupJobConfig.Builder config = new RollupJobConfig.Builder(); - config.setIndexPattern("test-1"); - config.setRollupIndex("rolled"); - config.setId("testGet"); - config.setGroupConfig(groupConfig.build()); - config.setMetricsConfig(Collections.singletonList(metricConfig)); - config.setCron("* * * * * ? *"); - config.setPageSize(10); - - PutRollupJobAction.Request request = new PutRollupJobAction.Request(); - request.setConfig(config.build()); - client().execute(PutRollupJobAction.INSTANCE, request).get(); - - GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("testGet"); - GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get(); - assertThat(response.getJobs().size(), equalTo(1)); - assertThat(response.getJobs().get(0).getJob().getId(), equalTo("testGet")); - } - - public void testIndexPattern() throws Exception { - MetricConfig metricConfig = new MetricConfig.Builder() - .setField("foo") - .setMetrics(Arrays.asList("sum", "min", "max", "avg")) - .build(); - - DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder(); - datehistoGroupConfig.setField("date_histo"); - datehistoGroupConfig.setInterval(new DateHistogramInterval("1d")); - - GroupConfig.Builder groupConfig = new GroupConfig.Builder(); - groupConfig.setDateHisto(datehistoGroupConfig.build()); - - - RollupJobConfig.Builder config = new RollupJobConfig.Builder(); - config.setIndexPattern("test-*"); - config.setId("testIndexPattern"); - config.setRollupIndex("rolled"); - config.setGroupConfig(groupConfig.build()); - config.setMetricsConfig(Collections.singletonList(metricConfig)); - config.setCron("* * * * * ? *"); - config.setPageSize(10); - - PutRollupJobAction.Request request = new PutRollupJobAction.Request(); - request.setConfig(config.build()); - client().execute(PutRollupJobAction.INSTANCE, request).get(); - - StartRollupJobAction.Request startRequest = new StartRollupJobAction.Request("testIndexPattern"); - StartRollupJobAction.Response startResponse = client().execute(StartRollupJobAction.INSTANCE, startRequest).get(); - Assert.assertThat(startResponse.isStarted(), equalTo(true)); - - // Make sure it started - ESTestCase.assertBusy(() -> { - RollupJobStatus rollupJobStatus = getRollupJobStatus("testIndexPattern"); - if (rollupJobStatus == null) { - fail("null"); - } - - IndexerState state = rollupJobStatus.getIndexerState(); - assertTrue(state.equals(IndexerState.STARTED) || state.equals(IndexerState.INDEXING)); - }, 60, TimeUnit.SECONDS); - - // And wait for it to finish - ESTestCase.assertBusy(() -> { - RollupJobStatus rollupJobStatus = getRollupJobStatus("testIndexPattern"); - if (rollupJobStatus == null) { - fail("null"); - } - - IndexerState state = rollupJobStatus.getIndexerState(); - assertTrue(state.equals(IndexerState.STARTED) && rollupJobStatus.getPosition() != null); - }, 60, TimeUnit.SECONDS); - - GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("testIndexPattern"); - GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get(); - Assert.assertThat(response.getJobs().size(), equalTo(1)); - Assert.assertThat(response.getJobs().get(0).getJob().getId(), equalTo("testIndexPattern")); - - GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().addIndices("rolled").get(); - Assert.assertThat(getIndexResponse.indices().length, Matchers.greaterThan(0)); - } - - public void testTwoJobsStartStopDeleteOne() throws Exception { - MetricConfig metricConfig = new MetricConfig.Builder() - .setField("foo") - .setMetrics(Arrays.asList("sum", "min", "max", "avg")) - .build(); - - DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder(); - datehistoGroupConfig.setField("date_histo"); - datehistoGroupConfig.setInterval(new DateHistogramInterval("1d")); - - GroupConfig.Builder groupConfig = new GroupConfig.Builder(); - groupConfig.setDateHisto(datehistoGroupConfig.build()); - - - RollupJobConfig.Builder config = new RollupJobConfig.Builder(); - config.setIndexPattern("test-1"); - config.setRollupIndex("rolled"); - config.setId("job1"); - config.setGroupConfig(groupConfig.build()); - config.setMetricsConfig(Collections.singletonList(metricConfig)); - config.setCron("* * * * * ? *"); - config.setPageSize(10); - - PutRollupJobAction.Request request = new PutRollupJobAction.Request(); - request.setConfig(config.build()); - client().execute(PutRollupJobAction.INSTANCE, request).get(); - - RollupJobConfig.Builder config2 = new RollupJobConfig.Builder(); - config2.setIndexPattern("test-1"); - config2.setRollupIndex("rolled"); - config2.setId("job2"); - config2.setGroupConfig(groupConfig.build()); - config2.setMetricsConfig(Collections.singletonList(metricConfig)); - config2.setCron("* * * * * ? *"); - config2.setPageSize(10); - - PutRollupJobAction.Request request2 = new PutRollupJobAction.Request(); - request2.setConfig(config2.build()); - client().execute(PutRollupJobAction.INSTANCE, request2).get(); - - StartRollupJobAction.Request startRequest = new StartRollupJobAction.Request("job1"); - StartRollupJobAction.Response response = client().execute(StartRollupJobAction.INSTANCE, startRequest).get(); - Assert.assertThat(response.isStarted(), equalTo(true)); - - // Make sure it started - ESTestCase.assertBusy(() -> { - RollupJobStatus rollupJobStatus = getRollupJobStatus("job1"); - if (rollupJobStatus == null) { - fail("null"); - } - - IndexerState state = rollupJobStatus.getIndexerState(); - assertTrue(state.equals(IndexerState.STARTED) || state.equals(IndexerState.INDEXING)); - }, 60, TimeUnit.SECONDS); - - //but not the other task - ESTestCase.assertBusy(() -> { - RollupJobStatus rollupJobStatus = getRollupJobStatus("job2"); - - IndexerState state = rollupJobStatus.getIndexerState(); - assertTrue(state.equals(IndexerState.STOPPED)); - }, 60, TimeUnit.SECONDS); - - // Delete the task - DeleteRollupJobAction.Request deleteRequest = new DeleteRollupJobAction.Request("job1"); - DeleteRollupJobAction.Response deleteResponse = client().execute(DeleteRollupJobAction.INSTANCE, deleteRequest).get(); - Assert.assertTrue(deleteResponse.isAcknowledged()); - - // Make sure the first job's task is gone - ESTestCase.assertBusy(() -> { - RollupJobStatus rollupJobStatus = getRollupJobStatus("job1"); - assertTrue(rollupJobStatus == null); - }, 60, TimeUnit.SECONDS); - - // And that we don't see it in the GetJobs API - GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("job1"); - GetRollupJobsAction.Response getResponse = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get(); - Assert.assertThat(getResponse.getJobs().size(), equalTo(0)); - - // But make sure the other job is still there - getRequest = new GetRollupJobsAction.Request("job2"); - getResponse = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get(); - Assert.assertThat(getResponse.getJobs().size(), equalTo(1)); - Assert.assertThat(getResponse.getJobs().get(0).getJob().getId(), equalTo("job2")); - - // and still STOPPED - ESTestCase.assertBusy(() -> { - RollupJobStatus rollupJobStatus = getRollupJobStatus("job2"); - - IndexerState state = rollupJobStatus.getIndexerState(); - assertTrue(state.equals(IndexerState.STOPPED)); - }, 60, TimeUnit.SECONDS); - } - - public void testBig() throws Exception { - - client().admin().indices().prepareCreate("test-big") - .addMapping("test-big", "{\"test-big\": {\"properties\": {\"timestamp\": {\"type\": \"date\"}, " + - "\"thefield\": {\"type\": \"integer\"}}}}", XContentType.JSON) - .setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get(); - client().admin().cluster().prepareHealth("test-big").setWaitForYellowStatus().get(); - - client().admin().indices().prepareCreate("test-verify") - .addMapping("test-big", "{\"test-big\": {\"properties\": {\"timestamp\": {\"type\": \"date\"}, " + - "\"thefield\": {\"type\": \"integer\"}}}}", XContentType.JSON) - .setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get(); - client().admin().cluster().prepareHealth("test-verify").setWaitForYellowStatus().get(); - - BulkRequestBuilder bulk = client().prepareBulk(); - Map source = new HashMap<>(3); - - int numDays = 90; - int numDocsPerDay = 100; - - for (int i = 0; i < numDays; i++) { - DateTime ts = new DateTime().minusDays(i); - for (int j = 0; j < numDocsPerDay; j++) { - - int value = ESTestCase.randomIntBetween(0,100); - source.put("timestamp", ts.toString()); - source.put("thefield", value); - bulk.add(new IndexRequest("test-big", "test-big").source(source)); - bulk.add(new IndexRequest("test-verify", "test-big").source(source)); - source.clear(); - } - - bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - bulk.get(); - bulk = client().prepareBulk(); - logger.info("Day: [" + i + "]: " + ts.toString() + " [" + ts.getMillis() + "]" ); - } - - - client().admin().indices().prepareRefresh("test-big").get(); - client().admin().indices().prepareRefresh("test-verify").get(); - - MetricConfig metricConfig = new MetricConfig.Builder() - .setField("thefield") - .setMetrics(Arrays.asList("sum", "min", "max", "avg")) - .build(); - - DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder(); - datehistoGroupConfig.setField("timestamp"); - datehistoGroupConfig.setInterval(new DateHistogramInterval("1d")); - - GroupConfig.Builder groupConfig = new GroupConfig.Builder(); - groupConfig.setDateHisto(datehistoGroupConfig.build()); - - RollupJobConfig.Builder config = new RollupJobConfig.Builder(); - config.setIndexPattern("test-big"); - config.setRollupIndex("rolled"); - config.setId(taskId); - config.setGroupConfig(groupConfig.build()); - config.setMetricsConfig(Collections.singletonList(metricConfig)); - config.setCron("* * * * * ? *"); - config.setPageSize(1000); - - PutRollupJobAction.Request request = new PutRollupJobAction.Request(); - request.setConfig(config.build()); - client().execute(PutRollupJobAction.INSTANCE, request).get(); - - StartRollupJobAction.Request startRequest = new StartRollupJobAction.Request(taskId); - StartRollupJobAction.Response response = client().execute(StartRollupJobAction.INSTANCE, startRequest).get(); - Assert.assertThat(response.isStarted(), equalTo(true)); - - ESTestCase.assertBusy(() -> { - RollupJobStatus rollupJobStatus = getRollupJobStatus(taskId); - if (rollupJobStatus == null) { - fail("null"); - } - - IndexerState state = rollupJobStatus.getIndexerState(); - logger.error("state: [" + state + "]"); - assertTrue(state.equals(IndexerState.STARTED) && rollupJobStatus.getPosition() != null); - }, 60, TimeUnit.SECONDS); - - RollupJobStatus rollupJobStatus = getRollupJobStatus(taskId); - if (rollupJobStatus == null) { - Assert.fail("rollup job status should not be null"); - } - - client().admin().indices().prepareRefresh("rolled").get(); - - SearchResponse count = client().prepareSearch("rolled").setSize(10).get(); - // total document is numDays minus 1 because we don't build rollup for - // buckets that are not full (bucket for the current day). - Assert.assertThat(count.getHits().totalHits, equalTo(Long.valueOf(numDays-1))); - - if (ESTestCase.randomBoolean()) { - client().admin().indices().prepareDelete("test-big").get(); - client().admin().indices().prepareRefresh().get(); - } - - // Execute the rollup search - SearchRequest rollupRequest = new SearchRequest("rolled") - .source(new SearchSourceBuilder() - .aggregation(dateHistogram("timestamp") - .interval(1000*86400) - .field("timestamp")) - .size(0)); - SearchResponse searchResponse = client().execute(RollupSearchAction.INSTANCE, rollupRequest).get(); - Assert.assertNotNull(searchResponse); - - // And a regular search against the verification index - SearchRequest verifyRequest = new SearchRequest("test-verify") - .source(new SearchSourceBuilder() - .aggregation(dateHistogram("timestamp") - .interval(1000*86400) - .field("timestamp")) - .size(0)); - SearchResponse verifyResponse = client().execute(SearchAction.INSTANCE, verifyRequest).get(); - - Map rollupAggs = searchResponse.getAggregations().asMap(); - - for (Aggregation agg : verifyResponse.getAggregations().asList()) { - Aggregation rollupAgg = rollupAggs.get(agg.getName()); - - Assert.assertNotNull(rollupAgg); - Assert.assertThat(rollupAgg.getType(), equalTo(agg.getType())); - verifyAgg((InternalDateHistogram)agg, (InternalDateHistogram)rollupAgg); - } - - // And a quick sanity check for doc type - SearchRequest rollupRawRequest = new SearchRequest("rolled") - .source(new SearchSourceBuilder().query(new MatchAllQueryBuilder()) - .size(1)); - SearchResponse searchRawResponse = client().execute(SearchAction.INSTANCE, rollupRawRequest).get(); - Assert.assertNotNull(searchRawResponse); - assertThat(searchRawResponse.getHits().getAt(0).getType(), equalTo("_doc")); - } - - private void verifyAgg(InternalDateHistogram verify, InternalDateHistogram rollup) { - for (int i = 0; i < rollup.getBuckets().size(); i++) { - InternalDateHistogram.Bucket verifyBucket = verify.getBuckets().get(i); - InternalDateHistogram.Bucket rollupBucket = rollup.getBuckets().get(i); - Assert.assertThat(rollupBucket.getDocCount(), equalTo(verifyBucket.getDocCount())); - Assert.assertThat(((DateTime)rollupBucket.getKey()).getMillis(), equalTo(((DateTime)verifyBucket.getKey()).getMillis())); - Assert.assertTrue(rollupBucket.getAggregations().equals(verifyBucket.getAggregations())); - } - } - - private RollupJobStatus getRollupJobStatus(final String taskId) { - final GetRollupJobsAction.Request request = new GetRollupJobsAction.Request(taskId); - final GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, request).actionGet(); - - if (response.getJobs() != null && response.getJobs().isEmpty() == false) { - assertThat("Expect 1 rollup job with id " + taskId, response.getJobs().size(), equalTo(1)); - return response.getJobs().iterator().next().getStatus(); - } - return null; - } - - @After - public void cleanup() throws ExecutionException, InterruptedException { - GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("_all"); - GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get(); - - for (GetRollupJobsAction.JobWrapper job : response.getJobs()) { - StopRollupJobAction.Request stopRequest = new StopRollupJobAction.Request(job.getJob().getId()); - try { - client().execute(StopRollupJobAction.INSTANCE, stopRequest).get(); - } catch (ElasticsearchException e) { - // - } - - DeleteRollupJobAction.Request deleteRequest = new DeleteRollupJobAction.Request(job.getJob().getId()); - client().execute(DeleteRollupJobAction.INSTANCE, deleteRequest).get(); - } - } -} diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherXpackUsageStatsTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherXpackUsageStatsTests.java index 3a314640d742a..7c07c98eb4725 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherXpackUsageStatsTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherXpackUsageStatsTests.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.action.XPackUsageAction; -import org.elasticsearch.xpack.core.action.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.watcher.WatcherFeatureSetUsage; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java new file mode 100644 index 0000000000000..f5f6d9d949b7f --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java @@ -0,0 +1,31 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.MasterNodeRequest; + +public class XPackUsageRequest extends MasterNodeRequest { + + @Override + public ActionRequestValidationException validate() { + return null; + } + +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java new file mode 100644 index 0000000000000..3459403bd6124 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Response object from calling the xpack usage api. + * + * Usage information for each feature is accessible through {@link #getUsages()}. + */ +public class XPackUsageResponse { + + private final Map> usages; + + private XPackUsageResponse(Map> usages) throws IOException { + this.usages = usages; + } + + @SuppressWarnings("unchecked") + private static Map castMap(Object value) { + return (Map)value; + } + + /** Return a map from feature name to usage information for that feature. */ + public Map> getUsages() { + return usages; + } + + public static XPackUsageResponse fromXContent(XContentParser parser) throws IOException { + Map rawMap = parser.map(); + Map> usages = rawMap.entrySet().stream().collect( + Collectors.toMap(Map.Entry::getKey, e -> castMap(e.getValue()))); + return new XPackUsageResponse(usages); + } +} diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 4c0019fb3193a..ee838ec5bc565 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -438,6 +438,7 @@ public void testDeleteJobAfterMissingIndex() throws Exception { client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32034") public void testDeleteJobAfterMissingAliases() throws Exception { String jobId = "delete-job-after-missing-alias-job"; String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); diff --git a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java new file mode 100644 index 0000000000000..b0142ae141853 --- /dev/null +++ b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java @@ -0,0 +1,326 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.multi_node; + +import org.apache.http.HttpStatus; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.core.rollup.job.RollupJob; +import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath; +import org.junit.After; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.isOneOf; + +public class RollupIT extends ESRestTestCase { + + @Override + protected Settings restClientSettings() { + return getClientSettings("super-user", "x-pack-super-password"); + } + + @Override + protected Settings restAdminSettings() { + return getClientSettings("super-user", "x-pack-super-password"); + } + + private Settings getClientSettings(final String username, final String password) { + final String token = basicAuthHeaderValue(username, new SecureString(password.toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + static Map toMap(Response response) throws IOException { + return toMap(EntityUtils.toString(response.getEntity())); + } + + static Map toMap(String response) throws IOException { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false); + } + + @After + public void clearRollupMetadata() throws Exception { + deleteAllJobs(); + waitForPendingTasks(); + // indices will be deleted by the ESRestTestCase class + } + + public void testBigRollup() throws Exception { + final int numDocs = 200; + + // index documents for the rollup job + final StringBuilder bulk = new StringBuilder(); + for (int i = 0; i < numDocs; i++) { + bulk.append("{\"index\":{\"_index\":\"rollup-docs\",\"_type\":\"_doc\"}}\n"); + ZonedDateTime zdt = ZonedDateTime.ofInstant(Instant.ofEpochSecond(1531221196 + (60*i)), ZoneId.of("UTC")); + String date = zdt.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME); + bulk.append("{\"timestamp\":\"").append(date).append("\",\"value\":").append(i).append("}\n"); + } + bulk.append("\r\n"); + + final Request bulkRequest = new Request("POST", "/_bulk"); + bulkRequest.addParameter("refresh", "true"); + bulkRequest.setJsonEntity(bulk.toString()); + client().performRequest(bulkRequest); + // create the rollup job + final Request createRollupJobRequest = new Request("PUT", "/_xpack/rollup/job/rollup-job-test"); + createRollupJobRequest.setJsonEntity("{" + + "\"index_pattern\":\"rollup-*\"," + + "\"rollup_index\":\"results-rollup\"," + + "\"cron\":\"*/1 * * * * ?\"," // fast cron and big page size so test runs quickly + + "\"page_size\":20," + + "\"groups\":{" + + " \"date_histogram\":{" + + " \"field\":\"timestamp\"," + + " \"interval\":\"5m\"" + + " }" + + "}," + + "\"metrics\":[" + + " {\"field\":\"value\",\"metrics\":[\"min\",\"max\",\"sum\"]}" + + "]" + + "}"); + + Map createRollupJobResponse = toMap(client().performRequest(createRollupJobRequest)); + assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + + // start the rollup job + final Request startRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-job-test/_start"); + Map startRollupJobResponse = toMap(client().performRequest(startRollupJobRequest)); + assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE)); + + assertRollUpJob("rollup-job-test"); + + // Wait for the job to finish, by watching how many rollup docs we've indexed + assertBusy(() -> { + final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/rollup-job-test"); + Response getRollupJobResponse = client().performRequest(getRollupJobRequest); + assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); + + Map job = getJob(getRollupJobResponse, "rollup-job-test"); + if (job != null) { + assertThat(ObjectPath.eval("status.job_state", job), equalTo("started")); + assertThat(ObjectPath.eval("stats.rollups_indexed", job), equalTo(41)); + } + }, 30L, TimeUnit.SECONDS); + + // Refresh the rollup index to make sure all newly indexed docs are searchable + final Request refreshRollupIndex = new Request("POST", "results-rollup/_refresh"); + toMap(client().performRequest(refreshRollupIndex)); + + String jsonRequestBody = "{\n" + + " \"size\": 0,\n" + + " \"query\": {\n" + + " \"match_all\": {}\n" + + " },\n" + + " \"aggs\": {\n" + + " \"date_histo\": {\n" + + " \"date_histogram\": {\n" + + " \"field\": \"timestamp\",\n" + + " \"interval\": \"1h\"\n" + + " },\n" + + " \"aggs\": {\n" + + " \"the_max\": {\n" + + " \"max\": {\n" + + " \"field\": \"value\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + + Request request = new Request("GET", "rollup-docs/_search"); + request.setJsonEntity(jsonRequestBody); + Response liveResponse = client().performRequest(request); + Map liveBody = toMap(liveResponse); + + request = new Request("GET", "results-rollup/_rollup_search"); + request.setJsonEntity(jsonRequestBody); + Response rollupResponse = client().performRequest(request); + Map rollupBody = toMap(rollupResponse); + + // Do the live agg results match the rollup agg results? + assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody), + equalTo(ObjectPath.eval("aggregations.date_histo.buckets", rollupBody))); + + request = new Request("GET", "rollup-docs/_rollup_search"); + request.setJsonEntity(jsonRequestBody); + Response liveRollupResponse = client().performRequest(request); + Map liveRollupBody = toMap(liveRollupResponse); + + // Does searching the live index via rollup_search work match the live search? + assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody), + equalTo(ObjectPath.eval("aggregations.date_histo.buckets", liveRollupBody))); + + } + + @SuppressWarnings("unchecked") + private void assertRollUpJob(final String rollupJob) throws Exception { + String[] states = new String[]{"indexing", "started"}; + waitForRollUpJob(rollupJob, states); + + // check that the rollup job is started using the RollUp API + final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); + Map getRollupJobResponse = toMap(client().performRequest(getRollupJobRequest)); + Map job = getJob(getRollupJobResponse, rollupJob); + if (job != null) { + assertThat(ObjectPath.eval("status.job_state", job), isOneOf(states)); + } + + // check that the rollup job is started using the Tasks API + final Request taskRequest = new Request("GET", "_tasks"); + taskRequest.addParameter("detailed", "true"); + taskRequest.addParameter("actions", "xpack/rollup/*"); + Map taskResponse = toMap(client().performRequest(taskRequest)); + Map taskResponseNodes = (Map) taskResponse.get("nodes"); + Map taskResponseNode = (Map) taskResponseNodes.values().iterator().next(); + Map taskResponseTasks = (Map) taskResponseNode.get("tasks"); + Map taskResponseStatus = (Map) taskResponseTasks.values().iterator().next(); + assertThat(ObjectPath.eval("status.job_state", taskResponseStatus), isOneOf(states)); + + // check that the rollup job is started using the Cluster State API + final Request clusterStateRequest = new Request("GET", "_cluster/state/metadata"); + Map clusterStateResponse = toMap(client().performRequest(clusterStateRequest)); + List> rollupJobTasks = ObjectPath.eval("metadata.persistent_tasks.tasks", clusterStateResponse); + + boolean hasRollupTask = false; + for (Map task : rollupJobTasks) { + if (ObjectPath.eval("id", task).equals(rollupJob)) { + hasRollupTask = true; + + final String jobStateField = "task.xpack/rollup/job.state.job_state"; + assertThat("Expected field [" + jobStateField + "] to be started or indexing in " + task.get("id"), + ObjectPath.eval(jobStateField, task), isOneOf(states)); + break; + } + } + if (hasRollupTask == false) { + fail("Expected persistent task for [" + rollupJob + "] but none found."); + } + + } + + private void waitForRollUpJob(final String rollupJob,String[] expectedStates) throws Exception { + assertBusy(() -> { + final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); + Response getRollupJobResponse = client().performRequest(getRollupJobRequest); + assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); + + Map job = getJob(getRollupJobResponse, rollupJob); + if (job != null) { + assertThat(ObjectPath.eval("status.job_state", job), isOneOf(expectedStates)); + } + }, 30L, TimeUnit.SECONDS); + } + + private Map getJob(Response response, String targetJobId) throws IOException { + return getJob(ESRestTestCase.entityAsMap(response), targetJobId); + } + + @SuppressWarnings("unchecked") + private Map getJob(Map jobsMap, String targetJobId) throws IOException { + + List> jobs = + (List>) XContentMapValues.extractValue("jobs", jobsMap); + + if (jobs == null) { + return null; + } + + for (Map job : jobs) { + String jobId = (String) ((Map) job.get("config")).get("id"); + if (jobId.equals(targetJobId)) { + return job; + } + } + return null; + } + + private void waitForPendingTasks() throws Exception { + ESTestCase.assertBusy(() -> { + try { + Request request = new Request("GET", "/_cat/tasks"); + request.addParameter("detailed", "true"); + Response response = adminClient().performRequest(request); + if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { + try (BufferedReader responseReader = new BufferedReader( + new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) { + int activeTasks = 0; + String line; + StringBuilder tasksListString = new StringBuilder(); + while ((line = responseReader.readLine()) != null) { + + // We only care about Rollup jobs, otherwise this fails too easily due to unrelated tasks + if (line.startsWith(RollupJob.NAME) == true) { + activeTasks++; + tasksListString.append(line); + tasksListString.append('\n'); + } + } + assertEquals(activeTasks + " active tasks found:\n" + tasksListString, 0, activeTasks); + } + } + } catch (IOException e) { + throw new AssertionError("Error getting active tasks list", e); + } + }); + } + + @SuppressWarnings("unchecked") + private void deleteAllJobs() throws Exception { + Request request = new Request("GET", "/_xpack/rollup/job/_all"); + Response response = adminClient().performRequest(request); + Map jobs = ESRestTestCase.entityAsMap(response); + @SuppressWarnings("unchecked") + List> jobConfigs = + (List>) XContentMapValues.extractValue("jobs", jobs); + + if (jobConfigs == null) { + return; + } + + for (Map jobConfig : jobConfigs) { + logger.debug(jobConfig); + String jobId = (String) ((Map) jobConfig.get("config")).get("id"); + logger.debug("Deleting job " + jobId); + try { + request = new Request("DELETE", "/_xpack/rollup/job/" + jobId); + adminClient().performRequest(request); + } catch (Exception e) { + // ok + } + } + } + + private static String responseEntityToString(Response response) throws Exception { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) { + return reader.lines().collect(Collectors.joining("\n")); + } + } +}