From 279f2929328b7ad9a8c7a840455dfb7f5a7ebcdb Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Thu, 17 May 2018 10:58:25 +0200 Subject: [PATCH 01/34] Fix _cluster/state to always return cluster_uuid (#30656) Since #30143, the Cluster State API should always returns the current cluster_uuid in the response body, regardless of the metrics filters. This is not exactly true as it is returned only if metadata metrics and no specific indices are requested. This commit fixes the behavior to always return the cluster_uuid and add new test. --- .../test/cluster.state/20_filtering.yml | 18 +++++++++++++++--- .../state/TransportClusterStateAction.java | 19 +++++++++---------- 2 files changed, 24 insertions(+), 13 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml index 880efaff19aa6..861e1200991b1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml @@ -163,12 +163,24 @@ setup: version: " - 6.3.99" reason: "cluster state including cluster_uuid at the top level is new in v6.4.0 and higher" + # Get the current cluster_uuid + - do: + cluster.state: {} + - set: { metadata.cluster_uuid : cluster_uuid } + - do: cluster.state: - metric: [ master_node, version, metadata ] + metric: [ master_node, version ] - - is_true: cluster_uuid + - match: { cluster_uuid: $cluster_uuid } - is_true: master_node - is_true: version - is_true: state_uuid - - is_true: metadata + + - do: + cluster.state: + metric: [ routing_table ] + index: testidx + + - match: { cluster_uuid: $cluster_uuid } + - is_true: routing_table diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java index 9efdbfa47148b..299e97a96408d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java @@ -98,14 +98,11 @@ protected void masterOperation(final ClusterStateRequest request, final ClusterS if (request.blocks()) { builder.blocks(currentState.blocks()); } - if (request.metaData()) { - MetaData.Builder mdBuilder; - if (request.indices().length == 0) { - mdBuilder = MetaData.builder(currentState.metaData()); - } else { - mdBuilder = MetaData.builder(); - } + MetaData.Builder mdBuilder = MetaData.builder(); + mdBuilder.clusterUUID(currentState.metaData().clusterUUID()); + + if (request.metaData()) { if (request.indices().length > 0) { String[] indices = indexNameExpressionResolver.concreteIndexNames(currentState, request); for (String filteredIndex : indices) { @@ -114,17 +111,19 @@ protected void masterOperation(final ClusterStateRequest request, final ClusterS mdBuilder.put(indexMetaData, false); } } + } else { + mdBuilder = MetaData.builder(currentState.metaData()); } // Filter our metadata that shouldn't be returned by API - for(ObjectObjectCursor custom : currentState.metaData().customs()) { + for(ObjectObjectCursor custom : currentState.metaData().customs()) { if(!custom.value.context().contains(MetaData.XContentContext.API)) { mdBuilder.removeCustom(custom.key); } } - - builder.metaData(mdBuilder); } + builder.metaData(mdBuilder); + if (request.customs()) { for (ObjectObjectCursor custom : currentState.customs()) { if (custom.value.isPrivate() == false) { From e7debce3e007b01047fa990cf209fe78c4f38aa7 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Thu, 17 May 2018 11:57:54 +0200 Subject: [PATCH 02/34] Watcher: Fix watch history template for dynamic slack attachments (#30172) The part of the history template responsible for slack attachments had a dynamic mapping configured which could lead to problems, when a string value looking like a date was configured in the value field of an attachment. This commit fixes the template by setting this field always to text. This also requires a change in the template numbering to be sure this will be applied properly when starting watcher. --- .../WatcherIndexTemplateRegistryField.java | 3 ++- .../src/main/resources/watch-history.json | 7 ++++++ .../rest-api-spec/test/slack/10_slack.yml | 23 +++++++++++++++++-- 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherIndexTemplateRegistryField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherIndexTemplateRegistryField.java index 4cf0898bae2ff..25e2c928d9a57 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherIndexTemplateRegistryField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherIndexTemplateRegistryField.java @@ -12,8 +12,9 @@ public final class WatcherIndexTemplateRegistryField { // version 3: include watch status in history // version 6: upgrade to ES 6, removal of _status field // version 7: add full exception stack traces for better debugging + // version 8: fix slack attachment property not to be dynamic, causing field type issues // Note: if you change this, also inform the kibana team around the watcher-ui - public static final String INDEX_TEMPLATE_VERSION = "7"; + public static final String INDEX_TEMPLATE_VERSION = "8"; public static final String HISTORY_TEMPLATE_NAME = ".watch-history-" + INDEX_TEMPLATE_VERSION; public static final String TRIGGERED_TEMPLATE_NAME = ".triggered_watches"; public static final String WATCHES_TEMPLATE_NAME = ".watches"; diff --git a/x-pack/plugin/core/src/main/resources/watch-history.json b/x-pack/plugin/core/src/main/resources/watch-history.json index a26305b35542a..d158281c264d2 100644 --- a/x-pack/plugin/core/src/main/resources/watch-history.json +++ b/x-pack/plugin/core/src/main/resources/watch-history.json @@ -507,6 +507,13 @@ "properties" : { "color" : { "type" : "keyword" + }, + "fields" : { + "properties" : { + "value" : { + "type" : "text" + } + } } } } diff --git a/x-pack/qa/third-party/slack/src/test/resources/rest-api-spec/test/slack/10_slack.yml b/x-pack/qa/third-party/slack/src/test/resources/rest-api-spec/test/slack/10_slack.yml index 3b04ba716759a..259bc9e1d25af 100644 --- a/x-pack/qa/third-party/slack/src/test/resources/rest-api-spec/test/slack/10_slack.yml +++ b/x-pack/qa/third-party/slack/src/test/resources/rest-api-spec/test/slack/10_slack.yml @@ -16,7 +16,13 @@ }, "input": { "simple": { - "foo" : "something from input" + "foo" : "something from input", + "hits" : { + "hits" : [ + { "_source" : { "name" : "first", "value" : "2018-04-26T11:45:12.518Z" } }, + { "_source" : { "name" : "second", "value" : "anything" } } + ] + } } }, "actions": { @@ -49,7 +55,20 @@ } ] } - ] + ], + "dynamic_attachments" : { + "list_path" : "ctx.payload.hits.hits", + "attachment_template" : { + "title": "Title", + "fields" : [ + { + "title" : "Field title {{_source.name}}", + "value" : "{{_source.value}}", + "short" : true + } + ] + } + } } } } From 1d329d8f5e92d8e2a698de60e0056f2ba32c1ce8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 17 May 2018 12:52:22 +0200 Subject: [PATCH 03/34] Deprecate `nGram` and `edgeNGram` names for ngram filters (#30209) The camel case name `nGram` should be removed in favour of `ngram` and similar for `edgeNGram` and `edge_ngram`. Before removal, we need to deprecate the camel case names first. This change adds deprecation warnings for indices with versions 6.4.0 and higher and logs deprecation warnings. --- .../analysis/common/CommonAnalysisPlugin.java | 21 +++- .../common/CommonAnalysisPluginTests.java | 119 ++++++++++++++++++ .../common/NGramTokenizerFactoryTests.java | 21 +--- .../analysis/PreConfiguredTokenFilter.java | 9 ++ 4 files changed, 146 insertions(+), 24 deletions(-) create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index c9b48f0c8650d..624194092a02e 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -237,9 +237,14 @@ public List getPreConfiguredTokenFilters() { filters.add(PreConfiguredTokenFilter.singleton("dutch_stem", false, input -> new SnowballFilter(input, new DutchStemmer()))); filters.add(PreConfiguredTokenFilter.singleton("edge_ngram", false, input -> new EdgeNGramTokenFilter(input, EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE))); - // TODO deprecate edgeNGram - filters.add(PreConfiguredTokenFilter.singleton("edgeNGram", false, input -> - new EdgeNGramTokenFilter(input, EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE))); + filters.add(PreConfiguredTokenFilter.singletonWithVersion("edgeNGram", false, (reader, version) -> { + if (version.onOrAfter(org.elasticsearch.Version.V_6_4_0)) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("edgeNGram_deprecation", + "The [edgeNGram] token filter name is deprecated and will be removed in a future version. " + + "Please change the filter name to [edge_ngram] instead."); + } + return new EdgeNGramTokenFilter(reader, EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE); + })); filters.add(PreConfiguredTokenFilter.singleton("elision", true, input -> new ElisionFilter(input, FrenchAnalyzer.DEFAULT_ARTICLES))); filters.add(PreConfiguredTokenFilter.singleton("french_stem", false, input -> new SnowballFilter(input, new FrenchStemmer()))); @@ -256,8 +261,14 @@ public List getPreConfiguredTokenFilters() { LimitTokenCountFilterFactory.DEFAULT_MAX_TOKEN_COUNT, LimitTokenCountFilterFactory.DEFAULT_CONSUME_ALL_TOKENS))); filters.add(PreConfiguredTokenFilter.singleton("ngram", false, NGramTokenFilter::new)); - // TODO deprecate nGram - filters.add(PreConfiguredTokenFilter.singleton("nGram", false, NGramTokenFilter::new)); + filters.add(PreConfiguredTokenFilter.singletonWithVersion("nGram", false, (reader, version) -> { + if (version.onOrAfter(org.elasticsearch.Version.V_6_4_0)) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("nGram_deprecation", + "The [nGram] token filter name is deprecated and will be removed in a future version. " + + "Please change the filter name to [ngram] instead."); + } + return new NGramTokenFilter(reader); + })); filters.add(PreConfiguredTokenFilter.singleton("persian_normalization", true, PersianNormalizationFilter::new)); filters.add(PreConfiguredTokenFilter.singleton("porter_stem", false, PorterStemFilter::new)); filters.add(PreConfiguredTokenFilter.singleton("reverse", false, ReverseStringFilter::new)); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java new file mode 100644 index 0000000000000..1d2b8a36810eb --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java @@ -0,0 +1,119 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.analysis.Tokenizer; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.VersionUtils; + +import java.io.IOException; +import java.io.StringReader; +import java.util.Map; + +public class CommonAnalysisPluginTests extends ESTestCase { + + /** + * Check that the deprecated name "nGram" issues a deprecation warning for indices created since 6.3.0 + */ + public void testNGramDeprecationWarning() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_6_4_0, Version.CURRENT)) + .build(); + + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + Map tokenFilters = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).tokenFilter; + TokenFilterFactory tokenFilterFactory = tokenFilters.get("nGram"); + Tokenizer tokenizer = new MockTokenizer(); + tokenizer.setReader(new StringReader("foo bar")); + assertNotNull(tokenFilterFactory.create(tokenizer)); + assertWarnings( + "The [nGram] token filter name is deprecated and will be removed in a future version. " + + "Please change the filter name to [ngram] instead."); + } + } + + /** + * Check that the deprecated name "nGram" does NOT issues a deprecation warning for indices created before 6.4.0 + */ + public void testNGramNoDeprecationWarningPre6_4() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(IndexMetaData.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_6_3_0)) + .build(); + + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + Map tokenFilters = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).tokenFilter; + TokenFilterFactory tokenFilterFactory = tokenFilters.get("nGram"); + Tokenizer tokenizer = new MockTokenizer(); + tokenizer.setReader(new StringReader("foo bar")); + assertNotNull(tokenFilterFactory.create(tokenizer)); + } + } + + /** + * Check that the deprecated name "edgeNGram" issues a deprecation warning for indices created since 6.3.0 + */ + public void testEdgeNGramDeprecationWarning() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_6_4_0, Version.CURRENT)) + .build(); + + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + Map tokenFilters = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).tokenFilter; + TokenFilterFactory tokenFilterFactory = tokenFilters.get("edgeNGram"); + Tokenizer tokenizer = new MockTokenizer(); + tokenizer.setReader(new StringReader("foo bar")); + assertNotNull(tokenFilterFactory.create(tokenizer)); + assertWarnings( + "The [edgeNGram] token filter name is deprecated and will be removed in a future version. " + + "Please change the filter name to [edge_ngram] instead."); + } + } + + /** + * Check that the deprecated name "edgeNGram" does NOT issues a deprecation warning for indices created before 6.4.0 + */ + public void testEdgeNGramNoDeprecationWarningPre6_4() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(IndexMetaData.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_6_3_0)) + .build(); + + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + Map tokenFilters = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).tokenFilter; + TokenFilterFactory tokenFilterFactory = tokenFilters.get("edgeNGram"); + Tokenizer tokenizer = new MockTokenizer(); + tokenizer.setReader(new StringReader("foo bar")); + assertNotNull(tokenFilterFactory.create(tokenizer)); + } + } +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java index 65c0940784bf1..79ca244ce9e94 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java @@ -32,15 +32,11 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.io.StringReader; -import java.lang.reflect.Field; -import java.lang.reflect.Modifier; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; -import java.util.Random; import static com.carrotsearch.randomizedtesting.RandomizedTest.scaledRandomIntBetween; import static org.hamcrest.Matchers.instanceOf; @@ -128,7 +124,7 @@ public void testBackwardsCompatibilityEdgeNgramTokenFilter() throws Exception { for (int i = 0; i < iters; i++) { final Index index = new Index("test", "_na_"); final String name = "ngr"; - Version v = randomVersion(random()); + Version v = VersionUtils.randomVersion(random()); Builder builder = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3); boolean reverse = random().nextBoolean(); if (reverse) { @@ -148,17 +144,4 @@ public void testBackwardsCompatibilityEdgeNgramTokenFilter() throws Exception { } } } - - - private Version randomVersion(Random random) throws IllegalArgumentException, IllegalAccessException { - Field[] declaredFields = Version.class.getFields(); - List versionFields = new ArrayList<>(); - for (Field field : declaredFields) { - if ((field.getModifiers() & Modifier.STATIC) != 0 && field.getName().startsWith("V_") && field.getType() == Version.class) { - versionFields.add(field); - } - } - return (Version) versionFields.get(random.nextInt(versionFields.size())).get(Version.class); - } - } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilter.java b/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilter.java index 777fb589c9db0..12130e856f32a 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilter.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilter.java @@ -41,6 +41,15 @@ public static PreConfiguredTokenFilter singleton(String name, boolean useFilterF (tokenStream, version) -> create.apply(tokenStream)); } + /** + * Create a pre-configured token filter that may not vary at all. + */ + public static PreConfiguredTokenFilter singletonWithVersion(String name, boolean useFilterForMultitermQueries, + BiFunction create) { + return new PreConfiguredTokenFilter(name, useFilterForMultitermQueries, CachingStrategy.ONE, + (tokenStream, version) -> create.apply(tokenStream, version)); + } + /** * Create a pre-configured token filter that may vary based on the Lucene version. */ From 75312a7e637300b19b4bd9678f0859bc91597a0a Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 17 May 2018 13:58:10 +0200 Subject: [PATCH 04/34] User proper write-once semantics for GCS repository (#30438) There's no need for an extra blobExists() call when writing a blob to the GCS service. GCS provides an option (with stronger consistency guarantees) on the insert method that guarantees that the blob that's uploaded does not already exist. Relates to #19749 --- .../gcs/GoogleCloudStorageBlobContainer.java | 3 - .../gcs/GoogleCloudStorageBlobStore.java | 58 +++++++++++++------ .../repositories/gcs/MockStorage.java | 21 ++++++- 3 files changed, 58 insertions(+), 24 deletions(-) diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainer.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainer.java index 331e2dadca2da..833539905103a 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainer.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainer.java @@ -66,9 +66,6 @@ public InputStream readBlob(String blobName) throws IOException { @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - if (blobExists(blobName)) { - throw new FileAlreadyExistsException("blob [" + blobName + "] already exists, cannot overwrite"); - } blobStore.writeBlob(buildKey(blobName), inputStream, blobSize); } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 5dc03ea45de03..83aafdde2b1ab 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -28,6 +28,7 @@ import com.google.cloud.storage.Storage; import com.google.cloud.storage.Storage.BlobListOption; import com.google.cloud.storage.Storage.CopyRequest; +import com.google.cloud.storage.StorageException; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; @@ -47,12 +48,15 @@ import java.nio.channels.Channels; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; +import java.nio.file.FileAlreadyExistsException; import java.nio.file.NoSuchFileException; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import static java.net.HttpURLConnection.HTTP_PRECON_FAILED; + class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore { // The recommended maximum size of a blob that should be uploaded in a single @@ -204,24 +208,32 @@ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws I * @param inputStream the stream containing the blob data */ private void writeBlobResumable(BlobInfo blobInfo, InputStream inputStream) throws IOException { - final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException(() -> storage.writer(blobInfo)); - Streams.copy(inputStream, Channels.newOutputStream(new WritableByteChannel() { - @Override - public boolean isOpen() { - return writeChannel.isOpen(); - } + try { + final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException( + () -> storage.writer(blobInfo, Storage.BlobWriteOption.doesNotExist())); + Streams.copy(inputStream, Channels.newOutputStream(new WritableByteChannel() { + @Override + public boolean isOpen() { + return writeChannel.isOpen(); + } - @Override - public void close() throws IOException { - SocketAccess.doPrivilegedVoidIOException(writeChannel::close); - } + @Override + public void close() throws IOException { + SocketAccess.doPrivilegedVoidIOException(writeChannel::close); + } - @SuppressForbidden(reason = "Channel is based of a socket not a file") - @Override - public int write(ByteBuffer src) throws IOException { - return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); + @SuppressForbidden(reason = "Channel is based of a socket not a file") + @Override + public int write(ByteBuffer src) throws IOException { + return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); + } + })); + } catch (StorageException se) { + if (se.getCode() == HTTP_PRECON_FAILED) { + throw new FileAlreadyExistsException(blobInfo.getBlobId().getName(), null, se.getMessage()); } - })); + throw se; + } } /** @@ -238,7 +250,17 @@ private void writeBlobMultipart(BlobInfo blobInfo, InputStream inputStream, long assert blobSize <= LARGE_BLOB_THRESHOLD_BYTE_SIZE : "large blob uploads should use the resumable upload method"; final ByteArrayOutputStream baos = new ByteArrayOutputStream(Math.toIntExact(blobSize)); Streams.copy(inputStream, baos); - SocketAccess.doPrivilegedVoidIOException(() -> storage.create(blobInfo, baos.toByteArray())); + SocketAccess.doPrivilegedVoidIOException( + () -> { + try { + storage.create(blobInfo, baos.toByteArray(), Storage.BlobTargetOption.doesNotExist()); + } catch (StorageException se) { + if (se.getCode() == HTTP_PRECON_FAILED) { + throw new FileAlreadyExistsException(blobInfo.getBlobId().getName(), null, se.getMessage()); + } + throw se; + } + }); } /** @@ -295,8 +317,8 @@ void deleteBlobs(Collection blobNames) throws IOException { /** * Moves a blob within the same bucket * - * @param sourceBlob name of the blob to move - * @param targetBlob new name of the blob in the same bucket + * @param sourceBlobName name of the blob to move + * @param targetBlobName new name of the blob in the same bucket */ void moveBlob(String sourceBlobName, String targetBlobName) throws IOException { final BlobId sourceBlobId = BlobId.of(bucket, sourceBlobName); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index 2b52b7a32a9cc..1b31b3018e48a 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -56,6 +56,7 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import java.util.stream.Stream; /** * {@link MockStorage} mocks a {@link Storage} client by storing all the blobs @@ -113,7 +114,14 @@ public Blob create(BlobInfo blobInfo, byte[] content, BlobTargetOption... option if (bucketName.equals(blobInfo.getBucket()) == false) { throw new StorageException(404, "Bucket not found"); } - blobs.put(blobInfo.getName(), content); + if (Stream.of(options).anyMatch(option -> option.equals(BlobTargetOption.doesNotExist()))) { + byte[] existingBytes = blobs.putIfAbsent(blobInfo.getName(), content); + if (existingBytes != null) { + throw new StorageException(412, "Blob already exists"); + } + } else { + blobs.put(blobInfo.getName(), content); + } return get(BlobId.of(blobInfo.getBucket(), blobInfo.getName())); } @@ -243,9 +251,16 @@ public boolean isOpen() { } @Override - public void close() throws IOException { + public void close() { IOUtils.closeWhileHandlingException(writableByteChannel); - blobs.put(blobInfo.getName(), output.toByteArray()); + if (Stream.of(options).anyMatch(option -> option.equals(BlobWriteOption.doesNotExist()))) { + byte[] existingBytes = blobs.putIfAbsent(blobInfo.getName(), output.toByteArray()); + if (existingBytes != null) { + throw new StorageException(412, "Blob already exists"); + } + } else { + blobs.put(blobInfo.getName(), output.toByteArray()); + } } }; } From 42226e02789222a89eeca6b14e5921de87740ae9 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Thu, 17 May 2018 12:59:20 +0100 Subject: [PATCH 05/34] [TEST] Account for increase in ML C++ memory usage (#30675) Recent changes to the ML C++ have resulted in higher memory usage, so fewer "by" fields can be analyzed in a given amount of model memory. --- .../xpack/ml/integration/AutodetectMemoryLimitIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java index c46b1d1c8689b..f54f1bf54e932 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java @@ -134,7 +134,7 @@ public void testTooManyByFields() throws Exception { assertThat(modelSizeStats.getModelBytes(), lessThan(36000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(30000000L)); assertThat(modelSizeStats.getTotalByFieldCount(), lessThan(1900L)); - assertThat(modelSizeStats.getTotalByFieldCount(), greaterThan(1600L)); + assertThat(modelSizeStats.getTotalByFieldCount(), greaterThan(1500L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } From d1b3781cb1e1161d23479dbf8d8b711f214bc12e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 17 May 2018 14:23:08 +0200 Subject: [PATCH 06/34] [Docs] Replace InetSocketTransportAddress with TransportAdress (#30673) The former class has been removed in 6.0, the documentation code snippets should be updated accordingly. --- docs/java-api/query-dsl/has-parent-query.asciidoc | 2 +- docs/java-api/query-dsl/percolate-query.asciidoc | 2 +- x-pack/docs/en/watcher/java.asciidoc | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/java-api/query-dsl/has-parent-query.asciidoc b/docs/java-api/query-dsl/has-parent-query.asciidoc index 63711c399f71a..6a83fe2b0698f 100644 --- a/docs/java-api/query-dsl/has-parent-query.asciidoc +++ b/docs/java-api/query-dsl/has-parent-query.asciidoc @@ -9,7 +9,7 @@ When using the `has_parent` query it is important to use the `PreBuiltTransportC -------------------------------------------------- Settings settings = Settings.builder().put("cluster.name", "elasticsearch").build(); TransportClient client = new PreBuiltTransportClient(settings); -client.addTransportAddress(new InetSocketTransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300))); +client.addTransportAddress(new TransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300))); -------------------------------------------------- Otherwise the parent-join module doesn't get loaded and the `has_parent` query can't be used from the transport client. diff --git a/docs/java-api/query-dsl/percolate-query.asciidoc b/docs/java-api/query-dsl/percolate-query.asciidoc index a5651392b628e..e1968ae456a5c 100644 --- a/docs/java-api/query-dsl/percolate-query.asciidoc +++ b/docs/java-api/query-dsl/percolate-query.asciidoc @@ -9,7 +9,7 @@ See: -------------------------------------------------- Settings settings = Settings.builder().put("cluster.name", "elasticsearch").build(); TransportClient client = new PreBuiltTransportClient(settings); -client.addTransportAddress(new InetSocketTransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300))); +client.addTransportAddress(new TransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300))); -------------------------------------------------- Before the `percolate` query can be used an `percolator` mapping should be added and diff --git a/x-pack/docs/en/watcher/java.asciidoc b/x-pack/docs/en/watcher/java.asciidoc index 7d3bce42f6907..37d519399efdc 100644 --- a/x-pack/docs/en/watcher/java.asciidoc +++ b/x-pack/docs/en/watcher/java.asciidoc @@ -116,7 +116,7 @@ TransportClient client = new PreBuiltXPackTransportClient(Settings.builder() .put("cluster.name", "myClusterName") ... .build()) - .addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("localhost"), 9300)); + .addTransportAddress(new TransportAddress(InetAddress.getByName("localhost"), 9300)); XPackClient xpackClient = new XPackClient(client); WatcherClient watcherClient = xpackClient.watcher(); From 7453e1d6b840a2e5df7e59e9358d16828cc8434b Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Thu, 17 May 2018 09:58:43 -0400 Subject: [PATCH 07/34] MovingFunction Pipeline agg backport to 6.x (#30658) This pipeline aggregation gives the user the ability to script functions that "move" across a window of data, instead of single data points. It is the scripted version of MovingAvg pipeline agg. Through custom script contexts, we expose a number of convenience methods: - MovingFunctions.max() - MovingFunctions.min() - MovingFunctions.sum() - MovingFunctions.unweightedAvg() - MovingFunctions.linearWeightedAvg() - MovingFunctions.ewma() - MovingFunctions.holt() - MovingFunctions.holtWinters() - MovingFunctions.stdDev() The user can also define any arbitrary logic via their own scripting, or combine with the above methods. --- docs/reference/aggregations/pipeline.asciidoc | 3 + .../pipeline/movavg-aggregation.asciidoc | 15 +- .../pipeline/movfn-aggregation.asciidoc | 633 ++++++++++++++++ .../painless/PainlessPlugin.java | 30 +- .../spi/org.elasticsearch.aggs.movfn.txt | 32 + .../test/painless/70_mov_fn_agg.yml | 315 ++++++++ .../test/search.aggregation/250_moving_fn.yml | 46 ++ .../test/search.aggregation/80_typed_keys.yml | 6 + .../elasticsearch/script/ScriptModule.java | 4 +- .../elasticsearch/search/SearchModule.java | 7 + .../pipeline/PipelineAggregatorBuilders.java | 10 + .../MovAvgPipelineAggregationBuilder.java | 6 + .../pipeline/movavg/models/EwmaModel.java | 18 +- .../movavg/models/HoltLinearModel.java | 44 +- .../movavg/models/HoltWintersModel.java | 89 +-- .../pipeline/movavg/models/LinearModel.java | 16 +- .../pipeline/movavg/models/MovAvgModel.java | 9 +- .../pipeline/movavg/models/SimpleModel.java | 11 +- .../MovFnPipelineAggregationBuilder.java | 264 +++++++ .../movfn/MovFnPipelineAggregator.java | 149 ++++ .../pipeline/movfn/MovingFunctionScript.java | 45 ++ .../pipeline/movfn/MovingFunctions.java | 359 +++++++++ .../DateHistogramAggregatorTests.java | 2 +- .../avg/AvgBucketAggregatorTests.java | 6 +- ...eAggregationBuilderSerializationTests.java | 51 ++ .../pipeline/movfn/MovFnUnitTests.java | 164 +++++ .../movfn/MovFnWhitelistedFunctionTests.java | 684 ++++++++++++++++++ .../pipeline/moving/avg/MovAvgIT.java | 2 +- .../pipeline/moving/avg/MovAvgTests.java | 9 + .../pipeline/moving/avg/MovAvgUnitTests.java | 4 +- .../script/MockScriptEngine.java | 16 + .../aggregations/AggregatorTestCase.java | 26 +- 32 files changed, 2887 insertions(+), 188 deletions(-) create mode 100644 docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc create mode 100644 modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.aggs.movfn.txt create mode 100644 modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregator.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctionScript.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctions.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilderSerializationTests.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnWhitelistedFunctionTests.java diff --git a/docs/reference/aggregations/pipeline.asciidoc b/docs/reference/aggregations/pipeline.asciidoc index bd1b0284a84fb..37c1c357007b0 100644 --- a/docs/reference/aggregations/pipeline.asciidoc +++ b/docs/reference/aggregations/pipeline.asciidoc @@ -72,6 +72,7 @@ POST /_search } -------------------------------------------------- // CONSOLE +// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.] <1> The metric is called `"the_sum"` <2> The `buckets_path` refers to the metric via a relative path `"the_sum"` @@ -136,6 +137,7 @@ POST /_search } -------------------------------------------------- // CONSOLE +// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.] <1> By using `_count` instead of a metric name, we can calculate the moving average of document counts in the histogram The `buckets_path` can also use `"_bucket_count"` and path to a multi-bucket aggregation to use the number of buckets @@ -231,6 +233,7 @@ include::pipeline/stats-bucket-aggregation.asciidoc[] include::pipeline/extended-stats-bucket-aggregation.asciidoc[] include::pipeline/percentiles-bucket-aggregation.asciidoc[] include::pipeline/movavg-aggregation.asciidoc[] +include::pipeline/movfn-aggregation.asciidoc[] include::pipeline/cumulative-sum-aggregation.asciidoc[] include::pipeline/bucket-script-aggregation.asciidoc[] include::pipeline/bucket-selector-aggregation.asciidoc[] diff --git a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc index db73510216be0..39a8255c90705 100644 --- a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc @@ -1,6 +1,10 @@ [[search-aggregations-pipeline-movavg-aggregation]] === Moving Average Aggregation +deprecated[6.4.0, The Moving Average aggregation has been deprecated in favor of the more general +<>. The new Moving Function aggregation provides +all the same functionality as the Moving Average aggregation, but also provides more flexibility.] + Given an ordered series of data, the Moving Average aggregation will slide a window across the data and emit the average value of that window. For example, given the data `[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]`, we can calculate a simple moving average with windows size of `5` as follows: @@ -74,6 +78,7 @@ POST /_search -------------------------------------------------- // CONSOLE // TEST[setup:sales] +// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.] <1> A `date_histogram` named "my_date_histo" is constructed on the "timestamp" field, with one-day intervals <2> A `sum` metric is used to calculate the sum of a field. This could be any metric (sum, min, max, etc) @@ -180,6 +185,7 @@ POST /_search -------------------------------------------------- // CONSOLE // TEST[setup:sales] +// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.] A `simple` model has no special settings to configure @@ -233,6 +239,7 @@ POST /_search -------------------------------------------------- // CONSOLE // TEST[setup:sales] +// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.] A `linear` model has no special settings to configure @@ -295,7 +302,7 @@ POST /_search -------------------------------------------------- // CONSOLE // TEST[setup:sales] - +// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.] [[single_0.2alpha]] .EWMA with window of size 10, alpha = 0.2 @@ -355,6 +362,7 @@ POST /_search -------------------------------------------------- // CONSOLE // TEST[setup:sales] +// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.] In practice, the `alpha` value behaves very similarly in `holt` as `ewma`: small values produce more smoothing and more lag, while larger values produce closer tracking and less lag. The value of `beta` is often difficult @@ -446,7 +454,7 @@ POST /_search -------------------------------------------------- // CONSOLE // TEST[setup:sales] - +// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.] [[holt_winters_add]] .Holt-Winters moving average with window of size 120, alpha = 0.5, beta = 0.7, gamma = 0.3, period = 30 @@ -508,6 +516,7 @@ POST /_search -------------------------------------------------- // CONSOLE // TEST[setup:sales] +// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.] ==== Prediction @@ -550,6 +559,7 @@ POST /_search -------------------------------------------------- // CONSOLE // TEST[setup:sales] +// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.] The `simple`, `linear` and `ewma` models all produce "flat" predictions: they essentially converge on the mean of the last value in the series, producing a flat: @@ -631,6 +641,7 @@ POST /_search -------------------------------------------------- // CONSOLE // TEST[setup:sales] +// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.] <1> Minimization is enabled with the `minimize` parameter diff --git a/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc new file mode 100644 index 0000000000000..b05c56b880560 --- /dev/null +++ b/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc @@ -0,0 +1,633 @@ +[[search-aggregations-pipeline-movfn-aggregation]] +=== Moving Function Aggregation + +Given an ordered series of data, the Moving Function aggregation will slide a window across the data and allow the user to specify a custom +script that is executed on each window of data. For convenience, a number of common functions are predefined such as min/max, moving averages, +etc. + +This is conceptually very similar to the <> pipeline aggregation, except +it provides more functionality. +==== Syntax + +A `moving_fn` aggregation looks like this in isolation: + +[source,js] +-------------------------------------------------- +{ + "moving_fn": { + "buckets_path": "the_sum", + "window": 10, + "script": "MovingFunctions.min(values)" + } +} +-------------------------------------------------- +// NOTCONSOLE + +.`moving_avg` Parameters +|=== +|Parameter Name |Description |Required |Default Value +|`buckets_path` |Path to the metric of interest (see <> for more details |Required | +|`window` |The size of window to "slide" across the histogram. |Required | +|`script` |The script that should be executed on each window of data |Required | +|=== + +`moving_fn` aggregations must be embedded inside of a `histogram` or `date_histogram` aggregation. They can be +embedded like any other metric aggregation: + +[source,js] +-------------------------------------------------- +POST /_search +{ + "size": 0, + "aggs": { + "my_date_histo":{ <1> + "date_histogram":{ + "field":"date", + "interval":"1M" + }, + "aggs":{ + "the_sum":{ + "sum":{ "field": "price" } <2> + }, + "the_movfn": { + "moving_fn": { + "buckets_path": "the_sum", <3> + "window": 10, + "script": "MovingFunctions.unweightedAvg(values)" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +<1> A `date_histogram` named "my_date_histo" is constructed on the "timestamp" field, with one-day intervals +<2> A `sum` metric is used to calculate the sum of a field. This could be any numeric metric (sum, min, max, etc) +<3> Finally, we specify a `moving_fn` aggregation which uses "the_sum" metric as its input. + +Moving averages are built by first specifying a `histogram` or `date_histogram` over a field. You can then optionally +add numeric metrics, such as a `sum`, inside of that histogram. Finally, the `moving_fn` is embedded inside the histogram. +The `buckets_path` parameter is then used to "point" at one of the sibling metrics inside of the histogram (see +<> for a description of the syntax for `buckets_path`. + +An example response from the above aggregation may look like: + +[source,js] +-------------------------------------------------- +{ + "took": 11, + "timed_out": false, + "_shards": ..., + "hits": ..., + "aggregations": { + "my_date_histo": { + "buckets": [ + { + "key_as_string": "2015/01/01 00:00:00", + "key": 1420070400000, + "doc_count": 3, + "the_sum": { + "value": 550.0 + }, + "the_movfn": { + "value": null + } + }, + { + "key_as_string": "2015/02/01 00:00:00", + "key": 1422748800000, + "doc_count": 2, + "the_sum": { + "value": 60.0 + }, + "the_movfn": { + "value": 550.0 + } + }, + { + "key_as_string": "2015/03/01 00:00:00", + "key": 1425168000000, + "doc_count": 2, + "the_sum": { + "value": 375.0 + }, + "the_movfn": { + "value": 305.0 + } + } + ] + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/"took": 11/"took": $body.took/] +// TESTRESPONSE[s/"_shards": \.\.\./"_shards": $body._shards/] +// TESTRESPONSE[s/"hits": \.\.\./"hits": $body.hits/] + + +==== Custom user scripting + +The Moving Function aggregation allows the user to specify any arbitrary script to define custom logic. The script is invoked each time a +new window of data is collected. These values are provided to the script in the `values` variable. The script should then perform some +kind of calculation and emit a single `double` as the result. Emitting `null` is not permitted, although `NaN` and +/- `Inf` are allowed. + +For example, this script will simply return the first value from the window, or `NaN` if no values are available: + +[source,js] +-------------------------------------------------- +POST /_search +{ + "size": 0, + "aggs": { + "my_date_histo":{ + "date_histogram":{ + "field":"date", + "interval":"1M" + }, + "aggs":{ + "the_sum":{ + "sum":{ "field": "price" } + }, + "the_movavg": { + "moving_fn": { + "buckets_path": "the_sum", + "window": 10, + "script": "return values.length > 0 ? values[0] : Double.NaN" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +==== Pre-built Functions + +For convenience, a number of functions have been prebuilt and are available inside the `moving_fn` script context: + +- `max()` +- `min()` +- `sum()` +- `stdDev()` +- `unweightedAvg()` +- `linearWeightedAvg()` +- `ewma()` +- `holt()` +- `holtWinters()` + +The functions are available from the `MovingFunctions` namespace. E.g. `MovingFunctions.max()` + +===== max Function + +This function accepts a collection of doubles and returns the maximum value in that window. `null` and `NaN` values are ignored; the maximum +is only calculated over the real values. If the window is empty, or all values are `null`/`NaN`, `NaN` is returned as the result. + +.`max(double[] values)` Parameters +|=== +|Parameter Name |Description +|`values` |The window of values to find the maximum +|=== + +[source,js] +-------------------------------------------------- +POST /_search +{ + "size": 0, + "aggs": { + "my_date_histo":{ + "date_histogram":{ + "field":"date", + "interval":"1M" + }, + "aggs":{ + "the_sum":{ + "sum":{ "field": "price" } + }, + "the_moving_max": { + "moving_fn": { + "buckets_path": "the_sum", + "window": 10, + "script": "MovingFunctions.max(values)" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +===== min Function + +This function accepts a collection of doubles and returns the minimum value in that window. `null` and `NaN` values are ignored; the minimum +is only calculated over the real values. If the window is empty, or all values are `null`/`NaN`, `NaN` is returned as the result. + +.`min(double[] values)` Parameters +|=== +|Parameter Name |Description +|`values` |The window of values to find the minimum +|=== + +[source,js] +-------------------------------------------------- +POST /_search +{ + "size": 0, + "aggs": { + "my_date_histo":{ + "date_histogram":{ + "field":"date", + "interval":"1M" + }, + "aggs":{ + "the_sum":{ + "sum":{ "field": "price" } + }, + "the_moving_min": { + "moving_fn": { + "buckets_path": "the_sum", + "window": 10, + "script": "MovingFunctions.min(values)" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +===== sum Function + +This function accepts a collection of doubles and returns the sum of the values in that window. `null` and `NaN` values are ignored; +the sum is only calculated over the real values. If the window is empty, or all values are `null`/`NaN`, `0.0` is returned as the result. + +.`sum(double[] values)` Parameters +|=== +|Parameter Name |Description +|`values` |The window of values to find the sum of +|=== + +[source,js] +-------------------------------------------------- +POST /_search +{ + "size": 0, + "aggs": { + "my_date_histo":{ + "date_histogram":{ + "field":"date", + "interval":"1M" + }, + "aggs":{ + "the_sum":{ + "sum":{ "field": "price" } + }, + "the_moving_sum": { + "moving_fn": { + "buckets_path": "the_sum", + "window": 10, + "script": "MovingFunctions.sum(values)" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +===== stdDev Function + +This function accepts a collection of doubles and and average, then returns the standard deviation of the values in that window. +`null` and `NaN` values are ignored; the sum is only calculated over the real values. If the window is empty, or all values are +`null`/`NaN`, `0.0` is returned as the result. + +.`stdDev(double[] values)` Parameters +|=== +|Parameter Name |Description +|`values` |The window of values to find the standard deviation of +|`avg` |The average of the window +|=== + +[source,js] +-------------------------------------------------- +POST /_search +{ + "size": 0, + "aggs": { + "my_date_histo":{ + "date_histogram":{ + "field":"date", + "interval":"1M" + }, + "aggs":{ + "the_sum":{ + "sum":{ "field": "price" } + }, + "the_moving_sum": { + "moving_fn": { + "buckets_path": "the_sum", + "window": 10, + "script": "MovingFunctions.stdDev(values, MovingFunctions.unweightedAvg(values))" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +The `avg` parameter must be provided to the standard deviation function because different styles of averages can be computed on the window +(simple, linearly weighted, etc). The various moving averages that are detailed below can be used to calculate the average for the +standard deviation function. + +===== unweightedAvg Function + +The `unweightedAvg` function calculates the sum of all values in the window, then divides by the size of the window. It is effectively +a simple arithmetic mean of the window. The simple moving average does not perform any time-dependent weighting, which means +the values from a `simple` moving average tend to "lag" behind the real data. + +`null` and `NaN` values are ignored; the average is only calculated over the real values. If the window is empty, or all values are +`null`/`NaN`, `NaN` is returned as the result. This means that the count used in the average calculation is count of non-`null`,non-`NaN` +values. + +.`unweightedAvg(double[] values)` Parameters +|=== +|Parameter Name |Description +|`values` |The window of values to find the sum of +|=== + +[source,js] +-------------------------------------------------- +POST /_search +{ + "size": 0, + "aggs": { + "my_date_histo":{ + "date_histogram":{ + "field":"date", + "interval":"1M" + }, + "aggs":{ + "the_sum":{ + "sum":{ "field": "price" } + }, + "the_movavg": { + "moving_fn": { + "buckets_path": "the_sum", + "window": 10, + "script": "MovingFunctions.unweightedAvg(values)" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +==== linearWeightedAvg Function + +The `linearWeightedAvg` function assigns a linear weighting to points in the series, such that "older" datapoints (e.g. those at +the beginning of the window) contribute a linearly less amount to the total average. The linear weighting helps reduce +the "lag" behind the data's mean, since older points have less influence. + +If the window is empty, or all values are `null`/`NaN`, `NaN` is returned as the result. + +.`linearWeightedAvg(double[] values)` Parameters +|=== +|Parameter Name |Description +|`values` |The window of values to find the sum of +|=== + +[source,js] +-------------------------------------------------- +POST /_search +{ + "size": 0, + "aggs": { + "my_date_histo":{ + "date_histogram":{ + "field":"date", + "interval":"1M" + }, + "aggs":{ + "the_sum":{ + "sum":{ "field": "price" } + }, + "the_movavg": { + "moving_fn": { + "buckets_path": "the_sum", + "window": 10, + "script": "MovingFunctions.linearWeightedAvg(values)" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +==== ewma Function + +The `ewma` function (aka "single-exponential") is similar to the `linearMovAvg` function, +except older data-points become exponentially less important, +rather than linearly less important. The speed at which the importance decays can be controlled with an `alpha` +setting. Small values make the weight decay slowly, which provides greater smoothing and takes into account a larger +portion of the window. Larger valuers make the weight decay quickly, which reduces the impact of older values on the +moving average. This tends to make the moving average track the data more closely but with less smoothing. + +`null` and `NaN` values are ignored; the average is only calculated over the real values. If the window is empty, or all values are +`null`/`NaN`, `NaN` is returned as the result. This means that the count used in the average calculation is count of non-`null`,non-`NaN` +values. + +.`ewma(double[] values, double alpha)` Parameters +|=== +|Parameter Name |Description +|`values` |The window of values to find the sum of +|`alpha` |Exponential decay +|=== + +[source,js] +-------------------------------------------------- +POST /_search +{ + "size": 0, + "aggs": { + "my_date_histo":{ + "date_histogram":{ + "field":"date", + "interval":"1M" + }, + "aggs":{ + "the_sum":{ + "sum":{ "field": "price" } + }, + "the_movavg": { + "moving_fn": { + "buckets_path": "the_sum", + "window": 10, + "script": "MovingFunctions.ewma(values, 0.3)" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + + +==== holt Function + +The `holt` function (aka "double exponential") incorporates a second exponential term which +tracks the data's trend. Single exponential does not perform well when the data has an underlying linear trend. The +double exponential model calculates two values internally: a "level" and a "trend". + +The level calculation is similar to `ewma`, and is an exponentially weighted view of the data. The difference is +that the previously smoothed value is used instead of the raw value, which allows it to stay close to the original series. +The trend calculation looks at the difference between the current and last value (e.g. the slope, or trend, of the +smoothed data). The trend value is also exponentially weighted. + +Values are produced by multiplying the level and trend components. + +`null` and `NaN` values are ignored; the average is only calculated over the real values. If the window is empty, or all values are +`null`/`NaN`, `NaN` is returned as the result. This means that the count used in the average calculation is count of non-`null`,non-`NaN` +values. + +.`holt(double[] values, double alpha)` Parameters +|=== +|Parameter Name |Description +|`values` |The window of values to find the sum of +|`alpha` |Level decay value +|`beta` |Trend decay value +|=== + +[source,js] +-------------------------------------------------- +POST /_search +{ + "size": 0, + "aggs": { + "my_date_histo":{ + "date_histogram":{ + "field":"date", + "interval":"1M" + }, + "aggs":{ + "the_sum":{ + "sum":{ "field": "price" } + }, + "the_movavg": { + "moving_fn": { + "buckets_path": "the_sum", + "window": 10, + "script": "MovingFunctions.holt(values, 0.3, 0.1)" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +In practice, the `alpha` value behaves very similarly in `holtMovAvg` as `ewmaMovAvg`: small values produce more smoothing +and more lag, while larger values produce closer tracking and less lag. The value of `beta` is often difficult +to see. Small values emphasize long-term trends (such as a constant linear trend in the whole series), while larger +values emphasize short-term trends. + +==== holtWinters Function + +The `holtWinters` function (aka "triple exponential") incorporates a third exponential term which +tracks the seasonal aspect of your data. This aggregation therefore smooths based on three components: "level", "trend" +and "seasonality". + +The level and trend calculation is identical to `holt` The seasonal calculation looks at the difference between +the current point, and the point one period earlier. + +Holt-Winters requires a little more handholding than the other moving averages. You need to specify the "periodicity" +of your data: e.g. if your data has cyclic trends every 7 days, you would set `period = 7`. Similarly if there was +a monthly trend, you would set it to `30`. There is currently no periodicity detection, although that is planned +for future enhancements. + +`null` and `NaN` values are ignored; the average is only calculated over the real values. If the window is empty, or all values are +`null`/`NaN`, `NaN` is returned as the result. This means that the count used in the average calculation is count of non-`null`,non-`NaN` +values. + +.`holtWinters(double[] values, double alpha)` Parameters +|=== +|Parameter Name |Description +|`values` |The window of values to find the sum of +|`alpha` |Level decay value +|`beta` |Trend decay value +|`gamma` |Seasonality decay value +|`period` |The periodicity of the data +|`multiplicative` |True if you wish to use multiplicative holt-winters, false to use additive +|=== + +[source,js] +-------------------------------------------------- +POST /_search +{ + "size": 0, + "aggs": { + "my_date_histo":{ + "date_histogram":{ + "field":"date", + "interval":"1M" + }, + "aggs":{ + "the_sum":{ + "sum":{ "field": "price" } + }, + "the_movavg": { + "moving_fn": { + "buckets_path": "the_sum", + "window": 10, + "script": "if (values.length > 5*2) {MovingFunctions.holtWinters(values, 0.3, 0.1, 0.1, 5, false)}" + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +[WARNING] +====== +Multiplicative Holt-Winters works by dividing each data point by the seasonal value. This is problematic if any of +your data is zero, or if there are gaps in the data (since this results in a divid-by-zero). To combat this, the +`mult` Holt-Winters pads all values by a very small amount (1*10^-10^) so that all values are non-zero. This affects +the result, but only minimally. If your data is non-zero, or you prefer to see `NaN` when zero's are encountered, +you can disable this behavior with `pad: false` +====== + +===== "Cold Start" + +Unfortunately, due to the nature of Holt-Winters, it requires two periods of data to "bootstrap" the algorithm. This +means that your `window` must always be *at least* twice the size of your period. An exception will be thrown if it +isn't. It also means that Holt-Winters will not emit a value for the first `2 * period` buckets; the current algorithm +does not backcast. + +You'll notice in the above example we have an `if ()` statement checking the size of values. This is checking to make sure +we have two periods worth of data (`5 * 2`, where 5 is the period specified in the `holtWintersMovAvg` function) before calling +the holt-winters function. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index 0364ad667efc7..4ebcf8bfb82d2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -32,6 +32,7 @@ import org.elasticsearch.painless.spi.PainlessExtension; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.painless.spi.WhitelistLoader; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; @@ -39,6 +40,7 @@ import org.elasticsearch.rest.RestHandler; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript; import java.util.ArrayList; import java.util.Arrays; @@ -55,18 +57,34 @@ */ public final class PainlessPlugin extends Plugin implements ScriptPlugin, ExtensiblePlugin, ActionPlugin { - private final Map, List> extendedWhitelists = new HashMap<>(); + private static final Map, List> whitelists; + + /* + * Contexts from Core that need custom whitelists can add them to the map below. + * Whitelist resources should be added as appropriately named, separate files + * under Painless' resources + */ + static { + Map, List> map = new HashMap<>(); + + // Moving Function Pipeline Agg + List movFn = new ArrayList<>(Whitelist.BASE_WHITELISTS); + movFn.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.elasticsearch.aggs.movfn.txt")); + map.put(MovingFunctionScript.CONTEXT, movFn); + + whitelists = map; + } @Override public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { Map, List> contextsWithWhitelists = new HashMap<>(); for (ScriptContext context : contexts) { // we might have a context that only uses the base whitelists, so would not have been filled in by reloadSPI - List whitelists = extendedWhitelists.get(context); - if (whitelists == null) { - whitelists = new ArrayList<>(Whitelist.BASE_WHITELISTS); + List contextWhitelists = whitelists.get(context); + if (contextWhitelists == null) { + contextWhitelists = new ArrayList<>(Whitelist.BASE_WHITELISTS); } - contextsWithWhitelists.put(context, whitelists); + contextsWithWhitelists.put(context, contextWhitelists); } return new PainlessScriptEngine(settings, contextsWithWhitelists); } @@ -80,7 +98,7 @@ public List> getSettings() { public void reloadSPI(ClassLoader loader) { for (PainlessExtension extension : ServiceLoader.load(PainlessExtension.class, loader)) { for (Map.Entry, List> entry : extension.getContextWhitelists().entrySet()) { - List existing = extendedWhitelists.computeIfAbsent(entry.getKey(), + List existing = whitelists.computeIfAbsent(entry.getKey(), c -> new ArrayList<>(Whitelist.BASE_WHITELISTS)); existing.addAll(entry.getValue()); } diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.aggs.movfn.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.aggs.movfn.txt new file mode 100644 index 0000000000000..a120b73820ada --- /dev/null +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.aggs.movfn.txt @@ -0,0 +1,32 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# This file contains a whitelist for the Moving Function pipeline aggregator in core + +class org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions { + double max(double[]) + double min(double[]) + double sum(double[]) + double stdDev(double[], double) + double unweightedAvg(double[]) + double linearWeightedAvg(double[]) + double ewma(double[], double) + double holt(double[], double, double) + double holtWinters(double[], double, double, double, int, boolean) +} \ No newline at end of file diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml new file mode 100644 index 0000000000000..76b63e171692e --- /dev/null +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml @@ -0,0 +1,315 @@ +# Sanity integration test to make sure the custom context and whitelist work for moving_fn pipeline agg +# +setup: + - skip: + version: " - 6.3.99" + reason: "moving_fn added in 6.4.0" + - do: + indices.create: + index: test + body: + mappings: + _doc: + properties: + value_field: + type: integer + date: + type: date + + - do: + bulk: + refresh: true + body: + - index: + _index: test + _type: _doc + _id: 1 + - date: "2017-01-01T00:00:00" + value_field: 1 + - index: + _index: test + _type: _doc + _id: 2 + - date: "2017-01-02T00:00:00" + value_field: 2 + - index: + _index: test + _type: _doc + _id: 3 + - date: "2017-01-03T00:00:00" + value_field: 3 + - index: + _index: test + _type: _doc + _id: 4 + - date: "2017-01-04T00:00:00" + value_field: 4 + - index: + _index: test + _type: _doc + _id: 5 + - date: "2017-01-05T00:00:00" + value_field: 5 + - index: + _index: test + _type: _doc + _id: 6 + - date: "2017-01-06T00:00:00" + value_field: 6 + + - do: + indices.refresh: + index: [test] + +--- +"max": + + - do: + search: + body: + size: 0 + aggs: + the_histo: + date_histogram: + field: "date" + interval: "1d" + aggs: + the_avg: + avg: + field: "value_field" + the_mov_fn: + moving_fn: + buckets_path: "the_avg" + window: 3 + script: "MovingFunctions.max(values)" + + - match: { hits.total: 6 } + - length: { hits.hits: 0 } + - is_false: aggregations.the_histo.buckets.0.the_mov_fn.value + - match: { aggregations.the_histo.buckets.1.the_mov_fn.value: 1.0 } + - match: { aggregations.the_histo.buckets.2.the_mov_fn.value: 2.0 } + - match: { aggregations.the_histo.buckets.3.the_mov_fn.value: 3.0 } + - match: { aggregations.the_histo.buckets.4.the_mov_fn.value: 4.0 } + - match: { aggregations.the_histo.buckets.5.the_mov_fn.value: 5.0 } + +--- +"min": + + - do: + search: + body: + size: 0 + aggs: + the_histo: + date_histogram: + field: "date" + interval: "1d" + aggs: + the_avg: + avg: + field: "value_field" + the_mov_fn: + moving_fn: + buckets_path: "the_avg" + window: 3 + script: "MovingFunctions.min(values)" + + - match: { hits.total: 6 } + - length: { hits.hits: 0 } + - is_false: aggregations.the_histo.buckets.0.the_mov_fn.value + - match: { aggregations.the_histo.buckets.1.the_mov_fn.value: 1.0 } + - match: { aggregations.the_histo.buckets.2.the_mov_fn.value: 1.0 } + - match: { aggregations.the_histo.buckets.3.the_mov_fn.value: 1.0 } + - match: { aggregations.the_histo.buckets.4.the_mov_fn.value: 2.0 } + - match: { aggregations.the_histo.buckets.5.the_mov_fn.value: 3.0 } + +--- +"sum": + + - do: + search: + body: + size: 0 + aggs: + the_histo: + date_histogram: + field: "date" + interval: "1d" + aggs: + the_avg: + avg: + field: "value_field" + the_mov_fn: + moving_fn: + buckets_path: "the_avg" + window: 3 + script: "MovingFunctions.sum(values)" + + - match: { hits.total: 6 } + - length: { hits.hits: 0 } + - match: { aggregations.the_histo.buckets.0.the_mov_fn.value: 0.0 } + - match: { aggregations.the_histo.buckets.1.the_mov_fn.value: 1.0 } + - match: { aggregations.the_histo.buckets.2.the_mov_fn.value: 3.0 } + - match: { aggregations.the_histo.buckets.3.the_mov_fn.value: 6.0 } + - match: { aggregations.the_histo.buckets.4.the_mov_fn.value: 9.0 } + - match: { aggregations.the_histo.buckets.5.the_mov_fn.value: 12.0 } + +--- +"unweightedAvg": + + - do: + search: + body: + size: 0 + aggs: + the_histo: + date_histogram: + field: "date" + interval: "1d" + aggs: + the_avg: + avg: + field: "value_field" + the_mov_fn: + moving_fn: + buckets_path: "the_avg" + window: 3 + script: "MovingFunctions.unweightedAvg(values)" + + - match: { hits.total: 6 } + - length: { hits.hits: 0 } + + +--- +"linearWeightedAvg": + + - do: + search: + body: + size: 0 + aggs: + the_histo: + date_histogram: + field: "date" + interval: "1d" + aggs: + the_avg: + avg: + field: "value_field" + the_mov_fn: + moving_fn: + buckets_path: "the_avg" + window: 3 + script: "MovingFunctions.linearWeightedAvg(values)" + + - match: { hits.total: 6 } + - length: { hits.hits: 0 } + + +--- +"ewma": + + - do: + search: + body: + size: 0 + aggs: + the_histo: + date_histogram: + field: "date" + interval: "1d" + aggs: + the_avg: + avg: + field: "value_field" + the_mov_fn: + moving_fn: + buckets_path: "the_avg" + window: 3 + script: "MovingFunctions.ewma(values, 0.1)" + + - match: { hits.total: 6 } + - length: { hits.hits: 0 } + + +--- +"holt": + + - do: + search: + body: + size: 0 + aggs: + the_histo: + date_histogram: + field: "date" + interval: "1d" + aggs: + the_avg: + avg: + field: "value_field" + the_mov_fn: + moving_fn: + buckets_path: "the_avg" + window: 3 + script: "MovingFunctions.holt(values, 0.1, 0.1)" + + - match: { hits.total: 6 } + - length: { hits.hits: 0 } + + +--- +"holtWinters": + + - do: + search: + body: + size: 0 + aggs: + the_histo: + date_histogram: + field: "date" + interval: "1d" + aggs: + the_avg: + avg: + field: "value_field" + the_mov_fn: + moving_fn: + buckets_path: "the_avg" + window: 1 + script: "if (values.length > 1) { MovingFunctions.holtWinters(values, 0.1, 0.1, 0.1, 1, true)}" + + - match: { hits.total: 6 } + - length: { hits.hits: 0 } + +--- +"stdDev": + + - do: + search: + body: + size: 0 + aggs: + the_histo: + date_histogram: + field: "date" + interval: "1d" + aggs: + the_avg: + avg: + field: "value_field" + the_mov_fn: + moving_fn: + buckets_path: "the_avg" + window: 3 + script: "MovingFunctions.stdDev(values, MovingFunctions.unweightedAvg(values))" + + - match: { hits.total: 6 } + - length: { hits.hits: 0 } + + + + + + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml new file mode 100644 index 0000000000000..f5b80509db895 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml @@ -0,0 +1,46 @@ +setup: + - skip: + version: " - 6.3.99" + reason: "moving_fn added in 6.4.0" + +--- +"Bad window": + + - do: + catch: /\[window\] must be a positive, non-zero integer\./ + search: + body: + size: 0 + aggs: + the_histo: + date_histogram: + field: "date" + interval: "1d" + aggs: + the_avg: + avg: + field: "value_field" + the_mov_fn: + moving_fn: + buckets_path: "the_avg" + window: -1 + script: "MovingFunctions.windowMax(values)" + +--- +"Not under date_histo": + + - do: + catch: /\[window\] must be a positive, non-zero integer\./ + search: + body: + size: 0 + aggs: + the_avg: + avg: + field: "value_field" + the_mov_fn: + moving_fn: + buckets_path: "the_avg" + window: -1 + script: "MovingFunctions.windowMax(values)" + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml index 9ea856c856e93..2fd1526be034a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/80_typed_keys.yml @@ -198,7 +198,13 @@ setup: --- "Test typed keys parameter for date_histogram aggregation and max_bucket pipeline aggregation": + - skip: + features: warnings + version: " - 6.3.99" + reason: "deprecation added in 6.4.0" - do: + warnings: + - 'The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.' search: typed_keys: true body: diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index 727651be6a565..5afb6ad28d7ab 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript; /** * Manages building {@link ScriptService}. @@ -48,7 +49,8 @@ public class ScriptModule { FilterScript.CONTEXT, SimilarityScript.CONTEXT, SimilarityWeightScript.CONTEXT, - TemplateScript.CONTEXT + TemplateScript.CONTEXT, + MovingFunctionScript.CONTEXT ).collect(Collectors.toMap(c -> c.name, Function.identity())); } diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 7fd4d1d5c450a..a689c7d1b5505 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -221,6 +221,8 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.models.LinearModel; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovFnPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovFnPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffPipelineAggregator; import org.elasticsearch.search.fetch.FetchPhase; @@ -515,6 +517,11 @@ private void registerPipelineAggregations(List plugins) { SerialDiffPipelineAggregationBuilder::new, SerialDiffPipelineAggregator::new, SerialDiffPipelineAggregationBuilder::parse)); + registerPipelineAggregation(new PipelineAggregationSpec( + MovFnPipelineAggregationBuilder.NAME, + MovFnPipelineAggregationBuilder::new, + MovFnPipelineAggregator::new, + MovFnPipelineAggregationBuilder::parse)); registerFromPlugin(plugins, SearchPlugin::getPipelineAggregations, this::registerPipelineAggregation); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java index e827275a2182d..ce87dd797d6e0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovFnPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffPipelineAggregationBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -78,6 +79,10 @@ public static PercentilesBucketPipelineAggregationBuilder percentilesBucket(Stri return new PercentilesBucketPipelineAggregationBuilder(name, bucketsPath); } + /** + * @deprecated use {@link #movingFunction(String, Script, String, int)} instead + */ + @Deprecated public static MovAvgPipelineAggregationBuilder movingAvg(String name, String bucketsPath) { return new MovAvgPipelineAggregationBuilder(name, bucketsPath); } @@ -114,4 +119,9 @@ public static CumulativeSumPipelineAggregationBuilder cumulativeSum(String name, public static SerialDiffPipelineAggregationBuilder diff(String name, String bucketsPath) { return new SerialDiffPipelineAggregationBuilder(name, bucketsPath); } + + public static MovFnPipelineAggregationBuilder movingFunction(String name, Script script, + String bucketsPaths, int window) { + return new MovFnPipelineAggregationBuilder(name, bucketsPaths, script, window); + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregationBuilder.java index d2210e1da322c..8fdc6d3eb62b3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregationBuilder.java @@ -23,6 +23,8 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.ParseFieldRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -59,6 +61,8 @@ public class MovAvgPipelineAggregationBuilder extends AbstractPipelineAggregatio public static final ParseField SETTINGS = new ParseField("settings"); private static final ParseField PREDICT = new ParseField("predict"); private static final ParseField MINIMIZE = new ParseField("minimize"); + private static final DeprecationLogger DEPRECATION_LOGGER + = new DeprecationLogger(Loggers.getLogger(MovAvgPipelineAggregationBuilder.class)); private String format; private GapPolicy gapPolicy = GapPolicy.SKIP; @@ -318,6 +322,8 @@ public static MovAvgPipelineAggregationBuilder parse( Integer predict = null; Boolean minimize = null; + DEPRECATION_LOGGER.deprecated("The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation."); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java index 26fb0333b188b..027536854ccfb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions; import java.io.IOException; import java.text.ParseException; @@ -90,7 +91,7 @@ public MovAvgModel clone() { } @Override - protected double[] doPredict(Collection values, int numPredictions) { + protected double[] doPredict(Collection values, int numPredictions) { double[] predictions = new double[numPredictions]; // EWMA just emits the same final prediction repeatedly. @@ -100,19 +101,8 @@ protected double[] doPredict(Collection values, int numPre } @Override - public double next(Collection values) { - double avg = 0; - boolean first = true; - - for (T v : values) { - if (first) { - avg = v.doubleValue(); - first = false; - } else { - avg = (v.doubleValue() * alpha) + (avg * (1 - alpha)); - } - } - return avg; + public double next(Collection values) { + return MovingFunctions.ewma(values.stream().mapToDouble(Double::doubleValue).toArray(), alpha); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java index 1819333738502..d029bde29ad95 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions; import java.io.IOException; import java.text.ParseException; @@ -116,16 +117,15 @@ public MovAvgModel clone() { * * @param values Collection of numerics to movingAvg, usually windowed * @param numPredictions Number of newly generated predictions to return - * @param Type of numeric * @return Returns an array of doubles, since most smoothing methods operate on floating points */ @Override - protected double[] doPredict(Collection values, int numPredictions) { + protected double[] doPredict(Collection values, int numPredictions) { return next(values, numPredictions); } @Override - public double next(Collection values) { + public double next(Collection values) { return next(values, 1)[0]; } @@ -135,47 +135,13 @@ public double next(Collection values) { * @param values Collection of values to calculate avg for * @param numForecasts number of forecasts into the future to return * - * @param Type T extending Number * @return Returns a Double containing the moving avg for the window */ - public double[] next(Collection values, int numForecasts) { - + public double[] next(Collection values, int numForecasts) { if (values.size() == 0) { return emptyPredictions(numForecasts); } - - // Smoothed value - double s = 0; - double last_s = 0; - - // Trend value - double b = 0; - double last_b = 0; - - int counter = 0; - - T last; - for (T v : values) { - last = v; - if (counter == 1) { - s = v.doubleValue(); - b = v.doubleValue() - last.doubleValue(); - } else { - s = alpha * v.doubleValue() + (1.0d - alpha) * (last_s + last_b); - b = beta * (s - last_s) + (1 - beta) * last_b; - } - - counter += 1; - last_s = s; - last_b = b; - } - - double[] forecastValues = new double[numForecasts]; - for (int i = 0; i < numForecasts; i++) { - forecastValues[i] = s + (i * b); - } - - return forecastValues; + return MovingFunctions.holtForecast(values.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta, numForecasts); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java index a750145e5f9ab..e7c2007955fd2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions; import java.io.IOException; import java.text.ParseException; @@ -259,16 +260,15 @@ public boolean hasValue(int valuesAvailable) { * * @param values Collection of numerics to movingAvg, usually windowed * @param numPredictions Number of newly generated predictions to return - * @param Type of numeric * @return Returns an array of doubles, since most smoothing methods operate on floating points */ @Override - protected double[] doPredict(Collection values, int numPredictions) { + protected double[] doPredict(Collection values, int numPredictions) { return next(values, numPredictions); } @Override - public double next(Collection values) { + public double next(Collection values) { return next(values, 1)[0]; } @@ -278,88 +278,11 @@ public double next(Collection values) { * @param values Collection of values to calculate avg for * @param numForecasts number of forecasts into the future to return * - * @param Type T extending Number * @return Returns a Double containing the moving avg for the window */ - public double[] next(Collection values, int numForecasts) { - - if (values.size() < period * 2) { - // We need at least two full "seasons" to use HW - // This should have been caught earlier, we can't do anything now...bail - throw new AggregationExecutionException("Holt-Winters aggregation requires at least (2 * period == 2 * " - + period + " == "+(2 * period)+") data-points to function. Only [" + values.size() + "] were provided."); - } - - // Smoothed value - double s = 0; - double last_s; - - // Trend value - double b = 0; - double last_b = 0; - - // Seasonal value - double[] seasonal = new double[values.size()]; - - int counter = 0; - double[] vs = new double[values.size()]; - for (T v : values) { - vs[counter] = v.doubleValue() + padding; - counter += 1; - } - - // Initial level value is average of first season - // Calculate the slopes between first and second season for each period - for (int i = 0; i < period; i++) { - s += vs[i]; - b += (vs[i + period] - vs[i]) / period; - } - s /= period; - b /= period; - last_s = s; - - // Calculate first seasonal - if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { - Arrays.fill(seasonal, 0.0); - } else { - for (int i = 0; i < period; i++) { - seasonal[i] = vs[i] / s; - } - } - - for (int i = period; i < vs.length; i++) { - // TODO if perf is a problem, we can specialize a subclass to avoid conditionals on each iteration - if (seasonalityType.equals(SeasonalityType.MULTIPLICATIVE)) { - s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); - } else { - s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); - } - - b = beta * (s - last_s) + (1 - beta) * last_b; - - if (seasonalityType.equals(SeasonalityType.MULTIPLICATIVE)) { - seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; - } else { - seasonal[i] = gamma * (vs[i] - (last_s - last_b )) + (1 - gamma) * seasonal[i - period]; - } - - last_s = s; - last_b = b; - } - - double[] forecastValues = new double[numForecasts]; - for (int i = 1; i <= numForecasts; i++) { - int idx = values.size() - period + ((i - 1) % period); - - // TODO perhaps pad out seasonal to a power of 2 and use a mask instead of modulo? - if (seasonalityType.equals(SeasonalityType.MULTIPLICATIVE)) { - forecastValues[i-1] = (s + (i * b)) * seasonal[idx]; - } else { - forecastValues[i-1] = s + (i * b) + seasonal[idx]; - } - } - - return forecastValues; + public double[] next(Collection values, int numForecasts) { + return MovingFunctions.holtWintersForecast(values.stream().mapToDouble(Double::doubleValue).toArray(), + alpha, beta, gamma, period, padding, seasonalityType.equals(SeasonalityType.MULTIPLICATIVE), numForecasts); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java index 3eed0bf603baa..3859405218286 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions; import java.io.IOException; import java.text.ParseException; @@ -74,7 +75,7 @@ public MovAvgModel clone() { } @Override - protected double[] doPredict(Collection values, int numPredictions) { + protected double[] doPredict(Collection values, int numPredictions) { double[] predictions = new double[numPredictions]; // EWMA just emits the same final prediction repeatedly. @@ -84,17 +85,8 @@ protected double[] doPredict(Collection values, int numPr } @Override - public double next(Collection values) { - double avg = 0; - long totalWeight = 1; - long current = 1; - - for (T v : values) { - avg += v.doubleValue() * current; - totalWeight += current; - current += 1; - } - return avg / totalWeight; + public double next(Collection values) { + return MovingFunctions.linearWeightedAvg(values.stream().mapToDouble(Double::doubleValue).toArray()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java index 354434b65205f..f826c01adced1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java @@ -68,20 +68,18 @@ public boolean hasValue(int valuesAvailable) { * Returns the next value in the series, according to the underlying smoothing model * * @param values Collection of numerics to movingAvg, usually windowed - * @param Type of numeric * @return Returns a double, since most smoothing methods operate on floating points */ - public abstract double next(Collection values); + public abstract double next(Collection values); /** * Predicts the next `n` values in the series. * * @param values Collection of numerics to movingAvg, usually windowed * @param numPredictions Number of newly generated predictions to return - * @param Type of numeric * @return Returns an array of doubles, since most smoothing methods operate on floating points */ - public double[] predict(Collection values, int numPredictions) { + public double[] predict(Collection values, int numPredictions) { assert(numPredictions >= 1); // If there are no values, we can't do anything. Return an array of NaNs. @@ -97,10 +95,9 @@ public double[] predict(Collection values, int numPredicti * * @param values Collection of numerics to movingAvg, usually windowed * @param numPredictions Number of newly generated predictions to return - * @param Type of numeric * @return Returns an array of doubles, since most smoothing methods operate on floating points */ - protected abstract double[] doPredict(Collection values, int numPredictions); + protected abstract double[] doPredict(Collection values, int numPredictions); /** * Returns an empty set of predictions, filled with NaNs diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java index e30a59d288711..b54dba242f9f9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions; import java.io.IOException; import java.text.ParseException; @@ -72,7 +73,7 @@ public MovAvgModel clone() { } @Override - protected double[] doPredict(Collection values, int numPredictions) { + protected double[] doPredict(Collection values, int numPredictions) { double[] predictions = new double[numPredictions]; // Simple just emits the same final prediction repeatedly. @@ -82,12 +83,8 @@ protected double[] doPredict(Collection values, int numPre } @Override - public double next(Collection values) { - double avg = 0; - for (T v : values) { - avg += v.doubleValue(); - } - return avg / values.size(); + public double next(Collection values) { + return MovingFunctions.unweightedAvg(values.stream().mapToDouble(Double::doubleValue).toArray()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java new file mode 100644 index 0000000000000..d49da4658ae2d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java @@ -0,0 +1,264 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.movfn; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.script.Script; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorFactory; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorFactory; +import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; + +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.BUCKETS_PATH; +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.FORMAT; +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.GAP_POLICY; + +public class MovFnPipelineAggregationBuilder extends AbstractPipelineAggregationBuilder { + public static final String NAME = "moving_fn"; + private static final ParseField WINDOW = new ParseField("window"); + + private final Script script; + private final String bucketsPathString; + private String format = null; + private GapPolicy gapPolicy = GapPolicy.SKIP; + private int window; + + private static final Function> PARSER + = name -> { + + @SuppressWarnings("unchecked") + ConstructingObjectParser parser = new ConstructingObjectParser<>( + MovFnPipelineAggregationBuilder.NAME, + false, + o -> new MovFnPipelineAggregationBuilder(name, (String) o[0], (Script) o[1], (int)o[2])); + + parser.declareString(ConstructingObjectParser.constructorArg(), BUCKETS_PATH_FIELD); + parser.declareField(ConstructingObjectParser.constructorArg(), + (p, c) -> Script.parse(p), Script.SCRIPT_PARSE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING); + parser.declareInt(ConstructingObjectParser.constructorArg(), WINDOW); + + parser.declareString(MovFnPipelineAggregationBuilder::format, FORMAT); + parser.declareField(MovFnPipelineAggregationBuilder::gapPolicy, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return GapPolicy.parse(p.text().toLowerCase(Locale.ROOT), p.getTokenLocation()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, GAP_POLICY, ObjectParser.ValueType.STRING); + + return parser; + }; + + + public MovFnPipelineAggregationBuilder(String name, String bucketsPath, Script script, int window) { + super(name, NAME, new String[]{bucketsPath}); + this.bucketsPathString = bucketsPath; + this.script = script; + if (window <= 0) { + throw new IllegalArgumentException("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer."); + } + this.window = window; + } + + public MovFnPipelineAggregationBuilder(StreamInput in) throws IOException { + super(in, NAME); + bucketsPathString = in.readString(); + script = new Script(in); + format = in.readOptionalString(); + gapPolicy = GapPolicy.readFrom(in); + window = in.readInt(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(bucketsPathString); + script.writeTo(out); + out.writeOptionalString(format); + gapPolicy.writeTo(out); + out.writeInt(window); + } + + /** + * Sets the format to use on the output of this aggregation. + */ + public MovFnPipelineAggregationBuilder format(String format) { + if (Strings.isNullOrEmpty(format)) { + throw new IllegalArgumentException("[" + FORMAT.getPreferredName() + "] must not be null or an empty string."); + } + this.format = format; + return this; + } + + /** + * Gets the format to use on the output of this aggregation. + */ + public String format() { + return format; + } + + protected DocValueFormat formatter() { + if (format != null) { + return new DocValueFormat.Decimal(format); + } + return DocValueFormat.RAW; + } + + /** + * Sets the gap policy to use for this aggregation. + */ + public MovFnPipelineAggregationBuilder gapPolicy(GapPolicy gapPolicy) { + if (gapPolicy == null) { + throw new IllegalArgumentException("[" + GAP_POLICY.getPreferredName() + "] must not be null."); + } + this.gapPolicy = gapPolicy; + return this; + } + + /** + * Gets the gap policy to use for this aggregation. + */ + public GapPolicy gapPolicy() { + return gapPolicy; + } + + /** + * Returns the window size for this aggregation + */ + public int getWindow() { + return window; + } + + /** + * Sets the window size for this aggregation + */ + public void setWindow(int window) { + if (window <= 0) { + throw new IllegalArgumentException("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer."); + } + this.window = window; + } + + @Override + public void doValidate(AggregatorFactory parent, List aggFactories, + List pipelineAggregatoractories) { + if (window <= 0) { + throw new IllegalArgumentException("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer."); + } + if (parent instanceof HistogramAggregatorFactory) { + HistogramAggregatorFactory histoParent = (HistogramAggregatorFactory) parent; + if (histoParent.minDocCount() != 0) { + throw new IllegalStateException("parent histogram of moving_function aggregation [" + name + + "] must have min_doc_count of 0"); + } + } else if (parent instanceof DateHistogramAggregatorFactory) { + DateHistogramAggregatorFactory histoParent = (DateHistogramAggregatorFactory) parent; + if (histoParent.minDocCount() != 0) { + throw new IllegalStateException("parent histogram of moving_function aggregation [" + name + + "] must have min_doc_count of 0"); + } + } else { + throw new IllegalStateException("moving_function aggregation [" + name + + "] must have a histogram or date_histogram as parent"); + } + } + + @Override + protected PipelineAggregator createInternal(Map metaData) throws IOException { + return new MovFnPipelineAggregator(name, bucketsPathString, script, window, formatter(), gapPolicy, metaData); + } + + @Override + protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(BUCKETS_PATH.getPreferredName(), bucketsPathString); + builder.field(Script.SCRIPT_PARSE_FIELD.getPreferredName(), script); + if (format != null) { + builder.field(FORMAT.getPreferredName(), format); + } + builder.field(GAP_POLICY.getPreferredName(), gapPolicy.getName()); + builder.field(WINDOW.getPreferredName(), window); + return builder; + } + + public static MovFnPipelineAggregationBuilder parse(String aggName, XContentParser parser) { + return PARSER.apply(aggName).apply(parser, null); + } + + /** + * Used for serialization testing, since pipeline aggs serialize themselves as a named object but are parsed + * as a regular object with the name passed in. + */ + static MovFnPipelineAggregationBuilder parse(XContentParser parser) throws IOException { + parser.nextToken(); + if (parser.currentToken().equals(XContentParser.Token.START_OBJECT)) { + parser.nextToken(); + if (parser.currentToken().equals(XContentParser.Token.FIELD_NAME)) { + String aggName = parser.currentName(); + parser.nextToken(); // "moving_fn" + parser.nextToken(); // start_object + return PARSER.apply(aggName).apply(parser, null); + } + } + + throw new IllegalStateException("Expected aggregation name but none found"); + } + + @Override + protected boolean overrideBucketsPath() { + return true; + } + + @Override + protected int doHashCode() { + return Objects.hash(bucketsPathString, script, format, gapPolicy, window); + } + + @Override + protected boolean doEquals(Object obj) { + MovFnPipelineAggregationBuilder other = (MovFnPipelineAggregationBuilder) obj; + return Objects.equals(bucketsPathString, other.bucketsPathString) + && Objects.equals(script, other.script) + && Objects.equals(format, other.format) + && Objects.equals(gapPolicy, other.gapPolicy) + && Objects.equals(window, other.window); + } + + @Override + public String getWriteableName() { + return NAME; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregator.java new file mode 100644 index 0000000000000..fc0ba7afac065 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregator.java @@ -0,0 +1,149 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.movfn; + +import org.elasticsearch.common.collect.EvictingQueue; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.script.Script; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramFactory; +import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; +import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; + +import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue; + +/** + * This pipeline aggregation gives the user the ability to script functions that "move" across a window + * of data, instead of single data points. It is the scripted version of MovingAvg pipeline agg. + * + * Through custom script contexts, we expose a number of convenience methods: + * + * - max + * - min + * - sum + * - unweightedAvg + * - linearWeightedAvg + * - ewma + * - holt + * - holtWintersMovAvg + * + * The user can also define any arbitrary logic via their own scripting, or combine with the above methods. + */ +public class MovFnPipelineAggregator extends PipelineAggregator { + private final DocValueFormat formatter; + private final BucketHelpers.GapPolicy gapPolicy; + private final Script script; + private final String bucketsPath; + private final int window; + + MovFnPipelineAggregator(String name, String bucketsPath, Script script, int window, DocValueFormat formatter, + BucketHelpers.GapPolicy gapPolicy, Map metadata) { + super(name, new String[]{bucketsPath}, metadata); + this.bucketsPath = bucketsPath; + this.script = script; + this.formatter = formatter; + this.gapPolicy = gapPolicy; + this.window = window; + } + + public MovFnPipelineAggregator(StreamInput in) throws IOException { + super(in); + script = new Script(in); + formatter = in.readNamedWriteable(DocValueFormat.class); + gapPolicy = BucketHelpers.GapPolicy.readFrom(in); + bucketsPath = in.readString(); + window = in.readInt(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + script.writeTo(out); + out.writeNamedWriteable(formatter); + gapPolicy.writeTo(out); + out.writeString(bucketsPath); + out.writeInt(window); + } + + @Override + public String getWriteableName() { + return MovFnPipelineAggregationBuilder.NAME; + } + + @Override + public InternalAggregation reduce(InternalAggregation aggregation, InternalAggregation.ReduceContext reduceContext) { + InternalMultiBucketAggregation + histo = (InternalMultiBucketAggregation) aggregation; + List buckets = histo.getBuckets(); + HistogramFactory factory = (HistogramFactory) histo; + + List newBuckets = new ArrayList<>(); + EvictingQueue values = new EvictingQueue<>(this.window); + + // Initialize the script + MovingFunctionScript.Factory scriptFactory = reduceContext.scriptService().compile(script, MovingFunctionScript.CONTEXT); + Map vars = new HashMap<>(); + if (script.getParams() != null) { + vars.putAll(script.getParams()); + } + + MovingFunctionScript executableScript = scriptFactory.newInstance(); + + for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { + Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy); + + // Default is to reuse existing bucket. Simplifies the rest of the logic, + // since we only change newBucket if we can add to it + MultiBucketsAggregation.Bucket newBucket = bucket; + + if (thisBucketValue != null && thisBucketValue.equals(Double.NaN) == false) { + + // The custom context mandates that the script returns a double (not Double) so we + // don't need null checks, etc. + double movavg = executableScript.execute(vars, values.stream().mapToDouble(Double::doubleValue).toArray()); + + List aggs = StreamSupport + .stream(bucket.getAggregations().spliterator(), false) + .map(InternalAggregation.class::cast) + .collect(Collectors.toList()); + aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList<>(), metaData())); + newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs)); + values.offer(thisBucketValue); + } + newBuckets.add(newBucket); + } + + return factory.createAggregation(newBuckets); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctionScript.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctionScript.java new file mode 100644 index 0000000000000..131f6eb0fab58 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctionScript.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.movfn; + +import org.elasticsearch.script.ScriptContext; + +import java.util.Collection; +import java.util.Map; + +/** + * This class provides a custom script context for the Moving Function pipeline aggregation, + * so that we can expose a number of pre-baked moving functions like min, max, movavg, etc + */ +public abstract class MovingFunctionScript { + /** + * @param params The user-provided parameters + * @param values The values in the window that we are moving a function across + * @return A double representing the value from this particular window + */ + public abstract double execute(Map params, double[] values); + + public interface Factory { + MovingFunctionScript newInstance(); + } + + public static final String[] PARAMETERS = new String[] {"params", "values"}; + public static final ScriptContext CONTEXT = new ScriptContext<>("moving-function", Factory.class); +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctions.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctions.java new file mode 100644 index 0000000000000..4261271d185c3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctions.java @@ -0,0 +1,359 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.movfn; + +import java.util.Arrays; +import java.util.Collection; + +/** + * Provides a collection of static utility methods that can be referenced from MovingFunction script contexts + */ +public class MovingFunctions { + + /** + * Find the maximum value in a window of values. + * If all values are missing/null/NaN, the return value will be NaN + */ + public static double max(double[] values) { + return Arrays.stream(values).max().orElse(Double.NaN); + } + + /** + * Find the minimum value in a window of values + * If all values are missing/null/NaN, the return value will be NaN + */ + public static double min(double[] values) { + return Arrays.stream(values).min().orElse(Double.NaN); + } + + /** + * Find the sum of a window of values + * If all values are missing/null/NaN, the return value will be 0.0 + */ + public static double sum(double[] values) { + if (values.length == 0) { + return 0.0; + } + return Arrays.stream(values).map(value -> { + if (Double.isNaN(value) == false) { + return value; + } + return 0.0; + }).sum(); + } + + /** + * Calculate a simple unweighted (arithmetic) moving average. + * + * Only finite values are averaged. NaN or null are ignored. + * If all values are missing/null/NaN, the return value will be NaN. + * The average is based on the count of non-null, non-NaN values. + */ + public static double unweightedAvg(double[] values) { + double avg = 0.0; + long count = 0; + for (double v : values) { + if (Double.isNaN(v) == false) { + avg += v; + count += 1; + } + } + return count == 0 ? Double.NaN : avg / count; + } + + /** + * Calculate a standard deviation over the values using the provided average. + * + * Only finite values are averaged. NaN or null are ignored. + * If all values are missing/null/NaN, the return value will be NaN. + * The average is based on the count of non-null, non-NaN values. + */ + public static double stdDev(double[] values, double avg) { + if (avg == Double.NaN) { + return Double.NaN; + } else { + long count = 0; + double squaredMean = 0; + for (double v : values) { + if (Double.isNaN(v) == false) { + squaredMean += Math.pow(v - avg, 2); + count += 1; + } + } + return Math.sqrt(squaredMean / count); + } + } + + /** + * Calculate a linearly weighted moving average, such that older values are + * linearly less important. "Time" is determined by position in collection + * + * Only finite values are averaged. NaN or null are ignored. + * If all values are missing/null/NaN, the return value will be NaN + * The average is based on the count of non-null, non-NaN values. + */ + public static double linearWeightedAvg(double[] values) { + double avg = 0; + long totalWeight = 1; + long current = 1; + + for (double v : values) { + if (Double.isNaN(v) == false) { + avg += v * current; + totalWeight += current; + current += 1; + } + } + return totalWeight == 1 ? Double.NaN : avg / totalWeight; + } + + /** + * + * Calculate a exponentially weighted moving average. + * + * Alpha controls the smoothing of the data. Alpha = 1 retains no memory of past values + * (e.g. a random walk), while alpha = 0 retains infinite memory of past values (e.g. + * the series mean). Useful values are somewhere in between. Defaults to 0.5. + * + * Only finite values are averaged. NaN or null are ignored. + * If all values are missing/null/NaN, the return value will be NaN + * The average is based on the count of non-null, non-NaN values. + * + * @param alpha A double between 0-1 inclusive, controls data smoothing + */ + public static double ewma(double[] values, double alpha) { + double avg = Double.NaN; + boolean first = true; + + for (double v : values) { + if (Double.isNaN(v) == false) { + if (first) { + avg = v; + first = false; + } else { + avg = (v * alpha) + (avg * (1 - alpha)); + } + } + } + return avg; + } + + /** + * Calculate a doubly exponential weighted moving average + * + * Alpha controls the smoothing of the data. Alpha = 1 retains no memory of past values + * (e.g. a random walk), while alpha = 0 retains infinite memory of past values (e.g. + * the series mean). Useful values are somewhere in between. Defaults to 0.5. + * + * Beta is equivalent to alpha, but controls the smoothing of the trend instead of the data + * + * Only finite values are averaged. NaN or null are ignored. + * If all values are missing/null/NaN, the return value will be NaN + * The average is based on the count of non-null, non-NaN values. + * + * @param alpha A double between 0-1 inclusive, controls data smoothing + * @param beta a double between 0-1 inclusive, controls trend smoothing + */ + public static double holt(double[] values, double alpha, double beta) { + if (values.length == 0) { + return Double.NaN; + } + + return holtForecast(values, alpha, beta, 1)[0]; + } + + /** + * Version of holt that can "forecast", not exposed as a whitelisted function for moving_fn scripts, but + * here as compatibility/code sharing for existing moving_avg agg. Can be removed when moving_avg is gone. + */ + public static double[] holtForecast(double[] values, double alpha, double beta, int numForecasts) { + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + int counter = 0; + + Double last; + for (double v : values) { + if (Double.isNaN(v) == false) { + last = v; + if (counter == 0) { + s = v; + b = v - last; + } else { + s = alpha * v + (1.0d - alpha) * (last_s + last_b); + b = beta * (s - last_s) + (1 - beta) * last_b; + } + + counter += 1; + last_s = s; + last_b = b; + } + } + + if (counter == 0) { + return emptyPredictions(numForecasts); + } + + double[] forecastValues = new double[numForecasts]; + for (int i = 0; i < numForecasts; i++) { + forecastValues[i] = s + (i * b); + } + + return forecastValues; + } + + /** + * Calculate a triple exponential weighted moving average + * + * Alpha controls the smoothing of the data. Alpha = 1 retains no memory of past values + * (e.g. a random walk), while alpha = 0 retains infinite memory of past values (e.g. + * the series mean). Useful values are somewhere in between. Defaults to 0.5. + * + * Beta is equivalent to alpha, but controls the smoothing of the trend instead of the data. + * Gamma is equivalent to alpha, but controls the smoothing of the seasonality instead of the data + * + * Only finite values are averaged. NaN or null are ignored. + * If all values are missing/null/NaN, the return value will be NaN + * The average is based on the count of non-null, non-NaN values. + * + * @param alpha A double between 0-1 inclusive, controls data smoothing + * @param beta a double between 0-1 inclusive, controls trend smoothing + * @param gamma a double between 0-1 inclusive, controls seasonality smoothing + * @param period the expected periodicity of the data + * @param multiplicative true if multiplicative HW should be used. False for additive + */ + public static double holtWinters(double[] values, double alpha, double beta, double gamma, + int period, boolean multiplicative) { + + if (values.length == 0) { + return Double.NaN; + } + + double padding = multiplicative ? 0.0000000001 : 0.0; + return holtWintersForecast(values, alpha, beta, gamma, period, padding, multiplicative, 1)[0]; + } + + /** + * Version of holt-winters that can "forecast", not exposed as a whitelisted function for moving_fn scripts, but + * here as compatibility/code sharing for existing moving_avg agg. Can be removed when moving_avg is gone. + */ + public static double[] holtWintersForecast(double[] values, double alpha, double beta, double gamma, + int period, double padding, boolean multiplicative, int numForecasts) { + if (values.length < period * 2) { + // We need at least two full "seasons" to use HW + // This should have been caught earlier, we can't do anything now...bail + throw new IllegalArgumentException("Holt-Winters aggregation requires at least (2 * period == 2 * " + + period + " == "+(2 * period)+") data-points to function. Only [" + values.length + "] were provided."); + } + + // Smoothed value + double s = 0; + double last_s; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[values.length]; + + int counter = 0; + double[] vs = new double[values.length]; + for (double v : values) { + if (Double.isNaN(v) == false) { + vs[counter] = v + padding; + counter += 1; + } + } + + if (counter == 0) { + return emptyPredictions(numForecasts); + } + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i + period] - vs[i]) / period; + } + s /= period; + b /= period; + last_s = s; + + // Calculate first seasonal + if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { + Arrays.fill(seasonal, 0.0); + } else { + for (int i = 0; i < period; i++) { + seasonal[i] = vs[i] / s; + } + } + + for (int i = period; i < vs.length; i++) { + // TODO if perf is a problem, we can specialize a subclass to avoid conditionals on each iteration + if (multiplicative) { + s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + } else { + s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + } + + b = beta * (s - last_s) + (1 - beta) * last_b; + + if (multiplicative) { + seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + } else { + seasonal[i] = gamma * (vs[i] - (last_s - last_b )) + (1 - gamma) * seasonal[i - period]; + } + + last_s = s; + last_b = b; + } + + double[] forecastValues = new double[numForecasts]; + for (int i = 1; i <= numForecasts; i++) { + int idx = values.length - period + ((i - 1) % period); + + // TODO perhaps pad out seasonal to a power of 2 and use a mask instead of modulo? + if (multiplicative) { + forecastValues[i-1] = (s + (i * b)) * seasonal[idx]; + } else { + forecastValues[i-1] = s + (i * b) + seasonal[idx]; + } + } + + return forecastValues; + } + + /** + * Returns an empty set of predictions, filled with NaNs + * @param numPredictions Number of empty predictions to generate + */ + private static double[] emptyPredictions(int numPredictions) { + double[] predictions = new double[numPredictions]; + Arrays.fill(predictions, Double.NaN); + return predictions; + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index 9b5bc7541f2c2..e89e15c631082 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -446,7 +446,7 @@ private void executeTestCase(boolean reduced, Query query, List dataset, InternalDateHistogram histogram; if (reduced) { - histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, maxBucket, fieldType); + histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, maxBucket, null, fieldType); } else { histogram = search(indexSearcher, query, aggregationBuilder, maxBucket, fieldType); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketAggregatorTests.java index ba719219ee53b..7cb0b63688bba 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketAggregatorTests.java @@ -120,8 +120,10 @@ public void testSameAggNames() throws IOException { valueFieldType.setName(VALUE_FIELD); valueFieldType.setHasDocValues(true); - avgResult = searchAndReduce(indexSearcher, query, avgBuilder, 10000, new MappedFieldType[]{fieldType, valueFieldType}); - histogramResult = searchAndReduce(indexSearcher, query, histo, 10000, new MappedFieldType[]{fieldType, valueFieldType}); + avgResult = searchAndReduce(indexSearcher, query, avgBuilder, 10000, null, + new MappedFieldType[]{fieldType, valueFieldType}); + histogramResult = searchAndReduce(indexSearcher, query, histo, 10000, null, + new MappedFieldType[]{fieldType, valueFieldType}); } // Finally, reduce the pipeline agg diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilderSerializationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilderSerializationTests.java new file mode 100644 index 0000000000000..218cbdf62ca05 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilderSerializationTests.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.movfn; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.script.Script; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; + +public class MovFnPipelineAggregationBuilderSerializationTests extends AbstractSerializingTestCase { + + @Override + protected MovFnPipelineAggregationBuilder createTestInstance() { + return new MovFnPipelineAggregationBuilder(randomAlphaOfLength(10), "foo", new Script("foo"), randomIntBetween(1, 10)); + } + + @Override + protected Writeable.Reader instanceReader() { + return MovFnPipelineAggregationBuilder::new; + } + + @Override + protected MovFnPipelineAggregationBuilder doParseInstance(XContentParser parser) throws IOException { + return MovFnPipelineAggregationBuilder.parse(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} + diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java new file mode 100644 index 0000000000000..4f9e653a20df6 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java @@ -0,0 +1,164 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.movfn; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; +import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MovFnUnitTests extends AggregatorTestCase { + + private static final String DATE_FIELD = "date"; + private static final String INSTANT_FIELD = "instant"; + private static final String VALUE_FIELD = "value_field"; + + private static final List datasetTimes = Arrays.asList( + "2017-01-01T01:07:45", + "2017-01-02T03:43:34", + "2017-01-03T04:11:00", + "2017-01-04T05:11:31", + "2017-01-05T08:24:05", + "2017-01-06T13:09:32", + "2017-01-07T13:47:43", + "2017-01-08T16:14:34", + "2017-01-09T17:09:50", + "2017-01-10T22:55:46"); + + private static final List datasetValues = Arrays.asList(1,2,3,4,5,6,7,8,9,10); + + public void testMatchAllDocs() throws IOException { + Query query = new MatchAllDocsQuery(); + Script script = new Script(Script.DEFAULT_SCRIPT_TYPE, "painless", "test", Collections.emptyMap()); + + DateHistogramAggregationBuilder aggBuilder = new DateHistogramAggregationBuilder("histo"); + aggBuilder.dateHistogramInterval(DateHistogramInterval.DAY).field(DATE_FIELD); + aggBuilder.subAggregation(new AvgAggregationBuilder("avg").field(VALUE_FIELD)); + aggBuilder.subAggregation(new MovFnPipelineAggregationBuilder("mov_fn", "avg", script, 3)); + + executeTestCase(query, aggBuilder, histogram -> { + assertEquals(10, histogram.getBuckets().size()); + List buckets = histogram.getBuckets(); + for (int i = 0; i < buckets.size(); i++) { + if (i == 0) { + assertThat(((InternalSimpleValue)(buckets.get(i).getAggregations().get("mov_fn"))).value(), equalTo(Double.NaN)); + } else { + assertThat(((InternalSimpleValue)(buckets.get(i).getAggregations().get("mov_fn"))).value(), equalTo(((double) i))); + } + + } + }, 1000, script); + } + + + @SuppressWarnings("unchecked") + private void executeTestCase(Query query, + DateHistogramAggregationBuilder aggBuilder, + Consumer verify, + int maxBucket, Script script) throws IOException { + + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + int counter = 0; + for (String date : datasetTimes) { + if (frequently()) { + indexWriter.commit(); + } + + long instant = asLong(date); + document.add(new SortedNumericDocValuesField(DATE_FIELD, instant)); + document.add(new LongPoint(INSTANT_FIELD, instant)); + document.add(new NumericDocValuesField(VALUE_FIELD, datasetValues.get(counter))); + indexWriter.addDocument(document); + document.clear(); + counter += 1; + } + } + + ScriptService scriptService = mock(ScriptService.class); + MovingFunctionScript.Factory factory = mock(MovingFunctionScript.Factory.class); + when(scriptService.compile(script, MovingFunctionScript.CONTEXT)).thenReturn(factory); + + MovingFunctionScript scriptInstance = new MovingFunctionScript() { + @Override + public double execute(Map params, double[] values) { + assertNotNull(values); + return MovingFunctions.max(values); + } + }; + + when(factory.newInstance()).thenReturn(scriptInstance); + + try (IndexReader indexReader = DirectoryReader.open(directory)) { + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + + DateFieldMapper.Builder builder = new DateFieldMapper.Builder("_name"); + DateFieldMapper.DateFieldType fieldType = builder.fieldType(); + fieldType.setHasDocValues(true); + fieldType.setName(aggBuilder.field()); + + MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + valueFieldType.setHasDocValues(true); + valueFieldType.setName("value_field"); + + InternalDateHistogram histogram; + histogram = searchAndReduce(indexSearcher, query, aggBuilder, maxBucket, scriptService, + new MappedFieldType[]{fieldType, valueFieldType}); + verify.accept(histogram); + } + } + } + + private static long asLong(String dateTime) { + return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(dateTime).getMillis(); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnWhitelistedFunctionTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnWhitelistedFunctionTests.java new file mode 100644 index 0000000000000..0a0f9d6ae3759 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnWhitelistedFunctionTests.java @@ -0,0 +1,684 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.movfn; + +import org.elasticsearch.common.collect.EvictingQueue; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; + +import static org.hamcrest.Matchers.equalTo; + +public class MovFnWhitelistedFunctionTests extends ESTestCase { + + public void testWindowMax() { + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + double randValue = randomDouble(); + double expected = -Double.MAX_VALUE; + + if (i == 0) { + window.offer(randValue); + continue; + } + + for (double value : window) { + expected = Math.max(expected, value); + } + + double actual = MovingFunctions.max(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertEquals(expected, actual, 0.01 * Math.abs(expected)); + window.offer(randValue); + } + } + + public void testNullWindowMax() { + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + Double randValue = randomBoolean() ? Double.NaN : null; + + if (i == 0) { + if (randValue != null) { + window.offer(randValue); + } + continue; + } + + double actual = MovingFunctions.max(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertThat(actual, equalTo(Double.NaN)); + if (randValue != null) { + window.offer(randValue); + } + } + } + + public void testEmptyWindowMax() { + EvictingQueue window = new EvictingQueue<>(0); + double actual = MovingFunctions.max(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertThat(actual, equalTo(Double.NaN)); + } + + public void testWindowMin() { + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + double randValue = randomDouble(); + double expected = Double.MAX_VALUE; + + if (i == 0) { + window.offer(randValue); + continue; + } + + for (double value : window) { + expected = Math.min(expected, value); + } + + double actual = MovingFunctions.min(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertEquals(expected, actual, 0.01 * Math.abs(expected)); + window.offer(randValue); + } + } + + public void testNullWindowMin() { + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + Double randValue = randomBoolean() ? Double.NaN : null; + + if (i == 0) { + if (randValue != null) { + window.offer(randValue); + } + continue; + } + + double actual = MovingFunctions.min(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertThat(actual, equalTo(Double.NaN)); + if (randValue != null) { + window.offer(randValue); + } + } + } + + public void testEmptyWindowMin() { + EvictingQueue window = new EvictingQueue<>(0); + double actual = MovingFunctions.min(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertThat(actual, equalTo(Double.NaN)); + } + + public void testWindowSum() { + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + double randValue = randomDouble(); + double expected = 0; + + if (i == 0) { + window.offer(randValue); + continue; + } + + for (double value : window) { + expected += value; + } + + double actual = MovingFunctions.sum(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertEquals(expected, actual, 0.01 * Math.abs(expected)); + window.offer(randValue); + } + } + + public void testNullWindowSum() { + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + Double randValue = randomBoolean() ? Double.NaN : null; + + if (i == 0) { + if (randValue != null) { + window.offer(randValue); + } + continue; + } + + double actual = MovingFunctions.sum(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertThat(actual, equalTo(0.0)); + if (randValue != null) { + window.offer(randValue); + } + } + } + + public void testEmptyWindowSum() { + EvictingQueue window = new EvictingQueue<>(0); + double actual = MovingFunctions.sum(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertThat(actual, equalTo(0.0)); + } + + public void testSimpleMovAvg() { + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + double randValue = randomDouble(); + double expected = 0; + + if (i == 0) { + window.offer(randValue); + continue; + } + + for (double value : window) { + expected += value; + } + expected /= window.size(); + + double actual = MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertEquals(expected, actual, 0.01 * Math.abs(expected)); + window.offer(randValue); + } + } + + public void testNullSimpleMovAvg() { + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + Double randValue = randomBoolean() ? Double.NaN : null; + + if (i == 0) { + if (randValue != null) { + window.offer(randValue); + } + continue; + } + + double actual = MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertThat(actual, equalTo(Double.NaN)); + if (randValue != null) { + window.offer(randValue); + } + } + } + + public void testEmptySimpleMovAvg() { + EvictingQueue window = new EvictingQueue<>(0); + double actual = MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertThat(actual, equalTo(Double.NaN)); + } + + public void testSimpleMovStdDev() { + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + double randValue = randomDouble(); + double mean = 0; + + if (i == 0) { + window.offer(randValue); + continue; + } + + for (double value : window) { + mean += value; + } + mean /= window.size(); + + double expected = 0.0; + for (double value : window) { + expected += Math.pow(value - mean, 2); + } + expected = Math.sqrt(expected / window.size()); + + double actual = MovingFunctions.stdDev(window.stream().mapToDouble(Double::doubleValue).toArray(), mean); + assertEquals(expected, actual, 0.01 * Math.abs(expected)); + window.offer(randValue); + } + } + + public void testNullSimpleStdDev() { + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + Double randValue = randomBoolean() ? Double.NaN : null; + + if (i == 0) { + if (randValue != null) { + window.offer(randValue); + } + continue; + } + + double actual = MovingFunctions.stdDev(window.stream().mapToDouble(Double::doubleValue).toArray(), + MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray())); + assertThat(actual, equalTo(Double.NaN)); + if (randValue != null) { + window.offer(randValue); + } + } + } + + public void testEmptySimpleStdDev() { + EvictingQueue window = new EvictingQueue<>(0); + double actual = MovingFunctions.stdDev(window.stream().mapToDouble(Double::doubleValue).toArray(), + MovingFunctions.unweightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray())); + assertThat(actual, equalTo(Double.NaN)); + } + + public void testLinearMovAvg() { + + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + double randValue = randomDouble(); + + if (i == 0) { + window.offer(randValue); + continue; + } + + double avg = 0; + long totalWeight = 1; + long current = 1; + + for (double value : window) { + avg += value * current; + totalWeight += current; + current += 1; + } + double expected = avg / totalWeight; + double actual = MovingFunctions.linearWeightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertEquals(expected, actual, 0.01 * Math.abs(expected)); + window.offer(randValue); + } + } + + public void testNullLinearMovAvg() { + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + Double randValue = randomBoolean() ? Double.NaN : null; + + if (i == 0) { + if (randValue != null) { + window.offer(randValue); + } + continue; + } + + double actual = MovingFunctions.linearWeightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertThat(actual, equalTo(Double.NaN)); + if (randValue != null) { + window.offer(randValue); + } + } + } + + public void testEmptyLinearMovAvg() { + EvictingQueue window = new EvictingQueue<>(0); + double actual = MovingFunctions.linearWeightedAvg(window.stream().mapToDouble(Double::doubleValue).toArray()); + assertThat(actual, equalTo(Double.NaN)); + } + + public void testEWMAMovAvg() { + double alpha = randomDouble(); + + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + double randValue = randomDouble(); + + if (i == 0) { + window.offer(randValue); + continue; + } + + double avg = 0; + boolean first = true; + + for (double value : window) { + if (first) { + avg = value; + first = false; + } else { + avg = (value * alpha) + (avg * (1 - alpha)); + } + } + double expected = avg; + double actual = MovingFunctions.ewma(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha); + assertEquals(expected, actual, 0.01 * Math.abs(expected)); + window.offer(randValue); + } + } + + public void testNullEwmaMovAvg() { + double alpha = randomDouble(); + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + Double randValue = randomBoolean() ? Double.NaN : null; + + if (i == 0) { + if (randValue != null) { + window.offer(randValue); + } + continue; + } + + double actual = MovingFunctions.ewma(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha); + assertThat(actual, equalTo(Double.NaN)); + if (randValue != null) { + window.offer(randValue); + } + } + } + + public void testEmptyEwmaMovAvg() { + double alpha = randomDouble(); + EvictingQueue window = new EvictingQueue<>(0); + double actual = MovingFunctions.ewma(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha); + assertThat(actual, equalTo(Double.NaN)); + } + + public void testHoltLinearMovAvg() { + double alpha = randomDouble(); + double beta = randomDouble(); + + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + double randValue = randomDouble(); + + if (i == 0) { + window.offer(randValue); + continue; + } + + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + int counter = 0; + + double last; + for (double value : window) { + last = value; + if (counter == 0) { + s = value; + b = value - last; + } else { + s = alpha * value + (1.0d - alpha) * (last_s + last_b); + b = beta * (s - last_s) + (1 - beta) * last_b; + } + + counter += 1; + last_s = s; + last_b = b; + } + + double expected = s + (0 * b) ; + double actual = MovingFunctions.holt(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta); + assertEquals(expected, actual, 0.01 * Math.abs(expected)); + window.offer(randValue); + } + } + + public void testNullHoltMovAvg() { + double alpha = randomDouble(); + double beta = randomDouble(); + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(1, 50); + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < numValues; i++) { + + Double randValue = randomBoolean() ? Double.NaN : null; + + if (i == 0) { + if (randValue != null) { + window.offer(randValue); + } + continue; + } + + double actual = MovingFunctions.holt(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta); + assertThat(actual, equalTo(Double.NaN)); + if (randValue != null) { + window.offer(randValue); + } + } + } + + public void testEmptyHoltMovAvg() { + double alpha = randomDouble(); + double beta = randomDouble(); + EvictingQueue window = new EvictingQueue<>(0); + double actual = MovingFunctions.holt(window.stream().mapToDouble(Double::doubleValue).toArray(), alpha, beta); + assertThat(actual, equalTo(Double.NaN)); + } + + public void testHoltWintersMultiplicative() { + double alpha = randomDouble(); + double beta = randomDouble(); + double gamma = randomDouble(); + int period = randomIntBetween(1,10); + int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < windowSize; i++) { + window.offer(randomDouble()); + } + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[windowSize]; + + int counter = 0; + double[] vs = new double[windowSize]; + for (double v : window) { + vs[counter] = v + 0.0000000001; + counter += 1; + } + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i + period] - vs[i]) / period; + } + s /= period; + b /= period; + last_s = s; + + // Calculate first seasonal + if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { + Arrays.fill(seasonal, 0.0); + } else { + for (int i = 0; i < period; i++) { + seasonal[i] = vs[i] / s; + } + } + + for (int i = period; i < vs.length; i++) { + s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + b = beta * (s - last_s) + (1 - beta) * last_b; + + seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + last_s = s; + last_b = b; + } + + int idx = window.size() - period + (0 % period); + double expected = (s + (1 * b)) * seasonal[idx]; + double actual = MovingFunctions.holtWinters(window.stream().mapToDouble(Double::doubleValue).toArray(), + alpha, beta, gamma, period, true); + assertEquals(expected, actual, 0.01 * Math.abs(expected)); + } + + public void testNullHoltWintersMovAvg() { + double alpha = randomDouble(); + double beta = randomDouble(); + double gamma = randomDouble(); + int period = randomIntBetween(1,10); + int numValues = randomIntBetween(1, 100); + int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < windowSize; i++) { + window.offer(Double.NaN); + } + + for (int i = 0; i < numValues; i++) { + double actual = MovingFunctions.holtWinters(window.stream().mapToDouble(Double::doubleValue).toArray(), + alpha, beta, gamma, period, false); + assertThat(actual, equalTo(Double.NaN)); + } + } + + public void testEmptyHoltWintersMovAvg() { + double alpha = randomDouble(); + double beta = randomDouble(); + double gamma = randomDouble(); + int period = randomIntBetween(1,10); + EvictingQueue window = new EvictingQueue<>(0); + double actual = MovingFunctions.holtWinters(window.stream().mapToDouble(Double::doubleValue).toArray(), + alpha, beta, gamma, period, false); + assertThat(actual, equalTo(Double.NaN)); + } + + public void testHoltWintersAdditive() { + double alpha = randomDouble(); + double beta = randomDouble(); + double gamma = randomDouble(); + int period = randomIntBetween(1,10); + + int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data + + EvictingQueue window = new EvictingQueue<>(windowSize); + for (int i = 0; i < windowSize; i++) { + window.offer(randomDouble()); + } + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[windowSize]; + + int counter = 0; + double[] vs = new double[windowSize]; + for (double v : window) { + vs[counter] = v; + counter += 1; + } + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i + period] - vs[i]) / period; + } + s /= period; + b /= period; + last_s = s; + + // Calculate first seasonal + if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { + Arrays.fill(seasonal, 0.0); + } else { + for (int i = 0; i < period; i++) { + seasonal[i] = vs[i] / s; + } + } + + for (int i = period; i < vs.length; i++) { + s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + b = beta * (s - last_s) + (1 - beta) * last_b; + + seasonal[i] = gamma * (vs[i] - (last_s - last_b )) + (1 - gamma) * seasonal[i - period]; + last_s = s; + last_b = b; + } + + int idx = window.size() - period + (0 % period); + double expected = s + (1 * b) + seasonal[idx]; + double actual = MovingFunctions.holtWinters(window.stream().mapToDouble(Double::doubleValue).toArray(), + alpha, beta, gamma, period, false); + assertEquals(expected, actual, 0.01 * Math.abs(expected)); + } + +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index 68dc7b87055c3..6fda8481a1c2c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -312,7 +312,7 @@ private double holt(Collection window) { double last; for (double value : window) { last = value; - if (counter == 1) { + if (counter == 0) { s = value; b = value - last; } else { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java index 869a7cd58ed8e..659fad3f45ce6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java @@ -31,6 +31,8 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.models.LinearModel; import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; +import java.io.IOException; + public class MovAvgTests extends BasePipelineAggregationTestCase { @Override @@ -94,6 +96,12 @@ protected MovAvgPipelineAggregationBuilder createTestAggregatorFactory() { return factory; } + @Override + public void testFromXContent() throws IOException { + super.testFromXContent(); + assertWarnings("The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation."); + } + public void testDefaultParsing() throws Exception { MovAvgPipelineAggregationBuilder expected = new MovAvgPipelineAggregationBuilder("commits_moving_avg", "commits"); String json = "{" + @@ -104,6 +112,7 @@ public void testDefaultParsing() throws Exception { " }" + "}"; PipelineAggregationBuilder newAgg = parse(createParser(JsonXContent.jsonXContent, json)); + assertWarnings("The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation."); assertNotSame(newAgg, expected); assertEquals(expected, newAgg); assertEquals(expected.hashCode(), newAgg.hashCode()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java index 34d203443604a..55c31013fd9de 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java @@ -246,7 +246,7 @@ public void testHoltLinearMovAvgModel() { double last; for (double value : window) { last = value; - if (counter == 1) { + if (counter == 0) { s = value; b = value - last; } else { @@ -292,7 +292,7 @@ public void testHoltLinearPredictionModel() { double last; for (double value : window) { last = value; - if (counter == 1) { + if (counter == 0) { s = value; b = value - last; } else { diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index da3757d77b46e..00303b344b92a 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -26,6 +26,8 @@ import org.elasticsearch.index.similarity.ScriptedSimilarity.Query; import org.elasticsearch.index.similarity.ScriptedSimilarity.Term; import org.elasticsearch.index.similarity.SimilarityService; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript; +import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; @@ -109,6 +111,9 @@ public String execute() { } else if (context.instanceClazz.equals(SimilarityWeightScript.class)) { SimilarityWeightScript.Factory factory = mockCompiled::createSimilarityWeightScript; return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(MovingFunctionScript.class)) { + MovingFunctionScript.Factory factory = mockCompiled::createMovingFunctionScript; + return context.factoryClazz.cast(factory); } throw new IllegalArgumentException("mock script engine does not know how to handle context [" + context.name + "]"); } @@ -169,6 +174,10 @@ public SimilarityScript createSimilarityScript() { public SimilarityWeightScript createSimilarityWeightScript() { return new MockSimilarityWeightScript(script != null ? script : ctx -> 42d); } + + public MovingFunctionScript createMovingFunctionScript() { + return new MockMovingFunctionScript(); + } } public class MockExecutableScript implements ExecutableScript { @@ -327,4 +336,11 @@ public static Script mockInlineScript(final String script) { return new Script(ScriptType.INLINE, "mock", script, emptyMap()); } + public class MockMovingFunctionScript extends MovingFunctionScript { + @Override + public double execute(Map params, double[] values) { + return MovingFunctions.unweightedAvg(values); + } + } + } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index c6a2f52783aa0..6b9ce05c1e4a9 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -60,6 +60,7 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.mock.orig.Mockito; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.fetch.FetchPhase; @@ -135,8 +136,8 @@ protected AggregatorFactory createAggregatorFactory(Query query, when(mapperService.types()).thenReturn(Collections.singleton(TYPE_NAME)); when(searchContext.mapperService()).thenReturn(mapperService); IndexFieldDataService ifds = new IndexFieldDataService(indexSettings, - new IndicesFieldDataCache(Settings.EMPTY, new IndexFieldDataCache.Listener() { - }), circuitBreakerService, mapperService); + new IndicesFieldDataCache(Settings.EMPTY, new IndexFieldDataCache.Listener() { + }), circuitBreakerService, mapperService); when(searchContext.getForField(Mockito.any(MappedFieldType.class))) .thenAnswer(invocationOnMock -> ifds.getForField((MappedFieldType) invocationOnMock.getArguments()[0])); @@ -216,7 +217,7 @@ public boolean shouldCache(Query query) throws IOException { when(searchContext.numberOfShards()).thenReturn(1); when(searchContext.searcher()).thenReturn(contextIndexSearcher); when(searchContext.fetchPhase()) - .thenReturn(new FetchPhase(Arrays.asList(new FetchSourceSubPhase(), new DocValueFieldsFetchSubPhase()))); + .thenReturn(new FetchPhase(Arrays.asList(new FetchSourceSubPhase(), new DocValueFieldsFetchSubPhase()))); when(searchContext.getObjectMapper(anyString())).thenAnswer(invocation -> { String fieldName = (String) invocation.getArguments()[0]; if (fieldName.startsWith(NESTEDFIELD_PREFIX)) { @@ -237,12 +238,12 @@ public boolean shouldCache(Query query) throws IOException { protected IndexSettings createIndexSettings() { return new IndexSettings( - IndexMetaData.builder("_index").settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1) - .numberOfReplicas(0) - .creationDate(System.currentTimeMillis()) - .build(), - Settings.EMPTY + IndexMetaData.builder("_index").settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .creationDate(System.currentTimeMillis()) + .build(), + Settings.EMPTY ); } @@ -257,7 +258,7 @@ protected MapperService mapperServiceMock() { * sub-tests that need a more complex mock can overwrite this */ protected QueryShardContext queryShardContextMock(MapperService mapperService, MappedFieldType[] fieldTypes, - CircuitBreakerService circuitBreakerService) { + CircuitBreakerService circuitBreakerService) { QueryShardContext queryShardContext = mock(QueryShardContext.class); when(queryShardContext.getMapperService()).thenReturn(mapperService); for (MappedFieldType fieldType : fieldTypes) { @@ -300,7 +301,7 @@ protected A searchAndReduc Query query, AggregationBuilder builder, MappedFieldType... fieldTypes) throws IOException { - return searchAndReduce(searcher, query, builder, DEFAULT_MAX_BUCKETS, fieldTypes); + return searchAndReduce(searcher, query, builder, DEFAULT_MAX_BUCKETS, null, fieldTypes); } /** @@ -312,6 +313,7 @@ protected A searchAndReduc Query query, AggregationBuilder builder, int maxBucket, + ScriptService scriptService, MappedFieldType... fieldTypes) throws IOException { final IndexReaderContext ctx = searcher.getTopReaderContext(); @@ -366,7 +368,7 @@ protected A searchAndReduc // now do the final reduce MultiBucketConsumer reduceBucketConsumer = new MultiBucketConsumer(maxBucket); InternalAggregation.ReduceContext context = - new InternalAggregation.ReduceContext(root.context().bigArrays(), null, reduceBucketConsumer, true); + new InternalAggregation.ReduceContext(root.context().bigArrays(), scriptService, reduceBucketConsumer, true); @SuppressWarnings("unchecked") A internalAgg = (A) aggs.get(0).doReduce(aggs, context); From ca84b0ce5121cbf8a1b9ce6308e2ac808dd4ff23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Piotr=20Pr=C4=85dzy=C5=84ski?= Date: Thu, 17 May 2018 16:21:25 +0200 Subject: [PATCH 08/34] top_hits doc example description update (#30676) Example description does not fit example code. --- .../aggregations/metrics/tophits-aggregation.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc index 0c19bf172bbf0..dc3222a5f371e 100644 --- a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc @@ -28,8 +28,8 @@ The top_hits aggregation returns regular search hits, because of this many per h ==== Example -In the following example we group the questions by tag and per tag we show the last active question. For each question -only the title field is being included in the source. +In the following example we group the sales by type and per type we show the last sale. +For each sale only the date and price fields are being included in the source. [source,js] -------------------------------------------------- From 2d5a690b45923a5dbdef4e9df69af711c05926b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Piotr=20Pr=C4=85dzy=C5=84ski?= Date: Thu, 17 May 2018 16:21:50 +0200 Subject: [PATCH 09/34] filters agg docs duplicated 'bucket' word removal (#30677) In one place word 'bucket' was duplicated. --- docs/reference/aggregations/bucket/filters-aggregation.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/aggregations/bucket/filters-aggregation.asciidoc b/docs/reference/aggregations/bucket/filters-aggregation.asciidoc index 3ca86d1d7a096..b7e3b1edf10d2 100644 --- a/docs/reference/aggregations/bucket/filters-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/filters-aggregation.asciidoc @@ -124,7 +124,7 @@ The `other_bucket` parameter can be set to add a bucket to the response which wi not match any of the given filters. The value of this parameter can be as follows: `false`:: Does not compute the `other` bucket -`true`:: Returns the `other` bucket bucket either in a bucket (named `_other_` by default) if named filters are being used, +`true`:: Returns the `other` bucket either in a bucket (named `_other_` by default) if named filters are being used, or as the last bucket if anonymous filters are being used The `other_bucket_key` parameter can be used to set the key for the `other` bucket to a value other than the default `_other_`. Setting From a6d5b2fc27d2615cbc41283ffb202d0f192f4007 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Thu, 17 May 2018 17:51:26 +0300 Subject: [PATCH 10/34] [ML] Clean left behind model state docs (#30659) It is possible for state documents to be left behind in the state index. This may be because of bugs or uncontrollable scenarios. In any case, those documents may take up quite some disk space when they add up. This commit adds a step in the expired data deletion that is part of the daily maintenance service. The new step searches for state documents that do not belong to any of the current jobs and deletes them. Closes #30551 --- .../autodetect/state/CategorizerState.java | 10 ++ .../process/autodetect/state/ModelState.java | 10 ++ .../process/autodetect/state/Quantiles.java | 10 ++ .../state/CategorizerStateTests.java | 29 ++++ .../autodetect/state/ModelStateTests.java | 31 ++++ .../autodetect/state/QuantilesTests.java | 17 +++ .../TransportDeleteExpiredDataAction.java | 4 +- .../persistence/BatchedDocumentsIterator.java | 9 ++ .../BatchedStateDocIdsIterator.java | 36 +++++ .../ml/job/retention/UnusedStateRemover.java | 134 ++++++++++++++++++ .../ml/integration/DeleteExpiredDataIT.java | 36 ++++- 11 files changed, 324 insertions(+), 2 deletions(-) create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/CategorizerStateTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelStateTests.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedStateDocIdsIterator.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/CategorizerState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/CategorizerState.java index 8c08300354698..2d68a6d7cf7a0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/CategorizerState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/CategorizerState.java @@ -37,6 +37,16 @@ public static final String v54DocumentPrefix(String jobId) { return jobId + "#"; } + /** + * Given the id of a categorizer state document it extracts the job id + * @param docId the categorizer state document id + * @return the job id or {@code null} if the id is not valid + */ + public static final String extractJobId(String docId) { + int suffixIndex = docId.lastIndexOf("_" + TYPE); + return suffixIndex <= 0 ? null : docId.substring(0, suffixIndex); + } + private CategorizerState() { } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelState.java index dce791a2b3d26..fbec7bb6c7291 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelState.java @@ -29,6 +29,16 @@ public static final String v54DocumentId(String jobId, String snapshotId, int do return jobId + "-" + snapshotId + "#" + docNum; } + /** + * Given the id of a state document it extracts the job id + * @param docId the state document id + * @return the job id or {@code null} if the id is not valid + */ + public static final String extractJobId(String docId) { + int suffixIndex = docId.lastIndexOf("_" + TYPE + "_"); + return suffixIndex <= 0 ? null : docId.substring(0, suffixIndex); + } + private ModelState() { } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/Quantiles.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/Quantiles.java index 0c167aadb7623..0b3ddcc7b5197 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/Quantiles.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/Quantiles.java @@ -60,6 +60,16 @@ public static String v54DocumentId(String jobId) { return jobId + "-" + TYPE; } + /** + * Given the id of a quantiles document it extracts the job id + * @param docId the quantiles document id + * @return the job id or {@code null} if the id is not valid + */ + public static final String extractJobId(String docId) { + int suffixIndex = docId.lastIndexOf("_" + TYPE); + return suffixIndex <= 0 ? null : docId.substring(0, suffixIndex); + } + private final String jobId; private final Date timestamp; private final String quantileState; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/CategorizerStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/CategorizerStateTests.java new file mode 100644 index 0000000000000..726288faffbc7 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/CategorizerStateTests.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.job.process.autodetect.state; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.Is.is; + +public class CategorizerStateTests extends ESTestCase { + + public void testExtractJobId_GivenValidDocId() { + assertThat(CategorizerState.extractJobId("foo_categorizer_state#1"), equalTo("foo")); + assertThat(CategorizerState.extractJobId("bar_categorizer_state#2"), equalTo("bar")); + assertThat(CategorizerState.extractJobId("foo_bar_categorizer_state#3"), equalTo("foo_bar")); + assertThat(CategorizerState.extractJobId("_categorizer_state_categorizer_state#3"), equalTo("_categorizer_state")); + } + + public void testExtractJobId_GivenInvalidDocId() { + assertThat(CategorizerState.extractJobId(""), is(nullValue())); + assertThat(CategorizerState.extractJobId("foo"), is(nullValue())); + assertThat(CategorizerState.extractJobId("_categorizer_state"), is(nullValue())); + assertThat(CategorizerState.extractJobId("foo_model_state_3141341341"), is(nullValue())); + } +} \ No newline at end of file diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelStateTests.java new file mode 100644 index 0000000000000..0e42a06111931 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelStateTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.job.process.autodetect.state; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.Is.is; + +public class ModelStateTests extends ESTestCase { + + public void testExtractJobId_GivenValidDocId() { + assertThat(ModelState.extractJobId("foo_model_state_3151373783#1"), equalTo("foo")); + assertThat(ModelState.extractJobId("bar_model_state_451515#3"), equalTo("bar")); + assertThat(ModelState.extractJobId("foo_bar_model_state_blah_blah"), equalTo("foo_bar")); + assertThat(ModelState.extractJobId("_model_state_model_state_11111"), equalTo("_model_state")); + } + + public void testExtractJobId_GivenInvalidDocId() { + assertThat(ModelState.extractJobId(""), is(nullValue())); + assertThat(ModelState.extractJobId("foo"), is(nullValue())); + assertThat(ModelState.extractJobId("_model_3141341341"), is(nullValue())); + assertThat(ModelState.extractJobId("_state_3141341341"), is(nullValue())); + assertThat(ModelState.extractJobId("_model_state_3141341341"), is(nullValue())); + assertThat(ModelState.extractJobId("foo_quantiles"), is(nullValue())); + } +} \ No newline at end of file diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/QuantilesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/QuantilesTests.java index 84c1a161f1ee4..146e3ed5bd539 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/QuantilesTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/QuantilesTests.java @@ -15,9 +15,26 @@ import java.util.Date; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.Is.is; public class QuantilesTests extends AbstractSerializingTestCase { + public void testExtractJobId_GivenValidDocId() { + assertThat(Quantiles.extractJobId("foo_quantiles"), equalTo("foo")); + assertThat(Quantiles.extractJobId("bar_quantiles"), equalTo("bar")); + assertThat(Quantiles.extractJobId("foo_bar_quantiles"), equalTo("foo_bar")); + assertThat(Quantiles.extractJobId("_quantiles_quantiles"), equalTo("_quantiles")); + } + + public void testExtractJobId_GivenInvalidDocId() { + assertThat(Quantiles.extractJobId(""), is(nullValue())); + assertThat(Quantiles.extractJobId("foo"), is(nullValue())); + assertThat(Quantiles.extractJobId("_quantiles"), is(nullValue())); + assertThat(Quantiles.extractJobId("foo_model_state_3141341341"), is(nullValue())); + } + public void testEquals_GivenSameObject() { Quantiles quantiles = new Quantiles("foo", new Date(0L), "foo"); assertTrue(quantiles.equals(quantiles)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java index 0e1ca9dd9aec3..9ab2132b61912 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.ml.job.retention.ExpiredModelSnapshotsRemover; import org.elasticsearch.xpack.ml.job.retention.ExpiredResultsRemover; import org.elasticsearch.xpack.ml.job.retention.MlDataRemover; +import org.elasticsearch.xpack.ml.job.retention.UnusedStateRemover; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.ml.utils.VolatileCursorIterator; @@ -56,7 +57,8 @@ private void deleteExpiredData(ActionListener List dataRemovers = Arrays.asList( new ExpiredResultsRemover(client, clusterService, auditor), new ExpiredForecastsRemover(client), - new ExpiredModelSnapshotsRemover(client, clusterService) + new ExpiredModelSnapshotsRemover(client, clusterService), + new UnusedStateRemover(client, clusterService) ); Iterator dataRemoversIterator = new VolatileCursorIterator<>(dataRemovers); deleteExpiredData(dataRemoversIterator, listener); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java index cf50579a0e517..d50a7c3f8c2ad 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIterator.java @@ -97,6 +97,7 @@ private SearchResponse initScroll() { searchRequest.source(new SearchSourceBuilder() .size(BATCH_SIZE) .query(getQuery()) + .fetchSource(shouldFetchSource()) .sort(SortBuilders.fieldSort(ElasticsearchMappings.ES_DOC))); SearchResponse searchResponse = client.search(searchRequest).actionGet(); @@ -123,6 +124,14 @@ private Deque mapHits(SearchResponse searchResponse) { return results; } + /** + * Should fetch source? Defaults to {@code true} + * @return whether the source should be fetched + */ + protected boolean shouldFetchSource() { + return true; + } + /** * Get the query to use for the search * @return the search query diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedStateDocIdsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedStateDocIdsIterator.java new file mode 100644 index 0000000000000..92235570b47b5 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedStateDocIdsIterator.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.persistence; + +import org.elasticsearch.client.Client; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; + +/** + * Iterates through the state doc ids + */ +public class BatchedStateDocIdsIterator extends BatchedDocumentsIterator { + + public BatchedStateDocIdsIterator(Client client, String index) { + super(client, index); + } + + @Override + protected boolean shouldFetchSource() { + return false; + } + + @Override + protected QueryBuilder getQuery() { + return QueryBuilders.matchAllQuery(); + } + + @Override + protected String map(SearchHit hit) { + return hit.getId(); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java new file mode 100644 index 0000000000000..b07b025e09e56 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.retention; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.xpack.core.ml.MLMetadataField; +import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelState; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; +import org.elasticsearch.xpack.ml.job.persistence.BatchedStateDocIdsIterator; + +import java.util.Arrays; +import java.util.Collections; +import java.util.Deque; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; + +/** + * If for any reason a job is deleted by some of its state documents + * are left behind, this class deletes any unused documents stored + * in the .ml-state index. + */ +public class UnusedStateRemover implements MlDataRemover { + + private static final Logger LOGGER = Loggers.getLogger(UnusedStateRemover.class); + + private final Client client; + private final ClusterService clusterService; + + public UnusedStateRemover(Client client, ClusterService clusterService) { + this.client = Objects.requireNonNull(client); + this.clusterService = Objects.requireNonNull(clusterService); + } + + @Override + public void remove(ActionListener listener) { + try { + BulkRequestBuilder deleteUnusedStateRequestBuilder = findUnusedStateDocs(); + if (deleteUnusedStateRequestBuilder.numberOfActions() > 0) { + executeDeleteUnusedStateDocs(deleteUnusedStateRequestBuilder, listener); + } else { + listener.onResponse(true); + } + } catch (Exception e) { + listener.onFailure(e); + } + } + + private BulkRequestBuilder findUnusedStateDocs() { + Set jobIds = getJobIds(); + BulkRequestBuilder deleteUnusedStateRequestBuilder = client.prepareBulk(); + BatchedStateDocIdsIterator stateDocIdsIterator = new BatchedStateDocIdsIterator(client, AnomalyDetectorsIndex.jobStateIndexName()); + while (stateDocIdsIterator.hasNext()) { + Deque stateDocIds = stateDocIdsIterator.next(); + for (String stateDocId : stateDocIds) { + String jobId = JobIdExtractor.extractJobId(stateDocId); + if (jobId == null) { + // not a managed state document id + continue; + } + if (jobIds.contains(jobId) == false) { + deleteUnusedStateRequestBuilder.add(new DeleteRequest( + AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings.DOC_TYPE, stateDocId)); + } + } + } + return deleteUnusedStateRequestBuilder; + } + + private Set getJobIds() { + ClusterState clusterState = clusterService.state(); + MlMetadata mlMetadata = clusterState.getMetaData().custom(MLMetadataField.TYPE); + if (mlMetadata != null) { + return mlMetadata.getJobs().keySet(); + } + return Collections.emptySet(); + } + + private void executeDeleteUnusedStateDocs(BulkRequestBuilder deleteUnusedStateRequestBuilder, ActionListener listener) { + LOGGER.info("Found [{}] unused state documents; attempting to delete", + deleteUnusedStateRequestBuilder.numberOfActions()); + deleteUnusedStateRequestBuilder.execute(new ActionListener() { + @Override + public void onResponse(BulkResponse bulkItemResponses) { + if (bulkItemResponses.hasFailures()) { + LOGGER.error("Some unused state documents could not be deleted due to failures: {}", + bulkItemResponses.buildFailureMessage()); + } else { + LOGGER.info("Successfully deleted all unused state documents"); + } + listener.onResponse(true); + } + + @Override + public void onFailure(Exception e) { + LOGGER.error("Error deleting unused model state documents: ", e); + listener.onFailure(e); + } + }); + } + + private static class JobIdExtractor { + + private static List> extractors = Arrays.asList( + ModelState::extractJobId, Quantiles::extractJobId, CategorizerState::extractJobId); + + private static String extractJobId(String docId) { + String jobId; + for (Function extractor : extractors) { + jobId = extractor.apply(docId); + if (jobId != null) { + return jobId; + } + } + return null; + } + } +} diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index 3a1fc2b0f6d4a..23bd5c7f7ddf1 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -8,12 +8,15 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.ml.action.DeleteExpiredDataAction; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -21,6 +24,7 @@ import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats; @@ -31,13 +35,16 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase { @@ -78,11 +85,16 @@ public void setUpData() throws IOException { } @After - public void tearDownData() throws Exception { + public void tearDownData() { client().admin().indices().prepareDelete(DATA_INDEX).get(); cleanUp(); } + public void testDeleteExpiredDataGivenNothingToDelete() throws Exception { + // Tests that nothing goes wrong when there's nothing to delete + client().execute(DeleteExpiredDataAction.INSTANCE, new DeleteExpiredDataAction.Request()).get(); + } + public void testDeleteExpiredData() throws Exception { registerJob(newJobBuilder("no-retention").setResultsRetentionDays(null).setModelSnapshotRetentionDays(null)); registerJob(newJobBuilder("results-retention").setResultsRetentionDays(1L).setModelSnapshotRetentionDays(null)); @@ -166,6 +178,18 @@ public void testDeleteExpiredData() throws Exception { assertThat(countForecastDocs(forecastStat.getJobId(), forecastStat.getForecastId()), equalTo(forecastStat.getRecordCount())); } + // Index some unused state documents (more than 10K to test scrolling works) + BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); + bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int i = 0; i < 10010; i++) { + String docId = "non_existing_job_" + randomFrom("model_state_1234567#" + i, "quantiles", "categorizer_state#" + i); + IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.jobStateIndexName(), "doc", docId); + indexRequest.source(Collections.emptyMap()); + bulkRequestBuilder.add(indexRequest); + } + assertThat(bulkRequestBuilder.get().status(), equalTo(RestStatus.OK)); + + // Now call the action under test client().execute(DeleteExpiredDataAction.INSTANCE, new DeleteExpiredDataAction.Request()).get(); // We need to refresh to ensure the deletion is visible @@ -216,6 +240,16 @@ public void testDeleteExpiredData() throws Exception { assertThat(countForecastDocs(job.getId(), forecastId), equalTo(0L)); } } + + // Verify .ml-state doesn't contain unused state documents + SearchResponse stateDocsResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexName()) + .setFetchSource(false) + .setSize(10000) + .get(); + assertThat(stateDocsResponse.getHits().getTotalHits(), lessThan(10000L)); + for (SearchHit hit : stateDocsResponse.getHits().getHits()) { + assertThat(hit.getId().startsWith("non_existing_job"), is(false)); + } } private static Job.Builder newJobBuilder(String id) { From 1f002040c827f0db82b197caa8aabe9d60faa86b Mon Sep 17 00:00:00 2001 From: Andy Bristol Date: Thu, 17 May 2018 07:59:46 -0700 Subject: [PATCH 11/34] [test] packaging: add windows boxes (#30402) Adds windows server 2012r2 and 2016 vagrant boxes to packaging tests. They can only be used if IDs for their images are specified, which are passed to gradle and then to vagrant via env variables. Adds options to the project property `vagrant.boxes` to choose between linux and windows boxes. Bats tests are run only on linux boxes, and portable packaging tests run on all boxes. Platform tests are only run on linux boxes since they are not being maintained. For #26741 --- TESTING.asciidoc | 155 +++++++---- Vagrantfile | 39 +++ .../gradle/vagrant/VagrantTestPlugin.groovy | 261 +++++++++++++----- 3 files changed, 331 insertions(+), 124 deletions(-) diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 0454b0aceb111..267c401c20bf7 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -303,15 +303,16 @@ comma separated list of nodes to connect to (e.g. localhost:9300). A transport c be created based on that and used for all the before|after test operations, and to extract the http addresses of the nodes so that REST requests can be sent to them. -== Testing scripts +== Testing packaging -The simplest way to test scripts and the packaged distributions is to use -Vagrant. You can get started by following there five easy steps: +The packaging tests use Vagrant virtual machines to verify that installing +and running elasticsearch distributions works correctly on supported operating systems. +These tests should really only be run in vagrant vms because they're destructive. . Install Virtual Box and Vagrant. -. (Optional) Install vagrant-cachier to squeeze a bit more performance out of -the process: +. (Optional) Install https://github.com/fgrehm/vagrant-cachier[vagrant-cachier] to squeeze +a bit more performance out of the process: -------------------------------------- vagrant plugin install vagrant-cachier @@ -325,26 +326,39 @@ vagrant plugin install vagrant-cachier . Download and smoke test the VMs with `./gradlew vagrantSmokeTest` or `./gradlew -Pvagrant.boxes=all vagrantSmokeTest`. The first time you run this it will -download the base images and provision the boxes and immediately quit. If you -you this again it'll skip the download step. +download the base images and provision the boxes and immediately quit. Downloading all +the images may take a long time. After the images are already on your machine, they won't +be downloaded again unless they have been updated to a new version. . Run the tests with `./gradlew packagingTest`. This will cause Gradle to build the tar, zip, and deb packages and all the plugins. It will then run the tests on ubuntu-1404 and centos-7. We chose those two distributions as the default because they cover deb and rpm packaging and SyvVinit and systemd. -You can run on all the VMs by running `./gradlew -Pvagrant.boxes=all -packagingTest`. You can run a particular VM with a command like `./gradlew --Pvagrant.boxes=oel-7 packagingTest`. See `./gradlew tasks` for a complete list -of available vagrant boxes for testing. It's important to know that if you -interrupt any of these Gradle commands then the boxes will remain running and -you'll have to terminate them with `./gradlew stop`. +You can choose which boxes to test by setting the `-Pvagrant.boxes` project property. All of +the valid options for this property are: + +* `sample` - The default, only chooses ubuntu-1404 and centos-7 +* List of box names, comma separated (e.g. `oel-7,fedora-26`) - Chooses exactly the boxes listed. +* `linux-all` - All linux boxes. +* `windows-all` - All Windows boxes. If there are any Windows boxes which do not +have images available when this value is provided, the build will fail. +* `all` - All boxes we test. If there are any boxes (e.g. Windows) which do not have images +available when this value is provided, the build will fail. + +For a complete list of boxes on which tests can be run, run `./gradlew :qa:vagrant:listAllBoxes`. +For a list of boxes that have images available from your configuration, run +`./gradlew :qa:vagrant:listAvailableBoxes` + +Note that if you interrupt gradle in the middle of running these tasks, any boxes started +will remain running and you'll have to stop them manually with `./gradlew stop` or +`vagrant halt`. All the regular vagrant commands should just work so you can get a shell in a VM running trusty by running `vagrant up ubuntu-1404 --provider virtualbox && vagrant ssh ubuntu-1404`. -These are the linux flavors the Vagrantfile currently supports: +These are the linux flavors supported, all of which we provide images for * ubuntu-1404 aka trusty * ubuntu-1604 aka xenial @@ -364,9 +378,42 @@ quality boxes available in vagrant atlas: * sles-11 -We're missing the following because our tests are very linux/bash centric: +=== Testing packaging on Windows + +The packaging tests also support Windows Server 2012R2 and Windows Server 2016. +Unfortunately we're not able to provide boxes for them in open source use +because of licensing issues. Any Virtualbox image that has WinRM and Powershell +enabled for remote users should work. + +Testing on Windows requires the https://github.com/criteo/vagrant-winrm[vagrant-winrm] plugin. + +------------------------------------ +vagrant plugin install vagrant-winrm +------------------------------------ + +Specify the image IDs of the Windows boxes to gradle with the following project +properties. They can be set in `~/.gradle/gradle.properties` like -* Windows Server 2012 +------------------------------------ +vagrant.windows-2012r2.id=my-image-id +vagrant.windows-2016.id=another-image-id +------------------------------------ + +or passed on the command line like `-Pvagrant.windows-2012r2.id=my-image-id` +`-Pvagrant.windows-2016=another-image-id` + +These properties are required for Windows support in all gradle tasks that +handle packaging tests. Either or both may be specified. Remember that to run tests +on these boxes, the project property `vagrant.boxes` still needs to be set to a +value that will include them. + +If you're running vagrant commands outside of gradle, specify the Windows boxes +with the environment variables + +* `VAGRANT_WINDOWS_2012R2_BOX` +* `VAGRANT_WINDOWS_2016_BOX` + +=== Testing VMs are disposable It's important to think of VMs like cattle. If they become lame you just shoot them and let vagrant reprovision them. Say you've hosed your precise VM: @@ -399,54 +446,62 @@ vagrant destroy -f `vagrant up` would normally start all the VMs but we've prevented that because that'd consume a ton of ram. -== Testing scripts more directly +=== Iterating on packaging tests -In general its best to stick to testing in vagrant because the bats scripts are -destructive. When working with a single package it's generally faster to run its -tests in a tighter loop than Gradle provides. In one window: +Running the packaging tests through gradle can take a while because it will start +and stop the VM each time. You can iterate faster by keeping the VM up and running +the tests directly. --------------------------------- -./gradlew :distribution:packages:rpm:assemble --------------------------------- +The packaging tests use a random seed to determine which past version to use for +testing upgrades. To use a single past version fix the test seed when running +the commands below (see <>) -and in another window: +First build the packaging tests and their dependencies ----------------------------------------------------- -vagrant up centos-7 --provider virtualbox && vagrant ssh centos-7 -cd $PACKAGING_ARCHIVES -sudo -E bats $BATS_TESTS/*rpm*.bats ----------------------------------------------------- +-------------------------------------------- +./gradlew :qa:vagrant:setupPackagingTest +-------------------------------------------- -If you wanted to retest all the release artifacts on a single VM you could: +Then choose the VM you want to test on and bring it up. For example, to bring +up Debian 9 use the gradle command below. Bringing the box up with vagrant directly +may not mount the packaging test project in the right place. Once the VM is up, ssh +into it -------------------------------------------------- -./gradlew setupPackagingTest -cd qa/vagrant; vagrant up ubuntu-1404 --provider virtualbox && vagrant ssh ubuntu-1404 +-------------------------------------------- +./gradlew :qa:vagrant:vagrantDebian9#up +vagrant ssh debian-9 +-------------------------------------------- + +Now inside the VM, to run the https://github.com/sstephenson/bats[bats] packaging tests + +-------------------------------------------- cd $PACKAGING_ARCHIVES -sudo -E bats $BATS_TESTS/*.bats -------------------------------------------------- -You can also use Gradle to prepare the test environment and then starts a single VM: +# runs all bats tests +sudo bats $BATS_TESTS/*.bats -------------------------------------------------- -./gradlew vagrantFedora27#up -------------------------------------------------- +# you can also pass specific test files +sudo bats $BATS_TESTS/20_tar_package.bats $BATS_TESTS/25_tar_plugins.bats +-------------------------------------------- -Or any of vagrantCentos6#up, vagrantCentos7#up, vagrantDebian8#up, -vagrantDebian9#up, vagrantFedora26#up, vagrantFedora27#up, vagrantOel6#up, vagrantOel7#up, -vagrantOpensuse42#up,vagrantSles12#up, vagrantUbuntu1404#up, vagrantUbuntu1604#up. +To run the Java packaging tests, again inside the VM -Once up, you can then connect to the VM using SSH from the elasticsearch directory: +-------------------------------------------- +bash $PACKAGING_TESTS/run-tests.sh +-------------------------------------------- -------------------------------------------------- -vagrant ssh fedora-27 -------------------------------------------------- +or on Windows -Or from another directory: +-------------------------------------------- +powershell -File $Env:PACKAGING_TESTS/run-tests.ps1 +-------------------------------------------- -------------------------------------------------- -VAGRANT_CWD=/path/to/elasticsearch vagrant ssh fedora-27 -------------------------------------------------- +When you've made changes you want to test, keep the VM up and reload the tests and +distributions inside by running (on the host) + +-------------------------------------------- +./gradlew :qa:vagrant:clean :qa:vagrant:setupPackagingTest +-------------------------------------------- Note: Starting vagrant VM outside of the elasticsearch folder requires to indicates the folder that contains the Vagrantfile using the VAGRANT_CWD diff --git a/Vagrantfile b/Vagrantfile index 6761fec07dab2..1c259c1125f00 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -121,6 +121,26 @@ Vagrant.configure(2) do |config| sles_common config, box end end + + windows_2012r2_box = ENV['VAGRANT_WINDOWS_2012R2_BOX'] + if windows_2012r2_box && windows_2012r2_box.empty? == false + 'windows-2012r2'.tap do |box| + config.vm.define box, define_opts do |config| + config.vm.box = windows_2012r2_box + windows_common config, box + end + end + end + + windows_2016_box = ENV['VAGRANT_WINDOWS_2016_BOX'] + if windows_2016_box && windows_2016_box.empty? == false + 'windows-2016'.tap do |box| + config.vm.define box, define_opts do |config| + config.vm.box = windows_2016_box + windows_common config, box + end + end + end end def deb_common(config, name, extra: '') @@ -353,3 +373,22 @@ SUDOERS_VARS chmod 0440 /etc/sudoers.d/elasticsearch_vars SHELL end + +def windows_common(config, name) + config.vm.provision 'markerfile', type: 'shell', inline: <<-SHELL + $ErrorActionPreference = "Stop" + New-Item C:/is_vagrant_vm -ItemType file -Force | Out-Null + SHELL + + config.vm.provision 'set prompt', type: 'shell', inline: <<-SHELL + $ErrorActionPreference = "Stop" + $ps_prompt = 'function Prompt { "#{name}:$($ExecutionContext.SessionState.Path.CurrentLocation)>" }' + $ps_prompt | Out-File $PsHome/Microsoft.PowerShell_profile.ps1 + SHELL + + config.vm.provision 'set env variables', type: 'shell', inline: <<-SHELL + $ErrorActionPreference = "Stop" + [Environment]::SetEnvironmentVariable("PACKAGING_ARCHIVES", "C:/project/build/packaging/archives", "Machine") + [Environment]::SetEnvironmentVariable("PACKAGING_TESTS", "C:/project/build/packaging/tests", "Machine") + SHELL +end diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index bb85359ae3f07..72d71f25f69f2 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -13,10 +13,12 @@ import org.gradle.api.tasks.Delete import org.gradle.api.tasks.Exec import org.gradle.api.tasks.TaskState +import static java.util.Collections.unmodifiableList + class VagrantTestPlugin implements Plugin { - /** All available boxes **/ - static List BOXES = [ + /** All Linux boxes that we test. These are all always supplied **/ + static final List LINUX_BOXES = unmodifiableList([ 'centos-6', 'centos-7', 'debian-8', @@ -29,26 +31,35 @@ class VagrantTestPlugin implements Plugin { 'sles-12', 'ubuntu-1404', 'ubuntu-1604' - ] + ]) + + /** All Windows boxes that we test, which may or may not be supplied **/ + static final List WINDOWS_BOXES = unmodifiableList([ + 'windows-2012r2', + 'windows-2016' + ]) + + /** All boxes that we test, some of which may not be supplied **/ + static final List ALL_BOXES = unmodifiableList(LINUX_BOXES + WINDOWS_BOXES) /** Boxes used when sampling the tests **/ - static List SAMPLE = [ + static final List SAMPLE = unmodifiableList([ 'centos-7', - 'ubuntu-1404', - ] + 'ubuntu-1404' + ]) /** All distributions to bring into test VM, whether or not they are used **/ - static List DISTRIBUTIONS = [ + static final List DISTRIBUTIONS = unmodifiableList([ 'archives:tar', 'archives:oss-tar', 'packages:rpm', 'packages:oss-rpm', 'packages:deb', 'packages:oss-deb' - ] + ]) /** Packages onboarded for upgrade tests **/ - static List UPGRADE_FROM_ARCHIVES = ['rpm', 'deb'] + static final List UPGRADE_FROM_ARCHIVES = unmodifiableList(['rpm', 'deb']) private static final PACKAGING_CONFIGURATION = 'packaging' private static final PACKAGING_TEST_CONFIGURATION = 'packagingTest' @@ -56,11 +67,19 @@ class VagrantTestPlugin implements Plugin { private static final String BATS_TEST_COMMAND ="cd \$PACKAGING_ARCHIVES && sudo bats --tap \$BATS_TESTS/*.$BATS" private static final String PLATFORM_TEST_COMMAND ="rm -rf ~/elasticsearch && rsync -r /elasticsearch/ ~/elasticsearch && cd ~/elasticsearch && ./gradlew test integTest" + /** Boxes that have been supplied and are available for testing **/ + List availableBoxes = [] + + /** extra env vars to pass to vagrant for box configuration **/ + Map vagrantBoxEnvVars = [:] + @Override void apply(Project project) { + collectAvailableBoxes(project) + // Creates the Vagrant extension for the project - project.extensions.create('esvagrant', VagrantPropertiesExtension, listVagrantBoxes(project)) + project.extensions.create('esvagrant', VagrantPropertiesExtension, listSelectedBoxes(project)) // Add required repositories for packaging tests configurePackagingArchiveRepositories(project) @@ -73,12 +92,17 @@ class VagrantTestPlugin implements Plugin { createVagrantTasks(project) if (project.extensions.esvagrant.boxes == null || project.extensions.esvagrant.boxes.size() == 0) { - throw new InvalidUserDataException('Vagrant boxes cannot be null or empty for esvagrant') + throw new InvalidUserDataException('Must specify at least one vagrant box') } for (String box : project.extensions.esvagrant.boxes) { - if (BOXES.contains(box) == false) { - throw new InvalidUserDataException("Vagrant box [${box}] not found, available virtual machines are ${BOXES}") + if (ALL_BOXES.contains(box) == false) { + throw new InvalidUserDataException("Vagrant box [${box}] is unknown to this plugin. Valid boxes are ${ALL_BOXES}") + } + + if (availableBoxes.contains(box) == false) { + throw new InvalidUserDataException("Vagrant box [${box}] is not available because an image is not supplied for it. " + + "Available boxes with supplied images are ${availableBoxes}") } } @@ -86,14 +110,45 @@ class VagrantTestPlugin implements Plugin { createVagrantBoxesTasks(project) } - private List listVagrantBoxes(Project project) { + /** + * Enumerate all the boxes that we know about and could possibly choose to test + */ + private void collectAvailableBoxes(Project project) { + // these images are hardcoded in the Vagrantfile and are always available + availableBoxes.addAll(LINUX_BOXES) + + // these images need to be provided at runtime + String windows_2012r2_box = project.getProperties().get('vagrant.windows-2012r2.id') + if (windows_2012r2_box != null && windows_2012r2_box.isEmpty() == false) { + availableBoxes.add('windows-2012r2') + vagrantBoxEnvVars['VAGRANT_WINDOWS_2012R2_BOX'] = windows_2012r2_box + } + + String windows_2016_box = project.getProperties().get('vagrant.windows-2016.id') + if (windows_2016_box != null && windows_2016_box.isEmpty() == false) { + availableBoxes.add('windows-2016') + vagrantBoxEnvVars['VAGRANT_WINDOWS_2016_BOX'] = windows_2016_box + } + } + + /** + * Enumerate all the boxes that we have chosen to test + */ + private static List listSelectedBoxes(Project project) { String vagrantBoxes = project.getProperties().get('vagrant.boxes', 'sample') - if (vagrantBoxes == 'sample') { - return SAMPLE - } else if (vagrantBoxes == 'all') { - return BOXES - } else { - return vagrantBoxes.split(',') + switch (vagrantBoxes) { + case 'sample': + return SAMPLE + case 'linux-all': + return LINUX_BOXES + case 'windows-all': + return WINDOWS_BOXES + case 'all': + return ALL_BOXES + case '': + return [] + default: + return vagrantBoxes.split(',') } } @@ -184,11 +239,19 @@ class VagrantTestPlugin implements Plugin { from project.configurations[PACKAGING_TEST_CONFIGURATION] } - Task createTestRunnerScript = project.tasks.create('createTestRunnerScript', FileContentsTask) { + Task createLinuxRunnerScript = project.tasks.create('createLinuxRunnerScript', FileContentsTask) { dependsOn copyPackagingTests file "${testsDir}/run-tests.sh" contents "java -cp \"\$PACKAGING_TESTS/*\" org.junit.runner.JUnitCore ${-> project.extensions.esvagrant.testClass}" } + Task createWindowsRunnerScript = project.tasks.create('createWindowsRunnerScript', FileContentsTask) { + dependsOn copyPackagingTests + file "${testsDir}/run-tests.ps1" + contents """\ + java -cp "\$Env:PACKAGING_TESTS/*" org.junit.runner.JUnitCore ${-> project.extensions.esvagrant.testClass} + exit \$LASTEXITCODE + """ + } Task createVersionFile = project.tasks.create('createVersionFile', FileContentsTask) { dependsOn copyPackagingArchives @@ -249,20 +312,24 @@ class VagrantTestPlugin implements Plugin { } Task vagrantSetUpTask = project.tasks.create('setupPackagingTest') - vagrantSetUpTask.dependsOn 'vagrantCheckVersion' - vagrantSetUpTask.dependsOn copyPackagingArchives, copyPackagingTests, createTestRunnerScript - vagrantSetUpTask.dependsOn createVersionFile, createUpgradeFromFile, createUpgradeIsOssFile - vagrantSetUpTask.dependsOn copyBatsTests, copyBatsUtils + vagrantSetUpTask.dependsOn( + 'vagrantCheckVersion', + copyPackagingArchives, + copyPackagingTests, + createLinuxRunnerScript, + createWindowsRunnerScript, + createVersionFile, + createUpgradeFromFile, + createUpgradeIsOssFile, + copyBatsTests, + copyBatsUtils + ) } private static void createPackagingTestTask(Project project) { project.tasks.create('packagingTest') { group 'Verification' - description "Tests yum/apt packages using vagrant and bats.\n" + - " Specify the vagrant boxes to test using the gradle property 'vagrant.boxes'.\n" + - " 'sample' can be used to test a single yum and apt box. 'all' can be used to\n" + - " test all available boxes. The available boxes are: \n" + - " ${BOXES}" + description "Tests distribution installation on different platforms using vagrant. See TESTING.asciidoc for details." dependsOn 'vagrantCheckVersion' } } @@ -270,24 +337,49 @@ class VagrantTestPlugin implements Plugin { private static void createPlatformTestTask(Project project) { project.tasks.create('platformTest') { group 'Verification' - description "Test unit and integ tests on different platforms using vagrant.\n" + - " Specify the vagrant boxes to test using the gradle property 'vagrant.boxes'.\n" + - " 'all' can be used to test all available boxes. The available boxes are: \n" + - " ${BOXES}" + description "Test unit and integ tests on different platforms using vagrant. See TESTING.asciidoc for details. This test " + + "is unmaintained." dependsOn 'vagrantCheckVersion' } } - private static void createVagrantTasks(Project project) { + private void createBoxListTasks(Project project) { + project.tasks.create('listAllBoxes') { + group 'Verification' + description 'List all vagrant boxes which can be tested by this plugin' + doLast { + println("All vagrant boxes supported by ${project.path}") + for (String box : ALL_BOXES) { + println(box) + } + } + dependsOn 'vagrantCheckVersion' + } + + project.tasks.create('listAvailableBoxes') { + group 'Verification' + description 'List all vagrant boxes which are available for testing' + doLast { + println("All vagrant boxes available to ${project.path}") + for (String box : availableBoxes) { + println(box) + } + } + dependsOn 'vagrantCheckVersion' + } + } + + private void createVagrantTasks(Project project) { createCleanTask(project) createStopTask(project) createSmokeTestTask(project) createPrepareVagrantTestEnvTask(project) createPackagingTestTask(project) createPlatformTestTask(project) + createBoxListTasks(project) } - private static void createVagrantBoxesTasks(Project project) { + private void createVagrantBoxesTasks(Project project) { assert project.extensions.esvagrant.boxes != null assert project.tasks.stop != null @@ -320,9 +412,10 @@ class VagrantTestPlugin implements Plugin { 'VAGRANT_VAGRANTFILE' : 'Vagrantfile', 'VAGRANT_PROJECT_DIR' : "${project.projectDir.absolutePath}" ] + vagrantEnvVars.putAll(vagrantBoxEnvVars) // Each box gets it own set of tasks - for (String box : BOXES) { + for (String box : availableBoxes) { String boxTask = box.capitalize().replace('-', '') // always add a halt task for all boxes, so clean makes sure they are all shutdown @@ -363,6 +456,7 @@ class VagrantTestPlugin implements Plugin { final Task destroy = project.tasks.create("vagrant${boxTask}#destroy", LoggedExec) { commandLine "bash", "-c", "vagrant status ${box} | grep -q \"${box}\\s\\+not created\" || vagrant destroy ${box} --force" workingDir project.rootProject.rootDir + environment vagrantEnvVars } destroy.onlyIf { vagrantDestroy } update.mustRunAfter(destroy) @@ -386,37 +480,42 @@ class VagrantTestPlugin implements Plugin { environment vagrantEnvVars dependsOn up finalizedBy halt - commandLine 'vagrant', 'ssh', box, '--command', - "set -o pipefail && echo 'Hello from ${project.path}' | sed -ue 's/^/ ${box}: /'" } vagrantSmokeTest.dependsOn(smoke) - - Task batsPackagingTest = project.tasks.create("vagrant${boxTask}#batsPackagingTest", BatsOverVagrantTask) { - remoteCommand BATS_TEST_COMMAND - boxName box - environmentVars vagrantEnvVars - dependsOn up, setupPackagingTest - finalizedBy halt + if (LINUX_BOXES.contains(box)) { + smoke.commandLine = ['vagrant', 'ssh', box, '--command', + "set -o pipefail && echo 'Hello from ${project.path}' | sed -ue 's/^/ ${box}: /'"] + } else { + smoke.commandLine = ['vagrant', 'winrm', box, '--command', + "Write-Host ' ${box}: Hello from ${project.path}'"] } - TaskExecutionAdapter batsPackagingReproListener = createReproListener(project, batsPackagingTest.path) - batsPackagingTest.doFirst { - project.gradle.addListener(batsPackagingReproListener) - } - batsPackagingTest.doLast { - project.gradle.removeListener(batsPackagingReproListener) - } - if (project.extensions.esvagrant.boxes.contains(box)) { - packagingTest.dependsOn(batsPackagingTest) + if (LINUX_BOXES.contains(box)) { + Task batsPackagingTest = project.tasks.create("vagrant${boxTask}#batsPackagingTest", BatsOverVagrantTask) { + remoteCommand BATS_TEST_COMMAND + boxName box + environmentVars vagrantEnvVars + dependsOn up, setupPackagingTest + finalizedBy halt + } + + TaskExecutionAdapter batsPackagingReproListener = createReproListener(project, batsPackagingTest.path) + batsPackagingTest.doFirst { + project.gradle.addListener(batsPackagingReproListener) + } + batsPackagingTest.doLast { + project.gradle.removeListener(batsPackagingReproListener) + } + if (project.extensions.esvagrant.boxes.contains(box)) { + packagingTest.dependsOn(batsPackagingTest) + } } Task javaPackagingTest = project.tasks.create("vagrant${boxTask}#javaPackagingTest", VagrantCommandTask) { - command 'ssh' boxName box environmentVars vagrantEnvVars dependsOn up, setupPackagingTest finalizedBy halt - args '--command', "bash \"\$PACKAGING_TESTS/run-tests.sh\"" } // todo remove this onlyIf after all packaging tests are consolidated @@ -424,6 +523,14 @@ class VagrantTestPlugin implements Plugin { project.extensions.esvagrant.testClass != null } + if (LINUX_BOXES.contains(box)) { + javaPackagingTest.command = 'ssh' + javaPackagingTest.args = ['--command', 'bash "$PACKAGING_TESTS/run-tests.sh"'] + } else { + javaPackagingTest.command = 'winrm' + javaPackagingTest.args = ['--command', 'powershell -File "$Env:PACKAGING_TESTS/run-tests.ps1"'] + } + TaskExecutionAdapter javaPackagingReproListener = createReproListener(project, javaPackagingTest.path) javaPackagingTest.doFirst { project.gradle.addListener(javaPackagingReproListener) @@ -435,23 +542,29 @@ class VagrantTestPlugin implements Plugin { packagingTest.dependsOn(javaPackagingTest) } - Task platform = project.tasks.create("vagrant${boxTask}#platformTest", VagrantCommandTask) { - command 'ssh' - boxName box - environmentVars vagrantEnvVars - dependsOn up - finalizedBy halt - args '--command', PLATFORM_TEST_COMMAND + " -Dtests.seed=${-> project.testSeed}" - } - TaskExecutionAdapter platformReproListener = createReproListener(project, platform.path) - platform.doFirst { - project.gradle.addListener(platformReproListener) - } - platform.doLast { - project.gradle.removeListener(platformReproListener) - } - if (project.extensions.esvagrant.boxes.contains(box)) { - platformTest.dependsOn(platform) + /* + * This test is unmaintained and was created to run on Linux. We won't allow it to run on Windows + * until it's been brought back into maintenance + */ + if (LINUX_BOXES.contains(box)) { + Task platform = project.tasks.create("vagrant${boxTask}#platformTest", VagrantCommandTask) { + command 'ssh' + boxName box + environmentVars vagrantEnvVars + dependsOn up + finalizedBy halt + args '--command', PLATFORM_TEST_COMMAND + " -Dtests.seed=${-> project.testSeed}" + } + TaskExecutionAdapter platformReproListener = createReproListener(project, platform.path) + platform.doFirst { + project.gradle.addListener(platformReproListener) + } + platform.doLast { + project.gradle.removeListener(platformReproListener) + } + if (project.extensions.esvagrant.boxes.contains(box)) { + platformTest.dependsOn(platform) + } } } } From c4a161d684611af54e99c8d82ddffad815a121ba Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Thu, 17 May 2018 07:09:18 -0400 Subject: [PATCH 12/34] Improve explanation in rescore (#30629) Currently in a rescore request if window_size is smaller than the top N documents returned (N=size), explanation of scores could be incorrect for documents that were a part of topN and not part of rescoring. This PR corrects this by saving in RescoreContext docIDs of documents for which rescoring was applied, and adding rescoring explanation only for these docIDs. Closes #28725 --- .../test/search/210_rescore_explain.yml | 47 +++++++++++++++++++ .../search/rescore/QueryRescorer.java | 34 ++++++++------ .../search/rescore/RescoreContext.java | 11 +++++ 3 files changed, 77 insertions(+), 15 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml new file mode 100644 index 0000000000000..0a6ea07791840 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/210_rescore_explain.yml @@ -0,0 +1,47 @@ +--- +"Score should match explanation in rescore": + - skip: + version: " - 6.3.99" + reason: Explanation for rescoring was corrected after these versions + - do: + indices.create: + index: test_index + body: + settings: + index: + number_of_shards: 1 + number_of_replicas: 0 + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "test_index", "_type": "_doc", "_id": "1"}}' + - '{"f1": "1"}' + - '{"index": {"_index": "test_index", "_type": "_doc", "_id": "2"}}' + - '{"f1": "2"}' + - '{"index": {"_index": "test_index", "_type": "_doc", "_id": "3"}}' + - '{"f1": "3"}' + + - do: + search: + index: test_index + body: + explain: true + query: + match_all: {} + rescore: + window_size: 2 + query: + rescore_query: + match_all: {} + query_weight: 5 + rescore_query_weight: 10 + + - match: { hits.hits.0._score: 15 } + - match: { hits.hits.0._explanation.value: 15 } + + - match: { hits.hits.1._score: 15 } + - match: { hits.hits.1._explanation.value: 15 } + + - match: { hits.hits.2._score: 5 } + - match: { hits.hits.2._explanation.value: 5 } diff --git a/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java b/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java index d4cf05d542560..4a9567a32c06a 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java @@ -30,6 +30,8 @@ import java.util.Arrays; import java.util.Comparator; import java.util.Set; +import java.util.Collections; +import static java.util.stream.Collectors.toSet; public final class QueryRescorer implements Rescorer { @@ -61,6 +63,11 @@ protected float combine(float firstPassScore, boolean secondPassMatches, float s // First take top slice of incoming docs, to be rescored: TopDocs topNFirstPass = topN(topDocs, rescoreContext.getWindowSize()); + // Save doc IDs for which rescoring was applied to be used in score explanation + Set topNDocIDs = Collections.unmodifiableSet( + Arrays.stream(topNFirstPass.scoreDocs).map(scoreDoc -> scoreDoc.doc).collect(toSet())); + rescoreContext.setRescoredDocs(topNDocIDs); + // Rescore them: TopDocs rescored = rescorer.rescore(searcher, topNFirstPass, rescoreContext.getWindowSize()); @@ -71,16 +78,12 @@ protected float combine(float firstPassScore, boolean secondPassMatches, float s @Override public Explanation explain(int topLevelDocId, IndexSearcher searcher, RescoreContext rescoreContext, Explanation sourceExplanation) throws IOException { - QueryRescoreContext rescore = (QueryRescoreContext) rescoreContext; if (sourceExplanation == null) { // this should not happen but just in case return Explanation.noMatch("nothing matched"); } - // TODO: this isn't right? I.e., we are incorrectly pretending all first pass hits were rescored? If the requested docID was - // beyond the top rescoreContext.window() in the first pass hits, we don't rescore it now? - Explanation rescoreExplain = searcher.explain(rescore.query(), topLevelDocId); + QueryRescoreContext rescore = (QueryRescoreContext) rescoreContext; float primaryWeight = rescore.queryWeight(); - Explanation prim; if (sourceExplanation.isMatch()) { prim = Explanation.match( @@ -89,23 +92,24 @@ public Explanation explain(int topLevelDocId, IndexSearcher searcher, RescoreCon } else { prim = Explanation.noMatch("First pass did not match", sourceExplanation); } - - // NOTE: we don't use Lucene's Rescorer.explain because we want to insert our own description with which ScoreMode was used. Maybe - // we should add QueryRescorer.explainCombine to Lucene? - if (rescoreExplain != null && rescoreExplain.isMatch()) { - float secondaryWeight = rescore.rescoreQueryWeight(); - Explanation sec = Explanation.match( + if (rescoreContext.isRescored(topLevelDocId)){ + Explanation rescoreExplain = searcher.explain(rescore.query(), topLevelDocId); + // NOTE: we don't use Lucene's Rescorer.explain because we want to insert our own description with which ScoreMode was used. + // Maybe we should add QueryRescorer.explainCombine to Lucene? + if (rescoreExplain != null && rescoreExplain.isMatch()) { + float secondaryWeight = rescore.rescoreQueryWeight(); + Explanation sec = Explanation.match( rescoreExplain.getValue() * secondaryWeight, "product of:", rescoreExplain, Explanation.match(secondaryWeight, "secondaryWeight")); - QueryRescoreMode scoreMode = rescore.scoreMode(); - return Explanation.match( + QueryRescoreMode scoreMode = rescore.scoreMode(); + return Explanation.match( scoreMode.combine(prim.getValue(), sec.getValue()), scoreMode + " of:", prim, sec); - } else { - return prim; + } } + return prim; } private static final Comparator SCORE_DOC_COMPARATOR = new Comparator() { diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java b/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java index 75ce807a67f47..913af0a2ceca0 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java @@ -19,6 +19,8 @@ package org.elasticsearch.search.rescore; +import java.util.Set; + /** * Context available to the rescore while it is running. Rescore * implementations should extend this with any additional resources that @@ -27,6 +29,7 @@ public class RescoreContext { private final int windowSize; private final Rescorer rescorer; + private Set rescoredDocs; //doc Ids for which rescoring was applied /** * Build the context. @@ -50,4 +53,12 @@ public Rescorer rescorer() { public int getWindowSize() { return windowSize; } + + public void setRescoredDocs(Set docIds) { + rescoredDocs = docIds; + } + + public boolean isRescored(int docId) { + return rescoredDocs.contains(docId); + } } From 7605ce6dc6b3c3136da575f470e1ab69fa085ed1 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 17 May 2018 09:57:11 -0700 Subject: [PATCH 13/34] [DOCS] Replace X-Pack terms with attributes --- docs/reference/setup/install/windows.asciidoc | 22 +++++++++---------- .../authentication/built-in-users.asciidoc | 2 +- .../authentication/custom-realm.asciidoc | 2 +- x-pack/docs/en/security/gs-index.asciidoc | 6 ++--- x-pack/docs/en/security/index.asciidoc | 6 ++--- 5 files changed, 19 insertions(+), 19 deletions(-) diff --git a/docs/reference/setup/install/windows.asciidoc b/docs/reference/setup/install/windows.asciidoc index c48ec5de22d2c..861daa160e7b5 100644 --- a/docs/reference/setup/install/windows.asciidoc +++ b/docs/reference/setup/install/windows.asciidoc @@ -79,13 +79,13 @@ part of the installation, with the option to configure a HTTPS proxy through whi [[msi-installer-selected-plugins]] image::images/msi_installer/msi_installer_selected_plugins.png[] -Upon choosing to install X-Pack plugin, an additional step allows a choice of the type of X-Pack -license to install, in addition to X-Pack Security configuration and built-in user configuration: +Upon choosing to install {xpack} plugin, an additional step allows a choice of the type of {xpack} +license to install, in addition to {security} configuration and built-in user configuration: [[msi-installer-xpack]] image::images/msi_installer/msi_installer_xpack.png[] -NOTE: X-Pack includes a choice of a Trial or Basic license for 30 days. After that, you can obtain one of the +NOTE: {xpack} includes a choice of a Trial or Basic license for 30 days. After that, you can obtain one of the https://www.elastic.co/subscriptions[available subscriptions] or {ref}/security-settings.html[disable Security]. The Basic license is free and includes the https://www.elastic.co/products/x-pack/monitoring[Monitoring] extension. @@ -286,43 +286,43 @@ as _properties_ within Windows Installer documentation) that can be passed to ms `XPACKLICENSE`:: - When installing X-Pack plugin, the type of license to install, + When installing {xpack} plugin, the type of license to install, either `Basic` or `Trial`. Defaults to `Basic` `XPACKSECURITYENABLED`:: - When installing X-Pack plugin with a `Trial` license, whether X-Pack Security should be enabled. + When installing {xpack} plugin with a `Trial` license, whether {security} should be enabled. Defaults to `true` `BOOTSTRAPPASSWORD`:: - When installing X-Pack plugin with a `Trial` license and X-Pack Security enabled, the password to + When installing {xpack} plugin with a `Trial` license and {security} enabled, the password to used to bootstrap the cluster and persisted as the `bootstrap.password` setting in the keystore. Defaults to a randomized value. `SKIPSETTINGPASSWORDS`:: - When installing X-Pack plugin with a `Trial` license and X-Pack Security enabled, whether the + When installing {xpack} plugin with a `Trial` license and {security} enabled, whether the installation should skip setting up the built-in users `elastic`, `kibana` and `logstash_system`. Defaults to `false` `ELASTICUSERPASSWORD`:: - When installing X-Pack plugin with a `Trial` license and X-Pack Security enabled, the password + When installing {xpack} plugin with a `Trial` license and {security} enabled, the password to use for the built-in user `elastic`. Defaults to `""` `KIBANAUSERPASSWORD`:: - When installing X-Pack plugin with a `Trial` license and X-Pack Security enabled, the password + When installing {xpack} plugin with a `Trial` license and {security} enabled, the password to use for the built-in user `kibana`. Defaults to `""` `LOGSTASHSYSTEMUSERPASSWORD`:: - When installing X-Pack plugin with a `Trial` license and X-Pack Security enabled, the password + When installing {xpack} plugin with a `Trial` license and {security} enabled, the password to use for the built-in user `logstash_system`. Defaults to `""` To pass a value, simply append the property name and value using the format `=""` to -the installation command. For example, to use a different installation directory to the default one and to install https://www.elastic.co/products/x-pack[X-Pack]: +the installation command. For example, to use a different installation directory to the default one and to install https://www.elastic.co/products/x-pack[{xpack}]: ["source","sh",subs="attributes,callouts"] -------------------------------------------- diff --git a/x-pack/docs/en/security/authentication/built-in-users.asciidoc b/x-pack/docs/en/security/authentication/built-in-users.asciidoc index 74fc9f1e1db12..d18f441e293f1 100644 --- a/x-pack/docs/en/security/authentication/built-in-users.asciidoc +++ b/x-pack/docs/en/security/authentication/built-in-users.asciidoc @@ -118,7 +118,7 @@ the `logstash.yml` configuration file: xpack.monitoring.elasticsearch.password: logstashpassword ---------------------------------------------------------- -If you have upgraded from an older version of elasticsearch/x-pack, +If you have upgraded from an older version of Elasticsearch, the `logstash_system` user may have defaulted to _disabled_ for security reasons. Once the password has been changed, you can enable the user via the following API call: diff --git a/x-pack/docs/en/security/authentication/custom-realm.asciidoc b/x-pack/docs/en/security/authentication/custom-realm.asciidoc index 8e0114b7454c6..0ae33d434a1f5 100644 --- a/x-pack/docs/en/security/authentication/custom-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/custom-realm.asciidoc @@ -50,7 +50,7 @@ public AuthenticationFailureHandler getAuthenticationFailureHandler() { ---------------------------------------------------- + The `getAuthenticationFailureHandler` method is used to optionally provide a -custom `AuthenticationFailureHandler`, which will control how X-Pack responds +custom `AuthenticationFailureHandler`, which will control how {security} responds in certain authentication failure events. + [source,java] diff --git a/x-pack/docs/en/security/gs-index.asciidoc b/x-pack/docs/en/security/gs-index.asciidoc index 2beef9403fc24..2a7a22731fc8e 100644 --- a/x-pack/docs/en/security/gs-index.asciidoc +++ b/x-pack/docs/en/security/gs-index.asciidoc @@ -25,8 +25,8 @@ Security protects Elasticsearch clusters by: To prevent unauthorized access to your Elasticsearch cluster, you must have a way to _authenticate_ users. This simply means that you need a way to validate that a user is who they claim to be. For example, you have to make sure only -the person named _Kelsey Andorra_ can sign in as the user `kandorra`. X-Pack -Security provides a standalone authentication mechanism that enables you to +the person named _Kelsey Andorra_ can sign in as the user `kandorra`. {security} +provides a standalone authentication mechanism that enables you to quickly password-protect your cluster. If you're already using {xpack-ref}/ldap-realm.html[LDAP], {xpack-ref}/active-directory-realm.html[ Active Directory], or {xpack-ref}/pki-realm.html[ PKI] to manage users in your organization, {security} is able to integrate with those @@ -83,7 +83,7 @@ issues. * {xpack-ref}/tribe-clients-integrations.html[Integrations] shows you how to interact with an Elasticsearch cluster protected by - X-Pack Security. + {security}. * {xpack-ref}/security-reference.html[Reference] provides detailed information about the access privileges you can grant to diff --git a/x-pack/docs/en/security/index.asciidoc b/x-pack/docs/en/security/index.asciidoc index bff23461b08c5..fed2906ab1bd9 100644 --- a/x-pack/docs/en/security/index.asciidoc +++ b/x-pack/docs/en/security/index.asciidoc @@ -26,8 +26,8 @@ Security protects Elasticsearch clusters by: To prevent unauthorized access to your Elasticsearch cluster, you must have a way to _authenticate_ users. This simply means that you need a way to validate that a user is who they claim to be. For example, you have to make sure only -the person named _Kelsey Andorra_ can sign in as the user `kandorra`. X-Pack -Security provides a standalone authentication mechanism that enables you to +the person named _Kelsey Andorra_ can sign in as the user `kandorra`. {security} +provides a standalone authentication mechanism that enables you to quickly password-protect your cluster. If you're already using <>, <>, or <> to manage users in your organization, {security} is able to integrate with those @@ -81,7 +81,7 @@ issues. * <> shows you how to interact with an Elasticsearch cluster protected by - X-Pack Security. + {security}. * <> provides detailed information about the access privileges you can grant to From d9b6b4d62162b7d3390a7e68ca3015aafd8ebb02 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 17 May 2018 13:04:10 -0700 Subject: [PATCH 14/34] Silence sleep based watcher test see https://github.com/elastic/elasticsearch/issues/30699 --- .../watcher/transport/action/activate/ActivateWatchTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java index c692a1dfc8b7b..5da3f05177aae 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java @@ -47,8 +47,8 @@ public class ActivateWatchTests extends AbstractWatcherIntegrationTestCase { protected boolean timeWarped() { return false; } - - // FIXME not to be sleep based + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30699") public void testDeactivateAndActivate() throws Exception { PutWatchResponse putWatchResponse = watcherClient().preparePutWatch() .setId("_id") From 20f9365e5976727e1163de148df9305daa7291fd Mon Sep 17 00:00:00 2001 From: tomcallahan Date: Thu, 17 May 2018 16:33:31 -0400 Subject: [PATCH 15/34] Backport get settings API changes to 6.x (#30494) Backports the Get Settings API changes to the 6.x branch. I remove the include_defaults test which will be added back shortly in a followup PR. This test does not work in a mixed-cluster environment and should be broken out from its current location in any case. --- .../elasticsearch/client/IndicesClient.java | 24 +++ .../client/RequestConverters.java | 17 ++ .../elasticsearch/client/IndicesClientIT.java | 104 ++++++++++ .../client/RequestConvertersTests.java | 47 +++++ .../IndicesClientDocumentationIT.java | 115 +++++++++++ .../high-level/indices/get_settings.asciidoc | 96 +++++++++ .../high-level/supported-apis.asciidoc | 2 + .../api/indices.get_settings.json | 4 + .../test/indices.put_settings/11_reset.yml | 7 - .../elasticsearch/action/ActionModule.java | 2 +- .../settings/get/GetSettingsRequest.java | 71 +++++-- .../settings/get/GetSettingsResponse.java | 194 +++++++++++++++++- .../get/TransportGetSettingsAction.java | 34 ++- .../admin/indices/RestGetSettingsAction.java | 29 +-- .../settings/get/GetSettingsActionTests.java | 148 +++++++++++++ .../settings/get/GetSettingsRequestTests.java | 70 +++++++ .../get/GetSettingsResponseTests.java | 160 +++++++++++++++ 17 files changed, 1063 insertions(+), 61 deletions(-) create mode 100644 docs/java-rest/high-level/indices/get_settings.asciidoc create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequestTests.java create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index fa6d8a8e065ea..5aa64a5c1375e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -43,6 +43,8 @@ import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; @@ -267,6 +269,28 @@ public void flushAsync(FlushRequest flushRequest, ActionListener listener, emptySet(), headers); } + /** + * Retrieve the settings of one or more indices + *

+ * See + * Indices Get Settings API on elastic.co + */ + public GetSettingsResponse getSettings(GetSettingsRequest getSettingsRequest, Header... headers) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(getSettingsRequest, RequestConverters::getSettings, + GetSettingsResponse::fromXContent, emptySet(), headers); + } + + /** + * Asynchronously retrieve the settings of one or more indices + *

+ * See + * Indices Get Settings API on elastic.co + */ + public void getSettingsAsync(GetSettingsRequest getSettingsRequest, ActionListener listener, Header... headers) { + restHighLevelClient.performRequestAsyncAndParseEntity(getSettingsRequest, RequestConverters::getSettings, + GetSettingsResponse::fromXContent, listener, emptySet(), headers); + } + /** * Force merge one or more indices using the Force Merge API *

diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 27b237c5302cb..35a594ac8bac3 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -47,6 +47,7 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; @@ -616,6 +617,22 @@ static Request rollover(RolloverRequest rolloverRequest) throws IOException { return request; } + static Request getSettings(GetSettingsRequest getSettingsRequest) throws IOException { + String[] indices = getSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.indices(); + String[] names = getSettingsRequest.names() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.names(); + + String endpoint = endpoint(indices, "_settings", names); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + Params params = new Params(request); + params.withIndicesOptions(getSettingsRequest.indicesOptions()); + params.withLocal(getSettingsRequest.local()); + params.withIncludeDefaults(getSettingsRequest.includeDefaults()); + params.withMasterTimeout(getSettingsRequest.masterNodeTimeout()); + + return request; + } + static Request indicesExist(GetIndexRequest getIndexRequest) { // this can be called with no indices as argument by transport client, not via REST though if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 14ec9230c95ee..cf7aeb389f8a6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -52,6 +52,8 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeResponse; import org.elasticsearch.action.admin.indices.shrink.ResizeType; @@ -201,6 +203,108 @@ public void testCreateIndex() throws IOException { } } + public void testGetSettings() throws IOException { + String indexName = "get_settings_index"; + Settings basicSettings = Settings.builder() + .put("number_of_shards", 1) + .put("number_of_replicas", 0) + .build(); + createIndex(indexName, basicSettings); + + GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indexName); + GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings, + highLevelClient().indices()::getSettingsAsync); + + assertNull(getSettingsResponse.getSetting(indexName, "index.refresh_interval")); + assertEquals("1", getSettingsResponse.getSetting(indexName, "index.number_of_shards")); + + updateIndexSettings(indexName, Settings.builder().put("refresh_interval", "30s")); + + GetSettingsResponse updatedResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings, + highLevelClient().indices()::getSettingsAsync); + assertEquals("30s", updatedResponse.getSetting(indexName, "index.refresh_interval")); + } + + public void testGetSettingsNonExistentIndex() throws IOException { + String nonExistentIndex = "index_that_doesnt_exist"; + assertFalse(indexExists(nonExistentIndex)); + + GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(nonExistentIndex); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, + () -> execute(getSettingsRequest, highLevelClient().indices()::getSettings, highLevelClient().indices()::getSettingsAsync)); + assertEquals(RestStatus.NOT_FOUND, exception.status()); + } + + public void testGetSettingsFromMultipleIndices() throws IOException { + String indexName1 = "get_multiple_settings_one"; + createIndex(indexName1, Settings.builder().put("number_of_shards", 2).build()); + + String indexName2 = "get_multiple_settings_two"; + createIndex(indexName2, Settings.builder().put("number_of_shards", 3).build()); + + GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices("get_multiple_settings*"); + GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings, + highLevelClient().indices()::getSettingsAsync); + + assertEquals("2", getSettingsResponse.getSetting(indexName1, "index.number_of_shards")); + assertEquals("3", getSettingsResponse.getSetting(indexName2, "index.number_of_shards")); + } + + public void testGetSettingsFiltered() throws IOException { + String indexName = "get_settings_index"; + Settings basicSettings = Settings.builder() + .put("number_of_shards", 1) + .put("number_of_replicas", 0) + .build(); + createIndex(indexName, basicSettings); + + GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indexName).names("index.number_of_shards"); + GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings, + highLevelClient().indices()::getSettingsAsync); + + assertNull(getSettingsResponse.getSetting(indexName, "index.number_of_replicas")); + assertEquals("1", getSettingsResponse.getSetting(indexName, "index.number_of_shards")); + assertEquals(1, getSettingsResponse.getIndexToSettings().get("get_settings_index").size()); + } + + public void testGetSettingsWithDefaults() throws IOException { + String indexName = "get_settings_index"; + Settings basicSettings = Settings.builder() + .put("number_of_shards", 1) + .put("number_of_replicas", 0) + .build(); + createIndex(indexName, basicSettings); + + GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indexName).includeDefaults(true); + GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings, + highLevelClient().indices()::getSettingsAsync); + + assertNotNull(getSettingsResponse.getSetting(indexName, "index.refresh_interval")); + assertEquals(IndexSettings.DEFAULT_REFRESH_INTERVAL, + getSettingsResponse.getIndexToDefaultSettings().get("get_settings_index").getAsTime("index.refresh_interval", null)); + assertEquals("1", getSettingsResponse.getSetting(indexName, "index.number_of_shards")); + } + + public void testGetSettingsWithDefaultsFiltered() throws IOException { + String indexName = "get_settings_index"; + Settings basicSettings = Settings.builder() + .put("number_of_shards", 1) + .put("number_of_replicas", 0) + .build(); + createIndex(indexName, basicSettings); + + GetSettingsRequest getSettingsRequest = new GetSettingsRequest() + .indices(indexName) + .names("index.refresh_interval") + .includeDefaults(true); + GetSettingsResponse getSettingsResponse = execute(getSettingsRequest, highLevelClient().indices()::getSettings, + highLevelClient().indices()::getSettingsAsync); + + assertNull(getSettingsResponse.getSetting(indexName, "index.number_of_replicas")); + assertNull(getSettingsResponse.getSetting(indexName, "index.number_of_shards")); + assertEquals(0, getSettingsResponse.getIndexToSettings().get("get_settings_index").size()); + assertEquals(1, getSettingsResponse.getIndexToDefaultSettings().get("get_settings_index").size()); + } public void testPutMapping() throws IOException { { // Add mappings to index diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index af01590c0a1cf..86c669650150b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -49,6 +49,7 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; @@ -415,6 +416,52 @@ public void testDeleteIndex() { assertNull(request.getEntity()); } + public void testGetSettings() throws IOException { + String[] indicesUnderTest = randomBoolean() ? null : randomIndicesNames(0, 5); + + GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indicesUnderTest); + + Map expectedParams = new HashMap<>(); + setRandomMasterTimeout(getSettingsRequest, expectedParams); + setRandomIndicesOptions(getSettingsRequest::indicesOptions, getSettingsRequest::indicesOptions, expectedParams); + + setRandomLocal(getSettingsRequest, expectedParams); + + if (randomBoolean()) { + //the request object will not have include_defaults present unless it is set to true + getSettingsRequest.includeDefaults(randomBoolean()); + if (getSettingsRequest.includeDefaults()) { + expectedParams.put("include_defaults", Boolean.toString(true)); + } + } + + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indicesUnderTest != null && indicesUnderTest.length > 0) { + endpoint.add(String.join(",", indicesUnderTest)); + } + endpoint.add("_settings"); + + if (randomBoolean()) { + String[] names = randomBoolean() ? null : new String[randomIntBetween(0, 3)]; + if (names != null) { + for (int x = 0; x < names.length; x++) { + names[x] = randomAlphaOfLengthBetween(3, 10); + } + } + getSettingsRequest.names(names); + if (names != null && names.length > 0) { + endpoint.add(String.join(",", names)); + } + } + + Request request = RequestConverters.getSettings(getSettingsRequest); + + assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + assertThat(request.getParameters(), equalTo(expectedParams)); + assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); + assertThat(request.getEntity(), nullValue()); + } + public void testDeleteIndexEmptyIndices() { String[] indices = randomBoolean() ? null : Strings.EMPTY_ARRAY; ActionRequestValidationException validationException = new DeleteIndexRequest(indices).validate(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index e7107c596f3aa..c766d87d231ce 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -50,6 +50,8 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeResponse; import org.elasticsearch.action.admin.indices.shrink.ResizeType; @@ -782,6 +784,119 @@ public void onFailure(Exception e) { } } + public void testGetSettings() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + Settings settings = Settings.builder().put("number_of_shards", 3).build(); + CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("index", settings)); + assertTrue(createIndexResponse.isAcknowledged()); + } + + // tag::get-settings-request + GetSettingsRequest request = new GetSettingsRequest().indices("index"); + // end::get-settings-request + + // tag::get-settings-request-names + request.names("index.number_of_shards"); // <1> + // end::get-settings-request-names + + // tag::get-settings-request-indicesOptions + request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1> + // end::get-settings-request-indicesOptions + + // tag::get-settings-execute + GetSettingsResponse getSettingsResponse = client.indices().getSettings(request); + // end::get-settings-execute + + // tag::get-settings-response + String numberOfShardsString = getSettingsResponse.getSetting("index", "index.number_of_shards"); // <1> + Settings indexSettings = getSettingsResponse.getIndexToSettings().get("index"); // <2> + Integer numberOfShards = indexSettings.getAsInt("index.number_of_shards", null); // <3> + // end::get-settings-response + + assertEquals("3", numberOfShardsString); + assertEquals(Integer.valueOf(3), numberOfShards); + + assertNull("refresh_interval returned but was never set!", + getSettingsResponse.getSetting("index", "index.refresh_interval")); + + // tag::get-settings-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetSettingsResponse GetSettingsResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-settings-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::get-settings-execute-async + client.indices().getSettingsAsync(request, listener); // <1> + // end::get-settings-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + public void testGetSettingsWithDefaults() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + Settings settings = Settings.builder().put("number_of_shards", 3).build(); + CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("index", settings)); + assertTrue(createIndexResponse.isAcknowledged()); + } + + GetSettingsRequest request = new GetSettingsRequest().indices("index"); + request.indicesOptions(IndicesOptions.lenientExpandOpen()); + + // tag::get-settings-request-include-defaults + request.includeDefaults(true); // <1> + // end::get-settings-request-include-defaults + + GetSettingsResponse getSettingsResponse = client.indices().getSettings(request); + String numberOfShardsString = getSettingsResponse.getSetting("index", "index.number_of_shards"); + Settings indexSettings = getSettingsResponse.getIndexToSettings().get("index"); + Integer numberOfShards = indexSettings.getAsInt("index.number_of_shards", null); + + // tag::get-settings-defaults-response + String refreshInterval = getSettingsResponse.getSetting("index", "index.refresh_interval"); // <1> + Settings indexDefaultSettings = getSettingsResponse.getIndexToDefaultSettings().get("index"); // <2> + // end::get-settings-defaults-response + + assertEquals("3", numberOfShardsString); + assertEquals(Integer.valueOf(3), numberOfShards); + assertNotNull("with defaults enabled we should get a value for refresh_interval!", refreshInterval); + + assertEquals(refreshInterval, indexDefaultSettings.get("index.refresh_interval")); + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetSettingsResponse GetSettingsResponse) { + } + + @Override + public void onFailure(Exception e) { + } + }; + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + client.indices().getSettingsAsync(request, listener); + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + public void testForceMergeIndex() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/docs/java-rest/high-level/indices/get_settings.asciidoc b/docs/java-rest/high-level/indices/get_settings.asciidoc new file mode 100644 index 0000000000000..b054715119ec3 --- /dev/null +++ b/docs/java-rest/high-level/indices/get_settings.asciidoc @@ -0,0 +1,96 @@ +[[java-rest-high-get-settings]] +=== Get Settings API + +[[java-rest-high-get-settings-request]] +==== Get Settings Request + +A `GetSettingsRequest` requires one or more `index` arguments: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-request] +-------------------------------------------------- +<1> The index whose settings we should retrieve + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-request-names] +-------------------------------------------------- +<1> One or more settings that be the only settings retrieved. If unset, all settings will be retrieved + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-request-include-defaults] +-------------------------------------------------- +<1> If true, defaults will be returned for settings not explicitly set on the index + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-request-indicesOptions] +-------------------------------------------------- +<1> Setting `IndicesOptions` controls how unavailable indices are resolved and +how wildcard expressions are expanded + +[[java-rest-high-get-settings-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-execute] +-------------------------------------------------- + +[[java-rest-high-get-settings-async]] +==== Asynchronous Execution + +The asynchronous execution of a Get Settings request requires both the `GetSettingsRequest` +instance and an `ActionListener` instance to be passed to the asynchronous +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-execute-async] +-------------------------------------------------- +<1> The `GetSettingsRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `GetSettingsResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-get-settings-response]] +==== Get Settings Response + +The returned `GetSettingsResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-response] +-------------------------------------------------- +<1> We can retrieve the setting value for a particular index directly from the response as a string +<2> We can also retrieve the Settings object for a particular index for further examination +<3> The returned Settings object provides convenience methods for non String types + +If the `includeDefaults` flag was set to true in the `GetSettingsRequest`, the +behavior of `GetSettingsResponse` will differ somewhat. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-settings-defaults-response] +-------------------------------------------------- +<1> Individual default setting values may be retrieved directly from the `GetSettingsResponse` +<2> We may retrieve a Settings object for an index that contains those settings with default values diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 658d6023caea5..b87d267961f84 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -67,6 +67,7 @@ Index Management:: * <> * <> * <> +* <> Mapping Management:: * <> @@ -92,6 +93,7 @@ include::indices/update_aliases.asciidoc[] include::indices/exists_alias.asciidoc[] include::indices/put_settings.asciidoc[] include::indices/put_template.asciidoc[] +include::indices/get_settings.asciidoc[] == Cluster APIs diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json index 706cce5277a40..ed22cc837d6a8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json @@ -16,6 +16,10 @@ } }, "params": { + "master_timeout": { + "type": "time", + "description": "Specify timeout for connection to master" + }, "ignore_unavailable": { "type" : "boolean", "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/11_reset.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/11_reset.yml index bc2dace0e1871..ac5564fcd3ec4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/11_reset.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/11_reset.yml @@ -23,10 +23,3 @@ Test reset index settings: indices.get_settings: flat_settings: false - is_false: test-index.settings.index\.refresh_interval - - do: - indices.get_settings: - include_defaults: true - flat_settings: true - index: test-index - - match: - test-index.defaults.index\.refresh_interval: "1s" diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 392b307a8aa79..42ff432240381 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -576,7 +576,7 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestOpenIndexAction(settings, restController)); registerHandler.accept(new RestUpdateSettingsAction(settings, restController)); - registerHandler.accept(new RestGetSettingsAction(settings, restController, indexScopedSettings, settingsFilter)); + registerHandler.accept(new RestGetSettingsAction(settings, restController)); registerHandler.accept(new RestAnalyzeAction(settings, restController)); registerHandler.accept(new RestGetIndexTemplateAction(settings, restController)); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java index d15da04acabf5..dd240094c93e9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.settings.get; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.ValidateActions; @@ -29,6 +30,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; public class GetSettingsRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { @@ -36,6 +39,7 @@ public class GetSettingsRequest extends MasterNodeReadRequest indexToSettings = ImmutableOpenMap.of(); + private ImmutableOpenMap indexToDefaultSettings = ImmutableOpenMap.of(); - public GetSettingsResponse(ImmutableOpenMap indexToSettings) { + public GetSettingsResponse(ImmutableOpenMap indexToSettings, + ImmutableOpenMap indexToDefaultSettings) { this.indexToSettings = indexToSettings; + this.indexToDefaultSettings = indexToDefaultSettings; } GetSettingsResponse() { } + /** + * Returns a map of index name to {@link Settings} object. The returned {@link Settings} + * objects contain only those settings explicitly set on a given index. Any settings + * taking effect as defaults must be accessed via {@link #getIndexToDefaultSettings()}. + */ public ImmutableOpenMap getIndexToSettings() { return indexToSettings; } + /** + * If the originating {@link GetSettingsRequest} object was configured to include + * defaults, this will contain a mapping of index name to {@link Settings} objects. + * The returned {@link Settings} objects will contain only those settings taking + * effect as defaults. Any settings explicitly set on the index will be available + * via {@link #getIndexToSettings()}. + * See also {@link GetSettingsRequest#includeDefaults(boolean)} + */ + public ImmutableOpenMap getIndexToDefaultSettings() { + return indexToDefaultSettings; + } + + /** + * Returns the string value for the specified index and setting. If the includeDefaults + * flag was not set or set to false on the GetSettingsRequest, this method will only + * return a value where the setting was explicitly set on the index. If the includeDefaults + * flag was set to true on the GetSettingsRequest, this method will fall back to return the default + * value if the setting was not explicitly set. + */ public String getSetting(String index, String setting) { Settings settings = indexToSettings.get(index); if (setting != null) { - return settings.get(setting); + if (settings != null && settings.hasValue(setting)) { + return settings.get(setting); + } else { + Settings defaultSettings = indexToDefaultSettings.get(index); + if (defaultSettings != null) { + return defaultSettings.get(setting); + } else { + return null; + } + } } else { return null; } @@ -55,12 +107,22 @@ public String getSetting(String index, String setting) { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - int size = in.readVInt(); - ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); - for (int i = 0; i < size; i++) { - builder.put(in.readString(), Settings.readSettingsFromStream(in)); + + int settingsSize = in.readVInt(); + ImmutableOpenMap.Builder settingsBuilder = ImmutableOpenMap.builder(); + for (int i = 0; i < settingsSize; i++) { + settingsBuilder.put(in.readString(), Settings.readSettingsFromStream(in)); + } + ImmutableOpenMap.Builder defaultSettingsBuilder = ImmutableOpenMap.builder(); + + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { + int defaultSettingsSize = in.readVInt(); + for (int i = 0; i < defaultSettingsSize ; i++) { + defaultSettingsBuilder.put(in.readString(), Settings.readSettingsFromStream(in)); + } } - indexToSettings = builder.build(); + indexToSettings = settingsBuilder.build(); + indexToDefaultSettings = defaultSettingsBuilder.build(); } @Override @@ -71,5 +133,121 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(cursor.key); Settings.writeSettingsToStream(cursor.value, out); } + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { + out.writeVInt(indexToDefaultSettings.size()); + for (ObjectObjectCursor cursor : indexToDefaultSettings) { + out.writeString(cursor.key); + Settings.writeSettingsToStream(cursor.value, out); + } + } + } + + private static void parseSettingsField(XContentParser parser, String currentIndexName, Map indexToSettings, + Map indexToDefaultSettings) throws IOException { + + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + switch (parser.currentName()) { + case "settings": + indexToSettings.put(currentIndexName, Settings.fromXContent(parser)); + break; + case "defaults": + indexToDefaultSettings.put(currentIndexName, Settings.fromXContent(parser)); + break; + default: + parser.skipChildren(); + } + } else if (parser.currentToken() == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } + parser.nextToken(); + } + + private static void parseIndexEntry(XContentParser parser, Map indexToSettings, + Map indexToDefaultSettings) throws IOException { + String indexName = parser.currentName(); + parser.nextToken(); + while (!parser.isClosed() && parser.currentToken() != XContentParser.Token.END_OBJECT) { + parseSettingsField(parser, indexName, indexToSettings, indexToDefaultSettings); + } + } + public static GetSettingsResponse fromXContent(XContentParser parser) throws IOException { + HashMap indexToSettings = new HashMap<>(); + HashMap indexToDefaultSettings = new HashMap<>(); + + if (parser.currentToken() == null) { + parser.nextToken(); + } + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + parser.nextToken(); + + while (!parser.isClosed()) { + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + //we must assume this is an index entry + parseIndexEntry(parser, indexToSettings, indexToDefaultSettings); + } else if (parser.currentToken() == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } else { + parser.nextToken(); + } + } + + ImmutableOpenMap settingsMap = ImmutableOpenMap.builder().putAll(indexToSettings).build(); + ImmutableOpenMap defaultSettingsMap = + ImmutableOpenMap.builder().putAll(indexToDefaultSettings).build(); + + return new GetSettingsResponse(settingsMap, defaultSettingsMap); + } + + @Override + public String toString() { + try { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + XContentBuilder builder = new XContentBuilder(JsonXContent.jsonXContent, baos); + toXContent(builder, ToXContent.EMPTY_PARAMS, false); + return Strings.toString(builder); + } catch (IOException e) { + throw new IllegalStateException(e); //should not be possible here + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return toXContent(builder, params, indexToDefaultSettings.isEmpty()); + } + + private XContentBuilder toXContent(XContentBuilder builder, Params params, boolean omitEmptySettings) throws IOException { + builder.startObject(); + for (ObjectObjectCursor cursor : getIndexToSettings()) { + // no settings, jump over it to shorten the response data + if (omitEmptySettings && cursor.value.isEmpty()) { + continue; + } + builder.startObject(cursor.key); + builder.startObject("settings"); + cursor.value.toXContent(builder, params); + builder.endObject(); + if (indexToDefaultSettings.isEmpty() == false) { + builder.startObject("defaults"); + indexToDefaultSettings.get(cursor.key).toXContent(builder, params); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSettingsResponse that = (GetSettingsResponse) o; + return Objects.equals(indexToSettings, that.indexToSettings) && + Objects.equals(indexToDefaultSettings, that.indexToDefaultSettings); + } + + @Override + public int hashCode() { + return Objects.hash(indexToSettings, indexToDefaultSettings); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java index e9f116566607f..bbee9e7b1130e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java @@ -37,19 +37,23 @@ import org.elasticsearch.index.Index; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.common.settings.IndexScopedSettings; -import java.util.Map; +import java.util.Arrays; public class TransportGetSettingsAction extends TransportMasterNodeReadAction { private final SettingsFilter settingsFilter; + private final IndexScopedSettings indexScopedSettings; + @Inject public TransportGetSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, SettingsFilter settingsFilter, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { + IndexNameExpressionResolver indexNameExpressionResolver, IndexScopedSettings indexedScopedSettings) { super(settings, GetSettingsAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, GetSettingsRequest::new); this.settingsFilter = settingsFilter; + this.indexScopedSettings = indexedScopedSettings; } @Override @@ -69,25 +73,39 @@ protected GetSettingsResponse newResponse() { return new GetSettingsResponse(); } + private static boolean isFilteredRequest(GetSettingsRequest request) { + return CollectionUtils.isEmpty(request.names()) == false; + } + @Override protected void masterOperation(GetSettingsRequest request, ClusterState state, ActionListener listener) { Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); ImmutableOpenMap.Builder indexToSettingsBuilder = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder indexToDefaultSettingsBuilder = ImmutableOpenMap.builder(); for (Index concreteIndex : concreteIndices) { IndexMetaData indexMetaData = state.getMetaData().index(concreteIndex); if (indexMetaData == null) { continue; } - Settings settings = settingsFilter.filter(indexMetaData.getSettings()); + Settings indexSettings = settingsFilter.filter(indexMetaData.getSettings()); if (request.humanReadable()) { - settings = IndexMetaData.addHumanReadableSettings(settings); + indexSettings = IndexMetaData.addHumanReadableSettings(indexSettings); } - if (CollectionUtils.isEmpty(request.names()) == false) { - settings = settings.filter(k -> Regex.simpleMatch(request.names(), k)); + + if (isFilteredRequest(request)) { + indexSettings = indexSettings.filter(k -> Regex.simpleMatch(request.names(), k)); + } + + indexToSettingsBuilder.put(concreteIndex.getName(), indexSettings); + if (request.includeDefaults()) { + Settings defaultSettings = settingsFilter.filter(indexScopedSettings.diff(indexSettings, Settings.EMPTY)); + if (isFilteredRequest(request)) { + defaultSettings = defaultSettings.filter(k -> Regex.simpleMatch(request.names(), k)); + } + indexToDefaultSettingsBuilder.put(concreteIndex.getName(), defaultSettings); } - indexToSettingsBuilder.put(concreteIndex.getName(), settings); } - listener.onResponse(new GetSettingsResponse(indexToSettingsBuilder.build())); + listener.onResponse(new GetSettingsResponse(indexToSettingsBuilder.build(), indexToDefaultSettingsBuilder.build())); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java index 8ac7f12312a45..9791994c773e2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java @@ -44,18 +44,12 @@ public class RestGetSettingsAction extends BaseRestHandler { - private final IndexScopedSettings indexScopedSettings; - private final SettingsFilter settingsFilter; - - public RestGetSettingsAction(Settings settings, RestController controller, IndexScopedSettings indexScopedSettings, - final SettingsFilter settingsFilter) { + public RestGetSettingsAction(Settings settings, RestController controller) { super(settings); - this.indexScopedSettings = indexScopedSettings; controller.registerHandler(GET, "/_settings/{name}", this); controller.registerHandler(GET, "/{index}/_settings", this); controller.registerHandler(GET, "/{index}/_settings/{name}", this); controller.registerHandler(GET, "/{index}/_setting/{name}", this); - this.settingsFilter = settingsFilter; } @Override @@ -73,31 +67,16 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC .indices(Strings.splitStringByCommaToArray(request.param("index"))) .indicesOptions(IndicesOptions.fromRequest(request, IndicesOptions.strictExpandOpen())) .humanReadable(request.hasParam("human")) + .includeDefaults(renderDefaults) .names(names); getSettingsRequest.local(request.paramAsBoolean("local", getSettingsRequest.local())); + getSettingsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getSettingsRequest.masterNodeTimeout())); return channel -> client.admin().indices().getSettings(getSettingsRequest, new RestBuilderListener(channel) { @Override public RestResponse buildResponse(GetSettingsResponse getSettingsResponse, XContentBuilder builder) throws Exception { - builder.startObject(); - for (ObjectObjectCursor cursor : getSettingsResponse.getIndexToSettings()) { - // no settings, jump over it to shorten the response data - if (cursor.value.isEmpty()) { - continue; - } - builder.startObject(cursor.key); - builder.startObject("settings"); - cursor.value.toXContent(builder, request); - builder.endObject(); - if (renderDefaults) { - builder.startObject("defaults"); - settingsFilter.filter(indexScopedSettings.diff(cursor.value, settings)).toXContent(builder, request); - builder.endObject(); - } - builder.endObject(); - } - builder.endObject(); + getSettingsResponse.toXContent(builder, request); return new BytesRestResponse(OK, builder); } }); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java new file mode 100644 index 0000000000000..11f0188c8c0b0 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java @@ -0,0 +1,148 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.settings.get; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.settings.SettingsModule; +import org.elasticsearch.index.Index; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.transport.CapturingTransport; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.junit.After; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; + +public class GetSettingsActionTests extends ESTestCase { + + private TransportService transportService; + private ClusterService clusterService; + private ThreadPool threadPool; + private SettingsFilter settingsFilter; + private final String indexName = "test_index"; + + private TestTransportGetSettingsAction getSettingsAction; + + class TestTransportGetSettingsAction extends TransportGetSettingsAction { + TestTransportGetSettingsAction() { + super(Settings.EMPTY, GetSettingsActionTests.this.transportService, GetSettingsActionTests.this.clusterService, + GetSettingsActionTests.this.threadPool, settingsFilter, new ActionFilters(Collections.emptySet()), + new Resolver(Settings.EMPTY), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + } + @Override + protected void masterOperation(GetSettingsRequest request, ClusterState state, ActionListener listener) { + ClusterState stateWithIndex = ClusterStateCreationUtils.state(indexName, 1, 1); + super.masterOperation(request, stateWithIndex, listener); + } + } + + @Before + public void setUp() throws Exception { + super.setUp(); + + settingsFilter = new SettingsModule(Settings.EMPTY, Collections.emptyList(), Collections.emptyList()).getSettingsFilter(); + threadPool = new TestThreadPool("GetSettingsActionTests"); + clusterService = createClusterService(threadPool); + CapturingTransport capturingTransport = new CapturingTransport(); + transportService = new TransportService(clusterService.getSettings(), capturingTransport, threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundAddress -> clusterService.localNode(), null, Collections.emptySet()); + transportService.start(); + transportService.acceptIncomingRequests(); + getSettingsAction = new GetSettingsActionTests.TestTransportGetSettingsAction(); + } + + @After + public void tearDown() throws Exception { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + threadPool = null; + clusterService.close(); + super.tearDown(); + } + + public void testIncludeDefaults() { + GetSettingsRequest noDefaultsRequest = new GetSettingsRequest().indices(indexName); + getSettingsAction.execute(null, noDefaultsRequest, ActionListener.wrap(noDefaultsResponse -> { + assertNull("index.refresh_interval should be null as it was never set", noDefaultsResponse.getSetting(indexName, + "index.refresh_interval")); + }, exception -> { + throw new AssertionError(exception); + })); + + GetSettingsRequest defaultsRequest = new GetSettingsRequest().indices(indexName).includeDefaults(true); + + getSettingsAction.execute(null, defaultsRequest, ActionListener.wrap(defaultsResponse -> { + assertNotNull("index.refresh_interval should be set as we are including defaults", defaultsResponse.getSetting(indexName, + "index.refresh_interval")); + }, exception -> { + throw new AssertionError(exception); + })); + + } + + public void testIncludeDefaultsWithFiltering() { + GetSettingsRequest defaultsRequest = new GetSettingsRequest().indices(indexName).includeDefaults(true) + .names("index.refresh_interval"); + getSettingsAction.execute(null, defaultsRequest, ActionListener.wrap(defaultsResponse -> { + assertNotNull("index.refresh_interval should be set as we are including defaults", defaultsResponse.getSetting(indexName, + "index.refresh_interval")); + assertNull("index.number_of_shards should be null as this query is filtered", + defaultsResponse.getSetting(indexName, "index.number_of_shards")); + assertNull("index.warmer.enabled should be null as this query is filtered", + defaultsResponse.getSetting(indexName, "index.warmer.enabled")); + }, exception -> { + throw new AssertionError(exception); + })); + } + + static class Resolver extends IndexNameExpressionResolver { + Resolver(Settings settings) { + super(settings); + } + + @Override + public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { + return request.indices(); + } + + @Override + public Index[] concreteIndices(ClusterState state, IndicesRequest request) { + Index[] out = new Index[request.indices().length]; + for (int x = 0; x < out.length; x++) { + out[x] = new Index(request.indices()[x], "_na_"); + } + return out; + } + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequestTests.java new file mode 100644 index 0000000000000..a7601355184db --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequestTests.java @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.settings.get; + +import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Base64; + +public class GetSettingsRequestTests extends ESTestCase { + private static final String TEST_622_REQUEST_BYTES = "ADwDAAEKdGVzdF9pbmRleA4BEHRlc3Rfc2V0dGluZ19rZXkB"; + private static final GetSettingsRequest TEST_622_REQUEST = new GetSettingsRequest() + .indices("test_index") + .names("test_setting_key") + .humanReadable(true); + private static final GetSettingsRequest TEST_700_REQUEST = new GetSettingsRequest() + .includeDefaults(true) + .humanReadable(true) + .indices("test_index") + .names("test_setting_key"); + + public void testSerdeRoundTrip() throws IOException { + BytesStreamOutput bso = new BytesStreamOutput(); + TEST_700_REQUEST.writeTo(bso); + + byte[] responseBytes = BytesReference.toBytes(bso.bytes()); + StreamInput si = StreamInput.wrap(responseBytes); + GetSettingsRequest deserialized = new GetSettingsRequest(); + deserialized.readFrom(si); + assertEquals(TEST_700_REQUEST, deserialized); + } + + public void testSerializeBackwardsCompatibility() throws IOException { + BytesStreamOutput bso = new BytesStreamOutput(); + bso.setVersion(Version.V_6_2_2); + TEST_700_REQUEST.writeTo(bso); + + byte[] responseBytes = BytesReference.toBytes(bso.bytes()); + assertEquals(TEST_622_REQUEST_BYTES, Base64.getEncoder().encodeToString(responseBytes)); + } + + public void testDeserializeBackwardsCompatibility() throws IOException { + StreamInput si = StreamInput.wrap(Base64.getDecoder().decode(TEST_622_REQUEST_BYTES)); + si.setVersion(Version.V_6_2_2); + GetSettingsRequest deserialized = new GetSettingsRequest(); + deserialized.readFrom(si); + assertEquals(TEST_622_REQUEST, deserialized); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java new file mode 100644 index 0000000000000..cf125257c36a8 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsResponseTests.java @@ -0,0 +1,160 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.settings.get; + +import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.RandomCreateIndexGenerator; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.junit.Assert; + +import java.io.IOException; +import java.util.Base64; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Predicate; + +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.index.IndexSettings.INDEX_REFRESH_INTERVAL_SETTING; + +public class GetSettingsResponseTests extends AbstractStreamableXContentTestCase { + + /* + index.number_of_shards=2,index.number_of_replicas=1. The below base64'd bytes were generated by + code from the 6.2.2 tag. + */ + private static final String TEST_6_2_2_RESPONSE_BYTES = + "AQppbmRleF9uYW1lAhhpbmRleC5udW1iZXJfb2ZfcmVwbGljYXMAATEWaW5kZXgubnVtYmVyX29mX3NoYXJkcwABMg=="; + + /* This response object was generated using similar code to the code used to create the above bytes */ + private static final GetSettingsResponse TEST_6_2_2_RESPONSE_INSTANCE = getExpectedTest622Response(); + + @Override + protected GetSettingsResponse createBlankInstance() { + return new GetSettingsResponse(); + } + + @Override + protected GetSettingsResponse createTestInstance() { + HashMap indexToSettings = new HashMap<>(); + HashMap indexToDefaultSettings = new HashMap<>(); + + IndexScopedSettings indexScopedSettings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS; + + Set indexNames = new HashSet(); + int numIndices = randomIntBetween(1, 5); + for (int x=0;x immutableIndexToSettings = + ImmutableOpenMap.builder().putAll(indexToSettings).build(); + + + if (randomBoolean()) { + for (String indexName : indexToSettings.keySet()) { + Settings defaultSettings = indexScopedSettings.diff(indexToSettings.get(indexName), Settings.EMPTY); + indexToDefaultSettings.put(indexName, defaultSettings); + } + } + + ImmutableOpenMap immutableIndexToDefaultSettings = + ImmutableOpenMap.builder().putAll(indexToDefaultSettings).build(); + + return new GetSettingsResponse(immutableIndexToSettings, immutableIndexToDefaultSettings); + } + + @Override + protected GetSettingsResponse doParseInstance(XContentParser parser) throws IOException { + return GetSettingsResponse.fromXContent(parser); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + //we do not want to add new fields at the root (index-level), or inside settings blocks + return f -> f.equals("") || f.contains(".settings") || f.contains(".defaults"); + } + + private static GetSettingsResponse getExpectedTest622Response() { + /* This is a fairly direct copy of the code used to generate the base64'd response above -- with the caveat that the constructor + has been modified so that the code compiles on this version of elasticsearch + */ + HashMap indexToSettings = new HashMap<>(); + Settings.Builder builder = Settings.builder(); + + builder.put(SETTING_NUMBER_OF_SHARDS, 2); + builder.put(SETTING_NUMBER_OF_REPLICAS, 1); + indexToSettings.put("index_name", builder.build()); + GetSettingsResponse response = new GetSettingsResponse(ImmutableOpenMap.builder().putAll(indexToSettings).build + (), ImmutableOpenMap.of()); + return response; + } + + private static GetSettingsResponse getResponseWithNewFields() { + HashMap indexToDefaultSettings = new HashMap<>(); + Settings.Builder builder = Settings.builder(); + + builder.put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1s"); + indexToDefaultSettings.put("index_name", builder.build()); + ImmutableOpenMap defaultsMap = ImmutableOpenMap.builder().putAll(indexToDefaultSettings) + .build(); + return new GetSettingsResponse(getExpectedTest622Response().getIndexToSettings(), defaultsMap); + } + + public void testCanDecode622Response() throws IOException { + StreamInput si = StreamInput.wrap(Base64.getDecoder().decode(TEST_6_2_2_RESPONSE_BYTES)); + si.setVersion(Version.V_6_2_2); + GetSettingsResponse response = new GetSettingsResponse(); + response.readFrom(si); + + Assert.assertEquals(TEST_6_2_2_RESPONSE_INSTANCE, response); + } + + public void testCanOutput622Response() throws IOException { + GetSettingsResponse responseWithExtraFields = getResponseWithNewFields(); + BytesStreamOutput bso = new BytesStreamOutput(); + bso.setVersion(Version.V_6_2_2); + responseWithExtraFields.writeTo(bso); + + String base64OfResponse = Base64.getEncoder().encodeToString(BytesReference.toBytes(bso.bytes())); + + Assert.assertEquals(TEST_6_2_2_RESPONSE_BYTES, base64OfResponse); + } +} From 09e96169ec2e2bdd7c99eb1263b8c65cf941a1cc Mon Sep 17 00:00:00 2001 From: Thomas Callahan Date: Thu, 17 May 2018 17:31:38 -0400 Subject: [PATCH 16/34] [DOCS] Add missing callout in IndicesClientDocumentationIT IndicesClientDocumentationIT.java is missing a callout, causing the docs build to break. This commit adds the missing callout. Relates #30494 --- .../client/documentation/IndicesClientDocumentationIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index c766d87d231ce..d4d5af9e694ad 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -794,7 +794,7 @@ public void testGetSettings() throws Exception { } // tag::get-settings-request - GetSettingsRequest request = new GetSettingsRequest().indices("index"); + GetSettingsRequest request = new GetSettingsRequest().indices("index"); // <1> // end::get-settings-request // tag::get-settings-request-names From 8066b0279e3af1137ee66ae59ad63e675d517909 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 17 May 2018 14:20:03 -0700 Subject: [PATCH 17/34] [DOCS] Fixes edit URLs for stack overview (#30583) --- x-pack/docs/en/ml/index.asciidoc | 13 +++++++++++-- x-pack/docs/en/security/index.asciidoc | 11 ++++++++++- x-pack/docs/en/watcher/index.asciidoc | 11 +++++++++++ 3 files changed, 32 insertions(+), 3 deletions(-) diff --git a/x-pack/docs/en/ml/index.asciidoc b/x-pack/docs/en/ml/index.asciidoc index c36f77ca812aa..4c9a32da8d678 100644 --- a/x-pack/docs/en/ml/index.asciidoc +++ b/x-pack/docs/en/ml/index.asciidoc @@ -17,11 +17,20 @@ from {es} for analysis and anomaly results are displayed in {kib} dashboards. -- +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/ml/overview.asciidoc include::overview.asciidoc[] + +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/ml/getting-started.asciidoc include::getting-started.asciidoc[] + +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/ml/configuring.asciidoc include::configuring.asciidoc[] + +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/ml/stopping-ml.asciidoc include::stopping-ml.asciidoc[] -// include::ml-scenarios.asciidoc[] + +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/ml/api-quickref.asciidoc include::api-quickref.asciidoc[] -//include::troubleshooting.asciidoc[] Referenced from x-pack/docs/public/xpack-troubleshooting.asciidoc + +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/ml/functions.asciidoc include::functions.asciidoc[] diff --git a/x-pack/docs/en/security/index.asciidoc b/x-pack/docs/en/security/index.asciidoc index fed2906ab1bd9..a46197b89f67d 100644 --- a/x-pack/docs/en/security/index.asciidoc +++ b/x-pack/docs/en/security/index.asciidoc @@ -95,20 +95,29 @@ Head over to our {security-forum}[Security Discussion Forum] to share your experience, questions, and suggestions. -- +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/getting-started.asciidoc include::getting-started.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/how-security-works.asciidoc include::how-security-works.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/authentication/overview.asciidoc include::authentication/overview.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/authorization/overview.asciidoc include::authorization/overview.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/auditing.asciidoc include::auditing.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/securing-communications.asciidoc include::securing-communications.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/using-ip-filtering.asciidoc include::using-ip-filtering.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/tribe-clients-integrations.asciidoc include::tribe-clients-integrations.asciidoc[] -include::reference.asciidoc[] \ No newline at end of file +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/reference.asciidoc +include::reference.asciidoc[] diff --git a/x-pack/docs/en/watcher/index.asciidoc b/x-pack/docs/en/watcher/index.asciidoc index 3cbc54b0a484b..2be3638971929 100644 --- a/x-pack/docs/en/watcher/index.asciidoc +++ b/x-pack/docs/en/watcher/index.asciidoc @@ -65,24 +65,35 @@ from the query, whether the condition was met, and what actions were taken. -- +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/watcher/getting-started.asciidoc include::getting-started.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/watcher/how-watcher-works.asciidoc include::how-watcher-works.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/watcher/encrypting-data.asciidoc include::encrypting-data.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/watcher/input.asciidoc include::input.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/watcher/trigger.asciidoc include::trigger.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/watcher/condition.asciidoc include::condition.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/watcher/actions.asciidoc include::actions.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/watcher/transform.asciidoc include::transform.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/watcher/java.asciidoc include::java.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/watcher/managing-watches.asciidoc include::managing-watches.asciidoc[] +:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/watcher/example-watches.asciidoc include::example-watches.asciidoc[] From ee9b9d445fb88a7c29e13202d3db58a1bf4bd061 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 17 May 2018 21:17:24 -0400 Subject: [PATCH 18/34] Make TransportClusterStateAction abide to our style (#30697) I still do not like == false. However, I am so use to reading it that today I read this line of code and could not understand how it could possibly be doing the right thing. It was only when I finally noticed the ! that the code made sense. This commit changes this code to be in our style of == false. I still do not like == false. --- .../admin/cluster/state/TransportClusterStateAction.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java index 299e97a96408d..53181ccd1b6d5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java @@ -115,9 +115,9 @@ protected void masterOperation(final ClusterStateRequest request, final ClusterS mdBuilder = MetaData.builder(currentState.metaData()); } - // Filter our metadata that shouldn't be returned by API - for(ObjectObjectCursor custom : currentState.metaData().customs()) { - if(!custom.value.context().contains(MetaData.XContentContext.API)) { + // filter out metadata that shouldn't be returned by the API + for (ObjectObjectCursor custom : currentState.metaData().customs()) { + if (custom.value.context().contains(MetaData.XContentContext.API) == false) { mdBuilder.removeCustom(custom.key); } } From 5fcebc92cbed06b0a15cc14a0f7fb6f7ba4d1963 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Thu, 17 May 2018 18:27:18 +1000 Subject: [PATCH 19/34] Preserve REST client auth despite 401 response (#30558) The default behaviour for Apache HTTP client is to mimic the standard browser behaviour of clearing the authentication cache (for a given host) if that host responds with 401. This behaviour is appropriate in a interactive browser environment where the user is given the opportunity to provide alternative credentials, but it is not the preferred behaviour for the ES REST client. X-Pack may respond with a 401 status if a request is made before the node/cluster has recovered sufficient state to know how to handle the provided authentication credentials - for example the security index need to be recovered before we can authenticate native users. In these cases the correct behaviour is to retry with the same credentials (rather than discarding those credentials). --- ...tentCredentialsAuthenticationStrategy.java | 59 +++++++++++++++++++ .../client/RestClientBuilder.java | 3 +- .../RestClientSingleHostIntegTests.java | 50 ++++++++++++---- 3 files changed, 99 insertions(+), 13 deletions(-) create mode 100644 client/rest/src/main/java/org/elasticsearch/client/PersistentCredentialsAuthenticationStrategy.java diff --git a/client/rest/src/main/java/org/elasticsearch/client/PersistentCredentialsAuthenticationStrategy.java b/client/rest/src/main/java/org/elasticsearch/client/PersistentCredentialsAuthenticationStrategy.java new file mode 100644 index 0000000000000..4ae22fbe3728e --- /dev/null +++ b/client/rest/src/main/java/org/elasticsearch/client/PersistentCredentialsAuthenticationStrategy.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * + */ + +package org.elasticsearch.client; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.http.HttpHost; +import org.apache.http.auth.AuthScheme; +import org.apache.http.impl.client.TargetAuthenticationStrategy; +import org.apache.http.protocol.HttpContext; + +/** + * An {@link org.apache.http.client.AuthenticationStrategy} implementation that does not perform + * any special handling if authentication fails. + * The default handler in Apache HTTP client mimics standard browser behaviour of clearing authentication + * credentials if it receives a 401 response from the server. While this can be useful for browser, it is + * rarely the desired behaviour with the Elasticsearch REST API. + * If the code using the REST client has configured credentials for the REST API, then we can and should + * assume that this is intentional, and those credentials represent the best possible authentication + * mechanism to the Elasticsearch node. + * If we receive a 401 status, a probably cause is that the authentication mechanism in place was unable + * to perform the requisite password checks (the node has not yet recovered its state, or an external + * authentication provider was unavailable). + * If this occurs, then the desired behaviour is for the Rest client to retry with the same credentials + * (rather than trying with no credentials, or expecting the calling code to provide alternate credentials). + */ +final class PersistentCredentialsAuthenticationStrategy extends TargetAuthenticationStrategy { + + private final Log logger = LogFactory.getLog(PersistentCredentialsAuthenticationStrategy.class); + + @Override + public void authFailed(HttpHost host, AuthScheme authScheme, HttpContext context) { + if (logger.isDebugEnabled()) { + logger.debug("Authentication to " + host + " failed (scheme: " + authScheme.getSchemeName() + + "). Preserving credentials for next request"); + } + // Do nothing. + // The superclass implementation of method will clear the credentials from the cache, but we don't + } +} diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java index 8768c07161989..5f7831c67fc28 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java @@ -204,7 +204,8 @@ private CloseableHttpAsyncClient createHttpClient() { HttpAsyncClientBuilder httpClientBuilder = HttpAsyncClientBuilder.create().setDefaultRequestConfig(requestConfigBuilder.build()) //default settings for connection pooling may be too constraining .setMaxConnPerRoute(DEFAULT_MAX_CONN_PER_ROUTE).setMaxConnTotal(DEFAULT_MAX_CONN_TOTAL) - .setSSLContext(SSLContext.getDefault()); + .setSSLContext(SSLContext.getDefault()) + .setTargetAuthenticationStrategy(new PersistentCredentialsAuthenticationStrategy()); if (httpClientConfigCallback != null) { httpClientBuilder = httpClientConfigCallback.customizeHttpClient(httpClientBuilder); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java index 667e38a5167d7..35cac627bbe6a 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java @@ -31,14 +31,14 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.client.TargetAuthenticationStrategy; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; +import org.apache.http.message.BasicHeader; import org.apache.http.nio.entity.NStringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.mocksocket.MockHttpServer; import org.junit.After; -import org.junit.AfterClass; import org.junit.Before; -import org.junit.BeforeClass; import java.io.IOException; import java.io.InputStreamReader; @@ -147,6 +147,8 @@ public HttpAsyncClientBuilder customizeHttpClient(final HttpAsyncClientBuilder h if (usePreemptiveAuth == false) { // disable preemptive auth by ignoring any authcache httpClientBuilder.disableAuthCaching(); + // don't use the "persistent credentials strategy" + httpClientBuilder.setTargetAuthenticationStrategy(new TargetAuthenticationStrategy()); } return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); @@ -193,7 +195,7 @@ public void onFailure(Exception exception) { assertTrue("timeout waiting for requests to be sent", latch.await(10, TimeUnit.SECONDS)); if (exceptions.isEmpty() == false) { AssertionError error = new AssertionError("expected no failures but got some. see suppressed for first 10 of [" - + exceptions.size() + "] failures"); + + exceptions.size() + "] failures"); for (Exception exception : exceptions.subList(0, Math.min(10, exceptions.size()))) { error.addSuppressed(exception); } @@ -217,7 +219,7 @@ public void testHeaders() throws IOException { Response esResponse; try { esResponse = restClient.performRequest(method, "/" + statusCode, Collections.emptyMap(), requestHeaders); - } catch(ResponseException e) { + } catch (ResponseException e) { esResponse = e.getResponse(); } @@ -291,8 +293,8 @@ public void testEncodeParams() throws IOException { /** * Verify that credentials are sent on the first request with preemptive auth enabled (default when provided with credentials). */ - public void testPreemptiveAuthEnabled() throws IOException { - final String[] methods = { "POST", "PUT", "GET", "DELETE" }; + public void testPreemptiveAuthEnabled() throws IOException { + final String[] methods = {"POST", "PUT", "GET", "DELETE"}; try (RestClient restClient = createRestClient(true, true)) { for (final String method : methods) { @@ -306,8 +308,8 @@ public void testPreemptiveAuthEnabled() throws IOException { /** * Verify that credentials are not sent on the first request with preemptive auth disabled. */ - public void testPreemptiveAuthDisabled() throws IOException { - final String[] methods = { "POST", "PUT", "GET", "DELETE" }; + public void testPreemptiveAuthDisabled() throws IOException { + final String[] methods = {"POST", "PUT", "GET", "DELETE"}; try (RestClient restClient = createRestClient(true, false)) { for (final String method : methods) { @@ -318,12 +320,31 @@ public void testPreemptiveAuthDisabled() throws IOException { } } + /** + * Verify that credentials continue to be sent even if a 401 (Unauthorized) response is received + */ + public void testAuthCredentialsAreNotClearedOnAuthChallenge() throws IOException { + final String[] methods = {"POST", "PUT", "GET", "DELETE"}; + + try (RestClient restClient = createRestClient(true, true)) { + for (final String method : methods) { + Header realmHeader = new BasicHeader("WWW-Authenticate", "Basic realm=\"test\""); + final Response response401 = bodyTest(restClient, method, 401, new Header[]{realmHeader}); + assertThat(response401.getHeader("Authorization"), startsWith("Basic")); + + final Response response200 = bodyTest(restClient, method, 200, new Header[0]); + assertThat(response200.getHeader("Authorization"), startsWith("Basic")); + } + } + + } + public void testUrlWithoutLeadingSlash() throws Exception { if (pathPrefix.length() == 0) { try { restClient.performRequest("GET", "200"); fail("request should have failed"); - } catch(ResponseException e) { + } catch (ResponseException e) { assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); } } else { @@ -335,8 +356,8 @@ public void testUrlWithoutLeadingSlash() throws Exception { { //pathPrefix is not required to start with '/', will be added automatically try (RestClient restClient = RestClient.builder( - new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())) - .setPathPrefix(pathPrefix.substring(1)).build()) { + new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())) + .setPathPrefix(pathPrefix.substring(1)).build()) { Response response = restClient.performRequest("GET", "200"); //a trailing slash gets automatically added if a pathPrefix is configured assertEquals(200, response.getStatusLine().getStatusCode()); @@ -350,10 +371,15 @@ private Response bodyTest(final String method) throws IOException { } private Response bodyTest(final RestClient restClient, final String method) throws IOException { - String requestBody = "{ \"field\": \"value\" }"; int statusCode = randomStatusCode(getRandom()); + return bodyTest(restClient, method, statusCode, new Header[0]); + } + + private Response bodyTest(RestClient restClient, String method, int statusCode, Header[] headers) throws IOException { + String requestBody = "{ \"field\": \"value\" }"; Request request = new Request(method, "/" + statusCode); request.setJsonEntity(requestBody); + request.setHeaders(headers); Response esResponse; try { esResponse = restClient.performRequest(request); From cfd239a31c2acef98e6aa95c2a242e2ded2bb0e6 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Thu, 17 May 2018 21:36:13 +1000 Subject: [PATCH 20/34] Add detailed assert message to IndexAuditUpgradeIT (#30669) Print out the returned buckets if the size does not match the expectation. --- .../java/org/elasticsearch/upgrades/IndexAuditUpgradeIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexAuditUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexAuditUpgradeIT.java index c155370ce0339..97a61f25d7b03 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexAuditUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexAuditUpgradeIT.java @@ -123,6 +123,6 @@ private void assertNumUniqueNodeNameBuckets(Matcher numBucketsMatcher) assertNotNull(nodesAgg); List> buckets = (List>) nodesAgg.get("buckets"); assertNotNull(buckets); - assertThat(buckets.size(), numBucketsMatcher); + assertThat("Found node buckets " + buckets, buckets.size(), numBucketsMatcher); } } From 7f48df0f481ff9e40e2baea570228f070acc0b00 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Fri, 18 May 2018 08:13:58 +0200 Subject: [PATCH 21/34] [ML] add version information in case of crash of native ML process (#30674) This change adds version information in case a native ML process crashes, the version is important for choosing the right symbol files when analyzing the crash. Adding the version combines all necessary information on one line. relates elastic/ml-cpp#94 --- .../ml/job/process/NativeController.java | 17 +------- .../process/logging/CppLogMessageHandler.java | 39 ++++++++++++++++++- 2 files changed, 38 insertions(+), 18 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeController.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeController.java index a45f44c227657..43c3f4825ddf3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeController.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeController.java @@ -22,8 +22,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeoutException; -import java.util.regex.Matcher; -import java.util.regex.Pattern; /** @@ -84,20 +82,7 @@ public long getPid() throws TimeoutException { } public Map getNativeCodeInfo() throws TimeoutException { - String copyrightMessage = cppLogHandler.getCppCopyright(CONTROLLER_CONNECT_TIMEOUT); - Matcher matcher = Pattern.compile("Version (.+) \\(Build ([^)]+)\\) Copyright ").matcher(copyrightMessage); - if (matcher.find()) { - Map info = new HashMap<>(2); - info.put("version", matcher.group(1)); - info.put("build_hash", matcher.group(2)); - return info; - } else { - // If this happens it probably means someone has changed the format in lib/ver/CBuildInfo.cc - // in the machine-learning-cpp repo without changing the pattern above to match - String msg = "Unexpected native controller process copyright format: " + copyrightMessage; - LOGGER.error(msg); - throw new ElasticsearchException(msg); - } + return cppLogHandler.getNativeCodeInfo(CONTROLLER_CONNECT_TIMEOUT); } public void startProcess(List command) throws IOException { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/logging/CppLogMessageHandler.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/logging/CppLogMessageHandler.java index ddafc36416b65..af0f199dd0c58 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/logging/CppLogMessageHandler.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/logging/CppLogMessageHandler.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -30,10 +30,15 @@ import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.Deque; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; /** * Handle a stream of C++ log messages that arrive via a named pipe in JSON format. @@ -181,6 +186,26 @@ public String getCppCopyright(Duration timeout) throws TimeoutException { return cppCopyright; } + /** + * Extracts version information from the copyright string which assumes a certain format. + */ + public Map getNativeCodeInfo(Duration timeout) throws TimeoutException { + String copyrightMessage = getCppCopyright(timeout); + Matcher matcher = Pattern.compile("Version (.+) \\(Build ([^)]+)\\) Copyright ").matcher(copyrightMessage); + if (matcher.find()) { + Map info = new HashMap<>(2); + info.put("version", matcher.group(1)); + info.put("build_hash", matcher.group(2)); + return info; + } else { + // If this happens it probably means someone has changed the format in lib/ver/CBuildInfo.cc + // in the ml-cpp repo without changing the pattern above to match + String msg = "Unexpected native process copyright format: " + copyrightMessage; + LOGGER.error(msg); + throw new ElasticsearchException(msg); + } + } + /** * Expected to be called very infrequently. */ @@ -281,8 +306,18 @@ private void parseMessage(XContent xContent, BytesReference bytesRef) { } catch (XContentParseException e) { String upstreamMessage = "Fatal error: '" + bytesRef.utf8ToString() + "'"; if (upstreamMessage.contains("bad_alloc")) { - upstreamMessage += ", process ran out of memory."; + upstreamMessage += ", process ran out of memory"; } + + // add version information, so it's conveniently next to the crash log + upstreamMessage += ", version: "; + try { + Map versionInfo = getNativeCodeInfo(Duration.ofMillis(10)); + upstreamMessage += String.format(Locale.ROOT, "%s (build %s)", versionInfo.get("version"), versionInfo.get("build_hash")); + } catch (TimeoutException timeoutException) { + upstreamMessage += "failed to retrieve"; + } + storeError(upstreamMessage); seenFatalError = true; } catch (IOException e) { From 70f48ee0e450c17399d9fa43108c69c88c380ec6 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Fri, 18 May 2018 08:49:18 +0200 Subject: [PATCH 22/34] Watcher: Prevent duplicate watch triggering during upgrade (#30643) If a user is putting a watch, while upgrading from 5.x to 6.x, this can lead to the watch being triggered on the node receiving the put watch request. Note, that this can only happen when watcher is not running in its distributed fashion. The condition for this is, that there are still nodes running on version 5 in a 6.x cluster. --- .../actions/put/TransportPutWatchAction.java | 7 +- .../put/TransportPutWatchActionTests.java | 112 ++++++++++++++++-- 2 files changed, 105 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java index 7dcca20e2019e..17495bcad1ceb 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java @@ -63,6 +63,7 @@ public class TransportPutWatchAction extends WatcherTransportActionwrap(response -> { boolean created = response.getResult() == DocWriteResponse.Result.CREATED; - if (localExecute(request) == false && watch.status().state().isActive()) { + // if not yet in distributed mode (mixed 5/6 version in cluster), only trigger on the master node + if (localExecute(request) == false && + this.clusterService.state().nodes().isLocalNodeElectedMaster() && + watch.status().state().isActive()) { triggerService.add(watch); } listener.onResponse(new PutWatchResponse(response.getId(), response.getVersion(), created)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java index ce223b1c9fd15..0844f573cc041 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchActionTests.java @@ -5,13 +5,18 @@ */ package org.elasticsearch.xpack.watcher.transport.actions.put; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -23,17 +28,23 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchRequest; +import org.elasticsearch.xpack.core.watcher.transport.actions.put.PutWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; import org.elasticsearch.xpack.core.watcher.watch.Watch; +import org.elasticsearch.xpack.core.watcher.watch.WatchStatus; import org.elasticsearch.xpack.watcher.test.WatchExecutionContextMockBuilder; import org.elasticsearch.xpack.watcher.trigger.TriggerService; import org.elasticsearch.xpack.watcher.watch.WatchParser; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; import org.junit.Before; import org.mockito.ArgumentCaptor; import java.util.Collections; +import java.util.HashSet; import java.util.Map; +import static java.util.Arrays.asList; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -45,18 +56,20 @@ import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; public class TransportPutWatchActionTests extends ESTestCase { private TransportPutWatchAction action; - private Watch watch = new WatchExecutionContextMockBuilder("_id").buildMock().watch(); - private ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + private final Watch watch = new WatchExecutionContextMockBuilder("_id").buildMock().watch(); + private final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + private final ClusterService clusterService = mock(ClusterService.class); + private final TriggerService triggerService = mock(TriggerService.class); + private final ActionListener listener = ActionListener.wrap(r -> {}, e -> assertThat(e, is(nullValue()))); @Before public void setupAction() throws Exception { - TriggerService triggerService = mock(TriggerService.class); - ClusterService clusterService = mock(ClusterService.class); ThreadPool threadPool = mock(ThreadPool.class); when(threadPool.getThreadContext()).thenReturn(threadContext); @@ -64,19 +77,24 @@ public void setupAction() throws Exception { WatchParser parser = mock(WatchParser.class); when(parser.parseWithSecrets(eq("_id"), eq(false), anyObject(), anyObject(), anyObject(), anyBoolean())).thenReturn(watch); + WatchStatus status = mock(WatchStatus.class); + WatchStatus.State state = new WatchStatus.State(true, DateTime.now(DateTimeZone.UTC)); + when(status.state()).thenReturn(state); + when(watch.status()).thenReturn(status); Client client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); // mock an index response that calls the listener doAnswer(invocation -> { - IndexRequest request = (IndexRequest) invocation.getArguments()[1]; - ActionListener listener = (ActionListener) invocation.getArguments()[2]; + UpdateRequest request = (UpdateRequest) invocation.getArguments()[0]; + ActionListener listener = (ActionListener) invocation.getArguments()[1]; ShardId shardId = new ShardId(new Index(Watch.INDEX, "uuid"), 0); - listener.onResponse(new IndexResponse(shardId, request.type(), request.id(), 1, 1, 1, true)); + listener.onResponse(new UpdateResponse(shardId, request.type(), request.id(), request.version(), + DocWriteResponse.Result.UPDATED)); return null; - }).when(client).execute(any(), any(), any()); + }).when(client).update(any(), any()); action = new TransportPutWatchAction(Settings.EMPTY, transportService, threadPool, new ActionFilters(Collections.emptySet()), new IndexNameExpressionResolver(Settings.EMPTY), new ClockMock(), @@ -84,7 +102,6 @@ public void setupAction() throws Exception { } public void testHeadersAreFilteredWhenPuttingWatches() throws Exception { - ClusterState state = mock(ClusterState.class); // set up threadcontext with some arbitrary info String headerName = randomFrom(ClientHelper.SECURITY_HEADER_FILTERS); threadContext.putHeader(headerName, randomAlphaOfLength(10)); @@ -92,7 +109,17 @@ public void testHeadersAreFilteredWhenPuttingWatches() throws Exception { PutWatchRequest putWatchRequest = new PutWatchRequest(); putWatchRequest.setId("_id"); - action.masterOperation(putWatchRequest, state, ActionListener.wrap(r -> {}, e -> assertThat(e, is(nullValue())))); + + ClusterState state = ClusterState.builder(new ClusterName("my_cluster")) + .nodes(DiscoveryNodes.builder() + .masterNodeId("node_1") + .localNodeId(randomFrom("node_1", "node_2")) + .add(newNode("node_1", Version.CURRENT)) + .add(newNode("node_2", Version.CURRENT))) + .build(); + when(clusterService.state()).thenReturn(state); + + action.masterOperation(putWatchRequest, state, listener); ArgumentCaptor captor = ArgumentCaptor.forClass(Map.class); verify(watch.status()).setHeaders(captor.capture()); @@ -100,4 +127,63 @@ public void testHeadersAreFilteredWhenPuttingWatches() throws Exception { assertThat(capturedHeaders.keySet(), hasSize(1)); assertThat(capturedHeaders, hasKey(headerName)); } -} \ No newline at end of file + + public void testWatchesAreNeverTriggeredWhenDistributed() throws Exception { + PutWatchRequest putWatchRequest = new PutWatchRequest(); + putWatchRequest.setId("_id"); + + ClusterState clusterState = ClusterState.builder(new ClusterName("my_cluster")) + .nodes(DiscoveryNodes.builder() + .masterNodeId("node_1") + .localNodeId(randomFrom("node_1", "node_2")) + .add(newNode("node_1", Version.CURRENT)) + .add(newNode("node_2", Version.CURRENT))) + .build(); + when(clusterService.state()).thenReturn(clusterState); + + action.masterOperation(putWatchRequest, clusterState, listener); + + verifyZeroInteractions(triggerService); + } + + public void testWatchesAreNotTriggeredOnNonMasterWhenNotDistributed() throws Exception { + PutWatchRequest putWatchRequest = new PutWatchRequest(); + putWatchRequest.setId("_id"); + + ClusterState clusterState = ClusterState.builder(new ClusterName("my_cluster")) + .nodes(DiscoveryNodes.builder() + .masterNodeId("node_2") + .localNodeId("node_1") + .add(newNode("node_1", Version.CURRENT)) + .add(newNode("node_2", Version.V_5_6_10))) + .build(); + when(clusterService.state()).thenReturn(clusterState); + + action.masterOperation(putWatchRequest, clusterState, listener); + + verifyZeroInteractions(triggerService); + } + + public void testWatchesAreTriggeredOnMasterWhenNotDistributed() throws Exception { + PutWatchRequest putWatchRequest = new PutWatchRequest(); + putWatchRequest.setId("_id"); + + ClusterState clusterState = ClusterState.builder(new ClusterName("my_cluster")) + .nodes(DiscoveryNodes.builder() + .masterNodeId("node_1") + .localNodeId("node_1") + .add(newNode("node_1", Version.CURRENT)) + .add(newNode("node_2", Version.V_5_6_10))) + .build(); + when(clusterService.state()).thenReturn(clusterState); + + action.masterOperation(putWatchRequest, clusterState, listener); + + verify(triggerService).add(eq(watch)); + } + + private static DiscoveryNode newNode(String nodeId, Version version) { + return new DiscoveryNode(nodeId, ESTestCase.buildNewFakeTransportAddress(), Collections.emptyMap(), + new HashSet<>(asList(DiscoveryNode.Role.values())), version); + } +} From 6bbd1b808c1b5917c65314105c55128d0959226b Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Fri, 18 May 2018 08:51:43 +0200 Subject: [PATCH 23/34] Tests: Fail if test watches could not be triggered (#30392) Watcher tests now always fail hard when watches that were tried to be triggered in a test using the trigger() method, but could not because they were not found on any of the nodes in the cluster. --- .../AbstractWatcherIntegrationTestCase.java | 23 ++++++++++++------- .../trigger/ScheduleTriggerEngineMock.java | 15 +++++------- 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 8b985cdbc848e..26b02a195f756 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.watcher.test; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; @@ -70,10 +71,12 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Locale; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.stream.Collectors; +import java.util.stream.StreamSupport; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; @@ -175,7 +178,7 @@ protected boolean timeWarped() { public void _setup() throws Exception { if (timeWarped()) { timeWarp = new TimeWarp(internalCluster().getInstances(ScheduleTriggerEngineMock.class), - (ClockMock)getInstanceFromMaster(Clock.class)); + (ClockMock)getInstanceFromMaster(Clock.class), logger); } if (internalCluster().size() > 0) { @@ -542,24 +545,28 @@ public EmailSent send(Email email, Authentication auth, Profile profile, String protected static class TimeWarp { - protected final Iterable schedulers; - protected final ClockMock clock; + private final List schedulers; + private final ClockMock clock; + private final Logger logger; - public TimeWarp(Iterable schedulers, ClockMock clock) { - this.schedulers = schedulers; + TimeWarp(Iterable schedulers, ClockMock clock, Logger logger) { + this.schedulers = StreamSupport.stream(schedulers.spliterator(), false).collect(Collectors.toList()); this.clock = clock; + this.logger = logger; } public void trigger(String jobName) { - schedulers.forEach(scheduler -> scheduler.trigger(jobName)); + trigger(jobName, 1, null); } public ClockMock clock() { return clock; } - public void trigger(String id, int times, TimeValue timeValue) { - schedulers.forEach(scheduler -> scheduler.trigger(id, times, timeValue)); + public void trigger(String watchId, int times, TimeValue timeValue) { + boolean isTriggered = schedulers.stream().anyMatch(scheduler -> scheduler.trigger(watchId, times, timeValue)); + String msg = String.format(Locale.ROOT, "could not find watch [%s] to trigger", watchId); + assertThat(msg, isTriggered, is(true)); } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java index 63f4f95ae2161..f6c06117970dc 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java @@ -77,18 +77,13 @@ public boolean remove(String jobId) { return watches.remove(jobId) != null; } - public void trigger(String jobName) { - trigger(jobName, 1, null); + public boolean trigger(String jobName) { + return trigger(jobName, 1, null); } - public void trigger(String jobName, int times) { - trigger(jobName, times, null); - } - - public void trigger(String jobName, int times, TimeValue interval) { + public boolean trigger(String jobName, int times, TimeValue interval) { if (watches.containsKey(jobName) == false) { - logger.trace("not executing job [{}], not found", jobName); - return; + return false; } for (int i = 0; i < times; i++) { @@ -108,5 +103,7 @@ public void trigger(String jobName, int times, TimeValue interval) { } } } + + return true; } } From 5f16c92c718b2a4e3e7726040d64865c0fb910b8 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Fri, 18 May 2018 14:07:16 +0200 Subject: [PATCH 24/34] [ML] provide tmp storage for forecasting and possibly any ml native jobs #30399 This implementation lazily (on 1st forecast request) checks for available diskspace and creates a subfolder for storing data outside of Lucene indexes, but as part of the ES data paths. Details: - tmp storage is managed and does not allow allocation if disk space is below a threshold (5GB at the moment) - tmp storage is supposed to be managed by the native component but in case this fails cleanup is provided: - on job close - on process crash - after node crash, on restart - available space is re-checked for every forecast call (the native component has to check again before writing) Note: The 1st path that has enough space is chosen on job open (job close/reopen triggers a new search) --- x-pack/docs/en/ml/forecasting.asciidoc | 5 +- .../xpack/ml/MachineLearning.java | 6 +- .../ml/action/TransportForecastJobAction.java | 12 ++ .../ml/job/process/NativeStorageProvider.java | 123 ++++++++++++++++ .../autodetect/AutodetectProcessManager.java | 70 ++++++++- .../autodetect/params/ForecastParams.java | 26 +++- .../writer/ControlMsgToProcessWriter.java | 3 + .../process/NativeStorageProviderTests.java | 139 ++++++++++++++++++ x-pack/qa/ml-native-tests/build.gradle | 1 + .../xpack/ml/integration/ForecastIT.java | 47 ++++-- 10 files changed, 406 insertions(+), 26 deletions(-) create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProvider.java create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProviderTests.java diff --git a/x-pack/docs/en/ml/forecasting.asciidoc b/x-pack/docs/en/ml/forecasting.asciidoc index 95693a1677f0a..cd01aa0fb77ca 100644 --- a/x-pack/docs/en/ml/forecasting.asciidoc +++ b/x-pack/docs/en/ml/forecasting.asciidoc @@ -59,10 +59,7 @@ For more information about any of these functions, see <>. * Forecasts run concurrently with real-time {ml} analysis. That is to say, {ml} analysis does not stop while forecasts are generated. Forecasts can have an impact on {ml} jobs, however, especially in terms of memory usage. For this -reason, forecasts run only if the model memory status is acceptable and the -snapshot models for the forecast do not require more than 20 MB. If these memory -limits are reached, consider splitting the job into multiple smaller jobs and -creating forecasts for these. +reason, forecasts run only if the model memory status is acceptable. * The job must be open when you create a forecast. Otherwise, an error occurs. * If there is insufficient data to generate any meaningful predictions, an error occurs. In general, forecasts that are created early in the learning phase diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index a80f6255cac5d..5e53355b9eee6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -291,7 +291,8 @@ public List> getSettings() { DataCountsReporter.ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING, DataCountsReporter.ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING, AutodetectProcessManager.MAX_RUNNING_JOBS_PER_NODE, - AutodetectProcessManager.MAX_OPEN_JOBS_PER_NODE)); + AutodetectProcessManager.MAX_OPEN_JOBS_PER_NODE, + AutodetectProcessManager.MIN_DISK_SPACE_OFF_HEAP)); } public Settings additionalSettings() { @@ -408,6 +409,9 @@ public Collection createComponents(Client client, ClusterService cluster // This object's constructor attaches to the license state, so there's no need to retain another reference to it new InvalidLicenseEnforcer(settings, getLicenseState(), threadPool, datafeedManager, autodetectProcessManager); + // run node startup tasks + autodetectProcessManager.onNodeStartup(); + return Arrays.asList( mlLifeCycleService, jobProvider, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java index 3b09377b477b8..aaa59e7e8cac0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java @@ -15,6 +15,8 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -28,6 +30,7 @@ import org.elasticsearch.xpack.ml.job.process.autodetect.params.ForecastParams; import java.io.IOException; +import java.nio.file.Path; import java.util.List; import java.util.function.Consumer; @@ -36,6 +39,8 @@ public class TransportForecastJobAction extends TransportJobTaskAction { + private static final ByteSizeValue FORECAST_LOCAL_STORAGE_LIMIT = new ByteSizeValue(500, ByteSizeUnit.MB); + private final JobProvider jobProvider; @Inject public TransportForecastJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, @@ -73,6 +78,13 @@ protected void taskOperation(ForecastJobAction.Request request, TransportOpenJob paramsBuilder.expiresIn(request.getExpiresIn()); } + // tmp storage might be null, we do not log here, because it might not be + // required + Path tmpStorage = processManager.tryGetTmpStorage(task, FORECAST_LOCAL_STORAGE_LIMIT); + if (tmpStorage != null) { + paramsBuilder.tmpStorage(tmpStorage.toString()); + } + ForecastParams params = paramsBuilder.build(); processManager.forecastJob(task, params, e -> { if (e == null) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProvider.java new file mode 100644 index 0000000000000..8a0268a8d0793 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProvider.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.job.process; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.env.Environment; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +/** + * Provide storage for native components. + */ +public class NativeStorageProvider { + + private static final Logger LOGGER = Loggers.getLogger(NativeStorageProvider.class); + + + private static final String LOCAL_STORAGE_SUBFOLDER = "ml-local-data"; + private static final String LOCAL_STORAGE_TMP_FOLDER = "tmp"; + + private final Environment environment; + + // do not allow any usage below this threshold + private final ByteSizeValue minLocalStorageAvailable; + + public NativeStorageProvider(Environment environment, ByteSizeValue minDiskSpaceOffHeap) { + this.environment = environment; + this.minLocalStorageAvailable = minDiskSpaceOffHeap; + } + + /** + * Removes any temporary storage leftovers. + * + * Removes all temp files and folder which might be there as a result of an + * unclean node shutdown or broken clients. + * + * Do not call while there are running jobs. + * + * @throws IOException if cleanup fails + */ + public void cleanupLocalTmpStorageInCaseOfUncleanShutdown() throws IOException { + for (Path p : environment.dataFiles()) { + IOUtils.rm(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER)); + } + } + + /** + * Tries to find local storage for storing temporary data. + * + * @param uniqueIdentifier An identifier to be used as sub folder + * @param requestedSize The maximum size required + * @return Path for temporary storage if available, null otherwise + */ + public Path tryGetLocalTmpStorage(String uniqueIdentifier, ByteSizeValue requestedSize) { + for (Path path : environment.dataFiles()) { + try { + if (getUsableSpace(path) >= requestedSize.getBytes() + minLocalStorageAvailable.getBytes()) { + Path tmpDirectory = path.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER).resolve(uniqueIdentifier); + Files.createDirectories(tmpDirectory); + return tmpDirectory; + } + } catch (IOException e) { + LOGGER.debug("Failed to obtain information about path [{}]: {}", path, e); + } + + } + LOGGER.debug("Failed to find native storage for [{}], returning null", uniqueIdentifier); + return null; + } + + public boolean localTmpStorageHasEnoughSpace(Path path, ByteSizeValue requestedSize) { + Path realPath = path.toAbsolutePath(); + for (Path p : environment.dataFiles()) { + try { + if (realPath.startsWith(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER))) { + return getUsableSpace(p) >= requestedSize.getBytes() + minLocalStorageAvailable.getBytes(); + } + } catch (IOException e) { + LOGGER.debug("Failed to optain information about path [{}]: {}", path, e); + } + } + + LOGGER.debug("Not enough space left for path [{}]", path); + return false; + } + + /** + * Delete temporary storage, previously allocated + * + * @param path + * Path to temporary storage + * @throws IOException + * if path can not be cleaned up + */ + public void cleanupLocalTmpStorage(Path path) throws IOException { + // do not allow to breakout from the tmp storage provided + Path realPath = path.toAbsolutePath(); + for (Path p : environment.dataFiles()) { + if (realPath.startsWith(p.resolve(LOCAL_STORAGE_SUBFOLDER).resolve(LOCAL_STORAGE_TMP_FOLDER))) { + IOUtils.rm(path); + } + } + } + + long getUsableSpace(Path path) throws IOException { + long freeSpaceInBytes = Environment.getFileStore(path).getUsableSpace(); + + /* See: https://bugs.openjdk.java.net/browse/JDK-8162520 */ + if (freeSpaceInBytes < 0) { + freeSpaceInBytes = Long.MAX_VALUE; + } + return freeSpaceInBytes; + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index cca591682d850..d3a848ef3821f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -7,6 +7,7 @@ import org.elasticsearch.common.xcontent.XContentElasticsearchExtension; import org.elasticsearch.core.internal.io.IOUtils; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; @@ -15,11 +16,12 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.index.analysis.AnalysisRegistry; @@ -47,6 +49,7 @@ import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.persistence.StateStreamer; import org.elasticsearch.xpack.ml.job.process.DataCountsReporter; +import org.elasticsearch.xpack.ml.job.process.NativeStorageProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.output.AutoDetectResultProcessor; import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; @@ -59,6 +62,7 @@ import java.io.IOException; import java.io.InputStream; +import java.nio.file.Path; import java.time.Duration; import java.time.ZonedDateTime; import java.util.Date; @@ -96,6 +100,10 @@ public class AutodetectProcessManager extends AbstractComponent { public static final Setting MAX_OPEN_JOBS_PER_NODE = Setting.intSetting("xpack.ml.max_open_jobs", MAX_RUNNING_JOBS_PER_NODE, 1, Property.NodeScope); + // Undocumented setting for integration test purposes + public static final Setting MIN_DISK_SPACE_OFF_HEAP = + Setting.byteSizeSetting("xpack.ml.min_disk_space_off_heap", new ByteSizeValue(5, ByteSizeUnit.GB), Property.NodeScope); + private final Client client; private final Environment environment; private final ThreadPool threadPool; @@ -107,8 +115,12 @@ public class AutodetectProcessManager extends AbstractComponent { private final JobResultsPersister jobResultsPersister; private final JobDataCountsPersister jobDataCountsPersister; + private NativeStorageProvider nativeStorageProvider; private final ConcurrentMap processByAllocation = new ConcurrentHashMap<>(); + // a map that manages the allocation of temporary space to jobs + private final ConcurrentMap nativeTmpStorage = new ConcurrentHashMap<>(); + private final int maxAllowedRunningJobs; private final NamedXContentRegistry xContentRegistry; @@ -133,6 +145,15 @@ public AutodetectProcessManager(Environment environment, Settings settings, Clie this.jobResultsPersister = jobResultsPersister; this.jobDataCountsPersister = jobDataCountsPersister; this.auditor = auditor; + this.nativeStorageProvider = new NativeStorageProvider(environment, MIN_DISK_SPACE_OFF_HEAP.get(settings)); + } + + public void onNodeStartup() { + try { + nativeStorageProvider.cleanupLocalTmpStorageInCaseOfUncleanShutdown(); + } catch (Exception e) { + logger.warn("Failed to cleanup native storage from previous invocation", e); + } } public synchronized void closeAllJobsOnThisNode(String reason) throws IOException { @@ -251,6 +272,28 @@ public void flushJob(JobTask jobTask, FlushJobParams params, ActionListener handler) { - logger.debug("Forecasting job {}", jobTask.getJobId()); + String jobId = jobTask.getJobId(); + logger.debug("Forecasting job {}", jobId); AutodetectCommunicator communicator = getOpenAutodetectCommunicator(jobTask); if (communicator == null) { - String message = String.format(Locale.ROOT, "Cannot forecast because job [%s] is not open", jobTask.getJobId()); + String message = String.format(Locale.ROOT, "Cannot forecast because job [%s] is not open", jobId); logger.debug(message); handler.accept(ExceptionsHelper.conflictStatusException(message)); return; @@ -271,7 +315,7 @@ public void forecastJob(JobTask jobTask, ForecastParams params, Consumer> getStatistics(JobTask jobTask return Optional.of(new Tuple<>(communicator.getDataCounts(), communicator.getModelSizeStats())); } + private void removeTmpStorage(String jobId) throws IOException { + Path path = nativeTmpStorage.get(jobId); + if (path != null) { + nativeStorageProvider.cleanupLocalTmpStorage(path); + } + } + ExecutorService createAutodetectExecutorService(ExecutorService executorService) { AutodetectWorkerExecutorService autoDetectWorkerExecutor = new AutodetectWorkerExecutorService(threadPool.getThreadContext()); executorService.submit(autoDetectWorkerExecutor::start); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParams.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParams.java index 0afd3b8a473fb..f243195c3a785 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParams.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParams.java @@ -16,12 +16,14 @@ public class ForecastParams { private final long createTime; private final long duration; private final long expiresIn; + private final String tmpStorage; - private ForecastParams(String forecastId, long createTime, long duration, long expiresIn) { + private ForecastParams(String forecastId, long createTime, long duration, long expiresIn, String tmpStorage) { this.forecastId = forecastId; this.createTime = createTime; this.duration = duration; this.expiresIn = expiresIn; + this.tmpStorage = tmpStorage; } public String getForecastId() { @@ -52,9 +54,18 @@ public long getExpiresIn() { return expiresIn; } + /** + * Temporary storage forecast is allowed to use for persisting models. + * + * @return path to tmp storage + */ + public String getTmpStorage() { + return tmpStorage; + } + @Override public int hashCode() { - return Objects.hash(forecastId, createTime, duration, expiresIn); + return Objects.hash(forecastId, createTime, duration, expiresIn, tmpStorage); } @Override @@ -69,7 +80,8 @@ public boolean equals(Object obj) { return Objects.equals(forecastId, other.forecastId) && Objects.equals(createTime, other.createTime) && Objects.equals(duration, other.duration) - && Objects.equals(expiresIn, other.expiresIn); + && Objects.equals(expiresIn, other.expiresIn) + && Objects.equals(tmpStorage, other.tmpStorage); } public static Builder builder() { @@ -81,6 +93,7 @@ public static class Builder { private final long createTimeEpochSecs; private long durationSecs; private long expiresInSecs; + private String tmpStorage; private Builder() { forecastId = UUIDs.base64UUID(); @@ -101,8 +114,13 @@ public Builder expiresIn(TimeValue expiresIn) { return this; } + public Builder tmpStorage(String tmpStorage) { + this.tmpStorage = tmpStorage; + return this; + } + public ForecastParams build() { - return new ForecastParams(forecastId, createTimeEpochSecs, durationSecs, expiresInSecs); + return new ForecastParams(forecastId, createTimeEpochSecs, durationSecs, expiresInSecs, tmpStorage); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/ControlMsgToProcessWriter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/ControlMsgToProcessWriter.java index 2a91797d28d75..2c026ec15506e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/ControlMsgToProcessWriter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/ControlMsgToProcessWriter.java @@ -164,6 +164,9 @@ public void writeForecastMessage(ForecastParams params) throws IOException { if (params.getExpiresIn() != -1) { builder.field("expires_in", params.getExpiresIn()); } + if (params.getTmpStorage() != null) { + builder.field("tmp_storage", params.getTmpStorage()); + } builder.endObject(); writeMessage(FORECAST_MESSAGE_CODE + Strings.toString(builder)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProviderTests.java new file mode 100644 index 0000000000000..3103e76c82bde --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/NativeStorageProviderTests.java @@ -0,0 +1,139 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.job.process; + +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.env.Environment; +import org.elasticsearch.test.ESTestCase; +import org.junit.Assert; + +import java.io.BufferedWriter; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.Map; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.any; + +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.doAnswer; + +public class NativeStorageProviderTests extends ESTestCase { + + public void testTmpStorage() throws IOException { + Map storage = new HashMap<>(); + Path tmpDir = createTempDir(); + + storage.put(tmpDir, new ByteSizeValue(6, ByteSizeUnit.GB).getBytes()); + NativeStorageProvider storageProvider = createNativeStorageProvider(storage); + + Assert.assertNotNull( + storageProvider.tryGetLocalTmpStorage(randomAlphaOfLengthBetween(4, 10), new ByteSizeValue(100, ByteSizeUnit.BYTES))); + Assert.assertNull(storageProvider.tryGetLocalTmpStorage(randomAlphaOfLengthBetween(4, 10), + new ByteSizeValue(1024 * 1024 * 1024 + 1, ByteSizeUnit.BYTES))); + + String id = randomAlphaOfLengthBetween(4, 10); + Path path = storageProvider.tryGetLocalTmpStorage(id, new ByteSizeValue(1, ByteSizeUnit.GB)); + Assert.assertNotNull(path); + + Assert.assertEquals(tmpDir.resolve("ml-local-data").resolve("tmp").resolve(id).toString(), path.toString()); + } + + public void testTmpStorageChooseDisk() throws IOException { + Map storage = new HashMap<>(); + Path tmpDir = createTempDir(); + + // low disk space + Path disk1 = tmpDir.resolve(randomAlphaOfLengthBetween(4, 10)); + storage.put(disk1, new ByteSizeValue(1, ByteSizeUnit.GB).getBytes()); + + // sufficient disk space + Path disk2 = tmpDir.resolve(randomAlphaOfLengthBetween(4, 10)); + storage.put(disk2, new ByteSizeValue(20, ByteSizeUnit.GB).getBytes()); + + NativeStorageProvider storageProvider = createNativeStorageProvider(storage); + + String id = randomAlphaOfLengthBetween(4, 10); + Path path = storageProvider.tryGetLocalTmpStorage(id, new ByteSizeValue(1, ByteSizeUnit.GB)); + Assert.assertNotNull(path); + + // should resolve to disk2 as disk1 is low on space + Assert.assertEquals(disk2.resolve("ml-local-data").resolve("tmp").resolve(id).toString(), path.toString()); + } + + public void testTmpStorageCleanup() throws IOException { + Map storage = new HashMap<>(); + Path tmpDir = createTempDir(); + storage.put(tmpDir, new ByteSizeValue(6, ByteSizeUnit.GB).getBytes()); + NativeStorageProvider storageProvider = createNativeStorageProvider(storage); + String id = randomAlphaOfLengthBetween(4, 10); + + Path path = storageProvider.tryGetLocalTmpStorage(id, new ByteSizeValue(1, ByteSizeUnit.KB)); + + Assert.assertTrue(Files.exists(path)); + Path testFile = PathUtils.get(path.toString(), "testFile"); + BufferedWriter writer = Files.newBufferedWriter(testFile, StandardCharsets.UTF_8); + writer.write("created by NativeStorageProviderTests::testTmpStorageDelete"); + + writer.close(); + Assert.assertTrue(Files.exists(testFile)); + Assert.assertTrue(Files.isRegularFile(testFile)); + + // the native component should cleanup itself, but assume it has crashed + storageProvider.cleanupLocalTmpStorage(path); + Assert.assertFalse(Files.exists(testFile)); + Assert.assertFalse(Files.exists(path)); + } + + public void testTmpStorageCleanupOnStart() throws IOException { + Map storage = new HashMap<>(); + Path tmpDir = createTempDir(); + storage.put(tmpDir, new ByteSizeValue(6, ByteSizeUnit.GB).getBytes()); + NativeStorageProvider storageProvider = createNativeStorageProvider(storage); + String id = randomAlphaOfLengthBetween(4, 10); + + Path path = storageProvider.tryGetLocalTmpStorage(id, new ByteSizeValue(1, ByteSizeUnit.KB)); + + Assert.assertTrue(Files.exists(path)); + Path testFile = PathUtils.get(path.toString(), "testFile"); + + BufferedWriter writer = Files.newBufferedWriter(testFile, StandardCharsets.UTF_8); + writer.write("created by NativeStorageProviderTests::testTmpStorageWipe"); + + writer.close(); + Assert.assertTrue(Files.exists(testFile)); + Assert.assertTrue(Files.isRegularFile(testFile)); + + // create a new storage provider to test the case of a crashed node + storageProvider = createNativeStorageProvider(storage); + storageProvider.cleanupLocalTmpStorageInCaseOfUncleanShutdown(); + Assert.assertFalse(Files.exists(testFile)); + Assert.assertFalse(Files.exists(path)); + } + + private NativeStorageProvider createNativeStorageProvider(Map paths) throws IOException { + Environment environment = mock(Environment.class); + + when(environment.dataFiles()).thenReturn(paths.keySet().toArray(new Path[paths.size()])); + NativeStorageProvider storageProvider = spy(new NativeStorageProvider(environment, new ByteSizeValue(5, ByteSizeUnit.GB))); + + doAnswer(invocation -> { + return paths.getOrDefault(invocation.getArguments()[0], Long.valueOf(0)).longValue(); + } + + ).when(storageProvider).getUsableSpace(any(Path.class)); + + return storageProvider; + } + +} diff --git a/x-pack/qa/ml-native-tests/build.gradle b/x-pack/qa/ml-native-tests/build.gradle index 94b7be3a44d4d..657aa7cfef68b 100644 --- a/x-pack/qa/ml-native-tests/build.gradle +++ b/x-pack/qa/ml-native-tests/build.gradle @@ -61,6 +61,7 @@ integTestCluster { setting 'xpack.security.transport.ssl.verification_mode', 'certificate' setting 'xpack.security.audit.enabled', 'true' setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.ml.min_disk_space_off_heap', '200mb' keystoreSetting 'bootstrap.password', 'x-pack-test-password' keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass' diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java index 14bdd533c6b38..84557798390dc 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; @@ -206,8 +207,7 @@ public void testMemoryStatus() throws Exception { assertThat(e.getMessage(), equalTo("Cannot run forecast: Forecast cannot be executed as model memory status is not OK")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/30399") - public void testMemoryLimit() throws Exception { + public void testOverflowToDisk() throws Exception { Detector.Builder detector = new Detector.Builder("mean", "value"); detector.setByFieldName("clientIP"); @@ -216,7 +216,9 @@ public void testMemoryLimit() throws Exception { analysisConfig.setBucketSpan(bucketSpan); DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setTimeFormat("epoch"); - Job.Builder job = new Job.Builder("forecast-it-test-memory-limit"); + Job.Builder job = new Job.Builder("forecast-it-test-overflow-to-disk"); + AnalysisLimits limits = new AnalysisLimits(2048L, null); + job.setAnalysisLimits(limits); job.setAnalysisConfig(analysisConfig); job.setDataDescription(dataDescription); @@ -224,28 +226,47 @@ public void testMemoryLimit() throws Exception { putJob(job); openJob(job.getId()); createDataWithLotsOfClientIps(bucketSpan, job); - ElasticsearchException e = expectThrows(ElasticsearchException.class, - () -> forecast(job.getId(), TimeValue.timeValueMinutes(120), null)); - assertThat(e.getMessage(), - equalTo("Cannot run forecast: Forecast cannot be executed as forecast memory usage is predicted to exceed 20MB")); + + try { + String forecastId = forecast(job.getId(), TimeValue.timeValueHours(1), null); + + waitForecastToFinish(job.getId(), forecastId); + } catch (ElasticsearchStatusException e) { + if (e.getMessage().contains("disk space")) { + throw new ElasticsearchStatusException( + "Test likely fails due to insufficient disk space on test machine, please free up space.", e.status(), e); + } + throw e; + } + + closeJob(job.getId()); + + List forecastStats = getForecastStats(); + assertThat(forecastStats.size(), equalTo(1)); + ForecastRequestStats forecastRequestStats = forecastStats.get(0); + List forecasts = getForecasts(job.getId(), forecastRequestStats); + + assertThat(forecastRequestStats.getRecordCount(), equalTo(8000L)); + assertThat(forecasts.size(), equalTo(8000)); } private void createDataWithLotsOfClientIps(TimeValue bucketSpan, Job.Builder job) throws IOException { long now = Instant.now().getEpochSecond(); - long timestamp = now - 50 * bucketSpan.seconds(); - while (timestamp < now) { - for (int i = 1; i < 256; i++) { + long timestamp = now - 15 * bucketSpan.seconds(); + + for (int h = 0; h < 15; h++) { + for (int i = 1; i < 101; i++) { List data = new ArrayList<>(); - for (int j = 1; j < 100; j++) { + for (int j = 1; j < 81; j++) { Map record = new HashMap<>(); record.put("time", timestamp); - record.put("value", 10.0); + record.put("value", 10.0 + h); record.put("clientIP", String.format(Locale.ROOT, "192.168.%d.%d", i, j)); data.add(createJsonRecord(record)); } postData(job.getId(), data.stream().collect(Collectors.joining())); - timestamp += bucketSpan.seconds(); } + timestamp += bucketSpan.seconds(); } flushJob(job.getId(), false); } From f820b48fe578d5d5939628ac8a79c80f1037750a Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 18 May 2018 14:11:11 +0200 Subject: [PATCH 25/34] [Build] Add test admin when starting gradle run with trial license and also add more documentation around gradle run task. (#30671) --- TESTING.asciidoc | 49 ++++++++++++++++++++++++++++++++------- distribution/build.gradle | 4 +++- 2 files changed, 43 insertions(+), 10 deletions(-) diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 267c401c20bf7..4215a49073826 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -25,6 +25,46 @@ run it using Gradle: ./gradlew run ------------------------------------- +==== Launching and debugging from an IDE + +If you want to run Elasticsearch from your IDE, the `./gradlew run` task +supports a remote debugging option: + +--------------------------------------------------------------------------- +./gradlew run --debug-jvm +--------------------------------------------------------------------------- + +==== Distribution + +By default a node is started with the zip distribution. +In order to start with a different distribution use the `-Drun.distribution` argument. + +To for example start the open source distribution: + +------------------------------------- +./gradlew run -Drun.distribution=oss-zip +------------------------------------- + +==== License type + +By default a node is started with the `basic` license type. +In order to start with a different license type use the `-Drun.license_type` argument. + +In order to start a node with a trial license execute the following command: + +------------------------------------- +./gradlew run -Drun.license_type=trial +------------------------------------- + +This enables security and other paid features and adds a superuser with the username: `elastic-admin` and +password: `elastic-password`. + +==== Other useful arguments + +In order to start a node with a different max heap space add: `-Dtests.heap.size=4G` +In order to disable annotations add: `-Dtests.asserts=false` +In order to set an Elasticsearch setting, provide a setting with the following prefix: `-Dtests.es.` + === Test case filtering. - `tests.class` is a class-filtering shell-like glob pattern, @@ -572,15 +612,6 @@ as its build system. Since the switch to Gradle though, this is no longer possib the code currently used to build Elasticsearch does not allow JaCoCo to recognize its tests. For more information on this, see the discussion in https://github.com/elastic/elasticsearch/issues/28867[issue #28867]. -== Launching and debugging from an IDE - -If you want to run Elasticsearch from your IDE, the `./gradlew run` task -supports a remote debugging option: - ---------------------------------------------------------------------------- -./gradlew run --debug-jvm ---------------------------------------------------------------------------- - == Debugging remotely from an IDE If you want to run Elasticsearch and be able to remotely attach the process diff --git a/distribution/build.gradle b/distribution/build.gradle index d2e2810bc7eec..940a4152bfd55 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -310,12 +310,14 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { task run(type: RunTask) { distribution = System.getProperty('run.distribution', 'zip') if (distribution == 'zip') { - String licenseType = System.getProperty("license_type", "basic") + String licenseType = System.getProperty("run.license_type", "basic") if (licenseType == 'trial') { setting 'xpack.ml.enabled', 'true' setting 'xpack.graph.enabled', 'true' setting 'xpack.watcher.enabled', 'true' setting 'xpack.license.self_generated.type', 'trial' + setupCommand 'setupTestAdmin', + 'bin/elasticsearch-users', 'useradd', 'elastic-admin', '-p', 'elastic-password', '-r', 'superuser' } else if (licenseType != 'basic') { throw new IllegalArgumentException("Unsupported self-generated license type: [" + licenseType + "[basic] or [trial].") } From da6bf42c6d2d29ec098c383feac14ad884b3b60b Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Fri, 18 May 2018 17:57:20 +0300 Subject: [PATCH 26/34] [ML][TEST] Make AutodetectMemoryLimitIT less fragile (#30716) These tests aim to check the set model memory limit is respected. Additionally, it was asserting counts of partition, by, over fields in an attempt to check that the used memory is spent meaningfully. However, this made the tests fragile, as changes in the ml-cpp could lead to CI failures. This commit removes those assertions. We are working on adding tests in ml-cpp that will compensate. --- .../ml/integration/AutodetectMemoryLimitIT.java | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java index f54f1bf54e932..4e0aa9c7e0613 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java @@ -33,7 +33,7 @@ public class AutodetectMemoryLimitIT extends MlNativeAutodetectIntegTestCase { @After - public void cleanUpTest() throws Exception { + public void cleanUpTest() { cleanUp(); } @@ -75,19 +75,10 @@ public void testTooManyPartitions() throws Exception { closeJob(job.getId()); // Assert we haven't violated the limit too much - // and a balance of partitions/by fields were created GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0); ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); assertThat(modelSizeStats.getModelBytes(), lessThan(35000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(30000000L)); - - // it is important to check that while we rejected partitions, we still managed - // to create some by fields; it shows we utilize memory in a meaningful way - // rather than creating empty partitions - assertThat(modelSizeStats.getTotalPartitionFieldCount(), lessThan(900L)); - assertThat(modelSizeStats.getTotalPartitionFieldCount(), greaterThan(650L)); - assertThat(modelSizeStats.getTotalByFieldCount(), lessThan(900L)); - assertThat(modelSizeStats.getTotalByFieldCount(), greaterThan(650L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } @@ -133,8 +124,6 @@ public void testTooManyByFields() throws Exception { ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); assertThat(modelSizeStats.getModelBytes(), lessThan(36000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(30000000L)); - assertThat(modelSizeStats.getTotalByFieldCount(), lessThan(1900L)); - assertThat(modelSizeStats.getTotalByFieldCount(), greaterThan(1500L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } @@ -184,9 +173,6 @@ public void testTooManyByAndOverFields() throws Exception { ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); assertThat(modelSizeStats.getModelBytes(), lessThan(36000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(24000000L)); - assertThat(modelSizeStats.getTotalByFieldCount(), equalTo(7L)); - assertThat(modelSizeStats.getTotalOverFieldCount(), greaterThan(40000L)); - assertThat(modelSizeStats.getTotalOverFieldCount(), lessThan(50000L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } @@ -237,7 +223,6 @@ public void testManyDistinctOverFields() throws Exception { ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); assertThat(modelSizeStats.getModelBytes(), lessThan(90000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(75000000L)); - assertThat(modelSizeStats.getTotalOverFieldCount(), greaterThan(140000L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.OK)); } From 4ca5e3f5e1e50c51aaa52a5af49e161704dfc583 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Fri, 18 May 2018 17:59:01 +0300 Subject: [PATCH 27/34] [ML][TEST] Fix bucket count assertion in ModelPlotsIT (#30717) As the first record is random, there's a chance it will be aligned on a bucket start. Thus we need to check the bucket count is in [23, 24]. Closes #30715 --- .../xpack/ml/integration/ModelPlotsIT.java | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java index eb0c125a13ce4..81a44cd133643 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java @@ -30,7 +30,9 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class ModelPlotsIT extends MlNativeAutodetectIntegTestCase { @@ -83,7 +85,11 @@ public void testPartitionFieldWithoutTerms() throws Exception { startDatafeed(datafeedId, 0, System.currentTimeMillis()); waitUntilJobIsClosed(job.getId()); - assertThat(getBuckets(job.getId()).size(), equalTo(23)); + // As the initial time is random, there's a chance the first record is + // aligned on a bucket start. Thus we check the buckets are in [23, 24] + assertThat(getBuckets(job.getId()).size(), greaterThanOrEqualTo(23)); + assertThat(getBuckets(job.getId()).size(), lessThanOrEqualTo(24)); + Set modelPlotTerms = modelPlotTerms(job.getId(), "partition_field_value"); assertThat(modelPlotTerms, containsInAnyOrder("user_1", "user_2", "user_3")); } @@ -101,7 +107,11 @@ public void testPartitionFieldWithTerms() throws Exception { startDatafeed(datafeedId, 0, System.currentTimeMillis()); waitUntilJobIsClosed(job.getId()); - assertThat(getBuckets(job.getId()).size(), equalTo(23)); + // As the initial time is random, there's a chance the first record is + // aligned on a bucket start. Thus we check the buckets are in [23, 24] + assertThat(getBuckets(job.getId()).size(), greaterThanOrEqualTo(23)); + assertThat(getBuckets(job.getId()).size(), lessThanOrEqualTo(24)); + Set modelPlotTerms = modelPlotTerms(job.getId(), "partition_field_value"); assertThat(modelPlotTerms, containsInAnyOrder("user_2", "user_3")); } From 38e72e34be9701f26297e06b2dba4eab8dcd5de8 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 18 May 2018 10:26:03 -0700 Subject: [PATCH 28/34] [DOCS] Moves X-Pack configurationg pages in table of contents (#30702) --- docs/reference/index-shared1.asciidoc | 2 -- docs/reference/index-shared2.asciidoc | 2 ++ x-pack/docs/en/index.asciidoc | 12 ++++++++++ .../en/setup/bootstrap-checks-xes.asciidoc | 4 ---- x-pack/docs/en/setup/setup-xes.asciidoc | 23 ++++++++----------- 5 files changed, 24 insertions(+), 19 deletions(-) diff --git a/docs/reference/index-shared1.asciidoc b/docs/reference/index-shared1.asciidoc index ae208e290112a..9325bd6e73e29 100644 --- a/docs/reference/index-shared1.asciidoc +++ b/docs/reference/index-shared1.asciidoc @@ -2,5 +2,3 @@ include::getting-started.asciidoc[] include::setup.asciidoc[] - -include::upgrade.asciidoc[] diff --git a/docs/reference/index-shared2.asciidoc b/docs/reference/index-shared2.asciidoc index e48948079cc9f..c0c3f8a6766ec 100644 --- a/docs/reference/index-shared2.asciidoc +++ b/docs/reference/index-shared2.asciidoc @@ -1,2 +1,4 @@ +include::upgrade.asciidoc[] + include::migration/index.asciidoc[] diff --git a/x-pack/docs/en/index.asciidoc b/x-pack/docs/en/index.asciidoc index 3133053c5bd23..1a8c9120b2aa4 100644 --- a/x-pack/docs/en/index.asciidoc +++ b/x-pack/docs/en/index.asciidoc @@ -1,8 +1,20 @@ include::{es-repo-dir}/index-shared1.asciidoc[] +include::setup/installing-xes.asciidoc[] + include::setup/setup-xes.asciidoc[] +include::{xes-repo-dir}/monitoring/configuring-monitoring.asciidoc[] + +include::{xes-repo-dir}/security/configuring-es.asciidoc[] + +include::setup/setup-xclient.asciidoc[] + +include::{xes-repo-dir}/settings/configuring-xes.asciidoc[] + +include::setup/bootstrap-checks-xes.asciidoc[] + include::{es-repo-dir}/index-shared2.asciidoc[] include::{es-repo-dir}/index-shared3.asciidoc[] diff --git a/x-pack/docs/en/setup/bootstrap-checks-xes.asciidoc b/x-pack/docs/en/setup/bootstrap-checks-xes.asciidoc index 7b221575d83a3..7d021a04cc109 100644 --- a/x-pack/docs/en/setup/bootstrap-checks-xes.asciidoc +++ b/x-pack/docs/en/setup/bootstrap-checks-xes.asciidoc @@ -1,10 +1,6 @@ [role="xpack"] [[bootstrap-checks-xpack]] == Bootstrap Checks for {xpack} -++++ -Bootstrap Checks -++++ - In addition to the <>, there are checks that are specific to {xpack} features. diff --git a/x-pack/docs/en/setup/setup-xes.asciidoc b/x-pack/docs/en/setup/setup-xes.asciidoc index ffb7f6aa96e8b..ca42437594c83 100644 --- a/x-pack/docs/en/setup/setup-xes.asciidoc +++ b/x-pack/docs/en/setup/setup-xes.asciidoc @@ -1,11 +1,16 @@ [role="xpack"] [[setup-xpack]] -= Set up {xpack} +== Set up {xpack} -[partintro] --- -{xpack} is an Elastic Stack extension that bundles security, alerting, -monitoring, reporting, machine learning, and graph capabilities. +{xpack} is an Elastic Stack extension that provides security, alerting, +monitoring, reporting, machine learning, and many other capabilities. By default, +when you install {es}, {xpack} is installed. + +If you want to try all of the {xpack} features, you can +{xpack-ref}/license-management.html[start a 30-day trial]. At the end of the +trial period, you can purchase a subscription to keep using the full +functionality of the {xpack} components. For more information, see +https://www.elastic.co/subscriptions. * <> * <> @@ -14,11 +19,3 @@ monitoring, reporting, machine learning, and graph capabilities. * <> * <> --- - -include::installing-xes.asciidoc[] -include::{xes-repo-dir}/monitoring/configuring-monitoring.asciidoc[] -include::{xes-repo-dir}/security/configuring-es.asciidoc[] -include::setup-xclient.asciidoc[] -include::{xes-repo-dir}/settings/configuring-xes.asciidoc[] -include::bootstrap-checks-xes.asciidoc[] From 6d29952b628dfa70b2868df484a9e0520c264ea5 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 18 May 2018 11:05:40 -0700 Subject: [PATCH 29/34] [DOCS] Removes redundant index.asciidoc files (#30707) --- docs/reference/index-shared1.asciidoc | 4 -- docs/reference/index-shared2.asciidoc | 4 -- docs/reference/index-shared3.asciidoc | 26 --------- docs/reference/index-shared4.asciidoc | 10 ---- docs/reference/index-shared5.asciidoc | 2 - docs/reference/index.asciidoc | 79 +++++++++++++++++++++++++-- docs/reference/index.x.asciidoc | 13 +---- x-pack/docs/en/index.asciidoc | 34 ------------ 8 files changed, 75 insertions(+), 97 deletions(-) delete mode 100644 docs/reference/index-shared1.asciidoc delete mode 100644 docs/reference/index-shared2.asciidoc delete mode 100644 docs/reference/index-shared3.asciidoc delete mode 100644 docs/reference/index-shared4.asciidoc delete mode 100644 docs/reference/index-shared5.asciidoc delete mode 100644 x-pack/docs/en/index.asciidoc diff --git a/docs/reference/index-shared1.asciidoc b/docs/reference/index-shared1.asciidoc deleted file mode 100644 index 9325bd6e73e29..0000000000000 --- a/docs/reference/index-shared1.asciidoc +++ /dev/null @@ -1,4 +0,0 @@ - -include::getting-started.asciidoc[] - -include::setup.asciidoc[] diff --git a/docs/reference/index-shared2.asciidoc b/docs/reference/index-shared2.asciidoc deleted file mode 100644 index c0c3f8a6766ec..0000000000000 --- a/docs/reference/index-shared2.asciidoc +++ /dev/null @@ -1,4 +0,0 @@ - -include::upgrade.asciidoc[] - -include::migration/index.asciidoc[] diff --git a/docs/reference/index-shared3.asciidoc b/docs/reference/index-shared3.asciidoc deleted file mode 100644 index 4da338186b0c8..0000000000000 --- a/docs/reference/index-shared3.asciidoc +++ /dev/null @@ -1,26 +0,0 @@ - -include::api-conventions.asciidoc[] - -include::docs.asciidoc[] - -include::search.asciidoc[] - -include::aggregations.asciidoc[] - -include::indices.asciidoc[] - -include::cat.asciidoc[] - -include::cluster.asciidoc[] - -include::query-dsl.asciidoc[] - -include::mapping.asciidoc[] - -include::analysis.asciidoc[] - -include::modules.asciidoc[] - -include::index-modules.asciidoc[] - -include::ingest.asciidoc[] diff --git a/docs/reference/index-shared4.asciidoc b/docs/reference/index-shared4.asciidoc deleted file mode 100644 index 7b7eb0c10e9e9..0000000000000 --- a/docs/reference/index-shared4.asciidoc +++ /dev/null @@ -1,10 +0,0 @@ - -include::how-to.asciidoc[] - -include::testing.asciidoc[] - -include::glossary.asciidoc[] - -include::release-notes/highlights.asciidoc[] - -include::release-notes.asciidoc[] diff --git a/docs/reference/index-shared5.asciidoc b/docs/reference/index-shared5.asciidoc deleted file mode 100644 index 572522f6c8e74..0000000000000 --- a/docs/reference/index-shared5.asciidoc +++ /dev/null @@ -1,2 +0,0 @@ - -include::redirects.asciidoc[] diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 8aa9eef32f8bf..c15ba6faf9fa3 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -1,12 +1,81 @@ [[elasticsearch-reference]] = Elasticsearch Reference +:include-xpack: true :es-test-dir: {docdir}/../src/test :plugins-examples-dir: {docdir}/../../plugins/examples +:xes-repo-dir: {docdir}/../../x-pack/docs/{lang} +:es-repo-dir: {docdir} + include::../Versions.asciidoc[] -include::index-shared1.asciidoc[] -include::index-shared2.asciidoc[] -include::index-shared3.asciidoc[] -include::index-shared4.asciidoc[] -include::index-shared5.asciidoc[] + +include::getting-started.asciidoc[] + +include::setup.asciidoc[] + +include::{xes-repo-dir}/setup/installing-xes.asciidoc[] + +include::{xes-repo-dir}/setup/setup-xes.asciidoc[] + +include::{xes-repo-dir}/monitoring/configuring-monitoring.asciidoc[] + +include::{xes-repo-dir}/security/configuring-es.asciidoc[] + +include::{xes-repo-dir}/setup/setup-xclient.asciidoc[] + +include::{xes-repo-dir}/settings/configuring-xes.asciidoc[] + +include::{xes-repo-dir}/setup/bootstrap-checks-xes.asciidoc[] + +include::upgrade.asciidoc[] + +include::migration/index.asciidoc[] + +include::api-conventions.asciidoc[] + +include::docs.asciidoc[] + +include::search.asciidoc[] + +include::aggregations.asciidoc[] + +include::indices.asciidoc[] + +include::cat.asciidoc[] + +include::cluster.asciidoc[] + +include::query-dsl.asciidoc[] + +include::mapping.asciidoc[] + +include::analysis.asciidoc[] + +include::modules.asciidoc[] + +include::index-modules.asciidoc[] + +include::ingest.asciidoc[] + +include::{xes-repo-dir}/sql/index.asciidoc[] + +include::{xes-repo-dir}/monitoring/index.asciidoc[] + +include::{xes-repo-dir}/rollup/index.asciidoc[] + +include::{xes-repo-dir}/rest-api/index.asciidoc[] + +include::{xes-repo-dir}/commands/index.asciidoc[] + +include::how-to.asciidoc[] + +include::testing.asciidoc[] + +include::glossary.asciidoc[] + +include::release-notes/highlights.asciidoc[] + +include::release-notes.asciidoc[] + +include::redirects.asciidoc[] diff --git a/docs/reference/index.x.asciidoc b/docs/reference/index.x.asciidoc index 5be21cb004331..35204eef5b67e 100644 --- a/docs/reference/index.x.asciidoc +++ b/docs/reference/index.x.asciidoc @@ -1,12 +1 @@ -[[elasticsearch-reference]] -= Elasticsearch Reference - -:include-xpack: true -:es-test-dir: {docdir}/../src/test -:plugins-examples-dir: {docdir}/../../plugins/examples -:xes-repo-dir: {docdir}/../../x-pack/docs/{lang} -:es-repo-dir: {docdir} - - -include::../Versions.asciidoc[] -include::{xes-repo-dir}/index.asciidoc[] +include::index.asciidoc[] diff --git a/x-pack/docs/en/index.asciidoc b/x-pack/docs/en/index.asciidoc deleted file mode 100644 index 1a8c9120b2aa4..0000000000000 --- a/x-pack/docs/en/index.asciidoc +++ /dev/null @@ -1,34 +0,0 @@ - -include::{es-repo-dir}/index-shared1.asciidoc[] - -include::setup/installing-xes.asciidoc[] - -include::setup/setup-xes.asciidoc[] - -include::{xes-repo-dir}/monitoring/configuring-monitoring.asciidoc[] - -include::{xes-repo-dir}/security/configuring-es.asciidoc[] - -include::setup/setup-xclient.asciidoc[] - -include::{xes-repo-dir}/settings/configuring-xes.asciidoc[] - -include::setup/bootstrap-checks-xes.asciidoc[] - -include::{es-repo-dir}/index-shared2.asciidoc[] - -include::{es-repo-dir}/index-shared3.asciidoc[] - -include::sql/index.asciidoc[] - -include::monitoring/index.asciidoc[] - -include::rollup/index.asciidoc[] - -include::rest-api/index.asciidoc[] - -include::commands/index.asciidoc[] - -include::{es-repo-dir}/index-shared4.asciidoc[] - -include::{es-repo-dir}/index-shared5.asciidoc[] From e04440ba5bc9c94595753ebf49e7d7f107830f6f Mon Sep 17 00:00:00 2001 From: David Roberts Date: Fri, 18 May 2018 19:01:43 +0100 Subject: [PATCH 30/34] [TEST] Reduce forecast overflow to disk test memory limit (#30727) By default ML native processes are only allowed to use 30% of RAM, so the previous 2GB setting prevented the test passing on VMs with only 4GB RAM. This change reduces the limit to 1200MB, which means it can now pass on VMs with 4GB RAM. --- .../java/org/elasticsearch/xpack/ml/integration/ForecastIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java index 84557798390dc..81c54353a2d70 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java @@ -217,7 +217,7 @@ public void testOverflowToDisk() throws Exception { DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setTimeFormat("epoch"); Job.Builder job = new Job.Builder("forecast-it-test-overflow-to-disk"); - AnalysisLimits limits = new AnalysisLimits(2048L, null); + AnalysisLimits limits = new AnalysisLimits(1200L, null); job.setAnalysisLimits(limits); job.setAnalysisConfig(analysisConfig); job.setDataDescription(dataDescription); From 72b622fcc8220405a49c3ce064e5f925894a3843 Mon Sep 17 00:00:00 2001 From: William Dearden Date: Fri, 18 May 2018 16:13:12 -0500 Subject: [PATCH 31/34] Docs: Add uptasticsearch to list of clients (#30738) It is a client for r. --- docs/community-clients/index.asciidoc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/community-clients/index.asciidoc b/docs/community-clients/index.asciidoc index 76312f39345e7..0306432344120 100644 --- a/docs/community-clients/index.asciidoc +++ b/docs/community-clients/index.asciidoc @@ -205,6 +205,9 @@ The following projects appear to be abandoned: * https://github.com/ropensci/elasticdsl[elasticdsl]: A high-level R DSL for Elasticsearch, wrapping the elastic R client. + +* https://github.com/UptakeOpenSource/uptasticsearch[uptasticsearch]: + An R client tailored to data science workflows. The following projects appear to be abandoned: From f7b5785552cbbe6989d6c4853aa33a5bed361c50 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 18 May 2018 18:21:39 -0400 Subject: [PATCH 32/34] Upgrade to Lucene-7.4.0-snapshot-59f2b7aec2 (#30726) This snapshot resolves issues related to ShrinkIndexIT. --- buildSrc/version.properties | 2 +- ...ene-expressions-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...ene-expressions-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...e-analyzers-icu-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...e-analyzers-icu-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...lyzers-kuromoji-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...lyzers-kuromoji-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...-analyzers-nori-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...-analyzers-nori-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...lyzers-phonetic-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...lyzers-phonetic-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...alyzers-smartcn-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...alyzers-smartcn-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...alyzers-stempel-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...alyzers-stempel-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...zers-morfologik-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...zers-morfologik-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...nalyzers-common-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...nalyzers-common-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...backward-codecs-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...backward-codecs-7.4.0-snapshot-6705632810.jar.sha1 | 1 - .../lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + .../lucene-core-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...lucene-grouping-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...lucene-grouping-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...ene-highlighter-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...ene-highlighter-7.4.0-snapshot-6705632810.jar.sha1 | 1 - .../lucene-join-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + .../lucene-join-7.4.0-snapshot-6705632810.jar.sha1 | 1 - .../lucene-memory-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + .../lucene-memory-7.4.0-snapshot-6705632810.jar.sha1 | 1 - .../lucene-misc-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + .../lucene-misc-7.4.0-snapshot-6705632810.jar.sha1 | 1 - .../lucene-queries-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + .../lucene-queries-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...ene-queryparser-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...ene-queryparser-7.4.0-snapshot-6705632810.jar.sha1 | 1 - .../lucene-sandbox-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + .../lucene-sandbox-7.4.0-snapshot-6705632810.jar.sha1 | 1 - .../lucene-spatial-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + .../lucene-spatial-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...-spatial-extras-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...-spatial-extras-7.4.0-snapshot-6705632810.jar.sha1 | 1 - ...ucene-spatial3d-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + ...ucene-spatial3d-7.4.0-snapshot-6705632810.jar.sha1 | 1 - .../lucene-suggest-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + .../lucene-suggest-7.4.0-snapshot-6705632810.jar.sha1 | 1 - .../index/shard/ElasticsearchMergePolicy.java | 4 ++-- .../java/org/elasticsearch/index/store/Store.java | 11 ----------- .../action/admin/indices/create/ShrinkIndexIT.java | 2 -- .../indices/analysis/AnalysisFactoryTestCase.java | 3 +++ .../lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + .../lucene-core-7.4.0-snapshot-6705632810.jar.sha1 | 1 - .../lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 | 1 + .../lucene-core-7.4.0-snapshot-6705632810.jar.sha1 | 1 - 55 files changed, 31 insertions(+), 41 deletions(-) create mode 100644 modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-analyzers-common-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-analyzers-common-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-grouping-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-join-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-join-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-memory-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-memory-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-misc-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-misc-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-queries-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-queries-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-spatial-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 server/licenses/lucene-suggest-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 create mode 100644 x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 delete mode 100644 x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 62193aae6e84f..bfe3a5ba8727c 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 6.4.0 -lucene = 7.4.0-snapshot-6705632810 +lucene = 7.4.0-snapshot-59f2b7aec2 # optional dependencies spatial4j = 0.7 diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..702782e1c5ed7 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +a3dba337d06e1f5930cb7ae638c1655b99ce0cb7 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-6705632810.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index b333863b94d64..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f72ad4b6474c2d59b0eed0ca84eddd1f99d29129 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..f99b0177de590 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +473a7f4d955f132bb498482648266653f8da85bd \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 6720beb8d8682..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b4e19c53f29fa9b40bd7ad12ff598e3f08d507a3 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..08269eed6360f --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +c5a72b9a790e2552248c8bbb36af47c4c399ba27 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 861a2110e164a..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -23dd8cb3834f3641d9b3e8bc3d38281389a597bc \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..325fe16120428 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +14f680ab9b886c7c5224ff682a7fa70b6df44a05 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index c7c19feb57df5..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e8119a17448a6f5512ded0bd2a6faa7fc8e70890 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..9e88119ed1d16 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +e033c68c9ec1ba9cd8439758adf7eb5fee22acef \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 94e8c2698389b..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -336d9ac698066b8cf8a448f193e4a29ef163baa8 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..74721c857571c --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +08df0a5029f11c109b22064dec78c05dfa25f9e3 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 364facee9efe6..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e1e77951a83fc6a9deab884773314992fefa14f3 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..1c257797c08e2 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +a9d1819b2b13f134f6a605ab5a59ce3c602c0460 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index b55d8cf04ec32..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d4da149a16673c6326f4898ad877756259f676f8 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..117ac05c91fe1 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +47bc91ccb0cdf0c1c404646ffe0d5fd6b020a4ab \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-6705632810.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index dcc2249c45f2f..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ab4141b43cc6c2680d5f5a0b5086299f38ebec4d \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..14f5fcb381f1c --- /dev/null +++ b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +b70d03784d06a643e096fae4d959200aa246ba16 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index ccabc01378088..0000000000000 --- a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f465718b3db829e7660009aac2c1211fd5d74ca0 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..47afb59e45eb7 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +d660a63ac0f7ab2772a45ae518518472bf620620 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 68c4dcebd2ee2..0000000000000 --- a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d502441e830e1a9d30270442f8e3fd8317fe7bba \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..50392f59374a8 --- /dev/null +++ b/server/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +bf8f9e8284a54af18545574cb4a530da0deb968a \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index c25718d0a9ee0..0000000000000 --- a/server/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5167fb0a14434cb10ec3224e9e32ca668e9f9ad4 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-grouping-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..27d7aaab2f589 --- /dev/null +++ b/server/licenses/lucene-grouping-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +9eaae9dcd4ec88227475cb81d3be9afa767f1b22 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-grouping-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 007ae9bb24eee..0000000000000 --- a/server/licenses/lucene-grouping-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -488aeecf49413b63a404989ae00b07b20951e76e \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..176c3a86afe7f --- /dev/null +++ b/server/licenses/lucene-highlighter-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +cd15f0008742c84899d678cb0cecda06d0a6d63e \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index f1733ffb6826a..0000000000000 --- a/server/licenses/lucene-highlighter-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -107755edd67cddb3fb9817de50c0bed3a10da19c \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-join-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..0bfe9cfb79aff --- /dev/null +++ b/server/licenses/lucene-join-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +5ce38b8610a7f402f2da3b0e408e508151d979c5 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-join-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 71800d6aa153f..0000000000000 --- a/server/licenses/lucene-join-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9226fab3b9c6250af52b87061f637c0f8e3114b6 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-memory-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..c1a0127e2ce73 --- /dev/null +++ b/server/licenses/lucene-memory-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +53819f03a07050a4af28361d64395c86f2cea008 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-memory-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 6cc24bbe98b82..0000000000000 --- a/server/licenses/lucene-memory-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2b7bf384c1933225972f04224d867ec800f5e3a7 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-misc-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..615a0dec0c0d4 --- /dev/null +++ b/server/licenses/lucene-misc-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +8cdc0e2b65d146ed11f4d2507109e530d59ff33d \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-misc-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 03c146f5c6473..0000000000000 --- a/server/licenses/lucene-misc-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -18b770c35db8757dc036b1506870a4ddaad7b1ab \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-queries-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..12f5eff262e9c --- /dev/null +++ b/server/licenses/lucene-queries-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +e56090463703112ad64ad457d18bae9a5b2966b8 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-queries-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 3ecdd79cafd9e..0000000000000 --- a/server/licenses/lucene-queries-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -683f6436938c67709d0c665c9e1fdef7bd893e4a \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..a787a00541a54 --- /dev/null +++ b/server/licenses/lucene-queryparser-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +9faf974b77058e44a6d35e956db4f5fb67389dfa \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index d113267f6f3d1..0000000000000 --- a/server/licenses/lucene-queryparser-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1df20ba64b9aa68f1fa9a15c9ff75f87f94dec47 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..7d95cd6b3b6e3 --- /dev/null +++ b/server/licenses/lucene-sandbox-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +b852b1fe70ef70736b2b1a9ad57eb93cbaed0423 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 7c3391aec27e8..0000000000000 --- a/server/licenses/lucene-sandbox-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -895ca714fc62b66ba63d43931730cdc4ef56d35f \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-spatial-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..ac0598b3f0c49 --- /dev/null +++ b/server/licenses/lucene-spatial-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +d2fa99ec7140fcf35db16ac1feb78ef142750d39 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-spatial-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 35d6d5359ebd1..0000000000000 --- a/server/licenses/lucene-spatial-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -95ab7e9421bbeb8229d83ac72700b37a521fdf4f \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..a2537dbdde529 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +c9963f60d3a0924b877a6f910650c5f2384822a0 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index c2f2f39a1f821..0000000000000 --- a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -773ff8c8425d32609ccec6956759ad377dfb8f6b \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..6844bcd13b278 --- /dev/null +++ b/server/licenses/lucene-spatial3d-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +3f33ba54da5e0e125f4c5ef7dd800dd6185e4f61 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index 0533067ff0d89..0000000000000 --- a/server/licenses/lucene-spatial3d-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ea711541e243ee768f950041e6e2843d0cc5e695 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/server/licenses/lucene-suggest-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..0343db2d94485 --- /dev/null +++ b/server/licenses/lucene-suggest-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +bb3c18c987395dae6fe63744f5a50fd367ea5a74 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0-snapshot-6705632810.jar.sha1 b/server/licenses/lucene-suggest-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index ce8adccc89a78..0000000000000 --- a/server/licenses/lucene-suggest-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2ca005cf25722ba3777ed93f720f40c937081fa6 \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java b/server/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java index 9253728fd1956..e49ac868aa7a5 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java @@ -20,9 +20,9 @@ package org.elasticsearch.index.shard; import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.FilterMergePolicy; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.MergePolicy; -import org.apache.lucene.index.MergePolicyWrapper; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.elasticsearch.Version; @@ -44,7 +44,7 @@ * For now, this {@link MergePolicy} takes care of moving versions that used to * be stored as payloads to numeric doc values. */ -public final class ElasticsearchMergePolicy extends MergePolicyWrapper { +public final class ElasticsearchMergePolicy extends FilterMergePolicy { private static Logger logger = Loggers.getLogger(ElasticsearchMergePolicy.class); diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index ae3762cee7725..9bcd60d676231 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -759,17 +759,6 @@ private void innerClose() throws IOException { public String toString() { return "store(" + in.toString() + ")"; } - - @Override - public boolean checkPendingDeletions() throws IOException { - if (super.checkPendingDeletions()) { - deletesLogger.warn("directory has still pending deletes"); - } - // we skip this check since our IW usage always goes forward. - // we still might run into situations where we have pending deletes ie. in shrink / split case - // and that will cause issues on windows since we open multiple IW instance one after another during the split/shrink recovery - return false; - } } /** diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java index d89a8a134ff7c..e48f151081f62 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; @@ -84,7 +83,6 @@ protected Collection> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class); } - @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-8318") public void testCreateShrinkIndexToN() { int[][] possibleShardSplits = new int[][] {{8,4,2}, {9, 3, 1}, {4, 2, 1}, {15,5,1}}; int[] shardSplits = randomFrom(possibleShardSplits); diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index 8b31680370b91..0396b8ac78820 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -217,6 +217,9 @@ private static String toCamelCase(String s) { // should we expose it, or maybe think about higher level integration of the // fake term frequency feature (LUCENE-7854) .put("delimitedtermfrequency", Void.class) + // LUCENE-8273: ConditionalTokenFilter allows analysis chains to skip + // particular token filters based on the attributes of the current token. + .put("termexclusion", Void.class) .immutableMap(); diff --git a/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..50392f59374a8 --- /dev/null +++ b/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +bf8f9e8284a54af18545574cb4a530da0deb968a \ No newline at end of file diff --git a/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 b/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index c25718d0a9ee0..0000000000000 --- a/x-pack/plugin/sql/jdbc/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5167fb0a14434cb10ec3224e9e32ca668e9f9ad4 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 new file mode 100644 index 0000000000000..50392f59374a8 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-59f2b7aec2.jar.sha1 @@ -0,0 +1 @@ +bf8f9e8284a54af18545574cb4a530da0deb968a \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 deleted file mode 100644 index c25718d0a9ee0..0000000000000 --- a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-6705632810.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5167fb0a14434cb10ec3224e9e32ca668e9f9ad4 \ No newline at end of file From b4e6bfaa682137a74fb161d3e06e75650375ab37 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 18 May 2018 10:56:08 -0700 Subject: [PATCH 33/34] Plugins: Remove meta plugins (#30670) Meta plugins existed only for a short time, in order to enable breaking up x-pack into multiple plugins. However, now that x-pack is no longer installed as a plugin, the need for them has disappeared. This commit removes the meta plugins infrastructure. --- .../plugin/MetaPluginBuildPlugin.groovy | 106 -------- .../MetaPluginPropertiesExtension.groovy | 46 ---- .../plugin/MetaPluginPropertiesTask.groovy | 68 ----- .../gradle/test/ClusterFormationTasks.groovy | 14 +- .../elasticsearch.es-meta-plugin.properties | 20 -- .../meta-plugin-descriptor.properties | 20 -- .../plugins/InstallPluginCommand.java | 99 +------- .../plugins/ListPluginsCommand.java | 20 +- .../plugins/InstallPluginCommandTests.java | 237 +----------------- .../plugins/ListPluginsCommandTests.java | 123 +-------- .../plugins/RemovePluginCommandTests.java | 23 -- docs/plugins/authors.asciidoc | 18 -- plugins/examples/meta-plugin/build.gradle | 28 --- .../meta-plugin/dummy-plugin1/build.gradle | 29 --- .../elasticsearch/example/DummyPlugin1.java | 29 --- .../meta-plugin/dummy-plugin2/build.gradle | 29 --- .../elasticsearch/example/DummyPlugin2.java | 29 --- .../meta-plugin-descriptor.properties | 4 - ...SmokeTestPluginsClientYamlTestSuiteIT.java | 39 --- .../test/smoke_test_plugins/10_basic.yml | 14 -- .../bootstrap/SpawnerNoBootstrapTests.java | 85 ------- .../elasticsearch/plugins/MetaPluginInfo.java | 149 ----------- .../elasticsearch/plugins/PluginsService.java | 125 ++------- .../plugins/MetaPluginInfoTests.java | 120 --------- .../plugins/PluginsServiceTests.java | 63 +---- .../elasticsearch/plugins/PluginTestUtil.java | 3 - x-pack/qa/smoke-test-plugins-ssl/build.gradle | 1 - x-pack/qa/vagrant/build.gradle | 5 - 28 files changed, 43 insertions(+), 1503 deletions(-) delete mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy delete mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesExtension.groovy delete mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesTask.groovy delete mode 100644 buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.es-meta-plugin.properties delete mode 100644 buildSrc/src/main/resources/meta-plugin-descriptor.properties delete mode 100644 plugins/examples/meta-plugin/build.gradle delete mode 100644 plugins/examples/meta-plugin/dummy-plugin1/build.gradle delete mode 100644 plugins/examples/meta-plugin/dummy-plugin1/src/main/java/org/elasticsearch/example/DummyPlugin1.java delete mode 100644 plugins/examples/meta-plugin/dummy-plugin2/build.gradle delete mode 100644 plugins/examples/meta-plugin/dummy-plugin2/src/main/java/org/elasticsearch/example/DummyPlugin2.java delete mode 100644 plugins/examples/meta-plugin/src/main/resources/meta-plugin-descriptor.properties delete mode 100644 plugins/examples/meta-plugin/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java delete mode 100644 plugins/examples/meta-plugin/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_basic.yml delete mode 100644 server/src/main/java/org/elasticsearch/plugins/MetaPluginInfo.java delete mode 100644 server/src/test/java/org/elasticsearch/plugins/MetaPluginInfoTests.java diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy deleted file mode 100644 index acb8f57d9d72c..0000000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginBuildPlugin.groovy +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle.plugin - -import org.elasticsearch.gradle.BuildPlugin -import org.elasticsearch.gradle.test.RestTestPlugin -import org.elasticsearch.gradle.test.RunTask -import org.elasticsearch.gradle.test.StandaloneRestTestPlugin -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.file.FileCopyDetails -import org.gradle.api.file.RelativePath -import org.gradle.api.tasks.bundling.Zip - -class MetaPluginBuildPlugin implements Plugin { - - @Override - void apply(Project project) { - project.plugins.apply(StandaloneRestTestPlugin) - project.plugins.apply(RestTestPlugin) - - createBundleTask(project) - boolean isModule = project.path.startsWith(':modules:') || project.path.startsWith(':x-pack:plugin') - - project.integTestCluster { - dependsOn(project.bundlePlugin) - distribution = 'integ-test-zip' - } - BuildPlugin.configurePomGeneration(project) - project.afterEvaluate { - PluginBuildPlugin.addZipPomGeneration(project) - if (isModule) { - if (project.integTestCluster.distribution == 'integ-test-zip') { - project.integTestCluster.module(project) - } - } else { - project.integTestCluster.plugin(project.path) - } - } - - RunTask run = project.tasks.create('run', RunTask) - run.dependsOn(project.bundlePlugin) - if (isModule == false) { - run.clusterConfig.plugin(project.path) - } - } - - private static void createBundleTask(Project project) { - - MetaPluginPropertiesTask buildProperties = project.tasks.create('pluginProperties', MetaPluginPropertiesTask.class) - - // create the actual bundle task, which zips up all the files for the plugin - Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [buildProperties]) { - from(buildProperties.descriptorOutput.parentFile) { - // plugin properties file - include(buildProperties.descriptorOutput.name) - } - // due to how the renames work for each bundled plugin, we must exclude empty dirs or every subdir - // within bundled plugin zips will show up at the root as an empty dir - includeEmptyDirs = false - - } - project.assemble.dependsOn(bundle) - - // also make the zip available as a configuration (used when depending on this project) - project.configurations.create('zip') - project.artifacts.add('zip', bundle) - - // a super hacky way to inject code to run at the end of each of the bundled plugin's configuration - // to add itself back to this meta plugin zip - project.afterEvaluate { - buildProperties.extension.plugins.each { String bundledPluginProjectName -> - Project bundledPluginProject = project.project(bundledPluginProjectName) - bundledPluginProject.afterEvaluate { - String bundledPluginName = bundledPluginProject.esplugin.name - bundle.configure { - dependsOn bundledPluginProject.bundlePlugin - from(project.zipTree(bundledPluginProject.bundlePlugin.outputs.files.singleFile)) { - eachFile { FileCopyDetails details -> - // we want each path to have the plugin name interjected - details.relativePath = new RelativePath(true, bundledPluginName, details.relativePath.toString()) - } - } - } - } - } - } - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesExtension.groovy deleted file mode 100644 index e5d84002e533f..0000000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesExtension.groovy +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle.plugin - -import org.gradle.api.Project -import org.gradle.api.tasks.Input - -/** - * A container for meta plugin properties that will be written to the meta plugin descriptor, for easy - * manipulation in the gradle DSL. - */ -class MetaPluginPropertiesExtension { - @Input - String name - - @Input - String description - - /** - * The plugins this meta plugin wraps. - * Note this is not written to the plugin descriptor, but used to setup the final zip file task. - */ - @Input - List plugins - - MetaPluginPropertiesExtension(Project project) { - name = project.name - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesTask.groovy deleted file mode 100644 index e868cc2cc3128..0000000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/MetaPluginPropertiesTask.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle.plugin - -import org.gradle.api.InvalidUserDataException -import org.gradle.api.Task -import org.gradle.api.tasks.Copy -import org.gradle.api.tasks.OutputFile - -class MetaPluginPropertiesTask extends Copy { - - MetaPluginPropertiesExtension extension - - @OutputFile - File descriptorOutput = new File(project.buildDir, 'generated-resources/meta-plugin-descriptor.properties') - - MetaPluginPropertiesTask() { - File templateFile = new File(project.buildDir, "templates/${descriptorOutput.name}") - Task copyPluginPropertiesTemplate = project.tasks.create('copyPluginPropertiesTemplate') { - doLast { - InputStream resourceTemplate = PluginPropertiesTask.getResourceAsStream("/${descriptorOutput.name}") - templateFile.parentFile.mkdirs() - templateFile.setText(resourceTemplate.getText('UTF-8'), 'UTF-8') - } - } - - dependsOn(copyPluginPropertiesTemplate) - extension = project.extensions.create('es_meta_plugin', MetaPluginPropertiesExtension, project) - project.afterEvaluate { - // check require properties are set - if (extension.name == null) { - throw new InvalidUserDataException('name is a required setting for es_meta_plugin') - } - if (extension.description == null) { - throw new InvalidUserDataException('description is a required setting for es_meta_plugin') - } - // configure property substitution - from(templateFile.parentFile).include(descriptorOutput.name) - into(descriptorOutput.parentFile) - Map properties = generateSubstitutions() - expand(properties) - inputs.properties(properties) - } - } - - Map generateSubstitutions() { - return ['name': extension.name, - 'description': extension.description - ] - } -} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 70c1cf84ed119..b5926dcb35824 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -24,7 +24,7 @@ import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.plugin.MetaPluginBuildPlugin + import org.elasticsearch.gradle.plugin.PluginBuildPlugin import org.elasticsearch.gradle.plugin.PluginPropertiesExtension import org.gradle.api.AntBuilder @@ -846,19 +846,15 @@ class ClusterFormationTasks { } static void verifyProjectHasBuildPlugin(String name, Version version, Project project, Project pluginProject) { - if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false && pluginProject.plugins.hasPlugin(MetaPluginBuildPlugin) == false) { + if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false) { throw new GradleException("Task [${name}] cannot add plugin [${pluginProject.path}] with version [${version}] to project's " + - "[${project.path}] dependencies: the plugin is not an esplugin or es_meta_plugin") + "[${project.path}] dependencies: the plugin is not an esplugin") } } - /** Find the plugin name in the given project, whether a regular plugin or meta plugin. */ + /** Find the plugin name in the given project. */ static String findPluginName(Project pluginProject) { PluginPropertiesExtension extension = pluginProject.extensions.findByName('esplugin') - if (extension != null) { - return extension.name - } else { - return pluginProject.extensions.findByName('es_meta_plugin').name - } + return extension.name } } diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.es-meta-plugin.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.es-meta-plugin.properties deleted file mode 100644 index 50240e95416c7..0000000000000 --- a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.es-meta-plugin.properties +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to Elasticsearch under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -implementation-class=org.elasticsearch.gradle.plugin.MetaPluginBuildPlugin diff --git a/buildSrc/src/main/resources/meta-plugin-descriptor.properties b/buildSrc/src/main/resources/meta-plugin-descriptor.properties deleted file mode 100644 index 950cb03240083..0000000000000 --- a/buildSrc/src/main/resources/meta-plugin-descriptor.properties +++ /dev/null @@ -1,20 +0,0 @@ -# Elasticsearch meta plugin descriptor file -# This file must exist as 'meta-plugin-descriptor.properties' inside a plugin. -# -### example meta plugin for "meta-foo" -# -# meta-foo.zip <-- zip file for the meta plugin, with this structure: -# |____ <-- The plugin files for bundled_plugin_1 -# |____ <-- The plugin files for bundled_plugin_2 -# |____ meta-plugin-descriptor.properties <-- example contents below: -# -# description=My meta plugin -# name=meta-foo -# -### mandatory elements for all meta plugins: -# -# 'description': simple summary of the meta plugin -description=${description} -# -# 'name': the meta plugin name -name=${name} \ No newline at end of file diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 71c57f7f10135..d6f6e36b8c48e 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -87,8 +87,8 @@ *
  • A URL to a plugin zip
  • * * - * Plugins are packaged as zip files. Each packaged plugin must contain a plugin properties file - * or a meta plugin properties file. See {@link PluginInfo} and {@link MetaPluginInfo}, respectively. + * Plugins are packaged as zip files. Each packaged plugin must contain a plugin properties file. + * See {@link PluginInfo}. *

    * The installation process first extracts the plugin files into a temporary * directory in order to verify the plugin satisfies the following requirements: @@ -106,11 +106,6 @@ * files specific to the plugin. The config files be installed into a subdirectory of the * elasticsearch config directory, using the name of the plugin. If any files to be installed * already exist, they will be skipped. - *

    - * If the plugin is a meta plugin, the installation process installs each plugin separately - * inside the meta plugin directory. The {@code bin} and {@code config} directory are also moved - * inside the meta plugin directory. - *

    */ class InstallPluginCommand extends EnvironmentAwareCommand { @@ -550,7 +545,7 @@ private Path stagingDirectoryWithoutPosixPermissions(Path pluginsDir) throws IOE } // checking for existing version of the plugin - private void verifyPluginName(Path pluginPath, String pluginName, Path candidateDir) throws UserException, IOException { + private void verifyPluginName(Path pluginPath, String pluginName) throws UserException, IOException { // don't let user install plugin conflicting with module... // they might be unavoidably in maven central and are packaged up the same way) if (MODULES.contains(pluginName)) { @@ -567,28 +562,10 @@ private void verifyPluginName(Path pluginPath, String pluginName, Path candidate pluginName); throw new UserException(PLUGIN_EXISTS, message); } - // checks meta plugins too - try (DirectoryStream stream = Files.newDirectoryStream(pluginPath)) { - for (Path plugin : stream) { - if (candidateDir.equals(plugin.resolve(pluginName))) { - continue; - } - if (MetaPluginInfo.isMetaPlugin(plugin) && Files.exists(plugin.resolve(pluginName))) { - final MetaPluginInfo info = MetaPluginInfo.readFromProperties(plugin); - final String message = String.format( - Locale.ROOT, - "plugin name [%s] already exists in a meta plugin; if you need to update the meta plugin, " + - "uninstall it first using command 'remove %s'", - plugin.resolve(pluginName).toAbsolutePath(), - info.getName()); - throw new UserException(PLUGIN_EXISTS, message); - } - } - } } /** Load information about the plugin, and verify it can be installed with no errors. */ - private PluginInfo loadPluginInfo(Terminal terminal, Path pluginRoot, boolean isBatch, Environment env) throws Exception { + private PluginInfo loadPluginInfo(Terminal terminal, Path pluginRoot, Environment env) throws Exception { final PluginInfo info = PluginInfo.readFromProperties(pluginRoot); if (info.hasNativeController()) { throw new IllegalStateException("plugins can not have native controllers"); @@ -596,7 +573,7 @@ private PluginInfo loadPluginInfo(Terminal terminal, Path pluginRoot, boolean is PluginsService.verifyCompatibility(info); // checking for existing version of the plugin - verifyPluginName(env.pluginsFile(), info.getName(), pluginRoot); + verifyPluginName(env.pluginsFile(), info.getName()); PluginsService.checkForFailedPluginRemovals(env.pluginsFile()); @@ -635,11 +612,7 @@ private void install(Terminal terminal, boolean isBatch, Path tmpRoot, Environme List deleteOnFailure = new ArrayList<>(); deleteOnFailure.add(tmpRoot); try { - if (MetaPluginInfo.isMetaPlugin(tmpRoot)) { - installMetaPlugin(terminal, isBatch, tmpRoot, env, deleteOnFailure); - } else { - installPlugin(terminal, isBatch, tmpRoot, env, deleteOnFailure); - } + installPlugin(terminal, isBatch, tmpRoot, env, deleteOnFailure); } catch (Exception installProblem) { try { IOUtils.rm(deleteOnFailure.toArray(new Path[0])); @@ -650,71 +623,13 @@ private void install(Terminal terminal, boolean isBatch, Path tmpRoot, Environme } } - /** - * Installs the meta plugin and all the bundled plugins from {@code tmpRoot} into the plugins dir. - * If a bundled plugin has a bin dir and/or a config dir, those are copied. - */ - private void installMetaPlugin(Terminal terminal, boolean isBatch, Path tmpRoot, - Environment env, List deleteOnFailure) throws Exception { - final MetaPluginInfo metaInfo = MetaPluginInfo.readFromProperties(tmpRoot); - verifyPluginName(env.pluginsFile(), metaInfo.getName(), tmpRoot); - - final Path destination = env.pluginsFile().resolve(metaInfo.getName()); - deleteOnFailure.add(destination); - terminal.println(VERBOSE, metaInfo.toString()); - - final List pluginPaths = new ArrayList<>(); - try (DirectoryStream paths = Files.newDirectoryStream(tmpRoot)) { - // Extract bundled plugins path and validate plugin names - for (Path plugin : paths) { - if (MetaPluginInfo.isPropertiesFile(plugin)) { - continue; - } - final PluginInfo info = PluginInfo.readFromProperties(plugin); - PluginsService.verifyCompatibility(info); - verifyPluginName(env.pluginsFile(), info.getName(), plugin); - pluginPaths.add(plugin); - } - } - - // read optional security policy from each bundled plugin, and confirm all exceptions one time with user - - Set permissions = new HashSet<>(); - final List pluginInfos = new ArrayList<>(); - for (Path plugin : pluginPaths) { - final PluginInfo info = loadPluginInfo(terminal, plugin, isBatch, env); - pluginInfos.add(info); - - Path policy = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY); - if (Files.exists(policy)) { - permissions.addAll(PluginSecurity.parsePermissions(policy, env.tmpFile())); - } - } - PluginSecurity.confirmPolicyExceptions(terminal, permissions, isBatch); - - // move support files and rename as needed to prepare the exploded plugin for its final location - for (int i = 0; i < pluginPaths.size(); ++i) { - Path pluginPath = pluginPaths.get(i); - PluginInfo info = pluginInfos.get(i); - installPluginSupportFiles(info, pluginPath, env.binFile().resolve(metaInfo.getName()), - env.configFile().resolve(metaInfo.getName()), deleteOnFailure); - // ensure the plugin dir within the tmpRoot has the correct name - if (pluginPath.getFileName().toString().equals(info.getName()) == false) { - Files.move(pluginPath, pluginPath.getParent().resolve(info.getName()), StandardCopyOption.ATOMIC_MOVE); - } - } - movePlugin(tmpRoot, destination); - String[] plugins = pluginInfos.stream().map(PluginInfo::getName).toArray(String[]::new); - terminal.println("-> Installed " + metaInfo.getName() + " with: " + Strings.arrayToCommaDelimitedString(plugins)); - } - /** * Installs the plugin from {@code tmpRoot} into the plugins dir. * If the plugin has a bin dir and/or a config dir, those are moved. */ private void installPlugin(Terminal terminal, boolean isBatch, Path tmpRoot, Environment env, List deleteOnFailure) throws Exception { - final PluginInfo info = loadPluginInfo(terminal, tmpRoot, isBatch, env); + final PluginInfo info = loadPluginInfo(terminal, tmpRoot, env); // read optional security policy (extra permissions), if it exists, confirm or warn the user Path policy = tmpRoot.resolve(PluginInfo.ES_PLUGIN_POLICY); final Set permissions; diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java index fb73554c2b19e..6015d9da14307 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java @@ -61,25 +61,7 @@ protected void execute(Terminal terminal, OptionSet options, Environment env) th } Collections.sort(plugins); for (final Path plugin : plugins) { - if (MetaPluginInfo.isMetaPlugin(plugin)) { - MetaPluginInfo metaInfo = MetaPluginInfo.readFromProperties(plugin); - List subPluginPaths = new ArrayList<>(); - try (DirectoryStream subPaths = Files.newDirectoryStream(plugin)) { - for (Path subPlugin : subPaths) { - if (MetaPluginInfo.isPropertiesFile(subPlugin)) { - continue; - } - subPluginPaths.add(subPlugin); - } - } - Collections.sort(subPluginPaths); - terminal.println(Terminal.Verbosity.SILENT, metaInfo.getName()); - for (Path subPlugin : subPluginPaths) { - printPlugin(env, terminal, subPlugin, "\t"); - } - } else { - printPlugin(env, terminal, plugin, ""); - } + printPlugin(env, terminal, plugin, ""); } } diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 5931e66cb9a5d..bfeb3c0279b65 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -219,18 +219,6 @@ static String createPluginUrl(String name, Path structure, String... additionalP return createPlugin(name, structure, additionalProps).toUri().toURL().toString(); } - /** creates an meta plugin .zip and returns the url for testing */ - static String createMetaPluginUrl(String name, Path structure) throws IOException { - return createMetaPlugin(name, structure).toUri().toURL().toString(); - } - - static void writeMetaPlugin(String name, Path structure) throws IOException { - PluginTestUtil.writeMetaPluginProperties(structure, - "description", "fake desc", - "name", name - ); - } - static void writePlugin(String name, Path structure, String... additionalProps) throws IOException { String[] properties = Stream.concat(Stream.of( "description", "fake desc", @@ -261,11 +249,6 @@ static Path createPlugin(String name, Path structure, String... additionalProps) return writeZip(structure, null); } - static Path createMetaPlugin(String name, Path structure) throws IOException { - writeMetaPlugin(name, structure); - return writeZip(structure, null); - } - void installPlugin(String pluginUrl, Path home) throws Exception { installPlugin(pluginUrl, home, skipJarHellCommand); } @@ -275,11 +258,6 @@ void installPlugin(String pluginUrl, Path home, InstallPluginCommand command) th command.execute(terminal, pluginUrl, false, env); } - void assertMetaPlugin(String metaPlugin, String name, Path original, Environment env) throws IOException { - assertPluginInternal(name, env.pluginsFile().resolve(metaPlugin)); - assertConfigAndBin(metaPlugin, original, env); - } - void assertPlugin(String name, Path original, Environment env) throws IOException { assertPluginInternal(name, env.pluginsFile()); assertConfigAndBin(name, original, env); @@ -388,23 +366,9 @@ public void testSomethingWorks() throws Exception { assertPlugin("fake", pluginDir, env.v2()); } - public void testWithMetaPlugin() throws Exception { - Tuple env = createEnv(fs, temp); - Path pluginDir = createPluginDir(temp); - Files.createDirectory(pluginDir.resolve("fake1")); - writePlugin("fake1", pluginDir.resolve("fake1")); - Files.createDirectory(pluginDir.resolve("fake2")); - writePlugin("fake2", pluginDir.resolve("fake2")); - String pluginZip = createMetaPluginUrl("my_plugins", pluginDir); - installPlugin(pluginZip, env.v1()); - assertMetaPlugin("my_plugins", "fake1", pluginDir, env.v2()); - assertMetaPlugin("my_plugins", "fake2", pluginDir, env.v2()); - } - public void testInstallFailsIfPreviouslyRemovedPluginFailed() throws Exception { Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); + Path pluginDir = createPluginDir(temp); String pluginZip = createPluginUrl("fake", pluginDir); final Path removing = env.v2().pluginsFile().resolve(".removing-failed"); Files.createDirectory(removing); @@ -414,11 +378,6 @@ public void testInstallFailsIfPreviouslyRemovedPluginFailed() throws Exception { "found file [%s] from a failed attempt to remove the plugin [failed]; execute [elasticsearch-plugin remove failed]", removing); assertThat(e, hasToString(containsString(expected))); - - // test with meta plugin - String metaZip = createMetaPluginUrl("my_plugins", metaDir); - final IllegalStateException e1 = expectThrows(IllegalStateException.class, () -> installPlugin(metaZip, env.v1())); - assertThat(e1, hasToString(containsString(expected))); } public void testSpaceInUrl() throws Exception { @@ -500,23 +459,6 @@ public void testJarHell() throws Exception { assertInstallCleaned(environment.v2()); } - public void testJarHellInMetaPlugin() throws Exception { - // jar hell test needs a real filesystem - assumeTrue("real filesystem", isReal); - Tuple environment = createEnv(fs, temp); - Path pluginDir = createPluginDir(temp); - Files.createDirectory(pluginDir.resolve("fake1")); - writePlugin("fake1", pluginDir.resolve("fake1")); - Files.createDirectory(pluginDir.resolve("fake2")); - writePlugin("fake2", pluginDir.resolve("fake2")); // adds plugin.jar with Fake2Plugin - writeJar(pluginDir.resolve("fake2").resolve("other.jar"), "Fake2Plugin"); - String pluginZip = createMetaPluginUrl("my_plugins", pluginDir); - IllegalStateException e = expectThrows(IllegalStateException.class, - () -> installPlugin(pluginZip, environment.v1(), defaultCommand)); - assertTrue(e.getMessage(), e.getMessage().contains("jar hell")); - assertInstallCleaned(environment.v2()); - } - public void testIsolatedPlugins() throws Exception { Tuple env = createEnv(fs, temp); // these both share the same FakePlugin class @@ -540,23 +482,6 @@ public void testExistingPlugin() throws Exception { assertInstallCleaned(env.v2()); } - public void testExistingMetaPlugin() throws Exception { - Tuple env = createEnv(fs, temp); - Path metaZip = createPluginDir(temp); - Path pluginDir = metaZip.resolve("fake"); - Files.createDirectory(pluginDir); - String pluginZip = createPluginUrl("fake", pluginDir); - installPlugin(pluginZip, env.v1()); - UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); - assertTrue(e.getMessage(), e.getMessage().contains("already exists")); - assertInstallCleaned(env.v2()); - - String anotherZip = createMetaPluginUrl("another_plugins", metaZip); - e = expectThrows(UserException.class, () -> installPlugin(anotherZip, env.v1())); - assertTrue(e.getMessage(), e.getMessage().contains("already exists")); - assertInstallCleaned(env.v2()); - } - public void testBin() throws Exception { Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); @@ -568,43 +493,20 @@ public void testBin() throws Exception { assertPlugin("fake", pluginDir, env.v2()); } - public void testMetaBin() throws Exception { - Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); - writePlugin("fake", pluginDir); - Path binDir = pluginDir.resolve("bin"); - Files.createDirectory(binDir); - Files.createFile(binDir.resolve("somescript")); - String pluginZip = createMetaPluginUrl("my_plugins", metaDir); - installPlugin(pluginZip, env.v1()); - assertMetaPlugin("my_plugins","fake", pluginDir, env.v2()); - } - public void testBinNotDir() throws Exception { Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); + Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createFile(binDir); String pluginZip = createPluginUrl("fake", pluginDir); UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertInstallCleaned(env.v2()); - - String metaZip = createMetaPluginUrl("my_plugins", metaDir); - e = expectThrows(UserException.class, () -> installPlugin(metaZip, env.v1())); - assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); - assertInstallCleaned(env.v2()); } public void testBinContainsDir() throws Exception { Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); + Path pluginDir = createPluginDir(temp); Path dirInBinDir = pluginDir.resolve("bin").resolve("foo"); Files.createDirectories(dirInBinDir); Files.createFile(dirInBinDir.resolve("somescript")); @@ -612,11 +514,6 @@ public void testBinContainsDir() throws Exception { UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin")); assertInstallCleaned(env.v2()); - - String metaZip = createMetaPluginUrl("my_plugins", metaDir); - e = expectThrows(UserException.class, () -> installPlugin(metaZip, env.v1())); - assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin")); - assertInstallCleaned(env.v2()); } public void testBinConflict() throws Exception { @@ -649,27 +546,6 @@ public void testBinPermissions() throws Exception { } } - public void testMetaBinPermissions() throws Exception { - assumeTrue("posix filesystem", isPosix); - Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); - writePlugin("fake", pluginDir); - Path binDir = pluginDir.resolve("bin"); - Files.createDirectory(binDir); - Files.createFile(binDir.resolve("somescript")); - String pluginZip = createMetaPluginUrl("my_plugins", metaDir); - try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) { - Set perms = binAttrs.getCopyPermissions(); - // make sure at least one execute perm is missing, so we know we forced it during installation - perms.remove(PosixFilePermission.GROUP_EXECUTE); - binAttrs.setPermissions(perms); - installPlugin(pluginZip, env.v1()); - assertMetaPlugin("my_plugins", "fake", pluginDir, env.v2()); - } - } - public void testPluginPermissions() throws Exception { assumeTrue("posix filesystem", isPosix); @@ -761,32 +637,9 @@ public void testExistingConfig() throws Exception { assertTrue(Files.exists(envConfigDir.resolve("other.yml"))); } - public void testExistingMetaConfig() throws Exception { - Tuple env = createEnv(fs, temp); - Path envConfigDir = env.v2().configFile().resolve("my_plugins"); - Files.createDirectories(envConfigDir); - Files.write(envConfigDir.resolve("custom.yml"), "existing config".getBytes(StandardCharsets.UTF_8)); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); - writePlugin("fake", pluginDir); - Path configDir = pluginDir.resolve("config"); - Files.createDirectory(configDir); - Files.write(configDir.resolve("custom.yml"), "new config".getBytes(StandardCharsets.UTF_8)); - Files.createFile(configDir.resolve("other.yml")); - String pluginZip = createMetaPluginUrl("my_plugins", metaDir); - installPlugin(pluginZip, env.v1()); - assertMetaPlugin("my_plugins", "fake", pluginDir, env.v2()); - List configLines = Files.readAllLines(envConfigDir.resolve("custom.yml"), StandardCharsets.UTF_8); - assertEquals(1, configLines.size()); - assertEquals("existing config", configLines.get(0)); - assertTrue(Files.exists(envConfigDir.resolve("other.yml"))); - } - public void testConfigNotDir() throws Exception { Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); + Path pluginDir = createPluginDir(temp); Files.createDirectories(pluginDir); Path configDir = pluginDir.resolve("config"); Files.createFile(configDir); @@ -794,11 +647,6 @@ public void testConfigNotDir() throws Exception { UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertInstallCleaned(env.v2()); - - String metaZip = createMetaPluginUrl("my_plugins", metaDir); - e = expectThrows(UserException.class, () -> installPlugin(metaZip, env.v1())); - assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); - assertInstallCleaned(env.v2()); } public void testConfigContainsDir() throws Exception { @@ -815,19 +663,12 @@ public void testConfigContainsDir() throws Exception { public void testMissingDescriptor() throws Exception { Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path pluginDir = metaDir.resolve("fake"); - Files.createDirectory(pluginDir); + Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve("fake.yml")); String pluginZip = writeZip(pluginDir, null).toUri().toURL().toString(); NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties")); assertInstallCleaned(env.v2()); - - String metaZip = createMetaPluginUrl("my_plugins", metaDir); - e = expectThrows(NoSuchFileException.class, () -> installPlugin(metaZip, env.v1())); - assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties")); - assertInstallCleaned(env.v2()); } public void testContainsIntermediateDirectory() throws Exception { @@ -840,16 +681,6 @@ public void testContainsIntermediateDirectory() throws Exception { assertInstallCleaned(env.v2()); } - public void testContainsIntermediateDirectoryMeta() throws Exception { - Tuple env = createEnv(fs, temp); - Path pluginDir = createPluginDir(temp); - Files.createFile(pluginDir.resolve(MetaPluginInfo.ES_META_PLUGIN_PROPERTIES)); - String pluginZip = writeZip(pluginDir, "elasticsearch").toUri().toURL().toString(); - UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); - assertThat(e.getMessage(), containsString("This plugin was built with an older plugin structure")); - assertInstallCleaned(env.v2()); - } - public void testZipRelativeOutsideEntryName() throws Exception { Tuple env = createEnv(fs, temp); Path zip = createTempDir().resolve("broken.zip"); @@ -958,29 +789,6 @@ public void testPluginAlreadyInstalled() throws Exception { "if you need to update the plugin, uninstall it first using command 'remove fake'")); } - public void testMetaPluginAlreadyInstalled() throws Exception { - Tuple env = createEnv(fs, temp); - { - // install fake plugin - Path pluginDir = createPluginDir(temp); - String pluginZip = createPluginUrl("fake", pluginDir); - installPlugin(pluginZip, env.v1()); - } - - Path pluginDir = createPluginDir(temp); - Files.createDirectory(pluginDir.resolve("fake")); - writePlugin("fake", pluginDir.resolve("fake")); - Files.createDirectory(pluginDir.resolve("other")); - writePlugin("other", pluginDir.resolve("other")); - String metaZip = createMetaPluginUrl("meta", pluginDir); - final UserException e = expectThrows(UserException.class, - () -> installPlugin(metaZip, env.v1(), randomFrom(skipJarHellCommand, defaultCommand))); - assertThat( - e.getMessage(), - equalTo("plugin directory [" + env.v2().pluginsFile().resolve("fake") + "] already exists; " + - "if you need to update the plugin, uninstall it first using command 'remove fake'")); - } - private void installPlugin(MockTerminal terminal, boolean isBatch) throws Exception { Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); @@ -1224,24 +1032,6 @@ public void testPolicyConfirmation() throws Exception { assertPlugin("fake", pluginDir, env.v2()); } - public void testMetaPluginPolicyConfirmation() throws Exception { - Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path fake1Dir = metaDir.resolve("fake1"); - Files.createDirectory(fake1Dir); - writePluginSecurityPolicy(fake1Dir, "setAccessible", "setFactory"); - writePlugin("fake1", fake1Dir); - Path fake2Dir = metaDir.resolve("fake2"); - Files.createDirectory(fake2Dir); - writePluginSecurityPolicy(fake2Dir, "setAccessible", "accessDeclaredMembers"); - writePlugin("fake2", fake2Dir); - String pluginZip = createMetaPluginUrl("meta-plugin", metaDir); - - assertPolicyConfirmation(env, pluginZip, "plugin requires additional permissions"); - assertMetaPlugin("meta-plugin", "fake1", metaDir, env.v2()); - assertMetaPlugin("meta-plugin", "fake2", metaDir, env.v2()); - } - public void testPluginWithNativeController() throws Exception { Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); @@ -1250,21 +1040,4 @@ public void testPluginWithNativeController() throws Exception { final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, env.v1())); assertThat(e, hasToString(containsString("plugins can not have native controllers"))); } - - public void testMetaPluginWithNativeController() throws Exception { - Tuple env = createEnv(fs, temp); - Path metaDir = createPluginDir(temp); - Path fake1Dir = metaDir.resolve("fake1"); - Files.createDirectory(fake1Dir); - writePluginSecurityPolicy(fake1Dir, "setAccessible", "setFactory"); - writePlugin("fake1", fake1Dir); - Path fake2Dir = metaDir.resolve("fake2"); - Files.createDirectory(fake2Dir); - writePlugin("fake2", fake2Dir, "has.native.controller", "true"); - String pluginZip = createMetaPluginUrl("meta-plugin", metaDir); - - final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, env.v1())); - assertThat(e, hasToString(containsString("plugins can not have native controllers"))); - } - } diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java index 42d0df75e9dc2..86ca616b50f08 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java @@ -93,16 +93,7 @@ private static void buildFakePlugin( final String description, final String name, final String classname) throws IOException { - buildFakePlugin(env, null, description, name, classname, false); - } - - private static void buildFakePlugin( - final Environment env, - final String metaPlugin, - final String description, - final String name, - final String classname) throws IOException { - buildFakePlugin(env, metaPlugin, description, name, classname, false); + buildFakePlugin(env, description, name, classname, false); } private static void buildFakePlugin( @@ -111,36 +102,15 @@ private static void buildFakePlugin( final String name, final String classname, final boolean hasNativeController) throws IOException { - buildFakePlugin(env, null, description, name, classname, hasNativeController); - } - - private static void buildFakePlugin( - final Environment env, - final String metaPlugin, - final String description, - final String name, - final String classname, - final boolean hasNativeController) throws IOException { - Path dest = metaPlugin != null ? env.pluginsFile().resolve(metaPlugin) : env.pluginsFile(); PluginTestUtil.writePluginProperties( - dest.resolve(name), - "description", description, - "name", name, - "version", "1.0", - "elasticsearch.version", Version.CURRENT.toString(), - "java.version", "1.8", - "classname", classname, - "has.native.controller", Boolean.toString(hasNativeController)); - } - - private static void buildFakeMetaPlugin( - final Environment env, - final String description, - final String name) throws IOException { - PluginTestUtil.writeMetaPluginProperties( env.pluginsFile().resolve(name), "description", description, - "name", name); + "name", name, + "version", "1.0", + "elasticsearch.version", Version.CURRENT.toString(), + "java.version", "1.8", + "classname", classname, + "has.native.controller", Boolean.toString(hasNativeController)); } public void testPluginsDirMissing() throws Exception { @@ -167,16 +137,6 @@ public void testTwoPlugins() throws Exception { assertEquals(buildMultiline("fake1", "fake2"), terminal.getOutput()); } - public void testMetaPlugin() throws Exception { - buildFakeMetaPlugin(env, "fake meta desc", "meta_plugin"); - buildFakePlugin(env, "meta_plugin", "fake desc", "fake1", "org.fake1"); - buildFakePlugin(env, "meta_plugin", "fake desc 2", "fake2", "org.fake2"); - buildFakePlugin(env, "fake desc 3", "fake3", "org.fake3"); - buildFakePlugin(env, "fake desc 4", "fake4", "org.fake4"); - MockTerminal terminal = listPlugins(home); - assertEquals(buildMultiline("fake3", "fake4", "meta_plugin", "\tfake1", "\tfake2"), terminal.getOutput()); - } - public void testPluginWithVerbose() throws Exception { buildFakePlugin(env, "fake desc", "fake_plugin", "org.fake"); String[] params = { "-v" }; @@ -248,39 +208,6 @@ public void testPluginWithVerboseMultiplePlugins() throws Exception { terminal.getOutput()); } - public void testPluginWithVerboseMetaPlugins() throws Exception { - buildFakeMetaPlugin(env, "fake meta desc", "meta_plugin"); - buildFakePlugin(env, "meta_plugin", "fake desc 1", "fake_plugin1", "org.fake"); - buildFakePlugin(env, "meta_plugin", "fake desc 2", "fake_plugin2", "org.fake2"); - String[] params = { "-v" }; - MockTerminal terminal = listPlugins(home, params); - assertEquals( - buildMultiline( - "Plugins directory: " + env.pluginsFile(), - "meta_plugin", - "\tfake_plugin1", - "\t- Plugin information:", - "\tName: fake_plugin1", - "\tDescription: fake desc 1", - "\tVersion: 1.0", - "\tElasticsearch Version: " + Version.CURRENT.toString(), - "\tJava Version: 1.8", - "\tNative Controller: false", - "\tExtended Plugins: []", - "\t * Classname: org.fake", - "\tfake_plugin2", - "\t- Plugin information:", - "\tName: fake_plugin2", - "\tDescription: fake desc 2", - "\tVersion: 1.0", - "\tElasticsearch Version: " + Version.CURRENT.toString(), - "\tJava Version: 1.8", - "\tNative Controller: false", - "\tExtended Plugins: []", - "\t * Classname: org.fake2"), - terminal.getOutput()); - } - public void testPluginWithoutVerboseMultiplePlugins() throws Exception { buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake"); buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); @@ -308,19 +235,6 @@ public void testPluginWithWrongDescriptorFile() throws Exception{ e.getMessage()); } - public void testMetaPluginWithWrongDescriptorFile() throws Exception{ - buildFakeMetaPlugin(env, "fake meta desc", "meta_plugin"); - final Path pluginDir = env.pluginsFile().resolve("meta_plugin").resolve("fake_plugin1"); - PluginTestUtil.writePluginProperties(pluginDir, "description", "fake desc"); - IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> listPlugins(home)); - final Path descriptorPath = pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES); - assertEquals( - "property [name] is missing in [" + descriptorPath.toString() + "]", - e.getMessage()); - } - public void testExistingIncompatiblePlugin() throws Exception { PluginTestUtil.writePluginProperties(env.pluginsFile().resolve("fake_plugin1"), "description", "fake desc 1", @@ -341,27 +255,4 @@ public void testExistingIncompatiblePlugin() throws Exception { terminal = listPlugins(home, params); assertEquals("fake_plugin1\nfake_plugin2\n", terminal.getOutput()); } - - public void testExistingIncompatibleMetaPlugin() throws Exception { - buildFakeMetaPlugin(env, "fake meta desc", "meta_plugin"); - PluginTestUtil.writePluginProperties(env.pluginsFile().resolve("meta_plugin").resolve("fake_plugin1"), - "description", "fake desc 1", - "name", "fake_plugin1", - "version", "1.0", - "elasticsearch.version", Version.fromString("1.0.0").toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "org.fake1"); - buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); - - MockTerminal terminal = listPlugins(home); - String message = "plugin [fake_plugin1] was built for Elasticsearch version 1.0 but version " + Version.CURRENT + " is required"; - assertEquals( - "fake_plugin2\nmeta_plugin\n\tfake_plugin1\n" + "WARNING: " + message + "\n", - terminal.getOutput()); - - String[] params = {"-s"}; - terminal = listPlugins(home, params); - assertEquals("fake_plugin2\nmeta_plugin\n\tfake_plugin1\n", terminal.getOutput()); - } - } diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java index a150d22a00429..fc28ad784282c 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java @@ -103,16 +103,6 @@ void createPlugin(Path path, String name, Version version) throws IOException { "classname", "SomeClass"); } - void createMetaPlugin(String name, String... plugins) throws Exception { - PluginTestUtil.writeMetaPluginProperties( - env.pluginsFile().resolve(name), - "description", "dummy", - "name", name); - for (String plugin : plugins) { - createPlugin(env.pluginsFile().resolve(name), plugin); - } - } - static MockTerminal removePlugin(String name, Path home, boolean purge) throws Exception { Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build()); MockTerminal terminal = new MockTerminal(); @@ -159,19 +149,6 @@ public void testRemoveOldVersion() throws Exception { assertRemoveCleaned(env); } - public void testBasicMeta() throws Exception { - createMetaPlugin("meta", "fake1"); - createPlugin("other"); - removePlugin("meta", home, randomBoolean()); - assertFalse(Files.exists(env.pluginsFile().resolve("meta"))); - assertTrue(Files.exists(env.pluginsFile().resolve("other"))); - assertRemoveCleaned(env); - - UserException exc = - expectThrows(UserException.class, () -> removePlugin("fake1", home, randomBoolean())); - assertThat(exc.getMessage(), containsString("plugin [fake1] not found")); - } - public void testBin() throws Exception { createPlugin("fake"); Path binDir = env.binFile().resolve("fake"); diff --git a/docs/plugins/authors.asciidoc b/docs/plugins/authors.asciidoc index b89ac903592ca..fceeeac892c80 100644 --- a/docs/plugins/authors.asciidoc +++ b/docs/plugins/authors.asciidoc @@ -13,8 +13,6 @@ The Elasticsearch repository contains examples of: which contains a rescore plugin. * a https://github.com/elastic/elasticsearch/tree/master/plugins/examples/script-expert-scoring[Java plugin] which contains a script plugin. -* a https://github.com/elastic/elasticsearch/tree/master/plugins/examples/meta-plugin[Java plugin] - which contains a meta plugin. These examples provide the bare bones needed to get started. For more information about how to write a plugin, we recommend looking at the plugins @@ -120,19 +118,3 @@ AccessController.doPrivileged( See http://www.oracle.com/technetwork/java/seccodeguide-139067.html[Secure Coding Guidelines for Java SE] for more information. - -[float] -=== Meta Plugin - -It is also possible to bundle multiple plugins into a meta plugin. -A directory for each sub-plugin must be contained in a directory called `elasticsearch. -The meta plugin must also contain a file called `meta-plugin-descriptor.properties` in the directory named -`elasticsearch`. -The format for this file is described in detail in this example: - -["source","properties",subs="attributes"] --------------------------------------------------- -include::{plugin-properties-files}/meta-plugin-descriptor.properties[] --------------------------------------------------- - -A meta plugin can be installed/removed like a normal plugin with the `bin/elasticsearch-plugin` command. diff --git a/plugins/examples/meta-plugin/build.gradle b/plugins/examples/meta-plugin/build.gradle deleted file mode 100644 index db28e6378713e..0000000000000 --- a/plugins/examples/meta-plugin/build.gradle +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// A meta plugin packaging example that bundles multiple plugins in a single zip. - -apply plugin: 'elasticsearch.es-meta-plugin' - -es_meta_plugin { - name 'meta-plugin' - description 'example meta plugin' - plugins = ['dummy-plugin1', 'dummy-plugin2'] -} diff --git a/plugins/examples/meta-plugin/dummy-plugin1/build.gradle b/plugins/examples/meta-plugin/dummy-plugin1/build.gradle deleted file mode 100644 index 5a02e993f8c25..0000000000000 --- a/plugins/examples/meta-plugin/dummy-plugin1/build.gradle +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -apply plugin: 'elasticsearch.esplugin' - -esplugin { - name 'dummy-plugin1' - description 'A dummy plugin' - classname 'org.elasticsearch.example.DummyPlugin1' -} - -test.enabled = false -integTestRunner.enabled = false \ No newline at end of file diff --git a/plugins/examples/meta-plugin/dummy-plugin1/src/main/java/org/elasticsearch/example/DummyPlugin1.java b/plugins/examples/meta-plugin/dummy-plugin1/src/main/java/org/elasticsearch/example/DummyPlugin1.java deleted file mode 100644 index 65102dbc2e337..0000000000000 --- a/plugins/examples/meta-plugin/dummy-plugin1/src/main/java/org/elasticsearch/example/DummyPlugin1.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.example; - -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.SearchPlugin; - -import java.util.List; - -import static java.util.Collections.singletonList; - -public class DummyPlugin1 extends Plugin {} diff --git a/plugins/examples/meta-plugin/dummy-plugin2/build.gradle b/plugins/examples/meta-plugin/dummy-plugin2/build.gradle deleted file mode 100644 index d90983adfed0c..0000000000000 --- a/plugins/examples/meta-plugin/dummy-plugin2/build.gradle +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -apply plugin: 'elasticsearch.esplugin' - -esplugin { - name 'dummy-plugin2' - description 'Another dummy plugin' - classname 'org.elasticsearch.example.DummyPlugin2' -} - -test.enabled = false -integTestRunner.enabled = false \ No newline at end of file diff --git a/plugins/examples/meta-plugin/dummy-plugin2/src/main/java/org/elasticsearch/example/DummyPlugin2.java b/plugins/examples/meta-plugin/dummy-plugin2/src/main/java/org/elasticsearch/example/DummyPlugin2.java deleted file mode 100644 index 2d74d7603d15f..0000000000000 --- a/plugins/examples/meta-plugin/dummy-plugin2/src/main/java/org/elasticsearch/example/DummyPlugin2.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.example; - -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.SearchPlugin; - -import java.util.List; - -import static java.util.Collections.singletonList; - -public class DummyPlugin2 extends Plugin {} diff --git a/plugins/examples/meta-plugin/src/main/resources/meta-plugin-descriptor.properties b/plugins/examples/meta-plugin/src/main/resources/meta-plugin-descriptor.properties deleted file mode 100644 index 1fd5a86b95a54..0000000000000 --- a/plugins/examples/meta-plugin/src/main/resources/meta-plugin-descriptor.properties +++ /dev/null @@ -1,4 +0,0 @@ -# The name of the meta plugin -name=my_meta_plugin -# The description of the meta plugin -description=A meta plugin example \ No newline at end of file diff --git a/plugins/examples/meta-plugin/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java b/plugins/examples/meta-plugin/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java deleted file mode 100644 index d1f9e6b73703e..0000000000000 --- a/plugins/examples/meta-plugin/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.smoketest; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; - -public class SmokeTestPluginsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - - public SmokeTestPluginsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); - } -} - diff --git a/plugins/examples/meta-plugin/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_basic.yml b/plugins/examples/meta-plugin/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_basic.yml deleted file mode 100644 index 011a278ed8949..0000000000000 --- a/plugins/examples/meta-plugin/src/test/resources/rest-api-spec/test/smoke_test_plugins/10_basic.yml +++ /dev/null @@ -1,14 +0,0 @@ -# Integration tests for testing meta plugins -# -"Check meta plugin install": - - do: - cluster.state: {} - - # Get master node id - - set: { master_node: master } - - - do: - nodes.info: {} - - - match: { nodes.$master.plugins.0.name: dummy-plugin1 } - - match: { nodes.$master.plugins.1.name: dummy-plugin2 } diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 1afda01130ba7..edc10bdec3a86 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -170,91 +170,6 @@ private void assertControllerSpawns(final Function pluginsDir } } - /** - * Two plugins in a meta module - one with a controller daemon and one without. - */ - public void testControllerSpawnMeta() throws Exception { - runTestControllerSpawnMeta(Environment::pluginsFile, false); - runTestControllerSpawnMeta(Environment::modulesFile, true); - } - - - private void runTestControllerSpawnMeta( - final Function pluginsDirFinder, final boolean expectSpawn) throws Exception { - /* - * On Windows you can not directly run a batch file - you have to run cmd.exe with the batch - * file as an argument and that's out of the remit of the controller daemon process spawner. - */ - assumeFalse("This test does not work on Windows", Constants.WINDOWS); - - Path esHome = createTempDir().resolve("esHome"); - Settings.Builder settingsBuilder = Settings.builder(); - settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.toString()); - Settings settings = settingsBuilder.build(); - - Environment environment = TestEnvironment.newEnvironment(settings); - - Path metaModule = pluginsDirFinder.apply(environment).resolve("meta_module"); - Files.createDirectories(environment.modulesFile()); - Files.createDirectories(metaModule); - PluginTestUtil.writeMetaPluginProperties( - metaModule, - "description", "test_plugin", - "name", "meta_plugin", - "plugins", "test_plugin,other_plugin"); - - // this plugin will have a controller daemon - Path plugin = metaModule.resolve("test_plugin"); - - Files.createDirectories(plugin); - PluginTestUtil.writePluginProperties( - plugin, - "description", "test_plugin", - "version", Version.CURRENT.toString(), - "elasticsearch.version", Version.CURRENT.toString(), - "name", "test_plugin", - "java.version", "1.8", - "classname", "TestPlugin", - "has.native.controller", "true"); - Path controllerProgram = Platforms.nativeControllerPath(plugin); - createControllerProgram(controllerProgram); - - // this plugin will not have a controller daemon - Path otherPlugin = metaModule.resolve("other_plugin"); - Files.createDirectories(otherPlugin); - PluginTestUtil.writePluginProperties( - otherPlugin, - "description", "other_plugin", - "version", Version.CURRENT.toString(), - "elasticsearch.version", Version.CURRENT.toString(), - "name", "other_plugin", - "java.version", "1.8", - "classname", "OtherPlugin", - "has.native.controller", "false"); - - Spawner spawner = new Spawner(); - spawner.spawnNativeControllers(environment); - - List processes = spawner.getProcesses(); - - if (expectSpawn) { - // as there should only be a reference in the list for the plugin that had the controller daemon, we expect one here - assertThat(processes, hasSize(1)); - Process process = processes.get(0); - final InputStreamReader in = - new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8); - try (BufferedReader stdoutReader = new BufferedReader(in)) { - String line = stdoutReader.readLine(); - assertEquals("I am alive", line); - spawner.close(); - // fail if the process does not die within one second; usually it will be even quicker but it depends on OS scheduling - assertTrue(process.waitFor(1, TimeUnit.SECONDS)); - } - } else { - assertThat(processes, hasSize(0)); - } - } - public void testControllerSpawnWithIncorrectDescriptor() throws IOException { // this plugin will have a controller daemon Path esHome = createTempDir().resolve("esHome"); diff --git a/server/src/main/java/org/elasticsearch/plugins/MetaPluginInfo.java b/server/src/main/java/org/elasticsearch/plugins/MetaPluginInfo.java deleted file mode 100644 index d8bb176273ce2..0000000000000 --- a/server/src/main/java/org/elasticsearch/plugins/MetaPluginInfo.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugins; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Arrays; -import java.util.Map; -import java.util.Properties; -import java.util.function.Function; -import java.util.stream.Collectors; - -/** - * An in-memory representation of the meta plugin descriptor. - */ -public class MetaPluginInfo { - static final String ES_META_PLUGIN_PROPERTIES = "meta-plugin-descriptor.properties"; - - private final String name; - private final String description; - - /** - * Construct plugin info. - * - * @param name the name of the plugin - * @param description a description of the plugin - */ - private MetaPluginInfo(String name, String description) { - this.name = name; - this.description = description; - } - - /** - * @return Whether the provided {@code path} is a meta plugin. - */ - public static boolean isMetaPlugin(final Path path) { - return Files.exists(path.resolve(ES_META_PLUGIN_PROPERTIES)); - } - - /** - * @return Whether the provided {@code path} is a meta properties file. - */ - public static boolean isPropertiesFile(final Path path) { - return ES_META_PLUGIN_PROPERTIES.equals(path.getFileName().toString()); - } - - /** reads (and validates) meta plugin metadata descriptor file */ - - /** - * Reads and validates the meta plugin descriptor file. - * - * @param path the path to the root directory for the meta plugin - * @return the meta plugin info - * @throws IOException if an I/O exception occurred reading the meta plugin descriptor - */ - public static MetaPluginInfo readFromProperties(final Path path) throws IOException { - final Path descriptor = path.resolve(ES_META_PLUGIN_PROPERTIES); - - final Map propsMap; - { - final Properties props = new Properties(); - try (InputStream stream = Files.newInputStream(descriptor)) { - props.load(stream); - } - propsMap = props.stringPropertyNames().stream().collect(Collectors.toMap(Function.identity(), props::getProperty)); - } - - final String name = propsMap.remove("name"); - if (name == null || name.isEmpty()) { - throw new IllegalArgumentException( - "property [name] is missing for meta plugin in [" + descriptor + "]"); - } - final String description = propsMap.remove("description"); - if (description == null) { - throw new IllegalArgumentException( - "property [description] is missing for meta plugin [" + name + "]"); - } - - if (propsMap.isEmpty() == false) { - throw new IllegalArgumentException("Unknown properties in meta plugin descriptor: " + propsMap.keySet()); - } - - return new MetaPluginInfo(name, description); - } - - /** - * The name of the meta plugin. - * - * @return the meta plugin name - */ - public String getName() { - return name; - } - - /** - * The description of the meta plugin. - * - * @return the meta plugin description - */ - public String getDescription() { - return description; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - MetaPluginInfo that = (MetaPluginInfo) o; - - if (!name.equals(that.name)) return false; - - return true; - } - - @Override - public int hashCode() { - return name.hashCode(); - } - - @Override - public String toString() { - final StringBuilder information = new StringBuilder() - .append("- Plugin information:\n") - .append("Name: ").append(name).append("\n") - .append("Description: ").append(description); - return information.toString(); - } - -} diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index 4514691e4bec4..3bb2c3a1868b1 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -140,16 +140,12 @@ public PluginsService(Settings settings, Path configPath, Path modulesDirectory, // TODO: remove this leniency, but tests bogusly rely on it if (isAccessibleDirectory(pluginsDirectory, logger)) { checkForFailedPluginRemovals(pluginsDirectory); - // call findBundles directly to get the meta plugin names - List plugins = findBundles(pluginsDirectory, "plugin"); - for (final BundleCollection plugin : plugins) { - final Collection bundles = plugin.bundles(); - for (final Bundle bundle : bundles) { - pluginsList.add(bundle.plugin); - } - seenBundles.addAll(bundles); - pluginsNames.add(plugin.name()); + Set plugins = getPluginBundles(pluginsDirectory); + for (final Bundle bundle : plugins) { + pluginsList.add(bundle.plugin); + pluginsNames.add(bundle.plugin.getName()); } + seenBundles.addAll(plugins); } } catch (IOException ex) { throw new IllegalStateException("Unable to initialize plugins", ex); @@ -253,17 +249,8 @@ public PluginsAndModules info() { return info; } - /** - * An abstraction over a single plugin and meta-plugins. - */ - interface BundleCollection { - String name(); - Collection bundles(); - } - - // a "bundle" is a group of plugins in a single classloader - // really should be 1-1, but we are not so fortunate - static class Bundle implements BundleCollection { + // a "bundle" is a group of jars in a single classloader + static class Bundle { final PluginInfo plugin; final Set urls; @@ -283,16 +270,6 @@ static class Bundle implements BundleCollection { this.urls = Objects.requireNonNull(urls); } - @Override - public String name() { - return plugin.getName(); - } - - @Override - public Collection bundles() { - return Collections.singletonList(this); - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -308,87 +285,30 @@ public int hashCode() { } /** - * Represents a meta-plugin and the {@link Bundle}s corresponding to its constituents. - */ - static class MetaBundle implements BundleCollection { - private final String name; - private final List bundles; - - MetaBundle(final String name, final List bundles) { - this.name = name; - this.bundles = bundles; - } - - @Override - public String name() { - return name; - } - - @Override - public Collection bundles() { - return bundles; - } - - } - - /** - * Extracts all installed plugin directories from the provided {@code rootPath} expanding meta-plugins if needed. + * Extracts all installed plugin directories from the provided {@code rootPath}. * * @param rootPath the path where the plugins are installed * @return a list of all plugin paths installed in the {@code rootPath} * @throws IOException if an I/O exception occurred reading the directories */ public static List findPluginDirs(final Path rootPath) throws IOException { - final Tuple, Map>> groupedPluginDirs = findGroupedPluginDirs(rootPath); - return Stream.concat( - groupedPluginDirs.v1().stream(), - groupedPluginDirs.v2().values().stream().flatMap(Collection::stream)) - .collect(Collectors.toList()); - } - - /** - * Extracts all installed plugin directories from the provided {@code rootPath} expanding meta-plugins if needed. The plugins are - * grouped into plugins and meta-plugins. The meta-plugins are keyed by the meta-plugin name. - * - * @param rootPath the path where the plugins are installed - * @return a tuple of plugins as the first component and meta-plugins keyed by meta-plugin name as the second component - * @throws IOException if an I/O exception occurred reading the directories - */ - private static Tuple, Map>> findGroupedPluginDirs(final Path rootPath) throws IOException { final List plugins = new ArrayList<>(); - final Map> metaPlugins = new LinkedHashMap<>(); final Set seen = new HashSet<>(); if (Files.exists(rootPath)) { try (DirectoryStream stream = Files.newDirectoryStream(rootPath)) { for (Path plugin : stream) { if (FileSystemUtils.isDesktopServicesStore(plugin) || - plugin.getFileName().toString().startsWith(".removing-")) { + plugin.getFileName().toString().startsWith(".removing-")) { continue; } if (seen.add(plugin.getFileName().toString()) == false) { throw new IllegalStateException("duplicate plugin: " + plugin); } - if (MetaPluginInfo.isMetaPlugin(plugin)) { - final String name = plugin.getFileName().toString(); - try (DirectoryStream subStream = Files.newDirectoryStream(plugin)) { - for (Path subPlugin : subStream) { - if (MetaPluginInfo.isPropertiesFile(subPlugin) || - FileSystemUtils.isDesktopServicesStore(subPlugin)) { - continue; - } - if (seen.add(subPlugin.getFileName().toString()) == false) { - throw new IllegalStateException("duplicate plugin: " + subPlugin); - } - metaPlugins.computeIfAbsent(name, n -> new ArrayList<>()).add(subPlugin); - } - } - } else { - plugins.add(plugin); - } + plugins.add(plugin); } } } - return Tuple.tuple(plugins, metaPlugins); + return plugins; } /** @@ -425,32 +345,21 @@ static void checkForFailedPluginRemovals(final Path pluginsDirectory) throws IOE /** Get bundles for plugins installed in the given modules directory. */ static Set getModuleBundles(Path modulesDirectory) throws IOException { - return findBundles(modulesDirectory, "module").stream().flatMap(b -> b.bundles().stream()).collect(Collectors.toSet()); + return findBundles(modulesDirectory, "module"); } /** Get bundles for plugins installed in the given plugins directory. */ static Set getPluginBundles(final Path pluginsDirectory) throws IOException { - return findBundles(pluginsDirectory, "plugin").stream().flatMap(b -> b.bundles().stream()).collect(Collectors.toSet()); + return findBundles(pluginsDirectory, "plugin"); } // searches subdirectories under the given directory for plugin directories - private static List findBundles(final Path directory, String type) throws IOException { - final List bundles = new ArrayList<>(); - final Set seenBundles = new HashSet<>(); - final Tuple, Map>> groupedPluginDirs = findGroupedPluginDirs(directory); - for (final Path plugin : groupedPluginDirs.v1()) { - final Bundle bundle = readPluginBundle(seenBundles, plugin, type); + private static Set findBundles(final Path directory, String type) throws IOException { + final Set bundles = new HashSet<>(); + for (final Path plugin : findPluginDirs(directory)) { + final Bundle bundle = readPluginBundle(bundles, plugin, type); bundles.add(bundle); } - for (final Map.Entry> metaPlugin : groupedPluginDirs.v2().entrySet()) { - final List metaPluginBundles = new ArrayList<>(); - for (final Path metaPluginPlugin : metaPlugin.getValue()) { - final Bundle bundle = readPluginBundle(seenBundles, metaPluginPlugin, type); - metaPluginBundles.add(bundle); - } - final MetaBundle metaBundle = new MetaBundle(metaPlugin.getKey(), metaPluginBundles); - bundles.add(metaBundle); - } return bundles; } diff --git a/server/src/test/java/org/elasticsearch/plugins/MetaPluginInfoTests.java b/server/src/test/java/org/elasticsearch/plugins/MetaPluginInfoTests.java deleted file mode 100644 index c54a13bd30267..0000000000000 --- a/server/src/test/java/org/elasticsearch/plugins/MetaPluginInfoTests.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugins; - -import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.Version; -import org.elasticsearch.test.ESTestCase; - -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.endsWith; - -@LuceneTestCase.SuppressFileSystems(value = "ExtrasFS") -public class MetaPluginInfoTests extends ESTestCase { - - public void testReadFromProperties() throws Exception { - Path pluginDir = createTempDir().resolve("fake-meta-plugin"); - PluginTestUtil.writeMetaPluginProperties(pluginDir, - "description", "fake desc", - "name", "my_meta_plugin"); - MetaPluginInfo info = MetaPluginInfo.readFromProperties(pluginDir); - assertEquals("my_meta_plugin", info.getName()); - assertEquals("fake desc", info.getDescription()); - } - - public void testReadFromPropertiesNameMissing() throws Exception { - Path pluginDir = createTempDir().resolve("fake-meta-plugin"); - PluginTestUtil.writeMetaPluginProperties(pluginDir); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> MetaPluginInfo.readFromProperties(pluginDir)); - assertThat(e.getMessage(), containsString("property [name] is missing for")); - - PluginTestUtil.writeMetaPluginProperties(pluginDir, "name", ""); - e = expectThrows(IllegalArgumentException.class, () -> MetaPluginInfo.readFromProperties(pluginDir)); - assertThat(e.getMessage(), containsString("property [name] is missing for")); - } - - public void testReadFromPropertiesDescriptionMissing() throws Exception { - Path pluginDir = createTempDir().resolve("fake-meta-plugin"); - PluginTestUtil.writeMetaPluginProperties(pluginDir, "name", "fake-meta-plugin"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> MetaPluginInfo.readFromProperties(pluginDir)); - assertThat(e.getMessage(), containsString("[description] is missing")); - } - - public void testUnknownProperties() throws Exception { - Path pluginDir = createTempDir().resolve("fake-meta-plugin"); - PluginTestUtil.writeMetaPluginProperties(pluginDir, - "extra", "property", - "unknown", "property", - "description", "fake desc", - "name", "my_meta_plugin"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> MetaPluginInfo.readFromProperties(pluginDir)); - assertThat(e.getMessage(), containsString("Unknown properties in meta plugin descriptor")); - } - - public void testExtractAllPluginsWithDuplicates() throws Exception { - Path pluginDir = createTempDir().resolve("plugins"); - // Simple plugin - Path plugin1 = pluginDir.resolve("plugin1"); - Files.createDirectories(plugin1); - PluginTestUtil.writePluginProperties(plugin1, - "description", "fake desc", - "name", "plugin1", - "version", "1.0", - "elasticsearch.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "FakePlugin"); - - // Meta plugin - Path metaPlugin = pluginDir.resolve("meta_plugin"); - Files.createDirectory(metaPlugin); - PluginTestUtil.writeMetaPluginProperties(metaPlugin, - "description", "fake desc", - "name", "meta_plugin"); - Path plugin2 = metaPlugin.resolve("plugin1"); - Files.createDirectory(plugin2); - PluginTestUtil.writePluginProperties(plugin2, - "description", "fake desc", - "name", "plugin1", - "version", "1.0", - "elasticsearch.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "FakePlugin"); - Path plugin3 = metaPlugin.resolve("plugin2"); - Files.createDirectory(plugin3); - PluginTestUtil.writePluginProperties(plugin3, - "description", "fake desc", - "name", "plugin2", - "version", "1.0", - "elasticsearch.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "FakePlugin"); - - IllegalStateException exc = - expectThrows(IllegalStateException.class, () -> PluginsService.findPluginDirs(pluginDir)); - assertThat(exc.getMessage(), containsString("duplicate plugin")); - assertThat(exc.getMessage(), endsWith("plugin1")); - } -} diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index 4d2eb6f2f36f3..ffecaca452599 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -620,34 +620,7 @@ public void testFindPluginDirs() throws IOException { Files.copy(jar, fake.resolve("plugin.jar")); } - final Path fakeMeta = plugins.resolve("fake-meta"); - - PluginTestUtil.writeMetaPluginProperties(fakeMeta, "description", "description", "name", "fake-meta"); - - final Path fakeMetaCore = fakeMeta.resolve("fake-meta-core"); - PluginTestUtil.writePluginProperties( - fakeMetaCore, - "description", "description", - "name", "fake-meta-core", - "version", "1.0.0", - "elasticsearch.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "test.DummyPlugin"); - try (InputStream jar = PluginsServiceTests.class.getResourceAsStream("dummy-plugin.jar")) { - Files.copy(jar, fakeMetaCore.resolve("plugin.jar")); - } - - assertThat(PluginsService.findPluginDirs(plugins), containsInAnyOrder(fake, fakeMetaCore)); - } - - public void testMissingMandatoryPlugin() { - final Settings settings = - Settings.builder() - .put("path.home", createTempDir()) - .put("plugin.mandatory", "fake") - .build(); - final IllegalStateException e = expectThrows(IllegalStateException.class, () -> newPluginsService(settings)); - assertThat(e, hasToString(containsString("missing mandatory plugins [fake]"))); + assertThat(PluginsService.findPluginDirs(plugins), containsInAnyOrder(fake)); } public void testExistingMandatoryClasspathPlugin() { @@ -696,38 +669,4 @@ public void testExistingMandatoryInstalledPlugin() throws IOException { .build(); newPluginsService(settings); } - - public void testExistingMandatoryMetaPlugin() throws IOException { - // This test opens a child classloader, reading a jar under the test temp - // dir (a dummy plugin). Classloaders are closed by GC, so when test teardown - // occurs the jar is deleted while the classloader is still open. However, on - // windows, files cannot be deleted when they are still open by a process. - assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - final Path pathHome = createTempDir(); - final Path plugins = pathHome.resolve("plugins"); - final Path fakeMeta = plugins.resolve("fake-meta"); - - PluginTestUtil.writeMetaPluginProperties(fakeMeta, "description", "description", "name", "fake-meta"); - - final Path fake = fakeMeta.resolve("fake"); - PluginTestUtil.writePluginProperties( - fake, - "description", "description", - "name", "fake", - "version", "1.0.0", - "elasticsearch.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "classname", "test.DummyPlugin"); - try (InputStream jar = PluginsServiceTests.class.getResourceAsStream("dummy-plugin.jar")) { - Files.copy(jar, fake.resolve("plugin.jar")); - } - - final Settings settings = - Settings.builder() - .put("path.home", pathHome) - .put("plugin.mandatory", "fake-meta") - .build(); - newPluginsService(settings); - } - } diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java b/test/framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java index 5a92c99d61870..ff996c800b59e 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/PluginTestUtil.java @@ -27,9 +27,6 @@ /** Utility methods for testing plugins */ public class PluginTestUtil { - public static void writeMetaPluginProperties(Path pluginDir, String... stringProps) throws IOException { - writeProperties(pluginDir.resolve(MetaPluginInfo.ES_META_PLUGIN_PROPERTIES), stringProps); - } public static void writePluginProperties(Path pluginDir, String... stringProps) throws IOException { writeProperties(pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES), stringProps); diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index bf26831fae82c..595c562af3707 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -1,6 +1,5 @@ import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.MavenFilteringHack -import org.elasticsearch.gradle.plugin.MetaPluginBuildPlugin import org.elasticsearch.gradle.plugin.PluginBuildPlugin import org.elasticsearch.gradle.test.NodeInfo diff --git a/x-pack/qa/vagrant/build.gradle b/x-pack/qa/vagrant/build.gradle index c69214578fd16..411b8d90c6d41 100644 --- a/x-pack/qa/vagrant/build.gradle +++ b/x-pack/qa/vagrant/build.gradle @@ -1,8 +1,3 @@ -import org.elasticsearch.gradle.plugin.MetaPluginBuildPlugin -import org.elasticsearch.gradle.plugin.MetaPluginPropertiesExtension -import org.elasticsearch.gradle.plugin.PluginBuildPlugin -import org.elasticsearch.gradle.plugin.PluginPropertiesExtension - apply plugin: 'elasticsearch.vagrantsupport' apply plugin: 'elasticsearch.vagrant' From 661fd65ff3f2f716146ee79c76ab5a641fca482c Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sat, 19 May 2018 08:27:33 -0400 Subject: [PATCH 34/34] Mute testCorruptFileThenSnapshotAndRestore Tracked at #30577 --- .../test/java/org/elasticsearch/index/store/CorruptedFileIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index bbfa56a0e55fe..f1ad1ecd27745 100644 --- a/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -470,6 +470,7 @@ protected void sendRequest(Connection connection, long requestId, String action, * TODO once checksum verification on snapshotting is implemented this test needs to be fixed or split into several * parts... We should also corrupt files on the actual snapshot and check that we don't restore the corrupted shard. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30577") @TestLogging("org.elasticsearch.monitor.fs:DEBUG") public void testCorruptFileThenSnapshotAndRestore() throws ExecutionException, InterruptedException, IOException { int numDocs = scaledRandomIntBetween(100, 1000);