From 63d531c6e7306d049b6bb6d0a7e55fc8d53b4939 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 13 Oct 2023 15:46:54 +0530 Subject: [PATCH 1/4] Validate checksum of each segment file post download from remote store (#10119) (#10590) (cherry picked from commit 6c022612769e6bc7ed18b9ec0888d6cd0dd415cc) Signed-off-by: Sachin Kale --- .../remotestore/RemoteStoreRestoreIT.java | 33 ++++++++++++- .../store/RemoteSegmentStoreDirectory.java | 4 ++ .../org/opensearch/index/store/Store.java | 47 ++++++++++++++++++- 3 files changed, 81 insertions(+), 3 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java index 7626e3dba6424..212f797180077 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java @@ -10,8 +10,11 @@ import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest; import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreResponse; +import org.opensearch.action.admin.indices.get.GetIndexRequest; +import org.opensearch.action.admin.indices.get.GetIndexResponse; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.cluster.health.ClusterHealthStatus; +import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -19,10 +22,12 @@ import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.repositories.RepositoriesService; import org.opensearch.repositories.Repository; +import org.opensearch.test.CorruptionUtils; import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; import java.io.IOException; +import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; import java.util.Locale; @@ -30,13 +35,14 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_REPOSITORY_SETTINGS_ATTRIBUTE_KEY_PREFIX; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; import static org.hamcrest.Matchers.greaterThan; -@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 0) +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) public class RemoteStoreRestoreIT extends BaseRemoteStoreRestoreIT { /** @@ -461,5 +467,30 @@ public void testRateLimitedRemoteDownloads() throws Exception { } } + public void testRestoreCorruptSegmentShouldFail() throws IOException, ExecutionException, InterruptedException { + prepareCluster(1, 3, INDEX_NAME, 0, 1); + indexData(randomIntBetween(3, 4), true, INDEX_NAME); + + GetIndexResponse getIndexResponse = client().admin().indices().getIndex(new GetIndexRequest()).get(); + String indexUUID = getIndexResponse.getSettings().get(INDEX_NAME).get(IndexMetadata.SETTING_INDEX_UUID); + + logger.info("--> Corrupting segment files in remote segment store"); + Path path = segmentRepoPath.resolve(indexUUID).resolve("0").resolve("segments").resolve("data"); + try (Stream dataPath = Files.list(path)) { + CorruptionUtils.corruptFile(random(), dataPath.toArray(Path[]::new)); + } + + logger.info("--> Stop primary"); + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(INDEX_NAME))); + + logger.info("--> Close and restore the index"); + client().admin() + .cluster() + .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME).waitForCompletion(true), PlainActionFuture.newFuture()); + + logger.info("--> Check for index status, should be red due to corruption"); + ensureRed(INDEX_NAME); + } + // TODO: Restore flow - index aliases } diff --git a/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java b/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java index be1f2341236ab..6b43fed3d8930 100644 --- a/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java +++ b/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java @@ -290,6 +290,10 @@ public void setWrittenByMajor(int writtenByMajor) { ); } } + + public int getWrittenByMajor() { + return writtenByMajor; + } } /** diff --git a/server/src/main/java/org/opensearch/index/store/Store.java b/server/src/main/java/org/opensearch/index/store/Store.java index b822742de6e97..d0cd2635ba672 100644 --- a/server/src/main/java/org/opensearch/index/store/Store.java +++ b/server/src/main/java/org/opensearch/index/store/Store.java @@ -105,6 +105,7 @@ import java.io.UncheckedIOException; import java.nio.file.NoSuchFileException; import java.nio.file.Path; +import java.text.ParseException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -120,6 +121,7 @@ import java.util.zip.CRC32; import java.util.zip.Checksum; +import static java.lang.Character.MAX_RADIX; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; import static org.opensearch.index.seqno.SequenceNumbers.LOCAL_CHECKPOINT_KEY; @@ -975,7 +977,11 @@ public void copyFrom(Directory from, String src, String dest, IOContext context) boolean success = false; long startTime = System.currentTimeMillis(); try { - super.copyFrom(from, src, dest, context); + if (from instanceof RemoteSegmentStoreDirectory) { + copyFileAndValidateChecksum(from, src, dest, context, fileSize); + } else { + super.copyFrom(from, src, dest, context); + } success = true; afterDownload(fileSize, startTime); } finally { @@ -985,6 +991,43 @@ public void copyFrom(Directory from, String src, String dest, IOContext context) } } + private void copyFileAndValidateChecksum(Directory from, String src, String dest, IOContext context, long fileSize) + throws IOException { + RemoteSegmentStoreDirectory.UploadedSegmentMetadata metadata = ((RemoteSegmentStoreDirectory) from) + .getSegmentsUploadedToRemoteStore() + .get(dest); + boolean success = false; + try (IndexInput is = from.openInput(src, context); IndexOutput os = createOutput(dest, context)) { + // Here, we don't need the exact version as LuceneVerifyingIndexOutput does not verify version + // It is just used to emit logs when the entire metadata object is provided as parameter. Also, + // we can't provide null version as StoreFileMetadata has non-null check on writtenBy field. + Version luceneMajorVersion = Version.parse(metadata.getWrittenByMajor() + ".0.0"); + long checksum = Long.parseLong(metadata.getChecksum()); + StoreFileMetadata storeFileMetadata = new StoreFileMetadata( + dest, + fileSize, + Long.toString(checksum, MAX_RADIX), + luceneMajorVersion + ); + VerifyingIndexOutput verifyingIndexOutput = new LuceneVerifyingIndexOutput(storeFileMetadata, os); + verifyingIndexOutput.copyBytes(is, is.length()); + verifyingIndexOutput.verify(); + success = true; + } catch (ParseException e) { + throw new IOException("Exception while reading version info for segment file from remote store: " + dest, e); + } finally { + if (success == false) { + // If the exception is thrown after file is created, we clean up the file. + // We ignore the exception as the deletion is best-effort basis and can fail if file does not exist. + try { + deleteFile("Quietly deleting", dest); + } catch (Exception e) { + // Ignore + } + } + } + } + /** * Updates the amount of bytes attempted for download */ @@ -1476,7 +1519,7 @@ public static boolean isAutogenerated(String name) { * Produces a string representation of the given digest value. */ public static String digestToString(long digest) { - return Long.toString(digest, Character.MAX_RADIX); + return Long.toString(digest, MAX_RADIX); } /** From 28f6788d6f008733a902f4ed2d05a8131e2581ea Mon Sep 17 00:00:00 2001 From: Sorabh Date: Fri, 13 Oct 2023 06:47:55 -0700 Subject: [PATCH 2/4] Backport PR 9107 to make search.concurrent.max_slice_count setting dynamic with lucene-9.8 (#10606) Signed-off-by: Sorabh Hamirwasia --- CHANGELOG.md | 1 + .../breaker/CircuitBreakerServiceIT.java | 2 +- .../search/stats/ConcurrentSearchStatsIT.java | 2 +- .../common/settings/ClusterSettings.java | 3 +- .../main/java/org/opensearch/node/Node.java | 2 - .../search/DefaultSearchContext.java | 9 + .../search/SearchBootstrapSettings.java | 47 ------ .../org/opensearch/search/SearchService.java | 14 ++ .../search/internal/ContextIndexSearcher.java | 5 +- .../internal/FilteredSearchContext.java | 5 + .../internal/MaxTargetSliceSupplier.java | 2 +- .../search/internal/SearchContext.java | 2 + .../common/settings/SettingsModuleTests.java | 15 +- .../internal/ContextIndexSearcherTests.java | 155 +++++++++++++----- .../search/internal/IndexReaderUtils.java | 34 ++-- .../search/query/QueryPhaseTests.java | 12 ++ .../test/OpenSearchIntegTestCase.java | 11 +- .../test/OpenSearchSingleNodeTestCase.java | 11 +- .../opensearch/test/TestSearchContext.java | 18 ++ 19 files changed, 212 insertions(+), 138 deletions(-) delete mode 100644 server/src/main/java/org/opensearch/search/SearchBootstrapSettings.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 110f247c4f6c0..3a8fd83b9e45b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -57,6 +57,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - [Metrics Framework] Add Metrics framework. ([#10241](https://github.com/opensearch-project/OpenSearch/pull/10241)) - Updating the separator for RemoteStoreLockManager since underscore is allowed in base64UUID url charset ([#10379](https://github.com/opensearch-project/OpenSearch/pull/10379)) - Add the means to extract the contextual properties from HttpChannel, TcpCChannel and TrasportChannel without excessive typecasting ([#10562](https://github.com/opensearch-project/OpenSearch/pull/10562)) +- Backport the PR #9107 for updating CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY setting to a dynamic setting ([#10606](https://github.com/opensearch-project/OpenSearch/pull/10606)) ### Deprecated diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java index dfe6889df2319..76f391bdcbb76 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -77,8 +77,8 @@ import static org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.BREAKER; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; -import static org.opensearch.search.SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY; import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.opensearch.search.SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY; import static org.opensearch.search.aggregations.AggregationBuilders.cardinality; import static org.opensearch.search.aggregations.AggregationBuilders.terms; import static org.opensearch.test.OpenSearchIntegTestCase.Scope.TEST; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/stats/ConcurrentSearchStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/stats/ConcurrentSearchStatsIT.java index abfd39793f1a4..7f819450896b1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/stats/ConcurrentSearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/stats/ConcurrentSearchStatsIT.java @@ -38,7 +38,7 @@ import java.util.function.Function; import static org.opensearch.index.query.QueryBuilders.scriptQuery; -import static org.opensearch.search.SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY; +import static org.opensearch.search.SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index 1f12971fe4771..08158d05e676b 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -137,7 +137,6 @@ import org.opensearch.repositories.fs.FsRepository; import org.opensearch.rest.BaseRestHandler; import org.opensearch.script.ScriptService; -import org.opensearch.search.SearchBootstrapSettings; import org.opensearch.search.SearchModule; import org.opensearch.search.SearchService; import org.opensearch.search.aggregations.MultiBucketConsumerService; @@ -694,7 +693,7 @@ public void apply(Settings value, Settings current, Settings previous) { List.of(FeatureFlags.CONCURRENT_SEGMENT_SEARCH), List.of( SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING, - SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING + SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING ), List.of(FeatureFlags.TELEMETRY), List.of(TelemetrySettings.TRACER_ENABLED_SETTING, TelemetrySettings.TRACER_SAMPLER_PROBABILITY) diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 63033ff9cc072..ea31eadf24d44 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -202,7 +202,6 @@ import org.opensearch.script.ScriptEngine; import org.opensearch.script.ScriptModule; import org.opensearch.script.ScriptService; -import org.opensearch.search.SearchBootstrapSettings; import org.opensearch.search.SearchModule; import org.opensearch.search.SearchService; import org.opensearch.search.aggregations.support.AggregationUsageService; @@ -485,7 +484,6 @@ protected Node( // Ensure to initialize Feature Flags via the settings from opensearch.yml FeatureFlags.initializeFeatureFlags(settings); - SearchBootstrapSettings.initialize(settings); final List identityPlugins = new ArrayList<>(); if (FeatureFlags.isEnabled(FeatureFlags.IDENTITY)) { diff --git a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java index 59108b70a09af..c229c3924688d 100644 --- a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java @@ -971,4 +971,13 @@ public boolean shouldUseTimeSeriesDescSortOptimization() { && sort.isSortOnTimeSeriesField() && sort.sort.getSort()[0].getReverse() == false; } + + @Override + public int getTargetMaxSliceCount() { + if (shouldUseConcurrentSearch() == false) { + throw new IllegalStateException("Target slice count should not be used when concurrent search is disabled"); + } + return clusterService.getClusterSettings().get(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING); + } + } diff --git a/server/src/main/java/org/opensearch/search/SearchBootstrapSettings.java b/server/src/main/java/org/opensearch/search/SearchBootstrapSettings.java deleted file mode 100644 index 7d167838a77c3..0000000000000 --- a/server/src/main/java/org/opensearch/search/SearchBootstrapSettings.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.search; - -import org.opensearch.common.settings.Setting; -import org.opensearch.common.settings.Settings; - -/** - * Keeps track of all the search related node level settings which can be accessed via static methods - * - * @opensearch.internal - */ -public class SearchBootstrapSettings { - // settings to configure maximum slice created per search request using OS custom slice computation mechanism. Default lucene - // mechanism will not be used if this setting is set with value > 0 - public static final String CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY = "search.concurrent.max_slice_count"; - public static final int CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE = 0; - - // value == 0 means lucene slice computation will be used - // this setting will be updated to dynamic setting as part of https://github.com/opensearch-project/OpenSearch/issues/8870 - public static final Setting CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING = Setting.intSetting( - CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, - CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE, - CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE, - Setting.Property.NodeScope - ); - private static Settings settings; - - public static void initialize(Settings openSearchSettings) { - settings = openSearchSettings; - } - - public static int getTargetMaxSlice() { - return (settings != null) - ? settings.getAsInt( - CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, - CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE - ) - : CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE; - } -} diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index a9e7dcdbc8a7c..b6573b29f8e75 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -256,6 +256,20 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv Property.NodeScope ); + // settings to configure maximum slice created per search request using OS custom slice computation mechanism. Default lucene + // mechanism will not be used if this setting is set with value > 0 + public static final String CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY = "search.concurrent.max_slice_count"; + public static final int CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE = 0; + + // value == 0 means lucene slice computation will be used + public static final Setting CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING = Setting.intSetting( + CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, + CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE, + CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE, + Property.Dynamic, + Property.NodeScope + ); + public static final int DEFAULT_SIZE = 10; public static final int DEFAULT_FROM = 0; diff --git a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java index 1b5d62dc56de9..188a45a1e2870 100644 --- a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java @@ -67,7 +67,6 @@ import org.opensearch.common.lease.Releasable; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; import org.opensearch.search.DocValueFormat; -import org.opensearch.search.SearchBootstrapSettings; import org.opensearch.search.SearchService; import org.opensearch.search.dfs.AggregatedDfs; import org.opensearch.search.profile.ContextualProfileBreakdown; @@ -451,9 +450,7 @@ public CollectionStatistics collectionStatistics(String field) throws IOExceptio */ @Override protected LeafSlice[] slices(List leaves) { - // For now using the static setting to get the targetMaxSlice value. It will be updated to dynamic mechanism as part of - // https://github.com/opensearch-project/OpenSearch/issues/8870 when lucene changes are available - return slicesInternal(leaves, SearchBootstrapSettings.getTargetMaxSlice()); + return slicesInternal(leaves, searchContext.getTargetMaxSliceCount()); } public DirectoryReader getDirectoryReader() { diff --git a/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java b/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java index 55b1eb30a8f27..327552cbfccdb 100644 --- a/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java +++ b/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java @@ -569,4 +569,9 @@ public boolean shouldUseConcurrentSearch() { public boolean shouldUseTimeSeriesDescSortOptimization() { return in.shouldUseTimeSeriesDescSortOptimization(); } + + @Override + public int getTargetMaxSliceCount() { + return in.getTargetMaxSliceCount(); + } } diff --git a/server/src/main/java/org/opensearch/search/internal/MaxTargetSliceSupplier.java b/server/src/main/java/org/opensearch/search/internal/MaxTargetSliceSupplier.java index 4b20ae6e771ea..64984585f3ab6 100644 --- a/server/src/main/java/org/opensearch/search/internal/MaxTargetSliceSupplier.java +++ b/server/src/main/java/org/opensearch/search/internal/MaxTargetSliceSupplier.java @@ -40,7 +40,7 @@ static IndexSearcher.LeafSlice[] getSlices(List leaves, int t // Sort by maxDoc, descending: sortedLeaves.sort(Collections.reverseOrder(Comparator.comparingInt(l -> l.reader().maxDoc()))); - final List> groupedLeaves = new ArrayList<>(); + final List> groupedLeaves = new ArrayList<>(targetSliceCount); for (int i = 0; i < targetSliceCount; ++i) { groupedLeaves.add(new ArrayList<>()); } diff --git a/server/src/main/java/org/opensearch/search/internal/SearchContext.java b/server/src/main/java/org/opensearch/search/internal/SearchContext.java index 5f891e012c676..e1b527b057a6c 100644 --- a/server/src/main/java/org/opensearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/opensearch/search/internal/SearchContext.java @@ -487,4 +487,6 @@ public String toString() { public abstract BucketCollectorProcessor bucketCollectorProcessor(); public abstract boolean shouldUseTimeSeriesDescSortOptimization(); + + public abstract int getTargetMaxSliceCount(); } diff --git a/server/src/test/java/org/opensearch/common/settings/SettingsModuleTests.java b/server/src/test/java/org/opensearch/common/settings/SettingsModuleTests.java index df34c19dbab8e..36ae21b4936ff 100644 --- a/server/src/test/java/org/opensearch/common/settings/SettingsModuleTests.java +++ b/server/src/test/java/org/opensearch/common/settings/SettingsModuleTests.java @@ -36,7 +36,6 @@ import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexSettings; -import org.opensearch.search.SearchBootstrapSettings; import org.opensearch.search.SearchService; import org.opensearch.test.FeatureFlagSetter; import org.hamcrest.Matchers; @@ -340,32 +339,32 @@ public void testConcurrentSegmentSearchIndexSettings() { public void testMaxSliceCountClusterSettingsForConcurrentSearch() { // Test that we throw an exception without the feature flag Settings settings = Settings.builder() - .put(SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), true) + .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), 2) .build(); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new SettingsModule(settings)); assertTrue( ex.getMessage() - .contains("unknown setting [" + SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey()) + .contains("unknown setting [" + SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey() + "]") ); // Test that the settings updates correctly with the feature flag FeatureFlagSetter.set(FeatureFlags.CONCURRENT_SEGMENT_SEARCH); int settingValue = randomIntBetween(0, 10); Settings settingsWithFeatureFlag = Settings.builder() - .put(SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), settingValue) + .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), settingValue) .build(); SettingsModule settingsModule = new SettingsModule(settingsWithFeatureFlag); assertEquals( settingValue, - (int) SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.get(settingsModule.getSettings()) + (int) SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.get(settingsModule.getSettings()) ); // Test that negative value is not allowed settingValue = -1; final Settings settingsWithFeatureFlag_2 = Settings.builder() - .put(SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), settingValue) + .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), settingValue) .build(); - ex = expectThrows(IllegalArgumentException.class, () -> new SettingsModule(settingsWithFeatureFlag_2)); - assertTrue(ex.getMessage().contains(SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey())); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> new SettingsModule(settingsWithFeatureFlag_2)); + assertTrue(iae.getMessage().contains(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey())); } } diff --git a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java index 5b6e668175748..b1f70dfce176c 100644 --- a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java @@ -81,7 +81,7 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.cache.bitset.BitsetFilterCache; import org.opensearch.index.shard.IndexShard; -import org.opensearch.search.SearchBootstrapSettings; +import org.opensearch.search.SearchService; import org.opensearch.search.aggregations.LeafBucketCollector; import org.opensearch.test.IndexSettingsModule; import org.opensearch.test.OpenSearchTestCase; @@ -92,6 +92,7 @@ import java.util.IdentityHashMap; import java.util.List; import java.util.Set; +import java.util.concurrent.ExecutorService; import static org.opensearch.search.internal.ContextIndexSearcher.intersectScorerAndBitSet; import static org.opensearch.search.internal.ExitableDirectoryReader.ExitableLeafReader; @@ -308,55 +309,119 @@ public void onRemoval(ShardId shardId, Accountable accountable) { public void testSlicesInternal() throws Exception { final List leaves = getLeaves(10); + try ( + final Directory directory = newDirectory(); + IndexWriter iw = new IndexWriter( + directory, + new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE) + ) + ) { + Document document = new Document(); + document.add(new StringField("field1", "value1", Field.Store.NO)); + document.add(new StringField("field2", "value1", Field.Store.NO)); + iw.addDocument(document); + iw.commit(); + try (DirectoryReader directoryReader = DirectoryReader.open(directory)) { + SearchContext searchContext = mock(SearchContext.class); + IndexShard indexShard = mock(IndexShard.class); + when(searchContext.indexShard()).thenReturn(indexShard); + when(searchContext.bucketCollectorProcessor()).thenReturn(SearchContext.NO_OP_BUCKET_COLLECTOR_PROCESSOR); + ContextIndexSearcher searcher = new ContextIndexSearcher( + directoryReader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), + true, + null, + searchContext + ); + // Case 1: Verify the slice count when lucene default slice computation is used + IndexSearcher.LeafSlice[] slices = searcher.slicesInternal( + leaves, + SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE + ); + int expectedSliceCount = 2; + // 2 slices will be created since max segment per slice of 5 will be reached + assertEquals(expectedSliceCount, slices.length); + for (int i = 0; i < expectedSliceCount; ++i) { + assertEquals(5, slices[i].leaves.length); + } - final Directory directory = newDirectory(); - IndexWriter iw = new IndexWriter(directory, new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE)); - Document document = new Document(); - document.add(new StringField("field1", "value1", Field.Store.NO)); - document.add(new StringField("field2", "value1", Field.Store.NO)); - iw.addDocument(document); - iw.commit(); - DirectoryReader directoryReader = DirectoryReader.open(directory); - - SearchContext searchContext = mock(SearchContext.class); - IndexShard indexShard = mock(IndexShard.class); - when(searchContext.indexShard()).thenReturn(indexShard); - when(searchContext.bucketCollectorProcessor()).thenReturn(SearchContext.NO_OP_BUCKET_COLLECTOR_PROCESSOR); - ContextIndexSearcher searcher = new ContextIndexSearcher( - directoryReader, - IndexSearcher.getDefaultSimilarity(), - IndexSearcher.getDefaultQueryCache(), - IndexSearcher.getDefaultQueryCachingPolicy(), - true, - null, - searchContext - ); - // Case 1: Verify the slice count when lucene default slice computation is used - IndexSearcher.LeafSlice[] slices = searcher.slicesInternal( - leaves, - SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE - ); - int expectedSliceCount = 2; - // 2 slices will be created since max segment per slice of 5 will be reached - assertEquals(expectedSliceCount, slices.length); - for (int i = 0; i < expectedSliceCount; ++i) { - assertEquals(5, slices[i].leaves.length); + // Case 2: Verify the slice count when custom max slice computation is used + expectedSliceCount = 4; + slices = searcher.slicesInternal(leaves, expectedSliceCount); + + // 4 slices will be created with 3 leaves in first 2 slices and 2 leaves in other slices + assertEquals(expectedSliceCount, slices.length); + for (int i = 0; i < expectedSliceCount; ++i) { + if (i < 2) { + assertEquals(3, slices[i].leaves.length); + } else { + assertEquals(2, slices[i].leaves.length); + } + } + } } + } - // Case 2: Verify the slice count when custom max slice computation is used - expectedSliceCount = 4; - slices = searcher.slicesInternal(leaves, expectedSliceCount); - - // 4 slices will be created with 3 leaves in first 2 slices and 2 leaves in other slices - assertEquals(expectedSliceCount, slices.length); - for (int i = 0; i < expectedSliceCount; ++i) { - if (i < 2) { - assertEquals(3, slices[i].leaves.length); - } else { - assertEquals(2, slices[i].leaves.length); + public void testGetSlicesWithNonNullExecutorButCSDisabled() throws Exception { + final List leaves = getLeaves(10); + try ( + final Directory directory = newDirectory(); + IndexWriter iw = new IndexWriter( + directory, + new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE) + ) + ) { + Document document = new Document(); + document.add(new StringField("field1", "value1", Field.Store.NO)); + document.add(new StringField("field2", "value1", Field.Store.NO)); + iw.addDocument(document); + iw.commit(); + try (DirectoryReader directoryReader = DirectoryReader.open(directory);) { + SearchContext searchContext = mock(SearchContext.class); + IndexShard indexShard = mock(IndexShard.class); + when(searchContext.indexShard()).thenReturn(indexShard); + when(searchContext.bucketCollectorProcessor()).thenReturn(SearchContext.NO_OP_BUCKET_COLLECTOR_PROCESSOR); + when(searchContext.shouldUseConcurrentSearch()).thenReturn(false); + ContextIndexSearcher searcher = new ContextIndexSearcher( + directoryReader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), + true, + null, + searchContext + ); + // Case 1: Verify getSlices return null when concurrent segment search is disabled + assertNull(searcher.getSlices()); + + // Case 2: Verify the slice count when custom max slice computation is used + searcher = new ContextIndexSearcher( + directoryReader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), + true, + mock(ExecutorService.class), + searchContext + ); + when(searchContext.shouldUseConcurrentSearch()).thenReturn(true); + when(searchContext.getTargetMaxSliceCount()).thenReturn(4); + int expectedSliceCount = 4; + IndexSearcher.LeafSlice[] slices = searcher.slices(leaves); + + // 4 slices will be created with 3 leaves in first 2 slices and 2 leaves in other slices + assertEquals(expectedSliceCount, slices.length); + for (int i = 0; i < expectedSliceCount; ++i) { + if (i < 2) { + assertEquals(3, slices[i].leaves.length); + } else { + assertEquals(2, slices[i].leaves.length); + } + } } } - IOUtils.close(directoryReader, iw, directory); } private SparseFixedBitSet query(LeafReaderContext leaf, String field, String value) throws IOException { diff --git a/server/src/test/java/org/opensearch/search/internal/IndexReaderUtils.java b/server/src/test/java/org/opensearch/search/internal/IndexReaderUtils.java index a87bb8a52cdd0..16958da77f1a3 100644 --- a/server/src/test/java/org/opensearch/search/internal/IndexReaderUtils.java +++ b/server/src/test/java/org/opensearch/search/internal/IndexReaderUtils.java @@ -31,21 +31,25 @@ public class IndexReaderUtils { * @return created leaves */ public static List getLeaves(int leafCount) throws Exception { - final Directory directory = newDirectory(); - IndexWriter iw = new IndexWriter(directory, new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE)); - for (int i = 0; i < leafCount; ++i) { - Document document = new Document(); - final String fieldValue = "value" + i; - document.add(new StringField("field1", fieldValue, Field.Store.NO)); - document.add(new StringField("field2", fieldValue, Field.Store.NO)); - iw.addDocument(document); - iw.commit(); + try ( + final Directory directory = newDirectory(); + final IndexWriter iw = new IndexWriter( + directory, + new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE) + ) + ) { + for (int i = 0; i < leafCount; ++i) { + Document document = new Document(); + final String fieldValue = "value" + i; + document.add(new StringField("field1", fieldValue, Field.Store.NO)); + document.add(new StringField("field2", fieldValue, Field.Store.NO)); + iw.addDocument(document); + iw.commit(); + } + try (DirectoryReader directoryReader = DirectoryReader.open(directory)) { + List leaves = directoryReader.leaves(); + return leaves; + } } - iw.close(); - DirectoryReader directoryReader = DirectoryReader.open(directory); - List leaves = directoryReader.leaves(); - directoryReader.close(); - directory.close(); - return leaves; } } diff --git a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java index 7d350847b50e5..39126a607f968 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java @@ -1209,6 +1209,12 @@ private static ContextIndexSearcher newContextSearcher(IndexReader reader, Execu IndexShard indexShard = mock(IndexShard.class); when(searchContext.indexShard()).thenReturn(indexShard); when(searchContext.bucketCollectorProcessor()).thenReturn(SearchContext.NO_OP_BUCKET_COLLECTOR_PROCESSOR); + when(searchContext.shouldUseConcurrentSearch()).thenReturn(executor != null); + if (executor != null) { + when(searchContext.getTargetMaxSliceCount()).thenReturn(randomIntBetween(0, 2)); + } else { + when(searchContext.getTargetMaxSliceCount()).thenThrow(IllegalStateException.class); + } return new ContextIndexSearcher( reader, IndexSearcher.getDefaultSimilarity(), @@ -1226,6 +1232,12 @@ private static ContextIndexSearcher newEarlyTerminationContextSearcher(IndexRead IndexShard indexShard = mock(IndexShard.class); when(searchContext.indexShard()).thenReturn(indexShard); when(searchContext.bucketCollectorProcessor()).thenReturn(SearchContext.NO_OP_BUCKET_COLLECTOR_PROCESSOR); + when(searchContext.shouldUseConcurrentSearch()).thenReturn(executor != null); + if (executor != null) { + when(searchContext.getTargetMaxSliceCount()).thenReturn(randomIntBetween(0, 2)); + } else { + when(searchContext.getTargetMaxSliceCount()).thenThrow(IllegalStateException.class); + } return new ContextIndexSearcher( reader, IndexSearcher.getDefaultSimilarity(), diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 23392072342f2..3310c76bdaa93 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -156,7 +156,6 @@ import org.opensearch.script.MockScriptService; import org.opensearch.script.ScriptMetadata; import org.opensearch.search.MockSearchService; -import org.opensearch.search.SearchBootstrapSettings; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchService; import org.opensearch.telemetry.TelemetrySettings; @@ -1971,15 +1970,15 @@ protected Settings nodeSettings(int nodeOrdinal) { // fixed thread pool builder.put("thread_pool.search.min_queue_size", 100); } - if (FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING.get(featureFlagSettings)) { - // By default, for tests we will put the target slice count of 2. This will increase the probability of having multiple slices - // when tests are run with concurrent segment search enabled - builder.put(SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, 2); - } // Enable tracer only when Telemetry Setting is enabled if (featureFlagSettings().getAsBoolean(FeatureFlags.TELEMETRY_SETTING.getKey(), false)) { builder.put(TelemetrySettings.TRACER_ENABLED_SETTING.getKey(), true); } + if (FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING.get(featureFlagSettings)) { + // By default, for tests we will put the target slice count of 2. This will increase the probability of having multiple slices + // when tests are run with concurrent segment search enabled + builder.put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, 2); + } return builder.build(); } diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java index 8977f081b5512..a623c3bd84117 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java @@ -69,7 +69,7 @@ import org.opensearch.node.NodeValidationException; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptService; -import org.opensearch.search.SearchBootstrapSettings; +import org.opensearch.search.SearchService; import org.opensearch.search.internal.SearchContext; import org.opensearch.telemetry.TelemetrySettings; import org.opensearch.test.telemetry.MockTelemetryPlugin; @@ -255,11 +255,10 @@ private Node newNode() { .put(nodeSettings()) // allow test cases to provide their own settings or override these .put(featureFlagSettings()); - if (Boolean.parseBoolean(settingsBuilder.get(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) - && (settingsBuilder.get(SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY) == null)) { - // By default, for tests we will put the target slice count of 2 if not explicitly set. This will increase the probability of - // having multiple slices when tests are run with concurrent segment search enabled - settingsBuilder.put(SearchBootstrapSettings.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, 2); + if (Boolean.parseBoolean(settingsBuilder.get(FeatureFlags.CONCURRENT_SEGMENT_SEARCH))) { + // By default, for tests we will put the target slice count of 2. This will increase the probability of having multiple slices + // when tests are run with concurrent segment search enabled + settingsBuilder.put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, 2); } Collection> plugins = getPlugins(); diff --git a/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java b/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java index 937dc71d55485..bbb3c4a070800 100644 --- a/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java @@ -83,6 +83,8 @@ import java.util.List; import java.util.Map; +import static org.opensearch.test.OpenSearchTestCase.randomIntBetween; + public class TestSearchContext extends SearchContext { public static final SearchShardTarget SHARD_TARGET = new SearchShardTarget( "test", @@ -118,6 +120,7 @@ public class TestSearchContext extends SearchContext { private CollapseContext collapse; protected boolean concurrentSegmentSearchEnabled; private BucketCollectorProcessor bucketCollectorProcessor = NO_OP_BUCKET_COLLECTOR_PROCESSOR; + private int maxSliceCount; /** * Sets the concurrent segment search enabled field @@ -126,6 +129,14 @@ public void setConcurrentSegmentSearchEnabled(boolean concurrentSegmentSearchEna this.concurrentSegmentSearchEnabled = concurrentSegmentSearchEnabled; } + /** + * Sets the maxSliceCount for concurrent search + * @param sliceCount maxSliceCount + */ + public void setMaxSliceCount(int sliceCount) { + this.maxSliceCount = sliceCount; + } + private final Map searchExtBuilders = new HashMap<>(); private ShardSearchRequest request; @@ -163,6 +174,7 @@ public TestSearchContext( this.queryShardContext = queryShardContext; this.searcher = searcher; this.concurrentSegmentSearchEnabled = searcher != null && (searcher.getExecutor() != null); + this.maxSliceCount = randomIntBetween(0, 2); this.scrollContext = scrollContext; } @@ -689,6 +701,12 @@ public boolean shouldUseTimeSeriesDescSortOptimization() { && sort.sort.getSort()[0].getReverse() == false; } + @Override + public int getTargetMaxSliceCount() { + assert concurrentSegmentSearchEnabled == true : "Please use concurrent search before fetching maxSliceCount"; + return maxSliceCount; + } + /** * Clean the query results by consuming all of it */ From d4de421d319b2ba44acaa0b58c0e02071d2ca420 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Fri, 13 Oct 2023 14:31:11 -0400 Subject: [PATCH 3/4] Bump netty from 4.1.99.Final to 4.1.100.Final (#10564) (#10571) Signed-off-by: Andriy Redko (cherry picked from commit a54108c6ef378dbbe432e6248240d4f1afd74047) Signed-off-by: Andriy Redko --- CHANGELOG.md | 1 + buildSrc/version.properties | 2 +- .../licenses/netty-buffer-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-buffer-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-codec-4.1.100.Final.jar.sha1 | 1 + .../transport-netty4/licenses/netty-codec-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-common-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-common-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-handler-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-handler-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-resolver-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.99.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.100.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-codec-dns-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-codec-dns-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-codec-socks-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-codec-socks-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-handler-proxy-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-handler-proxy-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-resolver-dns-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-resolver-dns-4.1.99.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.100.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.99.Final.jar.sha1 | 1 - .../repository-hdfs/licenses/netty-all-4.1.100.Final.jar.sha1 | 1 + .../repository-hdfs/licenses/netty-all-4.1.99.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-buffer-4.1.100.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-buffer-4.1.99.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-codec-4.1.100.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-codec-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.99.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-common-4.1.100.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-common-4.1.99.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-handler-4.1.100.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-handler-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.100.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-resolver-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.99.Final.jar.sha1 | 1 - .../netty-transport-classes-epoll-4.1.100.Final.jar.sha1 | 1 + .../netty-transport-classes-epoll-4.1.99.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.100.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.99.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-buffer-4.1.100.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-buffer-4.1.99.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-codec-4.1.100.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-codec-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.99.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-common-4.1.100.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-common-4.1.99.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-handler-4.1.100.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-handler-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.100.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-resolver-4.1.99.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.100.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.99.Final.jar.sha1 | 1 - 66 files changed, 34 insertions(+), 33 deletions(-) create mode 100644 modules/transport-netty4/licenses/netty-buffer-4.1.100.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-buffer-4.1.99.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-4.1.100.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-codec-4.1.99.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-http-4.1.100.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-codec-http-4.1.99.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-common-4.1.100.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-common-4.1.99.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-handler-4.1.100.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-handler-4.1.99.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-resolver-4.1.100.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-resolver-4.1.99.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-transport-4.1.100.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-transport-4.1.99.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.100.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-codec-dns-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-codec-dns-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-codec-http2-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-codec-http2-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-codec-socks-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-codec-socks-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-handler-proxy-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-handler-proxy-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-resolver-dns-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-resolver-dns-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/netty-all-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-hdfs/licenses/netty-all-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-buffer-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-buffer-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-codec-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-codec-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-codec-http-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-codec-http-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-codec-http2-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-codec-http2-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-common-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-common-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-handler-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-handler-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-resolver-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-resolver-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-transport-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-transport-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.99.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.100.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.99.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-buffer-4.1.100.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-buffer-4.1.99.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-codec-4.1.100.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-codec-4.1.99.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-codec-http-4.1.100.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-codec-http-4.1.99.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-common-4.1.100.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-common-4.1.99.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-handler-4.1.100.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-handler-4.1.99.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-resolver-4.1.100.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-resolver-4.1.99.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-transport-4.1.100.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-transport-4.1.99.Final.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 3a8fd83b9e45b..169a101daf114 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -42,6 +42,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `peter-evans/create-pull-request` from 3 to 5 ([#10301](https://github.com/opensearch-project/OpenSearch/pull/10301)) - Bump `org.apache.avro:avro` from 1.11.2 to 1.11.3 ([#10210](https://github.com/opensearch-project/OpenSearch/pull/10210)) - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) +- Bump `netty` from 4.1.99.Final to 4.1.100.Final ([#10564](https://github.com/opensearch-project/OpenSearch/pull/10564)) - Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) ### Changed diff --git a/buildSrc/version.properties b/buildSrc/version.properties index fc221b373e7cf..fb2c5de4c3c1a 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -26,7 +26,7 @@ jakarta_annotation = 1.3.5 # when updating the JNA version, also update the version in buildSrc/build.gradle jna = 5.13.0 -netty = 4.1.99.Final +netty = 4.1.100.Final joda = 2.12.2 # client dependencies diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.100.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..aaf2e35302d77 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +39b05d2d4027971bf99111a9be1d7035a116bb55 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.99.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.99.Final.jar.sha1 deleted file mode 100644 index 5b393be40e945..0000000000000 --- a/modules/transport-netty4/licenses/netty-buffer-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9f02dcb9b15a647a56af210dffdc294a57922fb0 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.100.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..a77333ea8ae47 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +9c3c71e7cf3b8ce3bfc9fa52a524b9ca7ddf259c \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.99.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.99.Final.jar.sha1 deleted file mode 100644 index 45ea27d29a183..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9984cbd6e5d55c768f198e975d8aaf7fd42a4602 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.100.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..6f26bf4e6a9b5 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +992623e7d8f2d96e41faf1687bb963f5433e3517 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.99.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.99.Final.jar.sha1 deleted file mode 100644 index 6bb7fcd68b272..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7142095066eaebd5f29b88c41af7b383b6a953f6 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.100.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..d2ff72db60d1f --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +847f942381145de23f21c836d05b0677474271d3 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.99.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.99.Final.jar.sha1 deleted file mode 100644 index d53adfa649f5f..0000000000000 --- a/modules/transport-netty4/licenses/netty-common-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -278f6dfa49d6bd75c40ae1470eb165716f87dce0 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.100.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..f12a6046e96d0 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +4c0acdb8bb73647ebb3847ac2d503d53d72c02b4 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.99.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.99.Final.jar.sha1 deleted file mode 100644 index 258f7c957dda0..0000000000000 --- a/modules/transport-netty4/licenses/netty-handler-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -742693761d7ea4c038bccfda96bb38194720b80d \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.100.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..8e4179ba15942 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +fe62f9ccd41b8660d07639dbbab8ae1edd6f2720 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.99.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.99.Final.jar.sha1 deleted file mode 100644 index b8bc0a4370f58..0000000000000 --- a/modules/transport-netty4/licenses/netty-resolver-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -080e45397d9d5b134477de3ffd0f94283b908621 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.100.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..ab2819da570fd --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +6620fbfb47667a5eb6050e35c7b4c88000bcd77f \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.99.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.99.Final.jar.sha1 deleted file mode 100644 index 247975e0a64c7..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9ca2e3ae19a6713b749df154622115f480b6716c \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.100.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..30d7758302e37 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +648ff5571022dbfa6789122e3872477bbf67fa7b \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.99.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.99.Final.jar.sha1 deleted file mode 100644 index 6b7b66ea768e3..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cb0fc6c31c387404212949c57950b5d72ce908b9 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.100.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..dfa4a0fbea94c --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-dns-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +663b1b7bf3ff0f12fde4df20c72d9e94584ebffa \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.99.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.99.Final.jar.sha1 deleted file mode 100644 index 6c1112ed49775..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-dns-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -21c76a42a468faafac6c84f8aca775073fc8e345 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.100.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..bf5605151406e --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-http2-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +cbf1a430ea44dbdedbcde16b185cbb95f28d72c7 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.99.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.99.Final.jar.sha1 deleted file mode 100644 index f9bdefc6dd965..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-http2-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c5a3481c4bb9732a3a94fb63cf916141a1a14669 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.100.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..8e9bc8c96aec7 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-socks-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +a9fbf4d64b08abed542eefd5f7aed4807edca56f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.99.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.99.Final.jar.sha1 deleted file mode 100644 index 717703c36e1ab..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-socks-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -259bf1c5178c3e23bb89a2fab59b6d22846e3fa6 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.100.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..35d9d82202274 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +af3cf676eed30184215426ecf0f0dde15555ea9c \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.99.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.99.Final.jar.sha1 deleted file mode 100644 index 3f69ae54c5d4a..0000000000000 --- a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8c8a89ea89b06e120c57bdb3db14b9a47ca30bb3 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.100.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..0948daa05fff6 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +62dbdf5f25eda75ea8456be1ed72b3fcb0d18774 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.99.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.99.Final.jar.sha1 deleted file mode 100644 index adef44a4e7da7..0000000000000 --- a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -804d8b752847923d3bb81f24de604597047c9b2e \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.100.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..30d7758302e37 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +648ff5571022dbfa6789122e3872477bbf67fa7b \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.99.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.99.Final.jar.sha1 deleted file mode 100644 index 6b7b66ea768e3..0000000000000 --- a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cb0fc6c31c387404212949c57950b5d72ce908b9 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.100.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..a9aa34392903e --- /dev/null +++ b/plugins/repository-hdfs/licenses/netty-all-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +5ef15a3ce29a792b7ad17438e5f84c617b3f2993 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.99.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.99.Final.jar.sha1 deleted file mode 100644 index 0756635018837..0000000000000 --- a/plugins/repository-hdfs/licenses/netty-all-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a45aa70bc50d0500da5cdcd595cc838d87ada987 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.100.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..aaf2e35302d77 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-buffer-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +39b05d2d4027971bf99111a9be1d7035a116bb55 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.99.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.99.Final.jar.sha1 deleted file mode 100644 index 5b393be40e945..0000000000000 --- a/plugins/repository-s3/licenses/netty-buffer-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9f02dcb9b15a647a56af210dffdc294a57922fb0 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.100.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..a77333ea8ae47 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +9c3c71e7cf3b8ce3bfc9fa52a524b9ca7ddf259c \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.99.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.99.Final.jar.sha1 deleted file mode 100644 index 45ea27d29a183..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9984cbd6e5d55c768f198e975d8aaf7fd42a4602 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.100.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..6f26bf4e6a9b5 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-http-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +992623e7d8f2d96e41faf1687bb963f5433e3517 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.99.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.99.Final.jar.sha1 deleted file mode 100644 index 6bb7fcd68b272..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-http-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7142095066eaebd5f29b88c41af7b383b6a953f6 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.100.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..bf5605151406e --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-http2-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +cbf1a430ea44dbdedbcde16b185cbb95f28d72c7 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.99.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.99.Final.jar.sha1 deleted file mode 100644 index f9bdefc6dd965..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-http2-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c5a3481c4bb9732a3a94fb63cf916141a1a14669 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-common-4.1.100.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..d2ff72db60d1f --- /dev/null +++ b/plugins/repository-s3/licenses/netty-common-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +847f942381145de23f21c836d05b0677474271d3 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-common-4.1.99.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.99.Final.jar.sha1 deleted file mode 100644 index d53adfa649f5f..0000000000000 --- a/plugins/repository-s3/licenses/netty-common-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -278f6dfa49d6bd75c40ae1470eb165716f87dce0 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.100.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..f12a6046e96d0 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-handler-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +4c0acdb8bb73647ebb3847ac2d503d53d72c02b4 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.99.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.99.Final.jar.sha1 deleted file mode 100644 index 258f7c957dda0..0000000000000 --- a/plugins/repository-s3/licenses/netty-handler-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -742693761d7ea4c038bccfda96bb38194720b80d \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.100.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..8e4179ba15942 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-resolver-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +fe62f9ccd41b8660d07639dbbab8ae1edd6f2720 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.99.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.99.Final.jar.sha1 deleted file mode 100644 index b8bc0a4370f58..0000000000000 --- a/plugins/repository-s3/licenses/netty-resolver-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -080e45397d9d5b134477de3ffd0f94283b908621 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.100.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..ab2819da570fd --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +6620fbfb47667a5eb6050e35c7b4c88000bcd77f \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.99.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.99.Final.jar.sha1 deleted file mode 100644 index 247975e0a64c7..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9ca2e3ae19a6713b749df154622115f480b6716c \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.100.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..5805fdaf411d1 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +78489936ca1d91483e34a31d04a3b0812386eb39 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.99.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.99.Final.jar.sha1 deleted file mode 100644 index 75b64ad4197d8..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -961bd5b8d97ea6a07168176462f398089a24b5c8 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.100.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..30d7758302e37 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +648ff5571022dbfa6789122e3872477bbf67fa7b \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.99.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.99.Final.jar.sha1 deleted file mode 100644 index 6b7b66ea768e3..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cb0fc6c31c387404212949c57950b5d72ce908b9 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.100.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..aaf2e35302d77 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-buffer-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +39b05d2d4027971bf99111a9be1d7035a116bb55 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.99.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.99.Final.jar.sha1 deleted file mode 100644 index 5b393be40e945..0000000000000 --- a/plugins/transport-nio/licenses/netty-buffer-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9f02dcb9b15a647a56af210dffdc294a57922fb0 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.100.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..a77333ea8ae47 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +9c3c71e7cf3b8ce3bfc9fa52a524b9ca7ddf259c \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.99.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.99.Final.jar.sha1 deleted file mode 100644 index 45ea27d29a183..0000000000000 --- a/plugins/transport-nio/licenses/netty-codec-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9984cbd6e5d55c768f198e975d8aaf7fd42a4602 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.100.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..6f26bf4e6a9b5 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +992623e7d8f2d96e41faf1687bb963f5433e3517 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.99.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.99.Final.jar.sha1 deleted file mode 100644 index 6bb7fcd68b272..0000000000000 --- a/plugins/transport-nio/licenses/netty-codec-http-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7142095066eaebd5f29b88c41af7b383b6a953f6 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.100.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..d2ff72db60d1f --- /dev/null +++ b/plugins/transport-nio/licenses/netty-common-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +847f942381145de23f21c836d05b0677474271d3 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.99.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.99.Final.jar.sha1 deleted file mode 100644 index d53adfa649f5f..0000000000000 --- a/plugins/transport-nio/licenses/netty-common-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -278f6dfa49d6bd75c40ae1470eb165716f87dce0 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.100.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..f12a6046e96d0 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-handler-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +4c0acdb8bb73647ebb3847ac2d503d53d72c02b4 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.99.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.99.Final.jar.sha1 deleted file mode 100644 index 258f7c957dda0..0000000000000 --- a/plugins/transport-nio/licenses/netty-handler-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -742693761d7ea4c038bccfda96bb38194720b80d \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.100.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..8e4179ba15942 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-resolver-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +fe62f9ccd41b8660d07639dbbab8ae1edd6f2720 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.99.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.99.Final.jar.sha1 deleted file mode 100644 index b8bc0a4370f58..0000000000000 --- a/plugins/transport-nio/licenses/netty-resolver-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -080e45397d9d5b134477de3ffd0f94283b908621 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.100.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.100.Final.jar.sha1 new file mode 100644 index 0000000000000..ab2819da570fd --- /dev/null +++ b/plugins/transport-nio/licenses/netty-transport-4.1.100.Final.jar.sha1 @@ -0,0 +1 @@ +6620fbfb47667a5eb6050e35c7b4c88000bcd77f \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.99.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.99.Final.jar.sha1 deleted file mode 100644 index 247975e0a64c7..0000000000000 --- a/plugins/transport-nio/licenses/netty-transport-4.1.99.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9ca2e3ae19a6713b749df154622115f480b6716c \ No newline at end of file From e6cb386585a6ecafcc4c33bfd059cf1bf234daa8 Mon Sep 17 00:00:00 2001 From: gaobinlong Date: Sat, 14 Oct 2023 02:45:03 +0800 Subject: [PATCH 4/4] Fix class_cast_exception when passing int to _version and other metadata fields in ingest simulate API (#10101) (#10478) * Fix class_cast_exception when passing int to _version and other metadata fields in ingest simulate API Signed-off-by: Gao Binlong * modify change log Signed-off-by: Gao Binlong * Add more tests Signed-off-by: Gao Binlong --------- Signed-off-by: Gao Binlong Signed-off-by: Daniel (dB.) Doubrovkine Co-authored-by: Daniel (dB.) Doubrovkine (cherry picked from commit fdaa438ee03db285417bc0bd4204a0fbd4e699c9) --- CHANGELOG.md | 1 + .../rest-api-spec/test/ingest/90_simulate.yml | 137 ++++++++++++++++++ .../ingest/SimulatePipelineRequest.java | 28 +++- .../SimulatePipelineRequestParsingTests.java | 60 +++++++- 4 files changed, 215 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 169a101daf114..1a66011a81e6a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -73,6 +73,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix registration and initialization of multiple extensions ([10256](https://github.com/opensearch-project/OpenSearch/pull/10256)) - Fix circular dependency in Settings initialization ([10194](https://github.com/opensearch-project/OpenSearch/pull/10194)) - Fix Segment Replication ShardLockObtainFailedException bug during index corruption ([10370](https://github.com/opensearch-project/OpenSearch/pull/10370)) +- Fix class_cast_exception when passing int to _version and other metadata fields in ingest simulate API ([#10101](https://github.com/opensearch-project/OpenSearch/pull/10101)) ### Security diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml index e012a82b15927..7c073739f6a1f 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml @@ -976,3 +976,140 @@ teardown: } - match: { error.root_cause.0.type: "illegal_argument_exception" } - match: { error.root_cause.0.reason: "Pipeline processor configured for non-existent pipeline [____pipeline_doesnot_exist___]" } + +--- +"Test simulate with docs containing metadata fields": + - do: + ingest.simulate: + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "set" : { + "field": "field2", + "value": "foo" + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_id": "id", + "_routing": "foo", + "_version": 100, + "_if_seq_no": 12333333333333333, + "_if_primary_term": 1, + "_source": { + "foo": "bar" + } + } + ] + } + + - length: { docs: 1 } + - match: { docs.0.doc._index: "index" } + - match: { docs.0.doc._id: "id" } + - match: { docs.0.doc._routing: "foo" } + - match: { docs.0.doc._version: "100" } + - match: { docs.0.doc._if_seq_no: "12333333333333333" } + - match: { docs.0.doc._if_primary_term: "1" } + - match: { docs.0.doc._source.foo: "bar" } + + - do: + catch: bad_request + ingest.simulate: + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field2", + "value": "foo" + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_id": "id", + "_routing": "foo", + "_version": "bar", + "_source": { + "foo": "bar" + } + } + ] + } + - match: { status: 400 } + - match: { error.root_cause.0.type: "illegal_argument_exception" } + - match: { error.root_cause.0.reason: "Failed to parse parameter [_version], only int or long is accepted" } + + - do: + catch: bad_request + ingest.simulate: + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field2", + "value": "foo" + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_id": "id", + "_routing": "foo", + "_if_seq_no": "123", + "_source": { + "foo": "bar" + } + } + ] + } + - match: { status: 400 } + - match: { error.root_cause.0.type: "illegal_argument_exception" } + - match: { error.root_cause.0.reason: "Failed to parse parameter [_if_seq_no], only int or long is accepted" } + + - do: + catch: bad_request + ingest.simulate: + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field2", + "value": "foo" + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_id": "id", + "_routing": "foo", + "_if_primary_term": "1", + "_source": { + "foo": "bar" + } + } + ] + } + - match: { status: 400 } + - match: { error.root_cause.0.type: "illegal_argument_exception" } + - match: { error.root_cause.0.reason: "Failed to parse parameter [_if_primary_term], only int or long is accepted" } diff --git a/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java index 2234934499609..ec3ee981b646f 100644 --- a/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java +++ b/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java @@ -218,7 +218,12 @@ private static List parseDocs(Map config) { String routing = ConfigurationUtils.readOptionalStringOrIntProperty(null, null, dataMap, Metadata.ROUTING.getFieldName()); Long version = null; if (dataMap.containsKey(Metadata.VERSION.getFieldName())) { - version = (Long) ConfigurationUtils.readObject(null, null, dataMap, Metadata.VERSION.getFieldName()); + Object versionFieldValue = ConfigurationUtils.readObject(null, null, dataMap, Metadata.VERSION.getFieldName()); + if (versionFieldValue instanceof Integer || versionFieldValue instanceof Long) { + version = ((Number) versionFieldValue).longValue(); + } else { + throw new IllegalArgumentException("Failed to parse parameter [_version], only int or long is accepted"); + } } VersionType versionType = null; if (dataMap.containsKey(Metadata.VERSION_TYPE.getFieldName())) { @@ -228,12 +233,25 @@ private static List parseDocs(Map config) { } IngestDocument ingestDocument = new IngestDocument(index, id, routing, version, versionType, document); if (dataMap.containsKey(Metadata.IF_SEQ_NO.getFieldName())) { - Long ifSeqNo = (Long) ConfigurationUtils.readObject(null, null, dataMap, Metadata.IF_SEQ_NO.getFieldName()); - ingestDocument.setFieldValue(Metadata.IF_SEQ_NO.getFieldName(), ifSeqNo); + Object ifSeqNoFieldValue = ConfigurationUtils.readObject(null, null, dataMap, Metadata.IF_SEQ_NO.getFieldName()); + if (ifSeqNoFieldValue instanceof Integer || ifSeqNoFieldValue instanceof Long) { + ingestDocument.setFieldValue(Metadata.IF_SEQ_NO.getFieldName(), ((Number) ifSeqNoFieldValue).longValue()); + } else { + throw new IllegalArgumentException("Failed to parse parameter [_if_seq_no], only int or long is accepted"); + } } if (dataMap.containsKey(Metadata.IF_PRIMARY_TERM.getFieldName())) { - Long ifPrimaryTerm = (Long) ConfigurationUtils.readObject(null, null, dataMap, Metadata.IF_PRIMARY_TERM.getFieldName()); - ingestDocument.setFieldValue(Metadata.IF_PRIMARY_TERM.getFieldName(), ifPrimaryTerm); + Object ifPrimaryTermFieldValue = ConfigurationUtils.readObject( + null, + null, + dataMap, + Metadata.IF_PRIMARY_TERM.getFieldName() + ); + if (ifPrimaryTermFieldValue instanceof Integer || ifPrimaryTermFieldValue instanceof Long) { + ingestDocument.setFieldValue(Metadata.IF_PRIMARY_TERM.getFieldName(), ((Number) ifPrimaryTermFieldValue).longValue()); + } else { + throw new IllegalArgumentException("Failed to parse parameter [_if_primary_term], only int or long is accepted"); + } } ingestDocumentList.add(ingestDocument); } diff --git a/server/src/test/java/org/opensearch/action/ingest/SimulatePipelineRequestParsingTests.java b/server/src/test/java/org/opensearch/action/ingest/SimulatePipelineRequestParsingTests.java index 705fb546a2fed..04a9d08bb22bc 100644 --- a/server/src/test/java/org/opensearch/action/ingest/SimulatePipelineRequestParsingTests.java +++ b/server/src/test/java/org/opensearch/action/ingest/SimulatePipelineRequestParsingTests.java @@ -144,17 +144,29 @@ public void innerTestParseWithProvidedPipeline() throws Exception { List fields = Arrays.asList(INDEX, ID, ROUTING, VERSION, VERSION_TYPE, IF_SEQ_NO, IF_PRIMARY_TERM); for (IngestDocument.Metadata field : fields) { if (field == VERSION) { - Long value = randomLong(); - doc.put(field.getFieldName(), value); - expectedDoc.put(field.getFieldName(), value); + if (randomBoolean()) { + Long value = randomLong(); + doc.put(field.getFieldName(), value); + expectedDoc.put(field.getFieldName(), value); + } else { + Integer value = randomIntBetween(1, 1000000); + doc.put(field.getFieldName(), value); + expectedDoc.put(field.getFieldName(), value); + } } else if (field == VERSION_TYPE) { String value = VersionType.toString(randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL, VersionType.EXTERNAL_GTE)); doc.put(field.getFieldName(), value); expectedDoc.put(field.getFieldName(), value); } else if (field == IF_SEQ_NO || field == IF_PRIMARY_TERM) { - Long value = randomNonNegativeLong(); - doc.put(field.getFieldName(), value); - expectedDoc.put(field.getFieldName(), value); + if (randomBoolean()) { + Long value = randomNonNegativeLong(); + doc.put(field.getFieldName(), value); + expectedDoc.put(field.getFieldName(), value); + } else { + Integer value = randomIntBetween(1, 1000000); + doc.put(field.getFieldName(), value); + expectedDoc.put(field.getFieldName(), value); + } } else { if (randomBoolean()) { String value = randomAlphaOfLengthBetween(1, 10); @@ -282,4 +294,40 @@ public void testNotValidDocs() { ); assertThat(e3.getMessage(), containsString("required property is missing")); } + + public void testNotValidMetadataFields() { + List fields = Arrays.asList(VERSION, IF_SEQ_NO, IF_PRIMARY_TERM); + for (IngestDocument.Metadata field : fields) { + String metadataFieldName = field.getFieldName(); + Map requestContent = new HashMap<>(); + List> docs = new ArrayList<>(); + requestContent.put(Fields.DOCS, docs); + Map doc = new HashMap<>(); + doc.put(metadataFieldName, randomAlphaOfLengthBetween(1, 10)); + doc.put(Fields.SOURCE, Collections.singletonMap(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); + docs.add(doc); + + Map pipelineConfig = new HashMap<>(); + List> processors = new ArrayList<>(); + Map processorConfig = new HashMap<>(); + List> onFailureProcessors = new ArrayList<>(); + int numOnFailureProcessors = randomIntBetween(0, 1); + for (int j = 0; j < numOnFailureProcessors; j++) { + onFailureProcessors.add(Collections.singletonMap("mock_processor", Collections.emptyMap())); + } + if (numOnFailureProcessors > 0) { + processorConfig.put("on_failure", onFailureProcessors); + } + processors.add(Collections.singletonMap("mock_processor", processorConfig)); + pipelineConfig.put("processors", processors); + + requestContent.put(Fields.PIPELINE, pipelineConfig); + + assertThrows( + "Failed to parse parameter [" + metadataFieldName + "], only int or long is accepted", + IllegalArgumentException.class, + () -> SimulatePipelineRequest.parse(requestContent, false, ingestService) + ); + } + } }