-
Notifications
You must be signed in to change notification settings - Fork 1.9k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Signed-off-by: Sarthak Aggarwal <sarthagg@amazon.com>
- Loading branch information
1 parent
36e7450
commit be7561b
Showing
2 changed files
with
346 additions
and
0 deletions.
There are no files selected for viewing
179 changes: 179 additions & 0 deletions
179
modules/reindex/src/internalClusterTest/java/org/opensearch/index/codec/ReindexCodecIT.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,179 @@ | ||
/* | ||
* SPDX-License-Identifier: Apache-2.0 | ||
* | ||
* The OpenSearch Contributors require contributions made to | ||
* this file be licensed under the Apache-2.0 license or a | ||
* compatible open source license. | ||
*/ | ||
|
||
package org.opensearch.index.codec; | ||
|
||
import org.opensearch.action.admin.indices.flush.FlushResponse; | ||
import org.opensearch.action.admin.indices.refresh.RefreshResponse; | ||
import org.opensearch.action.admin.indices.segments.IndicesSegmentsRequest; | ||
import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; | ||
import org.opensearch.action.support.ActiveShardCount; | ||
import org.opensearch.cluster.metadata.IndexMetadata; | ||
import org.opensearch.common.settings.Settings; | ||
import org.opensearch.index.engine.Segment; | ||
import org.opensearch.index.reindex.BulkByScrollResponse; | ||
import org.opensearch.index.reindex.ReindexAction; | ||
import org.opensearch.index.reindex.ReindexRequestBuilder; | ||
import org.opensearch.index.reindex.ReindexTestCase; | ||
|
||
import java.util.ArrayList; | ||
import java.util.Arrays; | ||
import java.util.Map; | ||
import java.util.UUID; | ||
import java.util.concurrent.ExecutionException; | ||
import java.util.stream.IntStream; | ||
|
||
import static java.util.stream.Collectors.toList; | ||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_METADATA; | ||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_READ; | ||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_WRITE; | ||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY; | ||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE; | ||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; | ||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; | ||
|
||
public class ReindexCodecIT extends ReindexTestCase { | ||
|
||
public void testReindexingMultipleCodecs() throws InterruptedException, ExecutionException { | ||
internalCluster().ensureAtLeastNumDataNodes(2); | ||
Map<String, String> codecMap = Map.of( | ||
"best_compression", | ||
"BEST_COMPRESSION", | ||
"zstd_no_dict", | ||
"ZSTD_NO_DICT", | ||
"zstd", | ||
"ZSTD", | ||
"default", | ||
"BEST_SPEED" | ||
); | ||
|
||
for (Map.Entry<String, String> codec : codecMap.entrySet()) { | ||
assertReindexingWithMultipleCodecs(codec.getKey(), codec.getValue(), codecMap); | ||
} | ||
|
||
} | ||
|
||
private void assertReindexingWithMultipleCodecs(String destCodec, String destCodecMode, Map<String, String> codecMap) | ||
throws ExecutionException, InterruptedException { | ||
|
||
final String index = "test-index" + destCodec; | ||
final String destIndex = "dest-index" + destCodec; | ||
|
||
// creating source index | ||
createIndex( | ||
index, | ||
Settings.builder() | ||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) | ||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) | ||
.put("index.codec", "default") | ||
.build() | ||
); | ||
ensureGreen(index); | ||
|
||
final int nbDocs = randomIntBetween(2, 5); | ||
|
||
// indexing with all 4 codecs | ||
for (Map.Entry<String, String> codec : codecMap.entrySet()) { | ||
indexWithDifferentCodecs(index, codec.getKey(), codec.getValue(), nbDocs); | ||
} | ||
|
||
// creating destination index with destination codec | ||
createIndex( | ||
destIndex, | ||
Settings.builder() | ||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) | ||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) | ||
.put("index.codec", destCodec) | ||
.build() | ||
); | ||
|
||
BulkByScrollResponse bulkResponse = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source(index) | ||
.destination(destIndex) | ||
.refresh(true) | ||
.waitForActiveShards(ActiveShardCount.ONE) | ||
.get(); | ||
|
||
assertEquals(4 * nbDocs, bulkResponse.getCreated()); | ||
assertEquals(4 * nbDocs, bulkResponse.getTotal()); | ||
assertEquals(0, bulkResponse.getDeleted()); | ||
assertEquals(0, bulkResponse.getNoops()); | ||
assertEquals(0, bulkResponse.getVersionConflicts()); | ||
assertEquals(1, bulkResponse.getBatches()); | ||
assertTrue(bulkResponse.getTook().getMillis() > 0); | ||
assertEquals(0, bulkResponse.getBulkFailures().size()); | ||
assertEquals(0, bulkResponse.getSearchFailures().size()); | ||
assertTrue(getSegments(destIndex).stream().allMatch(segment -> segment.attributes.containsValue(destCodecMode))); | ||
} | ||
|
||
private void flushAndRefreshIndex(String index) { | ||
|
||
// Request is not blocked | ||
for (String blockSetting : Arrays.asList( | ||
SETTING_BLOCKS_READ, | ||
SETTING_BLOCKS_WRITE, | ||
SETTING_READ_ONLY, | ||
SETTING_BLOCKS_METADATA, | ||
SETTING_READ_ONLY_ALLOW_DELETE | ||
)) { | ||
try { | ||
enableIndexBlock(index, blockSetting); | ||
// flush | ||
FlushResponse flushResponse = client().admin().indices().prepareFlush(index).setForce(true).execute().actionGet(); | ||
assertNoFailures(flushResponse); | ||
|
||
// refresh | ||
RefreshResponse refreshResponse = client().admin().indices().prepareRefresh(index).execute().actionGet(); | ||
assertNoFailures(refreshResponse); | ||
} finally { | ||
disableIndexBlock(index, blockSetting); | ||
} | ||
} | ||
} | ||
|
||
private void indexWithDifferentCodecs(String index, String codec, String codecMode, int nbDocs) throws InterruptedException, | ||
ExecutionException { | ||
|
||
assertAcked(client().admin().indices().prepareClose(index)); | ||
|
||
assertAcked( | ||
client().admin() | ||
.indices() | ||
.updateSettings(new UpdateSettingsRequest(index).settings(Settings.builder().put("index.codec", codec))) | ||
.get() | ||
); | ||
|
||
assertAcked(client().admin().indices().prepareOpen(index)); | ||
|
||
indexRandom( | ||
randomBoolean(), | ||
false, | ||
randomBoolean(), | ||
IntStream.range(0, nbDocs) | ||
.mapToObj(i -> client().prepareIndex(index).setId(UUID.randomUUID().toString()).setSource("num", i)) | ||
.collect(toList()) | ||
); | ||
flushAndRefreshIndex(index); | ||
assertTrue(getSegments(index).stream().anyMatch(segment -> segment.attributes.containsValue(codecMode))); | ||
} | ||
|
||
private ArrayList<Segment> getSegments(String index) { | ||
|
||
return new ArrayList<>( | ||
client().admin() | ||
.indices() | ||
.segments(new IndicesSegmentsRequest(index)) | ||
.actionGet() | ||
.getIndices() | ||
.get(index) | ||
.getShards() | ||
.get(0) | ||
.getShards()[0].getSegments() | ||
); | ||
} | ||
|
||
} |
167 changes: 167 additions & 0 deletions
167
server/src/internalClusterTest/java/org/opensearch/index/codec/CodecIT.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,167 @@ | ||
/* | ||
* SPDX-License-Identifier: Apache-2.0 | ||
* | ||
* The OpenSearch Contributors require contributions made to | ||
* this file be licensed under the Apache-2.0 license or a | ||
* compatible open source license. | ||
*/ | ||
|
||
package org.opensearch.index.codec; | ||
|
||
import org.opensearch.action.admin.indices.flush.FlushResponse; | ||
import org.opensearch.action.admin.indices.forcemerge.ForceMergeResponse; | ||
import org.opensearch.action.admin.indices.refresh.RefreshResponse; | ||
import org.opensearch.action.admin.indices.segments.IndicesSegmentsRequest; | ||
import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; | ||
import org.opensearch.cluster.metadata.IndexMetadata; | ||
import org.opensearch.common.settings.Settings; | ||
import org.opensearch.index.engine.Segment; | ||
import org.opensearch.test.OpenSearchIntegTestCase; | ||
|
||
import java.util.ArrayList; | ||
import java.util.Arrays; | ||
import java.util.List; | ||
import java.util.Map; | ||
import java.util.UUID; | ||
import java.util.concurrent.ExecutionException; | ||
import java.util.stream.Collectors; | ||
import java.util.stream.IntStream; | ||
|
||
import static java.util.stream.Collectors.toList; | ||
import static org.hamcrest.Matchers.is; | ||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_METADATA; | ||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_READ; | ||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_WRITE; | ||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY; | ||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE; | ||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; | ||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; | ||
|
||
@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST) | ||
public class CodecIT extends OpenSearchIntegTestCase { | ||
|
||
public void testForceMergeMultipleCodecs() throws ExecutionException, InterruptedException { | ||
|
||
Map<String, String> codecMap = Map.of( | ||
"best_compression", | ||
"BEST_COMPRESSION", | ||
"zstd_no_dict", | ||
"ZSTD_NO_DICT", | ||
"zstd", | ||
"ZSTD", | ||
"default", | ||
"BEST_SPEED" | ||
); | ||
|
||
for (Map.Entry<String, String> codec : codecMap.entrySet()) { | ||
forceMergeMultipleCodecs(codec.getKey(), codec.getValue(), codecMap); | ||
} | ||
|
||
} | ||
|
||
private void forceMergeMultipleCodecs(String finalCodec, String finalCodecMode, Map<String, String> codecMap) throws ExecutionException, | ||
InterruptedException { | ||
|
||
internalCluster().ensureAtLeastNumDataNodes(2); | ||
final String index = "test-index" + finalCodec; | ||
|
||
// creating index | ||
createIndex( | ||
index, | ||
Settings.builder() | ||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) | ||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) | ||
.put("index.codec", "default") | ||
.build() | ||
); | ||
ensureGreen(index); | ||
|
||
// ingesting and asserting segment codec mode for all four codecs | ||
for (Map.Entry<String, String> codec : codecMap.entrySet()) { | ||
assertSegmentCodec(index, codec.getKey(), codec.getValue()); | ||
} | ||
|
||
// force merge into final codec | ||
assertSegmentCodec(index, finalCodec, finalCodecMode); | ||
final ForceMergeResponse forceMergeResponse = client().admin().indices().prepareForceMerge(index).setMaxNumSegments(1).get(); | ||
|
||
assertThat(forceMergeResponse.getFailedShards(), is(0)); | ||
assertThat(forceMergeResponse.getSuccessfulShards(), is(2)); | ||
|
||
flushAndRefreshIndex(index); | ||
|
||
List<Segment> segments = getSegments(index).stream().filter(Segment::isSearch).collect(Collectors.toList()); | ||
assertEquals(1, segments.size()); | ||
assertTrue(segments.stream().findFirst().get().attributes.containsValue(finalCodecMode)); | ||
} | ||
|
||
private void assertSegmentCodec(String index, String codec, String codecMode) throws InterruptedException, ExecutionException { | ||
|
||
assertAcked(client().admin().indices().prepareClose(index)); | ||
|
||
assertAcked( | ||
client().admin() | ||
.indices() | ||
.updateSettings(new UpdateSettingsRequest(index).settings(Settings.builder().put("index.codec", codec))) | ||
.get() | ||
); | ||
|
||
assertAcked(client().admin().indices().prepareOpen(index)); | ||
|
||
ingest(index); | ||
flushAndRefreshIndex(index); | ||
|
||
assertTrue(getSegments(index).stream().anyMatch(segment -> segment.attributes.containsValue(codecMode))); | ||
} | ||
|
||
private ArrayList<Segment> getSegments(String index) { | ||
|
||
return new ArrayList<>( | ||
client().admin() | ||
.indices() | ||
.segments(new IndicesSegmentsRequest(index)) | ||
.actionGet() | ||
.getIndices() | ||
.get(index) | ||
.getShards() | ||
.get(0) | ||
.getShards()[0].getSegments() | ||
); | ||
} | ||
|
||
private void ingest(String index) throws InterruptedException { | ||
|
||
final int nbDocs = randomIntBetween(1, 5); | ||
indexRandom( | ||
randomBoolean(), | ||
false, | ||
randomBoolean(), | ||
IntStream.range(0, nbDocs) | ||
.mapToObj(i -> client().prepareIndex(index).setId(UUID.randomUUID().toString()).setSource("num", i)) | ||
.collect(toList()) | ||
); | ||
} | ||
|
||
private void flushAndRefreshIndex(String index) { | ||
|
||
// Request is not blocked | ||
for (String blockSetting : Arrays.asList( | ||
SETTING_BLOCKS_READ, | ||
SETTING_BLOCKS_WRITE, | ||
SETTING_READ_ONLY, | ||
SETTING_BLOCKS_METADATA, | ||
SETTING_READ_ONLY_ALLOW_DELETE | ||
)) { | ||
try { | ||
enableIndexBlock(index, blockSetting); | ||
FlushResponse flushResponse = client().admin().indices().prepareFlush(index).setForce(true).execute().actionGet(); | ||
assertNoFailures(flushResponse); | ||
RefreshResponse response = client().admin().indices().prepareRefresh(index).execute().actionGet(); | ||
assertNoFailures(response); | ||
} finally { | ||
disableIndexBlock(index, blockSetting); | ||
} | ||
} | ||
} | ||
|
||
} |