Skip to content

Commit

Permalink
Merge branch 'main' into cancellation-1
Browse files Browse the repository at this point in the history
  • Loading branch information
aasom143 committed Sep 3, 2024
2 parents 24d16a2 + cfcfe21 commit e062a97
Show file tree
Hide file tree
Showing 247 changed files with 12,367 additions and 3,255 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- [Workload Management] Add rejection logic for co-ordinator and shard level requests ([#15428](https://github.com/opensearch-project/OpenSearch/pull/15428)))
- Adding translog durability validation in index templates ([#15494](https://github.com/opensearch-project/OpenSearch/pull/15494))
- Add index creation using the context field ([#15290](https://github.com/opensearch-project/OpenSearch/pull/15290))
- Add fieldType to AbstractQueryBuilder and FieldSortBuilder ([#15328](https://github.com/opensearch-project/OpenSearch/pull/15328)))
- [Reader Writer Separation] Add searchOnly replica routing configuration ([#15410](https://github.com/opensearch-project/OpenSearch/pull/15410))
- [Range Queries] Add new approximateable query framework to short-circuit range queries ([#13788](https://github.com/opensearch-project/OpenSearch/pull/13788))
- [Workload Management] Add query group level failure tracking ([#15227](https://github.com/opensearch-project/OpenSearch/pull/15527))
- Add support to upload snapshot shard blobs with hashed prefix ([#15426](https://github.com/opensearch-project/OpenSearch/pull/15426))
- [Remote Publication] Add remote download stats ([#15291](https://github.com/opensearch-project/OpenSearch/pull/15291)))

### Dependencies
- Bump `netty` from 4.1.111.Final to 4.1.112.Final ([#15081](https://github.com/opensearch-project/OpenSearch/pull/15081))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -400,6 +400,11 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
builder.endObject();
}

@Override
public final String fieldName() {
return getDefaultFieldName();
}

@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
final MappedFieldType ft = context.fieldMapper(field);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,11 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
builder.endObject();
}

@Override
public final String fieldName() {
return getDefaultFieldName();
}

public static HasChildQueryBuilder fromXContent(XContentParser parser) throws IOException {
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String childType = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,11 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
builder.endObject();
}

@Override
public final String fieldName() {
return getDefaultFieldName();
}

public static HasParentQueryBuilder fromXContent(XContentParser parser) throws IOException {
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String parentType = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,11 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
builder.endObject();
}

@Override
public final String fieldName() {
return getDefaultFieldName();
}

public static ParentIdQueryBuilder fromXContent(XContentParser parser) throws IOException {
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String type = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -356,6 +356,11 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
builder.endObject();
}

@Override
public final String fieldName() {
return getDefaultFieldName();
}

private static final ConstructingObjectParser<PercolateQueryBuilder, Void> PARSER = new ConstructingObjectParser<>(NAME, args -> {
String field = (String) args[0];
BytesReference document = (BytesReference) args[1];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,19 +67,11 @@ public void testUrlRepository() throws Exception {

logger.info("--> creating repository");
Path repositoryLocation = randomRepoPath();
assertAcked(
client.admin()
.cluster()
.preparePutRepository("test-repo")
.setType(FsRepository.TYPE)
.setSettings(
Settings.builder()
.put(FsRepository.LOCATION_SETTING.getKey(), repositoryLocation)
.put(FsRepository.COMPRESS_SETTING.getKey(), randomBoolean())
.put(FsRepository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(100, 1000), ByteSizeUnit.BYTES)
)
);

Settings.Builder settings = Settings.builder()
.put(FsRepository.LOCATION_SETTING.getKey(), repositoryLocation)
.put(FsRepository.COMPRESS_SETTING.getKey(), randomBoolean())
.put(FsRepository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(100, 1000), ByteSizeUnit.BYTES);
createRepository("test-repo", FsRepository.TYPE, settings);
createIndex("test-idx");
ensureGreen();

Expand Down Expand Up @@ -115,17 +107,10 @@ public void testUrlRepository() throws Exception {
cluster().wipeIndices("test-idx");

logger.info("--> create read-only URL repository");
assertAcked(
client.admin()
.cluster()
.preparePutRepository("url-repo")
.setType(URLRepository.TYPE)
.setSettings(
Settings.builder()
.put(URLRepository.URL_SETTING.getKey(), repositoryLocation.toUri().toURL().toString())
.put("list_directories", randomBoolean())
)
);
Settings.Builder settingsBuilder = Settings.builder()
.put(URLRepository.URL_SETTING.getKey(), repositoryLocation.toUri().toURL().toString())
.put("list_directories", randomBoolean());
createRepository("url-repo", URLRepository.TYPE, settingsBuilder);
logger.info("--> restore index after deletion");
RestoreSnapshotResponse restoreSnapshotResponse = client.admin()
.cluster()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
import com.azure.storage.blob.models.BlobStorageException;
import org.opensearch.action.ActionRunnable;
import org.opensearch.action.support.PlainActionFuture;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.common.collect.Tuple;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.SecureSettings;
Expand All @@ -47,6 +46,7 @@
import org.opensearch.plugins.Plugin;
import org.opensearch.repositories.AbstractThirdPartyRepositoryTestCase;
import org.opensearch.repositories.blobstore.BlobStoreRepository;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.junit.AfterClass;

import java.net.HttpURLConnection;
Expand All @@ -56,7 +56,6 @@
import reactor.core.scheduler.Schedulers;

import static org.hamcrest.Matchers.blankOrNullString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;

public class AzureStorageCleanupThirdPartyTests extends AbstractThirdPartyRepositoryTestCase {
Expand Down Expand Up @@ -103,17 +102,11 @@ protected SecureSettings credentials() {

@Override
protected void createRepository(String repoName) {
AcknowledgedResponse putRepositoryResponse = client().admin()
.cluster()
.preparePutRepository(repoName)
.setType("azure")
.setSettings(
Settings.builder()
.put("container", System.getProperty("test.azure.container"))
.put("base_path", System.getProperty("test.azure.base"))
)
.get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
Settings.Builder settings = Settings.builder()
.put("container", System.getProperty("test.azure.container"))
.put("base_path", System.getProperty("test.azure.base"));

OpenSearchIntegTestCase.putRepository(client().admin().cluster(), repoName, "azure", settings);
if (Strings.hasText(System.getProperty("test.azure.sas_token"))) {
ensureSasTokenPermissions();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,19 +32,18 @@

package org.opensearch.repositories.gcs;

import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.SecureSettings;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.common.Strings;
import org.opensearch.plugins.Plugin;
import org.opensearch.repositories.AbstractThirdPartyRepositoryTestCase;
import org.opensearch.test.OpenSearchIntegTestCase;

import java.util.Base64;
import java.util.Collection;

import static org.hamcrest.Matchers.blankOrNullString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;

public class GoogleCloudStorageThirdPartyTests extends AbstractThirdPartyRepositoryTestCase {
Expand Down Expand Up @@ -84,16 +83,9 @@ protected SecureSettings credentials() {

@Override
protected void createRepository(final String repoName) {
AcknowledgedResponse putRepositoryResponse = client().admin()
.cluster()
.preparePutRepository("test-repo")
.setType("gcs")
.setSettings(
Settings.builder()
.put("bucket", System.getProperty("test.google.bucket"))
.put("base_path", System.getProperty("test.google.base", "/"))
)
.get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
Settings.Builder settings = Settings.builder()
.put("bucket", System.getProperty("test.google.bucket"))
.put("base_path", System.getProperty("test.google.base", "/"));
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "gcs", settings);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,12 @@
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;

import org.opensearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.SecureSettings;
import org.opensearch.common.settings.Settings;
import org.opensearch.plugins.Plugin;
import org.opensearch.repositories.AbstractThirdPartyRepositoryTestCase;
import org.opensearch.test.OpenSearchIntegTestCase;

import java.util.Collection;

Expand All @@ -61,20 +61,13 @@ protected SecureSettings credentials() {

@Override
protected void createRepository(String repoName) {
AcknowledgedResponse putRepositoryResponse = client().admin()
.cluster()
.preparePutRepository(repoName)
.setType("hdfs")
.setSettings(
Settings.builder()
.put("uri", "hdfs:///")
.put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName())
.put("path", "foo")
.put("chunk_size", randomIntBetween(100, 1000) + "k")
.put("compress", randomBoolean())
)
.get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
Settings.Builder settings = Settings.builder()
.put("uri", "hdfs:///")
.put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName())
.put("path", "foo")
.put("chunk_size", randomIntBetween(100, 1000) + "k")
.put("compress", randomBoolean());
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), repoName, "hdfs", settings);
}

// HDFS repository doesn't have precise cleanup stats so we only check whether or not any blobs were removed
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@

import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.client.Client;
import org.opensearch.cluster.ClusterState;
import org.opensearch.common.settings.Settings;
Expand All @@ -45,6 +44,7 @@
import org.opensearch.repositories.blobstore.BlobStoreRepository;
import org.opensearch.repositories.blobstore.BlobStoreTestUtil;
import org.opensearch.snapshots.SnapshotState;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.test.OpenSearchSingleNodeTestCase;
import org.opensearch.threadpool.ThreadPool;

Expand All @@ -63,21 +63,13 @@ protected Collection<Class<? extends Plugin>> getPlugins() {

public void testSimpleWorkflow() {
Client client = client();

AcknowledgedResponse putRepositoryResponse = client.admin()
.cluster()
.preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(
Settings.builder()
.put("uri", "hdfs:///")
.put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName())
.put("path", "foo")
.put("chunk_size", randomIntBetween(100, 1000) + "k")
.put("compress", randomBoolean())
)
.get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
Settings.Builder settings = Settings.builder()
.put("uri", "hdfs:///")
.put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName())
.put("path", "foo")
.put("chunk_size", randomIntBetween(100, 1000) + "k")
.put("compress", randomBoolean());
OpenSearchIntegTestCase.putRepository(client.admin().cluster(), "test-repo", "hdfs", settings);

createIndex("test-idx-1");
createIndex("test-idx-2");
Expand Down Expand Up @@ -168,7 +160,7 @@ public void testSimpleWorkflow() {

public void testMissingUri() {
try {
client().admin().cluster().preparePutRepository("test-repo").setType("hdfs").setSettings(Settings.EMPTY).get();
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "hdfs", Settings.builder());
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
Expand All @@ -178,12 +170,8 @@ public void testMissingUri() {

public void testEmptyUri() {
try {
client().admin()
.cluster()
.preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.builder().put("uri", "/path").build())
.get();
Settings.Builder settings = Settings.builder().put("uri", "/path");
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "hdfs", settings);
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
Expand All @@ -193,12 +181,8 @@ public void testEmptyUri() {

public void testNonHdfsUri() {
try {
client().admin()
.cluster()
.preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.builder().put("uri", "file:///").build())
.get();
Settings.Builder settings = Settings.builder().put("uri", "file:///");
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "hdfs", settings);
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
Expand All @@ -208,12 +192,8 @@ public void testNonHdfsUri() {

public void testPathSpecifiedInHdfs() {
try {
client().admin()
.cluster()
.preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.builder().put("uri", "hdfs:///some/path").build())
.get();
Settings.Builder settings = Settings.builder().put("uri", "hdfs:///some/path");
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "hdfs", settings);
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
Expand All @@ -223,12 +203,8 @@ public void testPathSpecifiedInHdfs() {

public void testMissingPath() {
try {
client().admin()
.cluster()
.preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.builder().put("uri", "hdfs:///").build())
.get();
Settings.Builder settings = Settings.builder().put("uri", "hdfs:///");
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "hdfs", settings);
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
Expand Down
Loading

0 comments on commit e062a97

Please sign in to comment.