Skip to content

Commit

Permalink
Integration test with external ES cluster (opendistro-for-elasticsear…
Browse files Browse the repository at this point in the history
…ch#374)

* Fix build issue by migrating to RestTestCase

* Fix query IT

* Fix subquery IT

* Fix csv formatter and SQL functions IT

* Fix aggregation and delete IT

* Fix date IT

* Fix jdbc IT

* Fix show and metadata IT

* Fix subquery IT

* Fix date functions UT

* Fix test data setup and cleanup issue

* Fix correctness IT

* Fix doctest IT

* Add JavaDoc on base class

* Address PR comments

* Address PR comments
  • Loading branch information
abbashus authored Mar 5, 2020
1 parent 7c57d72 commit 56a2f44
Show file tree
Hide file tree
Showing 63 changed files with 12,788 additions and 12,614 deletions.
17 changes: 16 additions & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,9 @@ version = "${opendistroVersion}.0"

apply plugin: 'elasticsearch.esplugin'
apply plugin: 'jacoco'
apply from: 'build-tools/sqlplugin-coverage.gradle'
if (!System.properties.containsKey('tests.rest.cluster') && !System.properties.containsKey('tests.cluster')){
apply from: 'build-tools/sqlplugin-coverage.gradle'
}
apply plugin: 'antlr'

jacoco {
Expand Down Expand Up @@ -134,6 +136,15 @@ integTestRunner {
// allows integration test classes to access test resource from project root path
systemProperty('project.root', project.rootDir.absolutePath)

// Tell the test JVM if the cluster JVM is running under a debugger so that tests can use longer timeouts for
// requests. The 'doFirst' delays reading the debug setting on the cluster till execution time.
doFirst { systemProperty 'cluster.debug', integTestCluster.debug }

// The --debug-jvm command-line option makes the cluster debuggable; this makes the tests debuggable
if (System.getProperty("test.debug") != null) {
jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
}

// Run different task based on test type. "exclude" is required for each task.
def testType = System.getProperty("testType")
if (testType == 'doctest') { // Doctest to generate documentation
Expand Down Expand Up @@ -167,6 +178,10 @@ integTestCluster {
distribution = "oss-zip"
}

run {
distribution = "oss-zip"
}

generateGrammarSource {
arguments += ['-visitor', '-package', 'com.amazon.opendistroforelasticsearch.sql.antlr.parser']
source = sourceSets.main.antlr
Expand Down
4 changes: 2 additions & 2 deletions docs/user/admin/settings.rst
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ Result set::
"hits" : [
{
"_index" : "accounts",
"_type" : "account",
"_type" : "_doc",
"_source" : {
"firstname" : "Nanette",
"age" : 28,
Expand All @@ -361,7 +361,7 @@ Result set::
},
{
"_index" : "accounts",
"_type" : "account",
"_type" : "_doc",
"_source" : {
"firstname" : "Amber",
"age" : 32,
Expand Down
4 changes: 2 additions & 2 deletions docs/user/interfaces/protocol.rst
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ Result set::
"hits" : [
{
"_index" : "accounts",
"_type" : "account",
"_type" : "_doc",
"_source" : {
"firstname" : "Nanette",
"age" : 28,
Expand All @@ -252,7 +252,7 @@ Result set::
},
{
"_index" : "accounts",
"_type" : "account",
"_type" : "_doc",
"_source" : {
"firstname" : "Amber",
"age" : 32,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -555,6 +555,9 @@ private DateHistogramAggregationBuilder dateHistogram(MethodField field) throws
case "interval":
dateHistogram.dateHistogramInterval(new DateHistogramInterval(kv.value.toString()));
break;
case "fixed_interval":
dateHistogram.fixedInterval(new DateHistogramInterval(kv.value.toString()));
break;
case "field":
dateHistogram.field(value);
break;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,13 +183,15 @@ public Tuple<String, String> function(String methodName, List<KVValue> paramers,
break;
case "month_of_year":
case "month":
functionStr = dateFunctionTemplate("monthOfYear", (SQLExpr) paramers.get(0).value);
functionStr = dateFunctionTemplate("monthValue", (SQLExpr) paramers.get(0).value);
break;
case "monthname":
functionStr = dateFunctionTemplate("month", (SQLExpr) paramers.get(0).value);
break;
case "week_of_year":
functionStr = dateFunctionTemplate("weekOfWeekyear", (SQLExpr) paramers.get(0).value);
functionStr = dateFunctionTemplate("weekOfWeekyear",
"get(WeekFields.ISO.weekOfWeekBasedYear())",
(SQLExpr) paramers.get(0).value);
break;
case "day_of_year":
functionStr = dateFunctionTemplate("dayOfYear", (SQLExpr) paramers.get(0).value);
Expand All @@ -199,22 +201,26 @@ public Tuple<String, String> function(String methodName, List<KVValue> paramers,
functionStr = dateFunctionTemplate("dayOfMonth", (SQLExpr) paramers.get(0).value);
break;
case "day_of_week":
functionStr = dateFunctionTemplate("dayOfWeek", (SQLExpr) paramers.get(0).value);
functionStr = dateFunctionTemplate("dayOfWeek",
"getDayOfWeekEnum().getValue()",
(SQLExpr) paramers.get(0).value);
break;
case "date":
functionStr = date((SQLExpr) paramers.get(0).value);
break;
case "hour_of_day":
functionStr = dateFunctionTemplate("hourOfDay", (SQLExpr) paramers.get(0).value);
functionStr = dateFunctionTemplate("hour", (SQLExpr) paramers.get(0).value);
break;
case "minute_of_day":
functionStr = dateFunctionTemplate("minuteOfDay", (SQLExpr) paramers.get(0).value);
functionStr = dateFunctionTemplate("minuteOfDay",
"get(ChronoField.MINUTE_OF_DAY)",
(SQLExpr) paramers.get(0).value);
break;
case "minute_of_hour":
functionStr = dateFunctionTemplate("minuteOfHour", (SQLExpr) paramers.get(0).value);
functionStr = dateFunctionTemplate("minute", (SQLExpr) paramers.get(0).value);
break;
case "second_of_minute":
functionStr = dateFunctionTemplate("secondOfMinute", (SQLExpr) paramers.get(0).value);
functionStr = dateFunctionTemplate("second", (SQLExpr) paramers.get(0).value);
break;
case "timestamp":
functionStr = timestamp((SQLExpr) paramers.get(0).value);
Expand Down Expand Up @@ -530,9 +536,26 @@ private Tuple<String, String> date_format(SQLExpr field, String pattern, String
}
}

/**
* Explicitly pass in name used to generate variable ID because methodName is not always valid
*
* For example,
* <code>
* functionStr = dateFunctionTemplate("weekOfWeekyear",
* "get(WeekFields.ISO.weekOfWeekBasedYear())",
* (SQLExpr) paramers.get(0).value);
* </code>
*
* The old dateFunctionTemplate(methodName, field) passes string "get(WeekFields.ISO.weekOfWeekBasedYear())"
* to nextId() which generates an invalid variable name in painless script.
*/
private Tuple<String, String> dateFunctionTemplate(String name, String methodName, SQLExpr field) {
String id = nextId(name);
return new Tuple<>(id, def(id, doc(field) + ".value." + methodName));
}

private Tuple<String, String> dateFunctionTemplate(String methodName, SQLExpr field) {
String name = nextId(methodName);
return new Tuple<>(name, def(name, doc(field) + ".value." + methodName));
return dateFunctionTemplate(methodName, methodName, field);
}

public Tuple<String, String> add(SQLExpr a, SQLExpr b) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,24 @@
import com.amazon.opendistroforelasticsearch.sql.correctness.runner.connection.ESConnection;
import com.amazon.opendistroforelasticsearch.sql.correctness.runner.connection.JDBCConnection;
import com.amazon.opendistroforelasticsearch.sql.correctness.testset.TestDataSet;
import com.amazon.opendistroforelasticsearch.sql.esintgtest.SQLIntegTestCase;
import com.amazon.opendistroforelasticsearch.sql.esintgtest.CustomExternalTestCluster;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import com.google.common.collect.Maps;
import org.apache.http.HttpHost;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.TestCluster;
import org.json.JSONObject;
import org.junit.Test;

import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
Expand All @@ -43,7 +52,10 @@
/**
* Correctness integration test by performing comparison test with other databases.
*/
public class CorrectnessIT extends SQLIntegTestCase {
@ESIntegTestCase.SuiteScopeTestCase
@ESIntegTestCase.ClusterScope(scope=ESIntegTestCase.Scope.SUITE, numDataNodes=3, supportsDedicatedMasters=false, transportClientRatio=1)
@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
public class CorrectnessIT extends ESIntegTestCase {

private static final Logger LOG = LogManager.getLogger();

Expand Down Expand Up @@ -135,4 +147,24 @@ private String reportFileName() {
return "report_" + dateTime + ".json";
}

@Override
protected TestCluster buildTestCluster(Scope scope, long seed) throws IOException {

String clusterAddresses = System.getProperty(TESTS_CLUSTER);

if (Strings.hasLength(clusterAddresses)) {
String[] stringAddresses = clusterAddresses.split(",");
TransportAddress[] transportAddresses = new TransportAddress[stringAddresses.length];
int i = 0;
for (String stringAddress : stringAddresses) {
URL url = new URL("http://" + stringAddress);
InetAddress inetAddress = InetAddress.getByName(url.getHost());
transportAddresses[i++] = new TransportAddress(new InetSocketAddress(inetAddress, url.getPort()));
}
return new CustomExternalTestCluster(createTempDir(), externalClusterClientSettings(),
transportClientPlugins(), transportAddresses);
}
return super.buildTestCluster(scope, seed);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -20,22 +20,31 @@
import com.amazon.opendistroforelasticsearch.sql.doctest.core.builder.DocBuilder;
import com.amazon.opendistroforelasticsearch.sql.doctest.core.markup.Document;
import com.amazon.opendistroforelasticsearch.sql.doctest.core.markup.RstDocument;
import com.amazon.opendistroforelasticsearch.sql.esintgtest.SQLIntegTestCase;
import com.amazon.opendistroforelasticsearch.sql.esintgtest.CustomExternalTestCluster;
import com.amazon.opendistroforelasticsearch.sql.esintgtest.TestUtils;
import com.carrotsearch.randomizedtesting.AnnotatedMethodProvider;
import com.carrotsearch.randomizedtesting.TestMethodAndParams;
import com.carrotsearch.randomizedtesting.annotations.TestCaseOrdering;
import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.TestCluster;

import java.io.IOException;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Comparator;

import static com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import static java.nio.file.StandardOpenOption.APPEND;
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;

Expand All @@ -44,11 +53,13 @@
*/
@TestMethodProviders({DocTest.SectionMethod.class})
@TestCaseOrdering(DocTest.SectionOrder.class)
@ESIntegTestCase.SuiteScopeTestCase
@ClusterScope(scope= SUITE, numDataNodes=1, supportsDedicatedMasters=false, transportClientRatio=1)
public abstract class DocTest extends SQLIntegTestCase implements DocBuilder {
@ThreadLeakScope(Scope.NONE)
public abstract class DocTest extends ESIntegTestCase implements DocBuilder {

@Override
protected void init() throws Exception {
protected void setupSuiteScopeCluster() {
DocTestConfig config = getClass().getAnnotation(DocTestConfig.class);
loadTestData(config);
copyTemplateToDocument(config);
Expand Down Expand Up @@ -111,4 +122,24 @@ private Path absolutePath(String templateRelativePath) {
return Paths.get(TestUtils.getResourceFilePath(DOCUMENT_FOLDER_ROOT + templateRelativePath));
}

@Override
protected TestCluster buildTestCluster(Scope scope, long seed) throws IOException {

String clusterAddresses = System.getProperty(TESTS_CLUSTER);

if (Strings.hasLength(clusterAddresses)) {
String[] stringAddresses = clusterAddresses.split(",");
TransportAddress[] transportAddresses = new TransportAddress[stringAddresses.length];
int i = 0;
for (String stringAddress : stringAddresses) {
URL url = new URL("http://" + stringAddress);
InetAddress inetAddress = InetAddress.getByName(url.getHost());
transportAddresses[i++] = new TransportAddress(new InetSocketAddress(inetAddress, url.getPort()));
}
return new CustomExternalTestCluster(createTempDir(), externalClusterClientSettings(),
transportClientPlugins(), transportAddresses);
}
return super.buildTestCluster(scope, seed);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public TestData(String[] testFilePaths) {
public void loadToES(DocTest test) {
for (String filePath : testFilePaths) {
try {
TestUtils.loadBulk(test.client(), TEST_DATA_FOLDER_ROOT + filePath, indexName(filePath));
TestUtils.loadDataByRestClient(test.restClient(), indexName(filePath), TEST_DATA_FOLDER_ROOT + filePath);
} catch (Exception e) {
throw new IllegalStateException(StringUtils.format(
"Failed to load test filePath from %s", filePath), e);
Expand Down
Loading

0 comments on commit 56a2f44

Please sign in to comment.