Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

issue-226 Renamed unit tests #391

Merged
merged 2 commits into from
Dec 3, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 9 additions & 9 deletions src/test/java/com/teragrep/pth10/CatalystVisitorTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ void tearDown() {
named = "skipSparkTest",
matches = "true"
)
void fromStringNot2Test() {
void searchQueryWithNotTest() {
String q = "index = \"cpu\" AND sourcetype = \"log:cpu:0\" NOT src";

this.streamingTestUtil.performDPLTest(q, this.testFile, res -> {
Expand Down Expand Up @@ -134,7 +134,7 @@ void columnFromStringTest() {
named = "skipSparkTest",
matches = "true"
)
void columnFromStringDateTest() {
void searchQueryWithTimestampTest() {
// Add time ranges
String q = "((( index =\"cpu\" AND host = \"sc-99-99-14-25\" ) AND sourcetype = \"log:cpu:0\" ) AND ( earliest= \"01/01/1970:02:00:00\" AND latest= \"01/01/2030:00:00:00\" ))";
this.streamingTestUtil.performDPLTest(q, this.testFile, res -> {
Expand Down Expand Up @@ -163,7 +163,7 @@ void columnFromStringDateTest() {
named = "skipSparkTest",
matches = "true"
)
void columnFromStringAndTest() {
void searchQueryWithAndTest() {
//LOGGER.info("------ AND ---------");
String q = "index =\"strawberry\" AND sourcetype =\"example:strawberry:strawberry\"";
this.streamingTestUtil.performDPLTest(q, this.testFile, res -> {
Expand All @@ -180,7 +180,7 @@ void columnFromStringAndTest() {
named = "skipSparkTest",
matches = "true"
)
void columnFromStringOrTest() {
void SearchQueryWithOrTest() {
//LOGGER.info("------ OR ---------");
String q = "index != \"strawberry\" OR sourcetype =\"example:strawberry:strawberry\"";
this.streamingTestUtil.performDPLTest(q, this.testFile, res -> {
Expand All @@ -197,7 +197,7 @@ void columnFromStringOrTest() {
named = "skipSparkTest",
matches = "true"
)
void fromStringFullTest() {
void searchQueryWithAggrTest() {
String q = "index = cinnamon _index_earliest=\"04/16/2020:10:25:40\" | chart count(_raw) as count by _time | where count > 70";
this.streamingTestUtil.performDPLTest(q, this.testFile, res -> {
DPLTimeFormat tf = new DPLTimeFormat("MM/dd/yyyy:HH:mm:ss");
Expand All @@ -218,7 +218,7 @@ void fromStringFullTest() {
named = "skipSparkTest",
matches = "true"
)
void endToEndTest() {
void searchQueryWithIndexEarliestTest() {
this.streamingTestUtil
.performDPLTest("index = cinnamon _index_earliest=\"04/16/2020:10:25:40\"", this.testFile, res -> {
String e = "[_raw: string, _time: string ... 6 more fields]";
Expand All @@ -240,7 +240,7 @@ void endToEndTest() {
named = "skipSparkTest",
matches = "true"
)
void endToEnd2Test() {
void searchQueryWithStringTest() {
// Use this file as the test data
String testFile = "src/test/resources/subsearchData*.jsonl";

Expand All @@ -266,7 +266,7 @@ void endToEnd2Test() {
named = "skipSparkTest",
matches = "true"
)
void endToEnd6Test() {
void AggregatesUsedTest() {
this.streamingTestUtil.performDPLTest("index = jla02logger ", this.testFile, res -> {
boolean aggregates = this.streamingTestUtil.getCatalystVisitor().getAggregatesUsed();
Assertions.assertFalse(aggregates);
Expand All @@ -279,7 +279,7 @@ void endToEnd6Test() {
named = "skipSparkTest",
matches = "true"
)
void searchQualifierMissingRightSide_Issue179_Test() {
void searchQualifierMissingRightSideTest() {
// assert user-friendly exception
RuntimeException thrown = this.streamingTestUtil
.performThrowingDPLTest(RuntimeException.class, "index = ", this.testFile, res -> {
Expand Down
26 changes: 13 additions & 13 deletions src/test/java/com/teragrep/pth10/ConvertTransformationTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ void tearDown() {
named = "skipSparkTest",
matches = "true"
)
void convert1_ctime() {
void testConvertCtimeAs() {
// "%m/%d/%Y %H:%M:%S";
streamingTestUtil.performDPLTest("index=index_A | convert ctime(offset) AS new", testFile, ds -> {
Assertions
Expand Down Expand Up @@ -117,7 +117,7 @@ void convert1_ctime() {
named = "skipSparkTest",
matches = "true"
)
void convert2_ctime() {
void testConvertCtime() {
streamingTestUtil.performDPLTest("index=index_A | convert ctime(offset)", testFile, ds -> {
Assertions
.assertEquals(
Expand Down Expand Up @@ -149,7 +149,7 @@ void convert2_ctime() {
named = "runSparkTest",
matches = "true"
)
void convert3_mktime() {
void testConvertMktime() {
streamingTestUtil
.performDPLTest(
"index=index_A | convert timeformat=\"%Y-%m-%d'T'%H:%M:%S.%f%z\" mktime(_time) as epochTime",
Expand Down Expand Up @@ -186,7 +186,7 @@ void convert3_mktime() {
named = "skipSparkTest",
matches = "true"
)
void convert3_mktime2() { // Use the system default timezone when timezone is not specified
void testConvertMktimeWithDefaultTimezone() { // Use the system default timezone when timezone is not specified
streamingTestUtil
.performDPLTest(
"index=index_A | eval a=\"2001-01-01T01:01:01.010\" | convert timeformat=\"%Y-%m-%d'T'%H:%M:%S.%f\" mktime(a) as epochTime",
Expand Down Expand Up @@ -220,7 +220,7 @@ void convert3_mktime2() { // Use the system default timezone when timezone is no
named = "runSparkTest",
matches = "true"
)
void convert4_dur2sec() {
void testConvertDur2sec() {
streamingTestUtil.performDPLTest("index=index_A | convert dur2sec(dur) as dur_sec", testFile, ds -> {
Assertions
.assertEquals(
Expand Down Expand Up @@ -251,7 +251,7 @@ void convert4_dur2sec() {
named = "skipSparkTest",
matches = "true"
)
void convert5_memk() {
void testConvertMemk() {
streamingTestUtil
.performDPLTest(
"index=index_A | strcat offset \"m\" offsetM | strcat offset \"k\" offsetK | strcat offset \"g\" offsetG | convert memk(offsetM) as memk_M memk(offsetK) as memk_K memk(offsetG) as memk_G memk(offset) as memk_def",
Expand Down Expand Up @@ -317,7 +317,7 @@ void convert5_memk() {
named = "skipSparkTest",
matches = "true"
)
void convert6_mstime() {
void testConvertMstime() {
streamingTestUtil
.performDPLTest(
"index=index_A | strcat \"\" \"47.\" \"329\" mst | strcat \"32:\" \"47.\" \"329\" mst2 | convert mstime(mst) as res mstime(mst2) as res2",
Expand Down Expand Up @@ -356,7 +356,7 @@ void convert6_mstime() {
named = "skipSparkTest",
matches = "true"
)
void convert7_rmcomma() {
void testConvertRmcomma() {
streamingTestUtil
.performDPLTest(
"index=index_A | strcat \"\" \"47,\" \"329\" mst | strcat \"32,\" \"47,\" \"329\" mst2 | convert rmcomma(mst) as res rmcomma(mst2) as res2",
Expand Down Expand Up @@ -395,7 +395,7 @@ void convert7_rmcomma() {
named = "skipSparkTest",
matches = "true"
)
void convert8_rmunit() {
void testConvertRmunit() {
streamingTestUtil
.performDPLTest(
"index=index_A | strcat \"329\" \"abc\" as mst | convert rmunit(mst) as res", testFile, ds -> {
Expand Down Expand Up @@ -425,7 +425,7 @@ void convert8_rmunit() {
named = "skipSparkTest",
matches = "true"
)
void convert8_rmunit2() {
void testConvertRmunitWithFloat() {
streamingTestUtil
.performDPLTest(
"index=index_A | strcat \"329.45\" \"abc\" as mst | convert rmunit(mst) as res", testFile,
Expand Down Expand Up @@ -638,7 +638,7 @@ void convert8_rmunit8() {
named = "skipSparkTest",
matches = "true"
)
void convert9_auto() {
void testConvertAuto() {
streamingTestUtil
.performDPLTest(
"index=index_A | strcat \"329\" \"\" with_results |strcat \"329\" \"aa\" no_results | convert auto(with_results) | convert auto(no_results)",
Expand Down Expand Up @@ -678,7 +678,7 @@ void convert9_auto() {
named = "skipSparkTest",
matches = "true"
)
void convert10_num() {
void testConvertNum() {
streamingTestUtil
.performDPLTest(
"index=index_A | strcat \"329\" \"\" with_results |strcat \"329\" \"aa\" no_results | convert num(with_results) | convert num(no_results)",
Expand Down Expand Up @@ -718,7 +718,7 @@ void convert10_num() {
named = "skipSparkTest",
matches = "true"
)
void convert11_none() {
void testConvertNone() {
streamingTestUtil
.performDPLTest(
"index=index_A | convert dur2sec(\"dur|offset\") AS dur_sec none(offset)", testFile, ds -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ void tearDown() {
named = "skipSparkTest",
matches = "true"
)
void formatTransformationTest0() {
void testFormatTransformation() {
String q = "index=index_A | format ";

streamingTestUtil
Expand Down Expand Up @@ -152,12 +152,13 @@ void formatTransformationTest0() {
named = "skipSparkTest",
matches = "true"
)
void formatTransformationTest1() {
void testFormatWithMultiValue() {
String q = "index=index_A | eval a=mvappend(\"1\", \"2\") | format maxresults=1 ";

streamingTestUtil
.performDPLTest(
q, testFile, res -> {
res.show(false);
// Check if result contains the column that was created for format result
Assertions.assertTrue(Arrays.toString(res.columns()).contains("search"));

Expand Down Expand Up @@ -186,7 +187,7 @@ void formatTransformationTest1() {
named = "skipSparkTest",
matches = "true"
)
void formatTransformationTest2() {
void testFormatTransformationWithSearchStrings() {
String q = "index=index_A | format maxresults=2 \"ROWPRE\" \"COLPRE\" \"COLSEP\" \"COLSUF\"\"ROWSEP\" \"ROWSUF\" ";

streamingTestUtil
Expand Down
12 changes: 6 additions & 6 deletions src/test/java/com/teragrep/pth10/RangemapTransformationTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ void tearDown() {
named = "skipSparkTest",
matches = "true"
)
public void rangemapTest0() {
public void testRangemap() {
streamingTestUtil.performDPLTest("index=* | rangemap field=_raw", testFile, ds -> {
List<Row> result = ds.select("range").distinct().collectAsList();
Assertions.assertEquals(1, result.size());
Expand All @@ -115,7 +115,7 @@ public void rangemapTest0() {
named = "skipSparkTest",
matches = "true"
)
public void rangemapTest1() {
public void testRangemapDefault() {
streamingTestUtil.performDPLTest("index=* | rangemap field=_raw default=xyz", testFile, ds -> {
List<Row> result = ds.select("range").distinct().collectAsList();
Assertions.assertEquals(1, result.size());
Expand All @@ -128,7 +128,7 @@ public void rangemapTest1() {
named = "skipSparkTest",
matches = "true"
)
public void rangemapTest2() {
public void testRangemapAttributeName() {
streamingTestUtil
.performDPLTest("index=* | rangemap field=_raw lo=0-5 med=6-34 hi=35-48 vlo=-20--10", testFile, ds -> {
List<Row> result = ds.select("_raw", "range").collectAsList();
Expand Down Expand Up @@ -162,7 +162,7 @@ else if (val == 47.2d) {
named = "skipSparkTest",
matches = "true"
)
public void rangemapTest3() {
public void testRangemapNoFieldParameter() {
IllegalArgumentException iae = this.streamingTestUtil
.performThrowingDPLTest(IllegalArgumentException.class, "index=* | rangemap", testFile, ds -> {
});
Expand All @@ -174,7 +174,7 @@ public void rangemapTest3() {
named = "skipSparkTest",
matches = "true"
)
public void rangemapTest4() {
public void testRangemapWithUnmatchedRange() {
streamingTestUtil
.performDPLTest(
"| makeresults | eval _raw = \"string\" | rangemap field=_raw r0=0-10 r1=11-20", testFile,
Expand All @@ -192,7 +192,7 @@ public void rangemapTest4() {
named = "skipSparkTest",
matches = "true"
)
public void rangemapMultiValueTest() {
public void testrangemapMultiValue() {
streamingTestUtil
.performDPLTest(
"index=* | eval a = mvappend(\"1\",\"3\",\"3\",\"a\") |rangemap field=a lo=1-2 hi=3-4",
Expand Down
10 changes: 5 additions & 5 deletions src/test/java/com/teragrep/pth10/RegexTransformationTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ void tearDown() {
named = "skipSparkTest",
matches = "true"
)
public void regexTest1() {
public void testRegexFieldNotEqual() {
streamingTestUtil.performDPLTest("index=index_A | regex _raw != \"data data\"", testFile, ds -> {
Assertions.assertEquals(0, ds.collectAsList().size());
});
Expand All @@ -115,7 +115,7 @@ public void regexTest1() {
named = "skipSparkTest",
matches = "true"
)
public void regexTest2() {
public void testRegexFieldEqual() {
streamingTestUtil.performDPLTest("index=index_A | regex _raw = \"data data\"", testFile, ds -> {
int size = ds.collectAsList().size();
Assertions.assertTrue(size > 1);
Expand All @@ -127,7 +127,7 @@ public void regexTest2() {
named = "skipSparkTest",
matches = "true"
)
public void regexTest3() {
public void testRegexWithString() {
streamingTestUtil.performDPLTest("index=index_A | regex \"data data\"", testFile, ds -> {
int size = ds.collectAsList().size();
Assertions.assertTrue(size > 1);
Expand All @@ -139,7 +139,7 @@ public void regexTest3() {
named = "skipSparkTest",
matches = "true"
)
public void regexTest4() {
public void testRegexMatchedPattern() {
streamingTestUtil.performDPLTest("index=index_A | regex \"^[d|D][a|z][t|T][a|B]\\s.{4}$\"", testFile, ds -> {
int size = ds.collectAsList().size();
Assertions.assertTrue(size > 1);
Expand All @@ -151,7 +151,7 @@ public void regexTest4() {
named = "skipSparkTest",
matches = "true"
)
public void regexTest5() {
public void testRegexUnmatchedPattern() {
streamingTestUtil.performDPLTest("index=index_A | regex \"^[d|D][a|z][t|T][c|B]\\s.{4}$\"", testFile, ds -> {
Assertions.assertEquals(0, ds.collectAsList().size());
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ void tearDown() {
named = "skipSparkTest",
matches = "true"
)
public void rename_test_1() {
public void testRenameMultipleFields() {
streamingTestUtil
.performDPLTest(
"index=index_A | rename _raw AS DATA , offset AS number, sourcetype AS typeOfSource, INVALID_FIELD AS fieldOfInvalid",
Expand Down
12 changes: 6 additions & 6 deletions src/test/java/com/teragrep/pth10/ReplaceTransformationTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ void tearDown() {
named = "skipSparkTest",
matches = "true"
) // Standard replace, without wildcards in WITH-clause
public void replace_test_1() {
public void testReplaceWithoutWildcardsInWith() {
streamingTestUtil
.performDPLTest("index=index_A | replace \"?$.data*\" WITH \"SomethingNew\" IN _raw", testFile, ds -> {
List<String> listOfRawCol = ds
Expand All @@ -121,13 +121,13 @@ public void replace_test_1() {
Assertions.assertEquals("SomethingNew", listOfRawCol.get(0));
});
}

//"?$.data^){"
@Test
@DisabledIfSystemProperty(
named = "skipSparkTest",
matches = "true"
) // One trailing wildcard in WITH-clause
public void replace_test_2() {
public void testReplaceWildcardAsTrailingInWith() {
streamingTestUtil
.performDPLTest("index=index_A | replace \"?$.data*\" WITH \"SomethingNew*\" IN _raw", testFile, ds -> {
List<String> listOfRawCol = ds
Expand All @@ -147,7 +147,7 @@ public void replace_test_2() {
named = "skipSparkTest",
matches = "true"
) // One wildcard in WITH-clause as a prefix
public void replace_test_3() {
public void testReplaceWildcardAsPrefixInWith() {
streamingTestUtil
.performDPLTest("index=index_A | replace \"*data^){\" WITH \"SomethingNew*\" IN _raw", testFile, ds -> {
List<String> listOfRawCol = ds
Expand All @@ -167,7 +167,7 @@ public void replace_test_3() {
named = "skipSparkTest",
matches = "true"
) // Two wildcards, both as a prefix and trailing in WITH-clause
public void replace_test_4() {
public void TestReplaceWildcardsAsBothPrefixTrailingInWith() {
streamingTestUtil
.performDPLTest("index=index_A | replace \"*data*\" WITH \"*SomethingNew*\" IN _raw", testFile, ds -> {
List<String> listOfRawCol = ds
Expand All @@ -187,7 +187,7 @@ public void replace_test_4() {
named = "skipSparkTest",
matches = "true"
) // Two x WITH y constructs
public void replaceTwoValuesTest() {
public void testReplaceTwoValues() {
streamingTestUtil
.performDPLTest(
"index=index_A | replace host WITH lost, index_A WITH index_B IN host, index", testFile, ds -> {
Expand Down
Loading