Skip to content

Commit

Permalink
Merge pull request #137 from Xceptance/#121-print-stack-trace
Browse files Browse the repository at this point in the history
#121 print stack trace
  • Loading branch information
occupant23 authored Sep 1, 2020
2 parents 184f0bb + 862d3ec commit 3ddd0eb
Show file tree
Hide file tree
Showing 14 changed files with 322 additions and 72 deletions.
1 change: 0 additions & 1 deletion src/main/java/com/xceptance/neodymium/util/Neodymium.java
Original file line number Diff line number Diff line change
Expand Up @@ -396,5 +396,4 @@ public static boolean isSite(String... sites)
}
return false;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -403,7 +403,6 @@ public static void wrapAssertionError(final Runnable runnable)
* @param verticalMovement
* The offset for the vertical movement
*/

public static void dragAndDrop(SelenideElement elementToMove, int horizontalMovement, int verticalMovement)
{
// perform drag and drop via the standard Selenium way
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ public class AllureSelenideListenerTest extends NeodymiumTest
public void testAllureSelenideListenerIsActiveForCucumber()
{
Result result = JUnitCore.runClasses(CucumberValidateAllureSelenideListenerIsActive.class);
checkPass(result, 1, 0, 0);
checkPass(result, 1, 0);
}

@Test
public void testAllureSelenideListenerIsActiveForJava()
{
Result result = JUnitCore.runClasses(AllureSelenideListenerIsActiveForJava.class);
checkPass(result, 1, 0, 0);
checkPass(result, 1, 0);
}
}
8 changes: 4 additions & 4 deletions src/test/java/com/xceptance/neodymium/tests/CucumberTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,27 +15,27 @@ public class CucumberTest extends NeodymiumTest
public void testSetBrowserViaTestData() throws Exception
{
Result result = JUnitCore.runClasses(CucumberSetBrowserViaTestData.class);
checkPass(result, 1, 0, 0);
checkPass(result, 1, 0);
}

@Test
public void testSetBrowserViaTestDataFail() throws Exception
{
Result result = JUnitCore.runClasses(CucumberSetBrowserViaTestDataFail.class);
checkFail(result, 1, 0, 1, null);
checkFail(result, 1, 0, 1);
}

@Test
public void testSetBrowserViaTag() throws Exception
{
Result result = JUnitCore.runClasses(CucumberSetBrowserViaTag.class);
checkPass(result, 1, 0, 0);
checkPass(result, 1, 0);
}

@Test
public void testSetBrowserViaTagFail() throws Exception
{
Result result = JUnitCore.runClasses(CucumberSetBrowserViaTagFail.class);
checkFail(result, 1, 0, 1, null);
checkFail(result, 1, 0, 1);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@ public void testDataUtils() throws Exception
{
// test the data utils
Result result = JUnitCore.runClasses(DataUtilsTests.class);
checkPass(result, 9, 0, 0);
checkPass(result, 9, 0);
}

@Test
public void testDataUtilsXml() throws Exception
{
// test the data utils
Result result = JUnitCore.runClasses(DataUtilsTestsXml.class);
checkPass(result, 9, 0, 0);
checkPass(result, 9, 0);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -101,15 +101,15 @@ public void testOverridingEnvironmentsAndBrowsers()
{
// test environment configuration
Result result = JUnitCore.runClasses(EnvironmentAndBrowserConfiguration.class);
checkPass(result, 5, 0, 0);
checkPass(result, 5, 0);
}

@Test
public void testConfigureBadEnvironmentProxies()
{
// test environment configuration
Result result = JUnitCore.runClasses(BadProxyEnvironmentConfiguration.class);
checkPass(result, 3, 0, 0);
checkPass(result, 3, 0);
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,51 +50,51 @@ public void testIsSite()
{
// test the isSite function
Result result = JUnitCore.runClasses(IsSiteTests.class);
checkPass(result, 10, 0, 0);
checkPass(result, 10, 0);
}

@Test
public void testContextGetCleared() throws Exception
{
// test that NeodymiumRunner clears the context before each run
Result result = JUnitCore.runClasses(ContextGetsCleared.class);
checkPass(result, 2, 0, 0);
checkPass(result, 2, 0);
}

@Test
public void testCucumberContextGetsCleared() throws Exception
{
// test that NeodymiumCucumberRunListener clears the context before each run
Result result = JUnitCore.runClasses(CucumberContextGetsCleared.class);
checkPass(result, 2, 0, 0);
checkPass(result, 2, 0);
}

@Test
public void testBrowserContextSetup() throws Exception
{
Result result = JUnitCore.runClasses(BrowserContextSetup.class);
checkPass(result, 1, 0, 0);
checkPass(result, 1, 0);
}

@Test
public void testDefaultSelenideConfigurationCheck() throws Exception
{
Result result = JUnitCore.runClasses(DefaultSelenideConfiguration.class);
checkPass(result, 2, 0, 0);
checkPass(result, 2, 0);
}

@Test
public void testSelenideConfigurationShortcuts() throws Exception
{
Result result = JUnitCore.runClasses(SelenideConfigurationShortcuts.class);
checkPass(result, 4, 0, 0);
checkPass(result, 4, 0);
}

@Test
public void testOverridingNeodymiumConfiguration() throws Exception
{
Result result = JUnitCore.runClasses(OverrideNeodymiumConfiguration.class);
checkPass(result, 2, 0, 0);
checkPass(result, 2, 0);
}

@Test
Expand Down Expand Up @@ -137,6 +137,6 @@ public void testContextWindowSize() throws Exception

// checks Neodymium functions for different browser sizes
Result result = JUnitCore.runClasses(WindowSizeTests.class);
checkPass(result, 5, 0, 0);
checkPass(result, 5, 0);
}
}
158 changes: 138 additions & 20 deletions src/test/java/com/xceptance/neodymium/tests/NeodymiumTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,19 @@
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;

import org.apache.commons.io.FileUtils;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.runner.Description;
import org.junit.runner.Result;
import org.junit.runner.notification.Failure;

import com.xceptance.neodymium.NeodymiumRunner;

Expand All @@ -36,8 +39,11 @@ public static void cleanUp()

/**
* delete a temporary test file
*
* @param tempFile
* the tempFile that should be deleted after test execution
*/
public static void deleteTempFile(File tempFile)
public static void deleteTempFile(final File tempFile)
{
if (tempFile.exists())
{
Expand All @@ -53,36 +59,139 @@ public static void deleteTempFile(File tempFile)
}
}

public void check(Result result, boolean expectedSuccessful, int expectedRunCount, int expectedIgnoreCount, int expectedFailCount,
String expectedFailureMessage)
/**
* Basic method to perform assertions on a given test result.
*
* @param result
* test result to validate
* @param expectSuccessful
* the test result should be successful
* @param expectedRunCount
* expected number of run tests (including ignored)
* @param expectedIgnoreCount
* expected number of ignored tests
* @param expectedFailCount
* expected number of failed tests
* @param expectedFailureMessages
* expected failure messages mapped by name of test method
*/
public void check(final Result result, final boolean expectSuccessful, final int expectedRunCount, final int expectedIgnoreCount,
final int expectedFailCount, final Map<String, String> expectedFailureMessages)
{
Assert.assertEquals("Test successful", expectedSuccessful, result.wasSuccessful());
Assert.assertEquals("Method run count", expectedRunCount, result.getRunCount());
Assert.assertEquals("Method ignore count", expectedIgnoreCount, result.getIgnoreCount());
Assert.assertEquals("Method fail count", expectedFailCount, result.getFailureCount());
final Optional<String> accumulatedTrace = result.getFailures().stream().map(Failure::getTrace).reduce(String::concat);
final String stackTrace = accumulatedTrace.orElse("n/a");
try
{
Assert.assertEquals("Test successful", expectSuccessful, result.wasSuccessful());
Assert.assertEquals("Method run count", expectedRunCount, result.getRunCount());
Assert.assertEquals("Method ignore count", expectedIgnoreCount, result.getIgnoreCount());
Assert.assertEquals("Method fail count", expectedFailCount, result.getFailureCount());

if (expectedFailureMessage != null)
if (expectedFailureMessages != null)
{
final int failureCount = result.getFailureCount();
for (int i = 0; i < failureCount; i++)
{
final String methodName = result.getFailures().get(i).getDescription().getMethodName();
Assert.assertEquals("Failure message", expectedFailureMessages.get(methodName), result.getFailures().get(i).getMessage());
}
}
}
catch (AssertionError e)
{
Assert.assertTrue("Failure count", expectedFailCount == 1);
Assert.assertEquals("Failure message", expectedFailureMessage, result.getFailures().get(0).getMessage());
Assert.fail("Assertion failed. " + e.getMessage() + " Stack trace: " + stackTrace);
}
}

public void checkPass(Result result, int expectedRunCount, int expectedIgnoreCount, int expectedFailCount)
/**
* Assert that all tests have passed.
*
* @param result
* test result to validate
* @param expectedRunCount
* expected number of run tests (including ignored)
* @param expectedIgnoreCount
* expected number of ignored tests
*/
public void checkPass(final Result result, final int expectedRunCount, final int expectedIgnoreCount)
{
check(result, true, expectedRunCount, expectedIgnoreCount, 0, null);
}

/**
* Assert that at least one test has failed.
*
* @param result
* test result to validate
* @param expectedRunCount
* expected number of run tests (including ignored)
* @param expectedIgnoreCount
* expected number of ignored tests
* @param expectedFailCount
* expected number of failed tests
*/
public void checkFail(final Result result, final int expectedRunCount, final int expectedIgnoreCount, final int expectedFailCount)
{
check(result, false, expectedRunCount, expectedIgnoreCount, expectedFailCount, null);
}

/**
* Assert that at least one test has failed.
*
* @param result
* test result to validate
* @param expectedRunCount
* expected number of run tests (including ignored)
* @param expectedIgnoreCount
* expected number of ignored tests
* @param expectedFailCount
* expected number of failed tests
* @param expectedFailureMessage
* expected message of all failures (same message for each failure)
*/
public void checkFail(final Result result, final int expectedRunCount, final int expectedIgnoreCount, final int expectedFailCount,
final String expectedFailureMessage)
{
check(result, true, expectedRunCount, expectedIgnoreCount, expectedFailCount, null);
final HashMap<String, String> expectedFailureMessages = new HashMap<String, String>();
for (Failure failure : result.getFailures())
{
expectedFailureMessages.put(failure.getDescription().getMethodName(), expectedFailureMessage);
}
check(result, false, expectedRunCount, expectedIgnoreCount, expectedFailCount, expectedFailureMessages);
}

public void checkFail(Result result, int expectedRunCount, int expectedIgnoreCount, int expectedFailCount,
String expectedFailureMessage)
/**
* Assert that at least one test has failed.
*
* @param result
* test result to validate
* @param expectedRunCount
* expected number of run tests (including ignored)
* @param expectedIgnoreCount
* expected number of ignored tests
* @param expectedFailCount
* expected number of failed tests
* @param expectedFailureMessages
* expected failures messages mapped by name of test method
*/
public void checkFail(final Result result, final int expectedRunCount, final int expectedIgnoreCount, final int expectedFailCount,
final Map<String, String> expectedFailureMessages)
{
check(result, false, expectedRunCount, expectedIgnoreCount, expectedFailCount, expectedFailureMessage);
check(result, false, expectedRunCount, expectedIgnoreCount, expectedFailCount, expectedFailureMessages);
}

public void checkDescription(Description testDescription, String[] expectedTestDescription)
/**
* Assert that the test description is valid.
*
* @param testDescription
* the test description that should be tested
* @param expectedTestDescription
* expected test description as String array
*/
public void checkDescription(final Description testDescription, final String[] expectedTestDescription)
{
ArrayList<Description> testChildren = testDescription.getChildren();
String[] actualDescription = new String[testChildren.size()];
final ArrayList<Description> testChildren = testDescription.getChildren();
final String[] actualDescription = new String[testChildren.size()];

for (int i = 0; i < testChildren.size(); i++)
{
Expand All @@ -94,7 +203,15 @@ public void checkDescription(Description testDescription, String[] expectedTestD
Assert.assertArrayEquals(expectedTestDescription, actualDescription);
}

public void checkDescription(Class<?> clazz, String[] expectedTestDescription) throws Throwable
/**
* Assert that the test description is valid.
*
* @param clazz
* the class whose description should be tested
* @param expectedTestDescription
* expected test description as String array
*/
public void checkDescription(final Class<?> clazz, final String[] expectedTestDescription) throws Throwable
{
checkDescription(new NeodymiumRunner(clazz).getDescription(), expectedTestDescription);
}
Expand All @@ -105,11 +222,12 @@ public void checkDescription(Class<?> clazz, String[] expectedTestDescription) t
* @param map
* @param file
*/
public static void writeMapToPropertiesFile(Map<String, String> map, File file)
public static void writeMapToPropertiesFile(final Map<String, String> map, final File file)
{
String propertiesString = map.entrySet().stream()
.map(entry -> entry.getKey() + "=" + entry.getValue())
.collect(Collectors.joining(System.lineSeparator()));

try
{
FileUtils.writeStringToFile(file, propertiesString, StandardCharsets.UTF_8);
Expand Down
Loading

0 comments on commit 3ddd0eb

Please sign in to comment.