From df3a2b2cf21274fe7afc19d14ec0259b964e13f7 Mon Sep 17 00:00:00 2001 From: Alexis Campailla Date: Fri, 7 Nov 2014 14:15:24 +0100 Subject: [PATCH] test: runner support for flaky tests Adding --flaky-tests option, to allow regarding flaky tests failures as non-fatal. Currently only observed by the TapProgressIndicator, which will add a # TODO directive to tests classified as flaky. According to the TAP specification, the test harness is supposed to treat failures that have a # TODO directive as non-fatal. --- tools/test.py | 46 ++++++++++++++++++++++++++++++++-------------- 1 file changed, 32 insertions(+), 14 deletions(-) diff --git a/tools/test.py b/tools/test.py index 0772f9ad321..579d444f6c5 100755 --- a/tools/test.py +++ b/tools/test.py @@ -55,8 +55,9 @@ class ProgressIndicator(object): - def __init__(self, cases): + def __init__(self, cases, flaky_tests_mode): self.cases = cases + self.flaky_tests_mode = flaky_tests_mode self.queue = Queue(len(cases)) for case in cases: self.queue.put_nowait(case) @@ -234,13 +235,19 @@ def HasRun(self, output): self._done += 1 command = basename(output.command[-1]) if output.UnexpectedOutput(): - print 'not ok %i - %s' % (self._done, command) + status_line = 'not ok %i - %s' % (self._done, command) + if FLAKY in output.test.outcomes and self.flaky_tests_mode == "dontcare": + status_line = status_line + " # TODO : Fix flaky test" + print status_line for l in output.output.stderr.splitlines(): print '#' + l for l in output.output.stdout.splitlines(): print '#' + l else: - print 'ok %i - %s' % (self._done, command) + status_line = 'ok %i - %s' % (self._done, command) + if FLAKY in output.test.outcomes: + status_line = status_line + " # TODO : Fix flaky test" + print status_line duration = output.test.duration @@ -258,8 +265,8 @@ def Done(self): class CompactProgressIndicator(ProgressIndicator): - def __init__(self, cases, templates): - super(CompactProgressIndicator, self).__init__(cases) + def __init__(self, cases, flaky_tests_mode, templates): + super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode) self.templates = templates self.last_status_length = 0 self.start_time = time.time() @@ -314,13 +321,13 @@ def PrintProgress(self, name): class ColorProgressIndicator(CompactProgressIndicator): - def __init__(self, cases): + def __init__(self, cases, flaky_tests_mode): templates = { 'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s", 'stdout': "\033[1m%s\033[0m", 'stderr': "\033[31m%s\033[0m", } - super(ColorProgressIndicator, self).__init__(cases, templates) + super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, templates) def ClearLine(self, last_line_length): print "\033[1K\r", @@ -328,7 +335,7 @@ def ClearLine(self, last_line_length): class MonochromeProgressIndicator(CompactProgressIndicator): - def __init__(self, cases): + def __init__(self, cases, flaky_tests_mode): templates = { 'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s", 'stdout': '%s', @@ -336,7 +343,7 @@ def __init__(self, cases): 'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"), 'max_length': 78 } - super(MonochromeProgressIndicator, self).__init__(cases, templates) + super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, templates) def ClearLine(self, last_line_length): print ("\r" + (" " * last_line_length) + "\r"), @@ -738,8 +745,8 @@ def GetVmFlags(self, testcase, mode): def GetTimeout(self, mode): return self.timeout * TIMEOUT_SCALEFACTOR[mode] -def RunTestCases(cases_to_run, progress, tasks): - progress = PROGRESS_INDICATORS[progress](cases_to_run) +def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode): + progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode) return progress.Run(tasks) @@ -763,6 +770,7 @@ def BuildRequirements(context, requirements, mode, scons_flags): TIMEOUT = 'timeout' CRASH = 'crash' SLOW = 'slow' +FLAKY = 'flaky' class Expression(object): @@ -1212,6 +1220,9 @@ def BuildOptions(): default=False, action="store_true") result.add_option("--cat", help="Print the source of the tests", default=False, action="store_true") + result.add_option("--flaky-tests", + help="Regard tests marked as flaky (run|skip|dontcare)", + default="run") result.add_option("--warn-unused", help="Report unused rules", default=False, action="store_true") result.add_option("-j", help="The number of parallel tasks to run", @@ -1258,6 +1269,13 @@ def ProcessOptions(options): options.scons_flags.append("arch=" + options.arch) if options.snapshot: options.scons_flags.append("snapshot=on") + def CheckTestMode(name, option): + if not option in ["run", "skip", "dontcare"]: + print "Unknown %s mode %s" % (name, option) + return False + return True + if not CheckTestMode("--flaky-tests", options.flaky_tests): + return False return True @@ -1457,15 +1475,15 @@ def wrap(processor): result = None def DoSkip(case): - return SKIP in case.outcomes or SLOW in case.outcomes + return SKIP in case.outcomes or SLOW in case.outcomes or (FLAKY in case.outcomes and options.flaky_tests == "skip") cases_to_run = [ c for c in all_cases if not DoSkip(c) ] if len(cases_to_run) == 0: print "No tests to run." - return 0 + return 1 else: try: start = time.time() - if RunTestCases(cases_to_run, options.progress, options.j): + if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests): result = 0 else: result = 1