-
Notifications
You must be signed in to change notification settings - Fork 30.1k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
CI convergence (flaky tests support, add internet tests) #2424
Changes from all commits
bc4a5b9
50319ed
b448b83
fd05d46
8567b2a
8e60b28
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
prefix message | ||
|
||
# To mark a test as flaky, list the test name in the appropriate section | ||
# below, without ".js", followed by ": PASS,FLAKY". Example: | ||
# sample-test : PASS,FLAKY | ||
|
||
[true] # This section applies to all platforms | ||
|
||
[$system==win32] | ||
|
||
[$system==linux] | ||
|
||
[$system==macos] | ||
|
||
[$system==solaris] # Also applies to SmartOS | ||
|
||
[$system==freebsd] | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
prefix parallel | ||
|
||
# To mark a test as flaky, list the test name in the appropriate section | ||
# below, without ".js", followed by ": PASS,FLAKY". Example: | ||
# sample-test : PASS,FLAKY | ||
|
||
[true] # This section applies to all platforms | ||
|
||
[$system==win32] | ||
test-tls-ticket-cluster : PASS,FLAKY | ||
|
||
[$system==linux] | ||
test-cluster-worker-forced-exit : PASS,FLAKY | ||
test-http-client-timeout-event : PASS,FLAKY | ||
test-process-argv-0 : PASS,FLAKY | ||
test-tick-processor : PASS,FLAKY | ||
test-tls-no-sslv3 : PASS,FLAKY | ||
test-child-process-buffering : PASS,FLAKY | ||
test-child-process-exit-code : PASS,FLAKY | ||
|
||
[$system==macos] | ||
|
||
[$system==solaris] # Also applies to SmartOS | ||
test-debug-signal-cluster : PASS,FLAKY | ||
|
||
[$system==freebsd] | ||
test-net-socket-local-address : PASS,FLAKY |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
prefix sequential | ||
|
||
# To mark a test as flaky, list the test name in the appropriate section | ||
# below, without ".js", followed by ": PASS,FLAKY". Example: | ||
# sample-test : PASS,FLAKY | ||
|
||
[true] # This section applies to all platforms | ||
|
||
[$system==win32] | ||
|
||
[$system==linux] | ||
|
||
[$system==macos] | ||
|
||
[$system==solaris] # Also applies to SmartOS | ||
|
||
[$system==freebsd] | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -61,8 +61,9 @@ | |
|
||
class ProgressIndicator(object): | ||
|
||
def __init__(self, cases): | ||
def __init__(self, cases, flaky_tests_mode): | ||
self.cases = cases | ||
self.flaky_tests_mode = flaky_tests_mode | ||
self.parallel_queue = Queue(len(cases)) | ||
self.sequential_queue = Queue(len(cases)) | ||
for case in cases: | ||
|
@@ -74,7 +75,9 @@ def __init__(self, cases): | |
self.remaining = len(cases) | ||
self.total = len(cases) | ||
self.failed = [ ] | ||
self.flaky_failed = [ ] | ||
self.crashed = 0 | ||
self.flaky_crashed = 0 | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Maybe make this an array to keep track of what tests crashed? EDIT: I guess that's inconsistent with |
||
self.lock = threading.Lock() | ||
self.shutdown_event = threading.Event() | ||
|
||
|
@@ -142,9 +145,14 @@ def RunSingle(self, parallel, thread_id): | |
return | ||
self.lock.acquire() | ||
if output.UnexpectedOutput(): | ||
self.failed.append(output) | ||
if output.HasCrashed(): | ||
self.crashed += 1 | ||
if FLAKY in output.test.outcomes and self.flaky_tests_mode == DONTCARE: | ||
self.flaky_failed.append(output) | ||
if output.HasCrashed(): | ||
self.flaky_crashed += 1 | ||
else: | ||
self.failed.append(output) | ||
if output.HasCrashed(): | ||
self.crashed += 1 | ||
else: | ||
self.succeeded += 1 | ||
self.remaining -= 1 | ||
|
@@ -251,7 +259,10 @@ def HasRun(self, output): | |
self._done += 1 | ||
command = basename(output.command[-1]) | ||
if output.UnexpectedOutput(): | ||
logger.info('not ok %i - %s' % (self._done, command)) | ||
status_line = 'not ok %i - %s' % (self._done, command) | ||
if FLAKY in output.test.outcomes and self.flaky_tests_mode == DONTCARE: | ||
status_line = status_line + ' # TODO : Fix flaky test' | ||
logger.info(status_line) | ||
for l in output.output.stderr.splitlines(): | ||
logger.info('#' + l) | ||
for l in output.output.stdout.splitlines(): | ||
|
@@ -262,7 +273,10 @@ def HasRun(self, output): | |
logger.info( | ||
'ok %i - %s # skip %s' % (self._done, command, skip.group(1))) | ||
else: | ||
logger.info('ok %i - %s' % (self._done, command)) | ||
status_line = 'ok %i - %s' % (self._done, command) | ||
if FLAKY in output.test.outcomes: | ||
status_line = status_line + ' # TODO : Fix flaky test' | ||
logger.info(status_line) | ||
|
||
duration = output.test.duration | ||
|
||
|
@@ -280,8 +294,8 @@ def Done(self): | |
|
||
class CompactProgressIndicator(ProgressIndicator): | ||
|
||
def __init__(self, cases, templates): | ||
super(CompactProgressIndicator, self).__init__(cases) | ||
def __init__(self, cases, flaky_tests_mode, templates): | ||
super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode) | ||
self.templates = templates | ||
self.last_status_length = 0 | ||
self.start_time = time.time() | ||
|
@@ -336,29 +350,29 @@ def PrintProgress(self, name): | |
|
||
class ColorProgressIndicator(CompactProgressIndicator): | ||
|
||
def __init__(self, cases): | ||
def __init__(self, cases, flaky_tests_mode): | ||
templates = { | ||
'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s", | ||
'stdout': "\033[1m%s\033[0m", | ||
'stderr': "\033[31m%s\033[0m", | ||
} | ||
super(ColorProgressIndicator, self).__init__(cases, templates) | ||
super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, templates) | ||
|
||
def ClearLine(self, last_line_length): | ||
print "\033[1K\r", | ||
|
||
|
||
class MonochromeProgressIndicator(CompactProgressIndicator): | ||
|
||
def __init__(self, cases): | ||
def __init__(self, cases, flaky_tests_mode): | ||
templates = { | ||
'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s", | ||
'stdout': '%s', | ||
'stderr': '%s', | ||
'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"), | ||
'max_length': 78 | ||
} | ||
super(MonochromeProgressIndicator, self).__init__(cases, templates) | ||
super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, templates) | ||
|
||
def ClearLine(self, last_line_length): | ||
print ("\r" + (" " * last_line_length) + "\r"), | ||
|
@@ -780,8 +794,8 @@ def GetVmFlags(self, testcase, mode): | |
def GetTimeout(self, mode): | ||
return self.timeout * TIMEOUT_SCALEFACTOR[ARCH_GUESS or 'ia32'][mode] | ||
|
||
def RunTestCases(cases_to_run, progress, tasks): | ||
progress = PROGRESS_INDICATORS[progress](cases_to_run) | ||
def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode): | ||
progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode) | ||
return progress.Run(tasks) | ||
|
||
|
||
|
@@ -805,7 +819,8 @@ def BuildRequirements(context, requirements, mode, scons_flags): | |
TIMEOUT = 'timeout' | ||
CRASH = 'crash' | ||
SLOW = 'slow' | ||
|
||
FLAKY = 'flaky' | ||
DONTCARE = 'dontcare' | ||
|
||
class Expression(object): | ||
pass | ||
|
@@ -1253,6 +1268,9 @@ def BuildOptions(): | |
default=False, action="store_true") | ||
result.add_option("--cat", help="Print the source of the tests", | ||
default=False, action="store_true") | ||
result.add_option("--flaky-tests", | ||
help="Regard tests marked as flaky (run|skip|dontcare)", | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. s/dontcare/ignore/? I guess I'm wondering how you arrived at "dontcare". There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I guess "ignore" would have worked as well as "dontcare". Since this is already used in v0.x, and CI rules, I'd rather leave it unchanged |
||
default="run") | ||
result.add_option("--warn-unused", help="Report unused rules", | ||
default=False, action="store_true") | ||
result.add_option("-j", help="The number of parallel tasks to run", | ||
|
@@ -1303,6 +1321,9 @@ def ProcessOptions(options): | |
return False | ||
if options.J: | ||
options.j = multiprocessing.cpu_count() | ||
if options.flaky_tests not in ["run", "skip", "dontcare"]: | ||
print "Unknown flaky-tests mode %s" % options.flaky_tests | ||
return False | ||
return True | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I am guessing (it's been a while since I wrote it) it was meant to be reused. But it's not. I can remove the function and inline the logic. |
||
|
||
|
||
|
@@ -1505,7 +1526,9 @@ def Main(): | |
|
||
result = None | ||
def DoSkip(case): | ||
return SKIP in case.outcomes or SLOW in case.outcomes | ||
if SKIP in case.outcomes or SLOW in case.outcomes: | ||
return True | ||
return FLAKY in case.outcomes and options.flaky_tests == SKIP | ||
cases_to_run = [ c for c in all_cases if not DoSkip(c) ] | ||
if options.run is not None: | ||
# Must ensure the list of tests is sorted before selecting, to avoid | ||
|
@@ -1522,7 +1545,7 @@ def DoSkip(case): | |
else: | ||
try: | ||
start = time.time() | ||
if RunTestCases(cases_to_run, options.progress, options.j): | ||
if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests): | ||
result = 0 | ||
else: | ||
result = 1 | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I'd add #1100 since it can fail on virtually any platform at random.