From 0cfd3be9c603e21bdde0c1ed56a8d7725c19de5e Mon Sep 17 00:00:00 2001 From: Alexis Campailla Date: Fri, 7 Nov 2014 14:15:24 +0100 Subject: [PATCH] test: runner support for flaky tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adding --flaky-tests option, to allow regarding flaky tests failures as non-fatal. Currently only observed by the TapProgressIndicator, which will add a # TODO directive to tests classified as flaky. According to the TAP specification, the test harness is supposed to treat failures that have a # TODO directive as non-fatal. Ported from https://github.com/joyent/node/commit/df3a2b2cf21274fe7afc19d14ec0259b964e13f7 PR-URL: https://github.com/nodejs/node/pull/2424 Reviewed-By: Ben Noordhuis Reviewed-By: João Reis Reviewed-By: Sakthipriyan Vairamani --- tools/test.py | 44 ++++++++++++++++++++++++++++++-------------- 1 file changed, 30 insertions(+), 14 deletions(-) diff --git a/tools/test.py b/tools/test.py index 43d349d49a892f..747bd499160fbd 100755 --- a/tools/test.py +++ b/tools/test.py @@ -61,8 +61,9 @@ class ProgressIndicator(object): - def __init__(self, cases): + def __init__(self, cases, flaky_tests_mode): self.cases = cases + self.flaky_tests_mode = flaky_tests_mode self.parallel_queue = Queue(len(cases)) self.sequential_queue = Queue(len(cases)) for case in cases: @@ -251,7 +252,10 @@ def HasRun(self, output): self._done += 1 command = basename(output.command[-1]) if output.UnexpectedOutput(): - logger.info('not ok %i - %s' % (self._done, command)) + status_line = 'not ok %i - %s' % (self._done, command) + if FLAKY in output.test.outcomes and self.flaky_tests_mode == DONTCARE: + status_line = status_line + ' # TODO : Fix flaky test' + logger.info(status_line) for l in output.output.stderr.splitlines(): logger.info('#' + l) for l in output.output.stdout.splitlines(): @@ -262,7 +266,10 @@ def HasRun(self, output): logger.info( 'ok %i - %s # skip %s' % (self._done, command, skip.group(1))) else: - logger.info('ok %i - %s' % (self._done, command)) + status_line = 'ok %i - %s' % (self._done, command) + if FLAKY in output.test.outcomes: + status_line = status_line + ' # TODO : Fix flaky test' + logger.info(status_line) duration = output.test.duration @@ -280,8 +287,8 @@ def Done(self): class CompactProgressIndicator(ProgressIndicator): - def __init__(self, cases, templates): - super(CompactProgressIndicator, self).__init__(cases) + def __init__(self, cases, flaky_tests_mode, templates): + super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode) self.templates = templates self.last_status_length = 0 self.start_time = time.time() @@ -336,13 +343,13 @@ def PrintProgress(self, name): class ColorProgressIndicator(CompactProgressIndicator): - def __init__(self, cases): + def __init__(self, cases, flaky_tests_mode): templates = { 'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s", 'stdout': "\033[1m%s\033[0m", 'stderr': "\033[31m%s\033[0m", } - super(ColorProgressIndicator, self).__init__(cases, templates) + super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, templates) def ClearLine(self, last_line_length): print "\033[1K\r", @@ -350,7 +357,7 @@ def ClearLine(self, last_line_length): class MonochromeProgressIndicator(CompactProgressIndicator): - def __init__(self, cases): + def __init__(self, cases, flaky_tests_mode): templates = { 'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s", 'stdout': '%s', @@ -358,7 +365,7 @@ def __init__(self, cases): 'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"), 'max_length': 78 } - super(MonochromeProgressIndicator, self).__init__(cases, templates) + super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, templates) def ClearLine(self, last_line_length): print ("\r" + (" " * last_line_length) + "\r"), @@ -780,8 +787,8 @@ def GetVmFlags(self, testcase, mode): def GetTimeout(self, mode): return self.timeout * TIMEOUT_SCALEFACTOR[ARCH_GUESS or 'ia32'][mode] -def RunTestCases(cases_to_run, progress, tasks): - progress = PROGRESS_INDICATORS[progress](cases_to_run) +def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode): + progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode) return progress.Run(tasks) @@ -805,7 +812,8 @@ def BuildRequirements(context, requirements, mode, scons_flags): TIMEOUT = 'timeout' CRASH = 'crash' SLOW = 'slow' - +FLAKY = 'flaky' +DONTCARE = 'dontcare' class Expression(object): pass @@ -1253,6 +1261,9 @@ def BuildOptions(): default=False, action="store_true") result.add_option("--cat", help="Print the source of the tests", default=False, action="store_true") + result.add_option("--flaky-tests", + help="Regard tests marked as flaky (run|skip|dontcare)", + default="run") result.add_option("--warn-unused", help="Report unused rules", default=False, action="store_true") result.add_option("-j", help="The number of parallel tasks to run", @@ -1303,6 +1314,9 @@ def ProcessOptions(options): return False if options.J: options.j = multiprocessing.cpu_count() + if options.flaky_tests not in ["run", "skip", "dontcare"]: + print "Unknown flaky-tests mode %s" % options.flaky_tests + return False return True @@ -1505,7 +1519,9 @@ def Main(): result = None def DoSkip(case): - return SKIP in case.outcomes or SLOW in case.outcomes + if SKIP in case.outcomes or SLOW in case.outcomes: + return True + return FLAKY in case.outcomes and options.flaky_tests == SKIP cases_to_run = [ c for c in all_cases if not DoSkip(c) ] if options.run is not None: # Must ensure the list of tests is sorted before selecting, to avoid @@ -1522,7 +1538,7 @@ def DoSkip(case): else: try: start = time.time() - if RunTestCases(cases_to_run, options.progress, options.j): + if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests): result = 0 else: result = 1