Skip to content

Commit 20f8e7f

Browse files
committed
test: remove flaky test functionality
Reverts nodejs/node-v0.x-archive#8689 PR-URL: #812 Reviewed-By: Jeremiah Senkpiel <[email protected]> Reviewed-By: Johan Bergström <[email protected]> Reviewed-By: Ben Noordhuis <[email protected]> Reviewed-By: Colin Ihrig <[email protected]>
1 parent fc6507d commit 20f8e7f

File tree

4 files changed

+13
-41
lines changed

4 files changed

+13
-41
lines changed

test/internet/internet.status

-1
This file was deleted.

test/parallel/simple.status

-4
This file was deleted.

test/pummel/pummel.status

-1
This file was deleted.

tools/test.py

+13-35
Original file line numberDiff line numberDiff line change
@@ -57,9 +57,8 @@
5757

5858
class ProgressIndicator(object):
5959

60-
def __init__(self, cases, flaky_tests_mode):
60+
def __init__(self, cases):
6161
self.cases = cases
62-
self.flaky_tests_mode = flaky_tests_mode
6362
self.parallel_queue = Queue(len(cases))
6463
self.sequential_queue = Queue(len(cases))
6564
for case in cases:
@@ -248,19 +247,13 @@ def HasRun(self, output):
248247
self._done += 1
249248
command = basename(output.command[-1])
250249
if output.UnexpectedOutput():
251-
status_line = 'not ok %i - %s' % (self._done, command)
252-
if FLAKY in output.test.outcomes and self.flaky_tests_mode == "dontcare":
253-
status_line = status_line + " # TODO : Fix flaky test"
254-
print status_line
250+
print 'not ok %i - %s' % (self._done, command)
255251
for l in output.output.stderr.splitlines():
256252
print '#' + l
257253
for l in output.output.stdout.splitlines():
258254
print '#' + l
259255
else:
260-
status_line = 'ok %i - %s' % (self._done, command)
261-
if FLAKY in output.test.outcomes:
262-
status_line = status_line + " # TODO : Fix flaky test"
263-
print status_line
256+
print 'ok %i - %s' % (self._done, command)
264257

265258
duration = output.test.duration
266259

@@ -278,8 +271,8 @@ def Done(self):
278271

279272
class CompactProgressIndicator(ProgressIndicator):
280273

281-
def __init__(self, cases, flaky_tests_mode, templates):
282-
super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode)
274+
def __init__(self, cases, templates):
275+
super(CompactProgressIndicator, self).__init__(cases)
283276
self.templates = templates
284277
self.last_status_length = 0
285278
self.start_time = time.time()
@@ -334,29 +327,29 @@ def PrintProgress(self, name):
334327

335328
class ColorProgressIndicator(CompactProgressIndicator):
336329

337-
def __init__(self, cases, flaky_tests_mode):
330+
def __init__(self, cases):
338331
templates = {
339332
'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s",
340333
'stdout': "\033[1m%s\033[0m",
341334
'stderr': "\033[31m%s\033[0m",
342335
}
343-
super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, templates)
336+
super(ColorProgressIndicator, self).__init__(cases, templates)
344337

345338
def ClearLine(self, last_line_length):
346339
print "\033[1K\r",
347340

348341

349342
class MonochromeProgressIndicator(CompactProgressIndicator):
350343

351-
def __init__(self, cases, flaky_tests_mode):
344+
def __init__(self, cases):
352345
templates = {
353346
'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s",
354347
'stdout': '%s',
355348
'stderr': '%s',
356349
'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"),
357350
'max_length': 78
358351
}
359-
super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, templates)
352+
super(MonochromeProgressIndicator, self).__init__(cases, templates)
360353

361354
def ClearLine(self, last_line_length):
362355
print ("\r" + (" " * last_line_length) + "\r"),
@@ -776,8 +769,8 @@ def GetVmFlags(self, testcase, mode):
776769
def GetTimeout(self, mode):
777770
return self.timeout * TIMEOUT_SCALEFACTOR[mode]
778771

779-
def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode):
780-
progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode)
772+
def RunTestCases(cases_to_run, progress, tasks):
773+
progress = PROGRESS_INDICATORS[progress](cases_to_run)
781774
return progress.Run(tasks)
782775

783776

@@ -801,7 +794,6 @@ def BuildRequirements(context, requirements, mode, scons_flags):
801794
TIMEOUT = 'timeout'
802795
CRASH = 'crash'
803796
SLOW = 'slow'
804-
FLAKY = 'flaky'
805797

806798

807799
class Expression(object):
@@ -1248,9 +1240,6 @@ def BuildOptions():
12481240
default=False, action="store_true")
12491241
result.add_option("--cat", help="Print the source of the tests",
12501242
default=False, action="store_true")
1251-
result.add_option("--flaky-tests",
1252-
help="Regard tests marked as flaky (run|skip|dontcare)",
1253-
default="run")
12541243
result.add_option("--warn-unused", help="Report unused rules",
12551244
default=False, action="store_true")
12561245
result.add_option("-j", help="The number of parallel tasks to run",
@@ -1280,35 +1269,24 @@ def ProcessOptions(options):
12801269
options.mode = options.mode.split(',')
12811270
if options.J:
12821271
options.j = multiprocessing.cpu_count()
1283-
def CheckTestMode(name, option):
1284-
if not option in ["run", "skip", "dontcare"]:
1285-
print "Unknown %s mode %s" % (name, option)
1286-
return False
1287-
return True
1288-
if not CheckTestMode("--flaky-tests", options.flaky_tests):
1289-
return False
12901272
return True
12911273

12921274

12931275
REPORT_TEMPLATE = """\
12941276
Total: %(total)i tests
12951277
* %(skipped)4d tests will be skipped
1296-
* %(nocrash)4d tests are expected to be flaky but not crash
12971278
* %(pass)4d tests are expected to pass
12981279
* %(fail_ok)4d tests are expected to fail that we won't fix
12991280
* %(fail)4d tests are expected to fail that we should fix\
13001281
"""
13011282

13021283
def PrintReport(cases):
1303-
def IsFlaky(o):
1304-
return (PASS in o) and (FAIL in o) and (not CRASH in o) and (not OKAY in o)
13051284
def IsFailOk(o):
13061285
return (len(o) == 2) and (FAIL in o) and (OKAY in o)
13071286
unskipped = [c for c in cases if not SKIP in c.outcomes]
13081287
print REPORT_TEMPLATE % {
13091288
'total': len(cases),
13101289
'skipped': len(cases) - len(unskipped),
1311-
'nocrash': len([t for t in unskipped if IsFlaky(t.outcomes)]),
13121290
'pass': len([t for t in unskipped if list(t.outcomes) == [PASS]]),
13131291
'fail_ok': len([t for t in unskipped if IsFailOk(t.outcomes)]),
13141292
'fail': len([t for t in unskipped if list(t.outcomes) == [FAIL]])
@@ -1486,15 +1464,15 @@ def Main():
14861464

14871465
result = None
14881466
def DoSkip(case):
1489-
return SKIP in case.outcomes or SLOW in case.outcomes or (FLAKY in case.outcomes and options.flaky_tests == "skip")
1467+
return SKIP in case.outcomes or SLOW in case.outcomes
14901468
cases_to_run = [ c for c in all_cases if not DoSkip(c) ]
14911469
if len(cases_to_run) == 0:
14921470
print "No tests to run."
14931471
return 1
14941472
else:
14951473
try:
14961474
start = time.time()
1497-
if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests):
1475+
if RunTestCases(cases_to_run, options.progress, options.j):
14981476
result = 0
14991477
else:
15001478
result = 1

0 commit comments

Comments
 (0)