Skip to content

Commit bfe0ac6

Browse files
aduh95danielleadams
authored andcommitted
tools: add more options to track flaky tests
Refs: #43929 (comment) PR-URL: #43954 Reviewed-By: Matteo Collina <[email protected]> Reviewed-By: Tobias Nießen <[email protected]> Reviewed-By: Feng Yu <[email protected]>
1 parent cb9b0e0 commit bfe0ac6

10 files changed

+43
-26
lines changed

.github/workflows/build-tarball.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ concurrency:
2929

3030
env:
3131
PYTHON_VERSION: '3.10'
32-
FLAKY_TESTS: dontcare
32+
FLAKY_TESTS: keep_retrying
3333

3434
permissions:
3535
contents: read
@@ -94,4 +94,4 @@ jobs:
9494
- name: Test
9595
run: |
9696
cd $TAR_DIR
97-
make run-ci -j2 V=1 TEST_CI_ARGS="-p dots"
97+
make run-ci -j2 V=1 TEST_CI_ARGS="-p dots --measure-flakiness 9"

.github/workflows/build-windows.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ concurrency:
2424

2525
env:
2626
PYTHON_VERSION: '3.10'
27-
FLAKY_TESTS: dontcare
27+
FLAKY_TESTS: keep_retrying
2828

2929
permissions:
3030
contents: read

.github/workflows/coverage-linux.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ concurrency:
2727

2828
env:
2929
PYTHON_VERSION: '3.10'
30-
FLAKY_TESTS: dontcare
30+
FLAKY_TESTS: keep_retrying
3131

3232
permissions:
3333
contents: read
@@ -53,7 +53,7 @@ jobs:
5353
# TODO(bcoe): fix the couple tests that fail with the inspector enabled.
5454
# The cause is most likely coverage's use of the inspector.
5555
- name: Test
56-
run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j2 V=1 TEST_CI_ARGS="-p dots" || exit 0
56+
run: NODE_V8_COVERAGE=coverage/tmp make test-cov -j2 V=1 TEST_CI_ARGS="-p dots --measure-flakiness 9" || exit 0
5757
- name: Report JS
5858
run: npx c8 report --check-coverage
5959
env:

.github/workflows/coverage-windows.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ concurrency:
2929

3030
env:
3131
PYTHON_VERSION: '3.10'
32-
FLAKY_TESTS: dontcare
32+
FLAKY_TESTS: keep_retrying
3333

3434
permissions:
3535
contents: read

.github/workflows/doc.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -40,4 +40,4 @@ jobs:
4040
name: docs
4141
path: out/doc
4242
- name: Test
43-
run: NODE=$(command -v node) make test-doc-ci TEST_CI_ARGS="-p actions"
43+
run: NODE=$(command -v node) make test-doc-ci TEST_CI_ARGS="-p actions --measure-flakiness 9"

.github/workflows/test-asan.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ concurrency:
3131
env:
3232
ASAN_OPTIONS: intercept_tls_get_addr=0
3333
PYTHON_VERSION: '3.10'
34-
FLAKY_TESTS: dontcare
34+
FLAKY_TESTS: keep_retrying
3535

3636
permissions:
3737
contents: read
@@ -58,4 +58,4 @@ jobs:
5858
- name: Build
5959
run: make build-ci -j2 V=1
6060
- name: Test
61-
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions -t 300"
61+
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions -t 300 --measure-flakiness 9"

.github/workflows/test-internet.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ concurrency:
2222

2323
env:
2424
PYTHON_VERSION: '3.10'
25-
FLAKY_TESTS: dontcare
25+
FLAKY_TESTS: keep_retrying
2626

2727
permissions:
2828
contents: read

.github/workflows/test-linux.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ concurrency:
2424

2525
env:
2626
PYTHON_VERSION: '3.10'
27-
FLAKY_TESTS: dontcare
27+
FLAKY_TESTS: keep_retrying
2828

2929
permissions:
3030
contents: read
@@ -46,4 +46,4 @@ jobs:
4646
- name: Build
4747
run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn"
4848
- name: Test
49-
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions"
49+
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --measure-flakiness 9"

.github/workflows/test-macos.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ concurrency:
3030

3131
env:
3232
PYTHON_VERSION: '3.10'
33-
FLAKY_TESTS: dontcare
33+
FLAKY_TESTS: keep_retrying
3434

3535
permissions:
3636
contents: read
@@ -60,4 +60,4 @@ jobs:
6060
- name: Build
6161
run: make build-ci -j2 V=1 CONFIG_FLAGS="--error-on-warn"
6262
- name: Test
63-
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions"
63+
run: make run-ci -j2 V=1 TEST_CI_ARGS="-p actions --measure-flakiness 9"

tools/test.py

+29-12
Original file line numberDiff line numberDiff line change
@@ -96,10 +96,11 @@ def get_module(name, path):
9696

9797
class ProgressIndicator(object):
9898

99-
def __init__(self, cases, flaky_tests_mode):
99+
def __init__(self, cases, flaky_tests_mode, measure_flakiness):
100100
self.cases = cases
101101
self.serial_id = 0
102102
self.flaky_tests_mode = flaky_tests_mode
103+
self.measure_flakiness = measure_flakiness
103104
self.parallel_queue = Queue(len(cases))
104105
self.sequential_queue = Queue(len(cases))
105106
for case in cases:
@@ -211,10 +212,22 @@ def RunSingle(self, parallel, thread_id):
211212
if output.UnexpectedOutput():
212213
if FLAKY in output.test.outcomes and self.flaky_tests_mode == DONTCARE:
213214
self.flaky_failed.append(output)
215+
elif FLAKY in output.test.outcomes and self.flaky_tests_mode == KEEP_RETRYING:
216+
for _ in range(99):
217+
if not case.Run().UnexpectedOutput():
218+
self.flaky_failed.append(output)
219+
break
220+
else:
221+
# If after 100 tries, the test is not passing, it's not flaky.
222+
self.failed.append(output)
214223
else:
215224
self.failed.append(output)
216225
if output.HasCrashed():
217226
self.crashed += 1
227+
if self.measure_flakiness:
228+
outputs = [case.Run() for _ in range(self.measure_flakiness)]
229+
# +1s are there because the test already failed once at this point.
230+
print(f" failed {len([i for i in outputs if i.UnexpectedOutput()]) + 1} out of {self.measure_flakiness + 1}")
218231
else:
219232
self.succeeded += 1
220233
self.remaining -= 1
@@ -436,8 +449,8 @@ def Done(self):
436449

437450
class CompactProgressIndicator(ProgressIndicator):
438451

439-
def __init__(self, cases, flaky_tests_mode, templates):
440-
super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode)
452+
def __init__(self, cases, flaky_tests_mode, measure_flakiness, templates):
453+
super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode, measure_flakiness)
441454
self.templates = templates
442455
self.last_status_length = 0
443456
self.start_time = time.time()
@@ -492,29 +505,29 @@ def PrintProgress(self, name):
492505

493506
class ColorProgressIndicator(CompactProgressIndicator):
494507

495-
def __init__(self, cases, flaky_tests_mode):
508+
def __init__(self, cases, flaky_tests_mode, measure_flakiness):
496509
templates = {
497510
'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s",
498511
'stdout': "\033[1m%s\033[0m",
499512
'stderr': "\033[31m%s\033[0m",
500513
}
501-
super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, templates)
514+
super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, measure_flakiness, templates)
502515

503516
def ClearLine(self, last_line_length):
504517
print("\033[1K\r", end='')
505518

506519

507520
class MonochromeProgressIndicator(CompactProgressIndicator):
508521

509-
def __init__(self, cases, flaky_tests_mode):
522+
def __init__(self, cases, flaky_tests_mode, measure_flakiness):
510523
templates = {
511524
'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s",
512525
'stdout': '%s',
513526
'stderr': '%s',
514527
'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"),
515528
'max_length': 78
516529
}
517-
super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, templates)
530+
super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, measure_flakiness, templates)
518531

519532
def ClearLine(self, last_line_length):
520533
print(("\r" + (" " * last_line_length) + "\r"), end='')
@@ -948,8 +961,8 @@ def GetTimeout(self, mode, section=''):
948961
timeout = timeout * 6
949962
return timeout
950963

951-
def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode):
952-
progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode)
964+
def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode, measure_flakiness):
965+
progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode, measure_flakiness)
953966
return progress.Run(tasks)
954967

955968
# -------------------------------------------
@@ -967,6 +980,7 @@ def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode):
967980
SLOW = 'slow'
968981
FLAKY = 'flaky'
969982
DONTCARE = 'dontcare'
983+
KEEP_RETRYING = 'keep_retrying'
970984

971985
class Expression(object):
972986
pass
@@ -1355,8 +1369,11 @@ def BuildOptions():
13551369
result.add_option("--cat", help="Print the source of the tests",
13561370
default=False, action="store_true")
13571371
result.add_option("--flaky-tests",
1358-
help="Regard tests marked as flaky (run|skip|dontcare)",
1372+
help="Regard tests marked as flaky (run|skip|dontcare|keep_retrying)",
13591373
default="run")
1374+
result.add_option("--measure-flakiness",
1375+
help="When a test fails, re-run it x number of times",
1376+
default=0, type="int")
13601377
result.add_option("--skip-tests",
13611378
help="Tests that should not be executed (comma-separated)",
13621379
default="")
@@ -1433,7 +1450,7 @@ def ProcessOptions(options):
14331450
# -j and ignoring -J, which is the opposite of what we used to do before -J
14341451
# became a legacy no-op.
14351452
print('Warning: Legacy -J option is ignored. Using the -j option.')
1436-
if options.flaky_tests not in [RUN, SKIP, DONTCARE]:
1453+
if options.flaky_tests not in [RUN, SKIP, DONTCARE, KEEP_RETRYING]:
14371454
print("Unknown flaky-tests mode %s" % options.flaky_tests)
14381455
return False
14391456
return True
@@ -1733,7 +1750,7 @@ def should_keep(case):
17331750
else:
17341751
try:
17351752
start = time.time()
1736-
if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests):
1753+
if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests, options.measure_flakiness):
17371754
result = 0
17381755
else:
17391756
result = 1

0 commit comments

Comments
 (0)