@@ -96,10 +96,11 @@ def get_module(name, path):
9696
9797class ProgressIndicator (object ):
9898
99- def __init__ (self , cases , flaky_tests_mode ):
99+ def __init__ (self , cases , flaky_tests_mode , measure_flakiness ):
100100self .cases = cases
101101self .serial_id = 0
102102self .flaky_tests_mode = flaky_tests_mode
103+ self .measure_flakiness = measure_flakiness
103104self .parallel_queue = Queue (len (cases ))
104105self .sequential_queue = Queue (len (cases ))
105106for case in cases :
@@ -211,10 +212,22 @@ def RunSingle(self, parallel, thread_id):
211212if output .UnexpectedOutput ():
212213if FLAKY in output .test .outcomes and self .flaky_tests_mode == DONTCARE :
213214self .flaky_failed .append (output )
215+ elif FLAKY in output .test .outcomes and self .flaky_tests_mode == KEEP_RETRYING :
216+ for _ in range (99 ):
217+ if not case .Run ().UnexpectedOutput ():
218+ self .flaky_failed .append (output )
219+ break
220+ else :
221+ # If after 100 tries, the test is not passing, it's not flaky.
222+ self .failed .append (output )
214223else :
215224self .failed .append (output )
216225if output .HasCrashed ():
217226self .crashed += 1
227+ if self .measure_flakiness :
228+ outputs = [case .Run () for _ in range (self .measure_flakiness )]
229+ # +1s are there because the test already failed once at this point.
230+ print (f" failed { len ([i for i in outputs if i .UnexpectedOutput ()]) + 1 } out of { self .measure_flakiness + 1 } " )
218231else :
219232self .succeeded += 1
220233self .remaining -= 1
@@ -436,8 +449,8 @@ def Done(self):
436449
437450class CompactProgressIndicator (ProgressIndicator ):
438451
439- def __init__ (self , cases , flaky_tests_mode , templates ):
440- super (CompactProgressIndicator , self ).__init__ (cases , flaky_tests_mode )
452+ def __init__ (self , cases , flaky_tests_mode , measure_flakiness , templates ):
453+ super (CompactProgressIndicator , self ).__init__ (cases , flaky_tests_mode , measure_flakiness )
441454self .templates = templates
442455self .last_status_length = 0
443456self .start_time = time .time ()
@@ -492,29 +505,29 @@ def PrintProgress(self, name):
492505
493506class ColorProgressIndicator (CompactProgressIndicator ):
494507
495- def __init__ (self , cases , flaky_tests_mode ):
508+ def __init__ (self , cases , flaky_tests_mode , measure_flakiness ):
496509templates = {
497510'status_line' : "[%(mins)02i:%(secs)02i|\033 [34m%%%(remaining) 4d\033 [0m|\033 [32m+%(passed) 4d\033 [0m|\033 [31m-%(failed) 4d\033 [0m]: %(test)s" ,
498511'stdout' : "\033 [1m%s\033 [0m" ,
499512'stderr' : "\033 [31m%s\033 [0m" ,
500513 }
501- super (ColorProgressIndicator , self ).__init__ (cases , flaky_tests_mode , templates )
514+ super (ColorProgressIndicator , self ).__init__ (cases , flaky_tests_mode , measure_flakiness , templates )
502515
503516def ClearLine (self , last_line_length ):
504517print ("\033 [1K\r " , end = '' )
505518
506519
507520class MonochromeProgressIndicator (CompactProgressIndicator ):
508521
509- def __init__ (self , cases , flaky_tests_mode ):
522+ def __init__ (self , cases , flaky_tests_mode , measure_flakiness ):
510523templates = {
511524'status_line' : "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s" ,
512525'stdout' : '%s' ,
513526'stderr' : '%s' ,
514527'clear' : lambda last_line_length : ("\r " + (" " * last_line_length ) + "\r " ),
515528'max_length' : 78
516529 }
517- super (MonochromeProgressIndicator , self ).__init__ (cases , flaky_tests_mode , templates )
530+ super (MonochromeProgressIndicator , self ).__init__ (cases , flaky_tests_mode , measure_flakiness , templates )
518531
519532def ClearLine (self , last_line_length ):
520533print (("\r " + (" " * last_line_length ) + "\r " ), end = '' )
@@ -948,8 +961,8 @@ def GetTimeout(self, mode, section=''):
948961timeout = timeout * 6
949962return timeout
950963
951- def RunTestCases (cases_to_run , progress , tasks , flaky_tests_mode ):
952- progress = PROGRESS_INDICATORS [progress ](cases_to_run , flaky_tests_mode )
964+ def RunTestCases (cases_to_run , progress , tasks , flaky_tests_mode , measure_flakiness ):
965+ progress = PROGRESS_INDICATORS [progress ](cases_to_run , flaky_tests_mode , measure_flakiness )
953966return progress .Run (tasks )
954967
955968# -------------------------------------------
@@ -967,6 +980,7 @@ def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode):
967980SLOW = 'slow'
968981FLAKY = 'flaky'
969982DONTCARE = 'dontcare'
983+ KEEP_RETRYING = 'keep_retrying'
970984
971985class Expression (object ):
972986pass
@@ -1355,8 +1369,11 @@ def BuildOptions():
13551369result .add_option ("--cat" , help = "Print the source of the tests" ,
13561370default = False , action = "store_true" )
13571371result .add_option ("--flaky-tests" ,
1358- help = "Regard tests marked as flaky (run|skip|dontcare)" ,
1372+ help = "Regard tests marked as flaky (run|skip|dontcare|keep_retrying )" ,
13591373default = "run" )
1374+ result .add_option ("--measure-flakiness" ,
1375+ help = "When a test fails, re-run it x number of times" ,
1376+ default = 0 , type = "int" )
13601377result .add_option ("--skip-tests" ,
13611378help = "Tests that should not be executed (comma-separated)" ,
13621379default = "" )
@@ -1433,7 +1450,7 @@ def ProcessOptions(options):
14331450# -j and ignoring -J, which is the opposite of what we used to do before -J
14341451# became a legacy no-op.
14351452print ('Warning: Legacy -J option is ignored. Using the -j option.' )
1436- if options .flaky_tests not in [RUN , SKIP , DONTCARE ]:
1453+ if options .flaky_tests not in [RUN , SKIP , DONTCARE , KEEP_RETRYING ]:
14371454print ("Unknown flaky-tests mode %s" % options .flaky_tests )
14381455return False
14391456return True
@@ -1733,7 +1750,7 @@ def should_keep(case):
17331750else :
17341751try :
17351752start = time .time ()
1736- if RunTestCases (cases_to_run , options .progress , options .j , options .flaky_tests ):
1753+ if RunTestCases (cases_to_run , options .progress , options .j , options .flaky_tests , options . measure_flakiness ):
17371754result = 0
17381755else :
17391756result = 1
0 commit comments