mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2025-02-24 14:33:40 +00:00
[lit] Refactor ProgressDisplay
Move progress display to separate file. Simplify some code paths. Decouple from other components via progress callback. Remove unused `_Display` class. Reviewed By: serge-sans-paille Differential Revision: https://reviews.llvm.org/D68525 llvm-svn: 374194
This commit is contained in:
parent
adefa6d1f6
commit
6485fb95d4
@ -172,7 +172,7 @@ class SimpleProgressBar:
|
||||
A simple progress bar which doesn't need any terminal support.
|
||||
|
||||
This prints out a progress bar like:
|
||||
'Header: 0 .. 10.. 20.. ...'
|
||||
'Header: 0.. 10.. 20.. ...'
|
||||
"""
|
||||
|
||||
def __init__(self, header):
|
||||
@ -191,7 +191,7 @@ class SimpleProgressBar:
|
||||
for i in range(self.atIndex, next):
|
||||
idx = i % 5
|
||||
if idx == 0:
|
||||
sys.stdout.write('%-2d' % (i*2))
|
||||
sys.stdout.write('%2d' % (i*2))
|
||||
elif idx == 1:
|
||||
pass # Skip second char
|
||||
elif idx < 4:
|
||||
|
98
utils/lit/lit/display.py
Normal file
98
utils/lit/lit/display.py
Normal file
@ -0,0 +1,98 @@
|
||||
import sys
|
||||
|
||||
import lit.ProgressBar
|
||||
|
||||
def create_display(opts, tests, total_tests, workers):
|
||||
if opts.quiet:
|
||||
return NopProgressDisplay()
|
||||
|
||||
of_total = (' of %d' % total_tests) if (tests != total_tests) else ''
|
||||
header = '-- Testing: %d%s tests, %d workers --' % (tests, of_total, workers)
|
||||
|
||||
progress_bar = None
|
||||
if opts.succinct and opts.useProgressBar:
|
||||
try:
|
||||
tc = lit.ProgressBar.TerminalController()
|
||||
progress_bar = lit.ProgressBar.ProgressBar(tc, header)
|
||||
except ValueError:
|
||||
print(header)
|
||||
progress_bar = lit.ProgressBar.SimpleProgressBar('Testing: ')
|
||||
else:
|
||||
print(header)
|
||||
|
||||
if progress_bar:
|
||||
progress_bar.update(0, '')
|
||||
|
||||
return ProgressDisplay(opts, tests, progress_bar)
|
||||
|
||||
class NopProgressDisplay(object):
|
||||
def update(self, test): pass
|
||||
def finish(self): pass
|
||||
|
||||
class ProgressDisplay(object):
|
||||
def __init__(self, opts, numTests, progressBar):
|
||||
self.opts = opts
|
||||
self.numTests = numTests
|
||||
self.progressBar = progressBar
|
||||
self.completed = 0
|
||||
|
||||
def finish(self):
|
||||
if self.progressBar:
|
||||
self.progressBar.clear()
|
||||
elif self.opts.succinct:
|
||||
sys.stdout.write('\n')
|
||||
|
||||
def update(self, test):
|
||||
self.completed += 1
|
||||
|
||||
show_result = test.result.code.isFailure or \
|
||||
self.opts.showAllOutput or \
|
||||
(not self.opts.quiet and not self.opts.succinct)
|
||||
if show_result:
|
||||
self.print_result(test)
|
||||
|
||||
if self.progressBar:
|
||||
percent = float(self.completed) / self.numTests
|
||||
self.progressBar.update(percent, test.getFullName())
|
||||
|
||||
def print_result(self, test):
|
||||
if self.progressBar:
|
||||
self.progressBar.clear()
|
||||
|
||||
# Show the test result line.
|
||||
test_name = test.getFullName()
|
||||
print('%s: %s (%d of %d)' % (test.result.code.name, test_name,
|
||||
self.completed, self.numTests))
|
||||
|
||||
# Show the test failure output, if requested.
|
||||
if (test.result.code.isFailure and self.opts.showOutput) or \
|
||||
self.opts.showAllOutput:
|
||||
if test.result.code.isFailure:
|
||||
print("%s TEST '%s' FAILED %s" % ('*'*20, test.getFullName(),
|
||||
'*'*20))
|
||||
print(test.result.output)
|
||||
print("*" * 20)
|
||||
|
||||
# Report test metrics, if present.
|
||||
if test.result.metrics:
|
||||
print("%s TEST '%s' RESULTS %s" % ('*'*10, test.getFullName(),
|
||||
'*'*10))
|
||||
items = sorted(test.result.metrics.items())
|
||||
for metric_name, value in items:
|
||||
print('%s: %s ' % (metric_name, value.format()))
|
||||
print("*" * 10)
|
||||
|
||||
# Report micro-tests, if present
|
||||
if test.result.microResults:
|
||||
items = sorted(test.result.microResults.items())
|
||||
for micro_test_name, micro_test in items:
|
||||
print("%s MICRO-TEST: %s" %
|
||||
('*'*3, micro_test_name))
|
||||
|
||||
if micro_test.metrics:
|
||||
sorted_metrics = sorted(micro_test.metrics.items())
|
||||
for metric_name, value in sorted_metrics:
|
||||
print(' %s: %s ' % (metric_name, value.format()))
|
||||
|
||||
# Ensure the output is flushed.
|
||||
sys.stdout.flush()
|
@ -19,84 +19,12 @@ import tempfile
|
||||
import shutil
|
||||
from xml.sax.saxutils import quoteattr
|
||||
|
||||
import lit.ProgressBar
|
||||
import lit.LitConfig
|
||||
import lit.Test
|
||||
import lit.run
|
||||
import lit.util
|
||||
import lit.discovery
|
||||
|
||||
class TestingProgressDisplay(object):
|
||||
def __init__(self, opts, numTests, progressBar=None):
|
||||
self.opts = opts
|
||||
self.numTests = numTests
|
||||
self.progressBar = progressBar
|
||||
self.completed = 0
|
||||
|
||||
def finish(self):
|
||||
if self.progressBar:
|
||||
self.progressBar.clear()
|
||||
elif self.opts.quiet:
|
||||
pass
|
||||
elif self.opts.succinct:
|
||||
sys.stdout.write('\n')
|
||||
|
||||
def update(self, test):
|
||||
self.completed += 1
|
||||
|
||||
if self.opts.incremental:
|
||||
update_incremental_cache(test)
|
||||
|
||||
if self.progressBar:
|
||||
self.progressBar.update(float(self.completed)/self.numTests,
|
||||
test.getFullName())
|
||||
|
||||
shouldShow = test.result.code.isFailure or \
|
||||
self.opts.showAllOutput or \
|
||||
(not self.opts.quiet and not self.opts.succinct)
|
||||
if not shouldShow:
|
||||
return
|
||||
|
||||
if self.progressBar:
|
||||
self.progressBar.clear()
|
||||
|
||||
# Show the test result line.
|
||||
test_name = test.getFullName()
|
||||
print('%s: %s (%d of %d)' % (test.result.code.name, test_name,
|
||||
self.completed, self.numTests))
|
||||
|
||||
# Show the test failure output, if requested.
|
||||
if (test.result.code.isFailure and self.opts.showOutput) or \
|
||||
self.opts.showAllOutput:
|
||||
if test.result.code.isFailure:
|
||||
print("%s TEST '%s' FAILED %s" % ('*'*20, test.getFullName(),
|
||||
'*'*20))
|
||||
print(test.result.output)
|
||||
print("*" * 20)
|
||||
|
||||
# Report test metrics, if present.
|
||||
if test.result.metrics:
|
||||
print("%s TEST '%s' RESULTS %s" % ('*'*10, test.getFullName(),
|
||||
'*'*10))
|
||||
items = sorted(test.result.metrics.items())
|
||||
for metric_name, value in items:
|
||||
print('%s: %s ' % (metric_name, value.format()))
|
||||
print("*" * 10)
|
||||
|
||||
# Report micro-tests, if present
|
||||
if test.result.microResults:
|
||||
items = sorted(test.result.microResults.items())
|
||||
for micro_test_name, micro_test in items:
|
||||
print("%s MICRO-TEST: %s" %
|
||||
('*'*3, micro_test_name))
|
||||
|
||||
if micro_test.metrics:
|
||||
sorted_metrics = sorted(micro_test.metrics.items())
|
||||
for metric_name, value in sorted_metrics:
|
||||
print(' %s: %s ' % (metric_name, value.format()))
|
||||
|
||||
# Ensure the output is flushed.
|
||||
sys.stdout.flush()
|
||||
import lit.display
|
||||
import lit.LitConfig
|
||||
import lit.run
|
||||
import lit.Test
|
||||
import lit.util
|
||||
|
||||
def write_test_results(run, lit_config, testing_time, output_path):
|
||||
try:
|
||||
@ -505,29 +433,22 @@ def main_with_tmp(builtinParameters):
|
||||
except:
|
||||
pass
|
||||
|
||||
extra = (' of %d' % numTotalTests) if (len(run.tests) != numTotalTests) else ''
|
||||
header = '-- Testing: %d%s tests, %d workers --' % (len(run.tests), extra, opts.numWorkers)
|
||||
progressBar = None
|
||||
if not opts.quiet:
|
||||
if opts.succinct and opts.useProgressBar:
|
||||
try:
|
||||
tc = lit.ProgressBar.TerminalController()
|
||||
progressBar = lit.ProgressBar.ProgressBar(tc, header)
|
||||
except ValueError:
|
||||
print(header)
|
||||
progressBar = lit.ProgressBar.SimpleProgressBar('Testing: ')
|
||||
else:
|
||||
print(header)
|
||||
display = lit.display.create_display(opts, len(run.tests),
|
||||
numTotalTests, opts.numWorkers)
|
||||
def progress_callback(test):
|
||||
display.update(test)
|
||||
if opts.incremental:
|
||||
update_incremental_cache(test)
|
||||
|
||||
startTime = time.time()
|
||||
display = TestingProgressDisplay(opts, len(run.tests), progressBar)
|
||||
try:
|
||||
run.execute_tests(display, opts.numWorkers, opts.maxTime)
|
||||
run.execute_tests(progress_callback, opts.numWorkers, opts.maxTime)
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(2)
|
||||
testing_time = time.time() - startTime
|
||||
|
||||
display.finish()
|
||||
|
||||
testing_time = time.time() - startTime
|
||||
if not opts.quiet:
|
||||
print('Testing Time: %.2fs' % (testing_time,))
|
||||
|
||||
|
@ -5,18 +5,6 @@ import lit.Test
|
||||
import lit.util
|
||||
import lit.worker
|
||||
|
||||
class _Display(object):
|
||||
def __init__(self, display, provider, maxFailures):
|
||||
self.display = display
|
||||
self.provider = provider
|
||||
self.maxFailures = maxFailures or object()
|
||||
self.failedCount = 0
|
||||
def update(self, test):
|
||||
self.display.update(test)
|
||||
self.failedCount += (test.result.code == lit.Test.FAIL)
|
||||
if self.failedCount == self.maxFailures:
|
||||
self.provider.cancel()
|
||||
|
||||
# No-operation semaphore for supporting `None` for parallelism_groups.
|
||||
# lit_config.parallelism_groups['my_group'] = None
|
||||
class NopSemaphore(object):
|
||||
@ -93,21 +81,20 @@ class Run(object):
|
||||
finally:
|
||||
pool.join()
|
||||
|
||||
def execute_tests(self, display, workers, max_time=None):
|
||||
def execute_tests(self, progress_callback, workers, max_time):
|
||||
"""
|
||||
execute_tests(display, workers, [max_time])
|
||||
execute_tests(progress_callback, workers, max_time)
|
||||
|
||||
Execute the tests in the run using up to the specified number of
|
||||
parallel tasks, and inform the display of each individual result. The
|
||||
parallel tasks, and inform the caller of each individual result. The
|
||||
provided tests should be a subset of the tests available in this run
|
||||
object.
|
||||
|
||||
The progress_callback will be invoked for each completed test.
|
||||
|
||||
If max_time is non-None, it should be a time in seconds after which to
|
||||
stop executing tests.
|
||||
|
||||
The display object will have its update method called for each completed
|
||||
test.
|
||||
|
||||
Upon completion, each test in the run will have its result
|
||||
computed. Tests which were not actually executed (for any reason) will
|
||||
be given an UNRESOLVED result.
|
||||
@ -116,9 +103,7 @@ class Run(object):
|
||||
if not self.tests:
|
||||
return
|
||||
|
||||
# Save the display object on the runner so that we can update it from
|
||||
# our task completion callback.
|
||||
self.display = display
|
||||
self.progress_callback = progress_callback
|
||||
|
||||
self.failure_count = 0
|
||||
self.hit_max_failures = False
|
||||
@ -156,7 +141,7 @@ class Run(object):
|
||||
assert self.tests[test_index].file_path == test_with_result.file_path, \
|
||||
"parent and child disagree on test path"
|
||||
self.tests[test_index] = test_with_result
|
||||
self.display.update(test_with_result)
|
||||
self.progress_callback(test_with_result)
|
||||
|
||||
# If we've finished all the tests or too many tests have failed, notify
|
||||
# the main thread that we've stopped testing.
|
||||
|
@ -3,11 +3,12 @@
|
||||
# RUN: not %{lit} -j 1 -s %{inputs}/progress-bar > %t.out
|
||||
# RUN: FileCheck < %t.out %s
|
||||
#
|
||||
# CHECK: Testing: 0 .. 10.. 20
|
||||
# CHECK: Testing:
|
||||
# CHECK: FAIL: progress-bar :: test-1.txt (1 of 4)
|
||||
# CHECK: Testing: 0 .. 10.. 20.. 30.. 40..
|
||||
# CHECK: Testing: 0.. 10.. 20
|
||||
# CHECK: FAIL: progress-bar :: test-2.txt (2 of 4)
|
||||
# CHECK: Testing: 0 .. 10.. 20.. 30.. 40.. 50.. 60.. 70
|
||||
# CHECK: Testing: 0.. 10.. 20.. 30.. 40..
|
||||
# CHECK: FAIL: progress-bar :: test-3.txt (3 of 4)
|
||||
# CHECK: Testing: 0 .. 10.. 20.. 30.. 40.. 50.. 60.. 70.. 80.. 90..
|
||||
# CHECK: Testing: 0.. 10.. 20.. 30.. 40.. 50.. 60.. 70
|
||||
# CHECK: FAIL: progress-bar :: test-4.txt (4 of 4)
|
||||
# CHECK: Testing: 0.. 10.. 20.. 30.. 40.. 50.. 60.. 70.. 80.. 90..
|
||||
|
Loading…
x
Reference in New Issue
Block a user