llvm.org GIT mirror llvm / 2849503
[lit] Add an --output option, for writing results in a machine readable form. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@190738 91177308-0d34-0410-b5e6-96231b3b80d8 Daniel Dunbar 6 years ago
5 changed file(s) with 96 addition(s) and 3 deletion(s). Raw diff Collapse all Expand all
3434
3535 class MetricValue(object):
3636 def format(self):
37 """
38 format() -> str
39
40 Convert this metric to a string suitable for displaying as part of the
41 console output.
42 """
43 raise RuntimeError("abstract method")
44
45 def todata(self):
46 """
47 todata() -> json-serializable data
48
49 Convert this metric to content suitable for serializing in the JSON test
50 output.
51 """
3752 raise RuntimeError("abstract method")
3853
3954 class IntMetricValue(MetricValue):
4358 def format(self):
4459 return str(self.value)
4560
61 def todata(self):
62 return self.value
63
4664 class RealMetricValue(MetricValue):
4765 def __init__(self, value):
4866 self.value = value
4967
5068 def format(self):
5169 return '%.4f' % self.value
70
71 def todata(self):
72 return self.value
5273
5374 # Test results.
5475
6767
6868 # Ensure the output is flushed.
6969 sys.stdout.flush()
70
71 def write_test_results(run, lit_config, testing_time, output_path):
72 try:
73 import json
74 except ImportError:
75 lit_config.fatal('test output unsupported with Python 2.5')
76
77 # Construct the data we will write.
78 data = {}
79 # Encode the current lit version as a schema version.
80 data['__version__'] = lit.__versioninfo__
81 data['elapsed'] = testing_time
82 # FIXME: Record some information on the lit configuration used?
83 # FIXME: Record information from the individual test suites?
84
85 # Encode the tests.
86 data['tests'] = tests_data = []
87 for test in run.tests:
88 test_data = {
89 'name' : test.getFullName(),
90 'code' : test.result.code.name,
91 'output' : test.result.output,
92 'elapsed' : test.result.elapsed }
93
94 # Add test metrics, if present.
95 if test.result.metrics:
96 test_data['metrics'] = metrics_data = {}
97 for key, value in test.result.metrics.items():
98 metrics_data[key] = value.todata()
99
100 tests_data.append(test_data)
101
102 # Write the output.
103 f = open(output_path, 'w')
104 try:
105 json.dump(data, f, indent=2, sort_keys=True)
106 f.write('\n')
107 finally:
108 f.close()
70109
71110 def main(builtinParameters = {}):
72111 # Bump the GIL check interval, its more important to get any one thread to a
102141 group.add_option("-v", "--verbose", dest="showOutput",
103142 help="Show all test output",
104143 action="store_true", default=False)
144 group.add_option("-o", "--output", dest="output_path",
145 help="Write test results to the provided path",
146 action="store", type=str, metavar="PATH")
105147 group.add_option("", "--no-progress-bar", dest="useProgressBar",
106148 help="Do not use curses based progress bar",
107149 action="store_false", default=True)
288330 sys.exit(2)
289331 display.finish()
290332
333 testing_time = time.time() - startTime
291334 if not opts.quiet:
292 print('Testing Time: %.2fs'%(time.time() - startTime))
335 print('Testing Time: %.2fs' % (testing_time,))
336
337 # Write out the test data, if requested.
338 if opts.output_path is not None:
339 write_test_results(run, litConfig, testing_time, opts.output_path)
293340
294341 # List test results organized by kind.
295342 hasFailures = False
0 [global]
11 result_code = PASS
2 result_output = 'Test passed.'
2 result_output = Test passed.
33
44 [results]
55 value0 = 1
2222 config.test_source_root = os.path.dirname(__file__)
2323 config.test_exec_root = config.test_source_root
2424
25 config.target_triple = None
25 config.target_triple = '(unused)'
2626
2727 src_root = os.path.join(config.test_source_root, '..')
2828 config.environment['PYTHONPATH'] = src_root
3838 if lit_config.params.get('check-coverage', None):
3939 config.environment['COVERAGE_PROCESS_START'] = os.path.join(
4040 os.path.dirname(__file__), ".coveragerc")
41
42 # Add a feature to detect the Python version.
43 config.available_features.add("python%d.%d" % (sys.version_info[0],
44 sys.version_info[1]))
0 # XFAIL: python2.5
1
2 # RUN: %{lit} -j 1 -v %{inputs}/test-data --output %t.results.out > %t.out
3 # RUN: FileCheck < %t.results.out %s
4
5 # CHECK: {
6 # CHECK: "__version__"
7 # CHECK: "elapsed"
8 # CHECK-NEXT: "tests": [
9 # CHECK-NEXT: {
10 # CHECK-NEXT: "code": "PASS",
11 # CHECK-NEXT: "elapsed": {{[0-9.]+}},
12 # CHECK-NEXT: "metrics": {
13 # CHECK-NEXT: "value0": 1,
14 # CHECK-NEXT: "value1": 2.3456
15 # CHECK-NEXT: }
16 # CHECK-NEXT: "name": "test-data :: metrics.ini",
17 # CHECK-NEXT: "output": "Test passed."
18 # CHECK-NEXT: }
19 # CHECK-NEXT: ]
20 # CHECK-NEXT: }