Jpp 19.3.0-rc.1
the software that should make you happy
Loading...
Searching...
No Matches
run_tests.py
Go to the documentation of this file.
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3"""
4A test runner for Jpp.
5This script traverses the tests directory and reports a summary.
6
7"""
8from __future__ import print_function
9from collections import defaultdict
10from glob import glob, iglob
11import os
12from os.path import join, basename
13from subprocess import Popen, PIPE
14import sys
15from time import time
16import re
17import xml.etree.ElementTree as ET
18import xml.dom.minidom
19
20if not len(sys.argv) == 2:
21 print("Usage: run_tests.py PATH_TO_TESTS")
22 raise SystemExit
23
24os.environ["TEST_DEBUG"] = "1"
25
26try:
27 unichr
28except NameError:
29 unichr = chr
30
31__author__ = "Tamas Gal"
32__credits__ = "Brian Beyer"
33__license__ = "MIT"
34__email__ = "tgal@km3net.de"
35__status__ = "Development"
36
37TESTS_DIR = sys.argv[1]
38JUNIT_XML = 'out/junit_{}.xml'.format(os.path.basename(TESTS_DIR))
39
40if hasattr(sys.stdout, 'isatty') and sys.stdout.isatty():
41 INFO = '\033[94m' # blue
42 OK = '\033[92m' # green
43 FAIL = '\033[91m' # red
44 RST = '\033[0m' # colour reset
45 BOLD = '\033[1m'
46else:
47 INFO, OK, FAIL, RST, BOLD = ('', ) * 5
48
49
50def main():
51 test_results = run_tests(TESTS_DIR)
52 n_tests = len(test_results)
53 n_failed_tests = sum(1 for r in test_results.values() if r[0] > 0)
54 total_time = sum(r[1] for r in test_results.values())
55
56 print("\n{}"
57 "Test summary\n"
58 "============\n{}"
59 "Total number of tests: {}\n{}"
60 "Failed tests: {}{}\n"
61 "Elapsed time: {:.1f}s\n".format(
62 INFO, RST, n_tests, BOLD + FAIL if n_failed_tests > 0 else OK,
63 n_failed_tests, RST, total_time))
64
65 write_junit_xml(test_results)
66
67 if n_failed_tests > 0:
68 print_captured_output(test_results)
69 exit(1)
70 else:
71 exit(0)
72
73
74def write_junit_xml(test_results):
75 """Generate XML file according to JUnit specs"""
76 test_cases = []
77 for test_script, (exit_code, t, stdout, stderr) in test_results.items():
78 test_case = TestCase(test_script,
79 elapsed_sec=t,
80 stdout=stdout,
81 stderr=stderr)
82 if exit_code > 0:
83 test_case.add_error_info('non-zero exit-code: %d' % exit_code)
84 test_cases.append(test_case)
85 test_suite = TestSuite("Jpp Test Suite", test_cases)
86 with open(JUNIT_XML, 'w') as f:
87 TestSuite.to_file(f, [test_suite])
88
89
90def print_captured_output(test_results):
91 """Prints the STDOUT and STDERR of failing test scripts"""
92 print("{}"
93 "Captured output of failing tests\n"
94 "================================\n{}".format(INFO, RST))
95 for test_script, (exit_code, t, stdout, stderr) in test_results.items():
96 if exit_code > 0:
97 print("{}\n{}\n".format(test_script, len(test_script) * '-'))
98 print('{}stdout:{}\n{}\n{}stderr:{}\n{}'.format(
99 OK + BOLD, RST, stdout, FAIL + BOLD, RST, stderr))
100
101
102def safe_str(obj):
103 return obj.decode('utf-8').encode('ascii', 'ignore').decode('ascii')
104
105
106def run_tests(tests_dir):
107 """Runs each script in the tests directory and returns the results.
108
109 Parameters
110 ----------
111 tests_dir: str
112 The path to the test dir, containing the test scripts (`*.sh`).
113
114 Returns
115 -------
116 dict: key = script path, value = (exit_code, elapsed_time, stdout, stderr)
117
118 """
119 test_results = {}
120
121 for subdir in sorted(glob(join(tests_dir, '*'))):
122 component_group = basename(subdir)
123 print("\n{}{}\n{}{}".format(INFO, component_group,
124 len(component_group) * '=', RST))
125 for test_script in sorted(glob(join(subdir, '*.sh')) + glob(join(subdir, '*.csh'))):
126 print("+ {}".format(test_script), end=' => ')
127 sys.stdout.flush()
128 start_time = time()
129 proc = Popen(test_script, stdout=PIPE, stderr=PIPE)
130 out, err = [safe_str(t) for t in proc.communicate()]
131 exit_code = proc.wait()
132 delta_t = time() - start_time
133 test_results[test_script] = (exit_code, delta_t, out, err)
134 print(" ({:.2f} s) ".format(delta_t), end='')
135 sys.stdout.flush()
136 if exit_code > 0:
137 print("{}FAILED (exit code {}){}".format(FAIL, exit_code, RST))
138 sys.stdout.flush()
139 else:
140 print("{}OK{}".format(OK, RST))
141 sys.stdout.flush()
142
143 return test_results
144
145
146class TestSuite(object):
147 """
148 Suite of test cases.
149 Can handle unicode strings or binary strings if their encoding is provided.
150 """
151 def __init__(self,
152 name,
153 test_cases=None,
154 hostname=None,
155 id=None,
156 package=None,
157 timestamp=None,
158 properties=None,
159 file=None,
160 log=None,
161 url=None,
162 stdout=None,
163 stderr=None):
164 self.name = name
165 if not test_cases:
166 test_cases = []
167 try:
168 iter(test_cases)
169 except TypeError:
170 raise Exception('test_cases must be a list of test cases')
171 self.test_cases = test_cases
172 self.timestamp = timestamp
173 self.hostname = hostname
174 self.id = id
175 self.package = package
176 self.file = file
177 self.log = log
178 self.url = url
179 self.stdout = stdout
180 self.stderr = stderr
181 self.properties = properties
182
183 def build_xml_doc(self, encoding=None):
184 """
185 Builds the XML document for the JUnit test suite.
186 Produces clean unicode strings and decodes non-unicode with the help of encoding.
187 @param encoding: Used to decode encoded strings.
188 @return: XML document with unicode string elements
189 """
190
191 # build the test suite element
192 test_suite_attributes = dict()
193 test_suite_attributes['name'] = self.name
194 if any(c.assertions for c in self.test_cases):
195 test_suite_attributes['assertions'] = \
196 str(sum([int(c.assertions) for c in self.test_cases if c.assertions]))
197 test_suite_attributes['disabled'] = \
198 str(len([c for c in self.test_cases if not c.is_enabled]))
199 test_suite_attributes['failures'] = \
200 str(len([c for c in self.test_cases if c.is_failure()]))
201 test_suite_attributes['errors'] = \
202 str(len([c for c in self.test_cases if c.is_error()]))
203 test_suite_attributes['skipped'] = \
204 str(len([c for c in self.test_cases if c.is_skipped()]))
205 test_suite_attributes['time'] = \
206 str(sum(c.elapsed_sec for c in self.test_cases if c.elapsed_sec))
207 test_suite_attributes['tests'] = str(len(self.test_cases))
208
209 if self.hostname:
210 test_suite_attributes['hostname'] = self.hostname
211 if self.id:
212 test_suite_attributes['id'] = self.id
213 if self.package:
214 test_suite_attributes['package'] = self.package
215 if self.timestamp:
216 test_suite_attributes['timestamp'] = self.timestamp
217 if self.file:
218 test_suite_attributes['file'] = self.file
219 if self.log:
220 test_suite_attributes['log'] = self.log
221 if self.url:
222 test_suite_attributes['url'] = self.url
223
224 xml_element = ET.Element("testsuite", test_suite_attributes)
225
226 # add any properties
227 if self.properties:
228 props_element = ET.SubElement(xml_element, "properties")
229 for k, v in self.properties.items():
230 attrs = {
231 'name': k,
232 'value': v
233 }
234 ET.SubElement(props_element, "property", attrs)
235
236 # add test suite stdout
237 if self.stdout:
238 stdout_element = ET.SubElement(xml_element, "system-out")
239 stdout_element.text = self.stdout
240
241 # add test suite stderr
242 if self.stderr:
243 stderr_element = ET.SubElement(xml_element, "system-err")
244 stderr_element.text = self.stderr
245
246 # test cases
247 for case in self.test_cases:
248 test_case_attributes = dict()
249 test_case_attributes['name'] = case.name
250 if case.assertions:
251 # Number of assertions in the test case
252 test_case_attributes['assertions'] = "%d" % case.assertions
253 if case.elapsed_sec:
254 test_case_attributes['time'] = "%f" % case.elapsed_sec
255 if case.timestamp:
256 test_case_attributes['timestamp'] = case.timestamp
257 if case.classname:
258 test_case_attributes['classname'] = case.classname
259 if case.status:
260 test_case_attributes['status'] = case.status
261 if case.category:
262 test_case_attributes['class'] = case.category
263 if case.file:
264 test_case_attributes['file'] = case.file
265 if case.line:
266 test_case_attributes['line'] = case.line
267 if case.log:
268 test_case_attributes['log'] = case.log
269 if case.url:
270 test_case_attributes['url'] = case.url
271
272 test_case_element = ET.SubElement(xml_element, "testcase",
273 test_case_attributes)
274
275 # failures
276 if case.is_failure():
277 attrs = {'type': 'failure'}
278 if case.failure_message:
279 attrs['message'] = case.failure_message
280 if case.failure_type:
281 attrs['type'] = case.failure_type
282 failure_element = ET.Element("failure", attrs)
283 if case.failure_output:
284 failure_element.text = case.failure_output
285 test_case_element.append(failure_element)
286
287 # errors
288 if case.is_error():
289 attrs = {'type': 'error'}
290 if case.error_message:
291 attrs['message'] = case.error_message
292 if case.error_type:
293 attrs['type'] = case.error_type
294 error_element = ET.Element("error", attrs)
295 if case.error_output:
296 error_element.text = case.error_output
297 test_case_element.append(error_element)
298
299 # skippeds
300 if case.is_skipped():
301 attrs = {'type': 'skipped'}
302 if case.skipped_message:
303 attrs['message'] = case.skipped_message
304 skipped_element = ET.Element("skipped", attrs)
305 if case.skipped_output:
306 skipped_element.text = case.skipped_output
307 test_case_element.append(skipped_element)
308
309 # test stdout
310 if case.stdout:
311 stdout_element = ET.Element("system-out")
312 stdout_element.text = case.stdout
313 test_case_element.append(stdout_element)
314
315 # test stderr
316 if case.stderr:
317 stderr_element = ET.Element("system-err")
318 stderr_element.text = case.stderr
319 test_case_element.append(stderr_element)
320
321 return xml_element
322
323 @staticmethod
324 def to_xml_string(test_suites, prettyprint=True, encoding=None):
325 """
326 Returns the string representation of the JUnit XML document.
327 @param encoding: The encoding of the input.
328 @return: unicode string
329 """
330
331 try:
332 iter(test_suites)
333 except TypeError:
334 raise Exception('test_suites must be a list of test suites')
335
336 xml_element = ET.Element("testsuites")
337 attributes = defaultdict(int)
338 for ts in test_suites:
339 ts_xml = ts.build_xml_doc(encoding=encoding)
340 for key in ['failures', 'errors', 'tests', 'disabled']:
341 attributes[key] += int(ts_xml.get(key, 0))
342 for key in ['time']:
343 attributes[key] += float(ts_xml.get(key, 0))
344 xml_element.append(ts_xml)
345 for key, value in attributes.items():
346 xml_element.set(key, str(value))
347
348 xml_string = ET.tostring(xml_element, encoding=encoding)
349 # is encoded now
350 xml_string = TestSuite._clean_illegal_xml_chars(
351 xml_string.decode(encoding or 'utf-8'))
352 # is unicode now
353
354 if prettyprint:
355 # minidom.parseString() works just on correctly encoded binary strings
356 xml_string = xml_string.encode(encoding or 'utf-8')
357 xml_string = xml.dom.minidom.parseString(xml_string)
358 # toprettyxml() produces unicode if no encoding is being passed or binary string with an encoding
359 xml_string = xml_string.toprettyxml(encoding=encoding)
360 if encoding:
361 xml_string = xml_string.decode(encoding)
362 # is unicode now
363 return xml_string
364
365 @staticmethod
366 def to_file(file_descriptor, test_suites, prettyprint=True, encoding=None):
367 """
368 Writes the JUnit XML document to a file.
369 """
370 xml_string = TestSuite.to_xml_string(test_suites,
371 prettyprint=prettyprint,
372 encoding=encoding)
373 # has problems with encoded str with non-ASCII (non-default-encoding) characters!
374 file_descriptor.write(xml_string)
375
376 @staticmethod
377 def _clean_illegal_xml_chars(string_to_clean):
378 """
379 Removes any illegal unicode characters from the given XML string.
380 @see: http://stackoverflow.com/questions/1707890/fast-way-to-filter-illegal-xml-unicode-chars-in-python
381 """
382
383 illegal_unichrs = [(0x00, 0x08), (0x0B, 0x1F), (0x7F, 0x84),
384 (0x86, 0x9F), (0xD800, 0xDFFF), (0xFDD0, 0xFDDF),
385 (0xFFFE, 0xFFFF), (0x1FFFE, 0x1FFFF),
386 (0x2FFFE, 0x2FFFF), (0x3FFFE, 0x3FFFF),
387 (0x4FFFE, 0x4FFFF), (0x5FFFE, 0x5FFFF),
388 (0x6FFFE, 0x6FFFF), (0x7FFFE, 0x7FFFF),
389 (0x8FFFE, 0x8FFFF), (0x9FFFE, 0x9FFFF),
390 (0xAFFFE, 0xAFFFF), (0xBFFFE, 0xBFFFF),
391 (0xCFFFE, 0xCFFFF), (0xDFFFE, 0xDFFFF),
392 (0xEFFFE, 0xEFFFF), (0xFFFFE, 0xFFFFF),
393 (0x10FFFE, 0x10FFFF)]
394
395 illegal_ranges = [
396 "%s-%s" % (unichr(low), unichr(high))
397 for (low, high) in illegal_unichrs if low < sys.maxunicode
398 ]
399
400 illegal_xml_re = re.compile('[%s]' % ''.join(illegal_ranges))
401 return illegal_xml_re.sub('', string_to_clean)
402
403
404class TestCase(object):
405 """A JUnit test case with a result and possibly some stdout or stderr"""
406 def __init__(self,
407 name,
408 classname=None,
409 elapsed_sec=None,
410 stdout=None,
411 stderr=None,
412 assertions=None,
413 timestamp=None,
414 status=None,
415 category=None,
416 file=None,
417 line=None,
418 log=None,
419 group=None,
420 url=None):
421 self.name = name
422 self.assertions = assertions
423 self.elapsed_sec = elapsed_sec
424 self.timestamp = timestamp
425 self.classname = classname
426 self.status = status
427 self.category = category
428 self.file = file
429 self.line = line
430 self.log = log
431 self.url = url
432 self.stdout = stdout
433 self.stderr = stderr
434
435 self.is_enabled = True
436 self.error_message = None
437 self.error_output = None
438 self.error_type = None
439 self.failure_message = None
440 self.failure_output = None
441 self.failure_type = None
442 self.skipped_message = None
443 self.skipped_output = None
444
445 def add_error_info(self, message=None, output=None, error_type=None):
446 """Adds an error message, output, or both to the test case"""
447 if message:
448 self.error_message = message
449 if output:
450 self.error_output = output
451 if error_type:
452 self.error_type = error_type
453
454 def add_failure_info(self, message=None, output=None, failure_type=None):
455 """Adds a failure message, output, or both to the test case"""
456 if message:
457 self.failure_message = message
458 if output:
459 self.failure_output = output
460 if failure_type:
461 self.failure_type = failure_type
462
463 def add_skipped_info(self, message=None, output=None):
464 """Adds a skipped message, output, or both to the test case"""
465 if message:
466 self.skipped_message = message
467 if output:
468 self.skipped_output = output
469
470 def is_failure(self):
471 """returns true if this test case is a failure"""
472 return self.failure_output or self.failure_message
473
474 def is_error(self):
475 """returns true if this test case is an error"""
476 return self.error_output or self.error_message
477
478 def is_skipped(self):
479 """returns true if this test case has been skipped"""
480 return self.skipped_output or self.skipped_message
481
482
483if __name__ == '__main__':
484 main()
void print(const TH1 &h1, std::ostream &out)
Print histogram parameters.
General exception.
Definition Exception.hh:13
__init__(self, name, test_cases=None, hostname=None, id=None, package=None, timestamp=None, properties=None, file=None, log=None, url=None, stdout=None, stderr=None)
Definition run_tests.py:163
_clean_illegal_xml_chars(string_to_clean)
Definition run_tests.py:377
to_file(file_descriptor, test_suites, prettyprint=True, encoding=None)
Definition run_tests.py:366
build_xml_doc(self, encoding=None)
Definition run_tests.py:183
to_xml_string(test_suites, prettyprint=True, encoding=None)
Definition run_tests.py:324
safe_str(obj)
Definition run_tests.py:102
print_captured_output(test_results)
Definition run_tests.py:90
write_junit_xml(test_results)
Definition run_tests.py:74