1#!/usr/bin/env python 2 3""" 4This script determines the given package's openbmc dependencies from its 5configure.ac file where it downloads, configures, builds, and installs each of 6these dependencies. Then the given package is configured, built, and installed 7prior to executing its unit tests. 8""" 9 10from git import Repo 11from urlparse import urljoin 12from subprocess import check_call, call, CalledProcessError 13import os 14import sys 15import argparse 16import multiprocessing 17import re 18import sets 19import subprocess 20import shutil 21import platform 22 23 24class DepTree(): 25 """ 26 Represents package dependency tree, where each node is a DepTree with a 27 name and DepTree children. 28 """ 29 30 def __init__(self, name): 31 """ 32 Create new DepTree. 33 34 Parameter descriptions: 35 name Name of new tree node. 36 """ 37 self.name = name 38 self.children = list() 39 40 def AddChild(self, name): 41 """ 42 Add new child node to current node. 43 44 Parameter descriptions: 45 name Name of new child 46 """ 47 new_child = DepTree(name) 48 self.children.append(new_child) 49 return new_child 50 51 def AddChildNode(self, node): 52 """ 53 Add existing child node to current node. 54 55 Parameter descriptions: 56 node Tree node to add 57 """ 58 self.children.append(node) 59 60 def RemoveChild(self, name): 61 """ 62 Remove child node. 63 64 Parameter descriptions: 65 name Name of child to remove 66 """ 67 for child in self.children: 68 if child.name == name: 69 self.children.remove(child) 70 return 71 72 def GetNode(self, name): 73 """ 74 Return node with matching name. Return None if not found. 75 76 Parameter descriptions: 77 name Name of node to return 78 """ 79 if self.name == name: 80 return self 81 for child in self.children: 82 node = child.GetNode(name) 83 if node: 84 return node 85 return None 86 87 def GetParentNode(self, name, parent_node=None): 88 """ 89 Return parent of node with matching name. Return none if not found. 90 91 Parameter descriptions: 92 name Name of node to get parent of 93 parent_node Parent of current node 94 """ 95 if self.name == name: 96 return parent_node 97 for child in self.children: 98 found_node = child.GetParentNode(name, self) 99 if found_node: 100 return found_node 101 return None 102 103 def GetPath(self, name, path=None): 104 """ 105 Return list of node names from head to matching name. 106 Return None if not found. 107 108 Parameter descriptions: 109 name Name of node 110 path List of node names from head to current node 111 """ 112 if not path: 113 path = [] 114 if self.name == name: 115 path.append(self.name) 116 return path 117 for child in self.children: 118 match = child.GetPath(name, path + [self.name]) 119 if match: 120 return match 121 return None 122 123 def GetPathRegex(self, name, regex_str, path=None): 124 """ 125 Return list of node paths that end in name, or match regex_str. 126 Return empty list if not found. 127 128 Parameter descriptions: 129 name Name of node to search for 130 regex_str Regex string to match node names 131 path Path of node names from head to current node 132 """ 133 new_paths = [] 134 if not path: 135 path = [] 136 match = re.match(regex_str, self.name) 137 if (self.name == name) or (match): 138 new_paths.append(path + [self.name]) 139 for child in self.children: 140 return_paths = None 141 full_path = path + [self.name] 142 return_paths = child.GetPathRegex(name, regex_str, full_path) 143 for i in return_paths: 144 new_paths.append(i) 145 return new_paths 146 147 def MoveNode(self, from_name, to_name): 148 """ 149 Mode existing from_name node to become child of to_name node. 150 151 Parameter descriptions: 152 from_name Name of node to make a child of to_name 153 to_name Name of node to make parent of from_name 154 """ 155 parent_from_node = self.GetParentNode(from_name) 156 from_node = self.GetNode(from_name) 157 parent_from_node.RemoveChild(from_name) 158 to_node = self.GetNode(to_name) 159 to_node.AddChildNode(from_node) 160 161 def ReorderDeps(self, name, regex_str): 162 """ 163 Reorder dependency tree. If tree contains nodes with names that 164 match 'name' and 'regex_str', move 'regex_str' nodes that are 165 to the right of 'name' node, so that they become children of the 166 'name' node. 167 168 Parameter descriptions: 169 name Name of node to look for 170 regex_str Regex string to match names to 171 """ 172 name_path = self.GetPath(name) 173 if not name_path: 174 return 175 paths = self.GetPathRegex(name, regex_str) 176 is_name_in_paths = False 177 name_index = 0 178 for i in range(len(paths)): 179 path = paths[i] 180 if path[-1] == name: 181 is_name_in_paths = True 182 name_index = i 183 break 184 if not is_name_in_paths: 185 return 186 for i in range(name_index + 1, len(paths)): 187 path = paths[i] 188 if name in path: 189 continue 190 from_name = path[-1] 191 self.MoveNode(from_name, name) 192 193 def GetInstallList(self): 194 """ 195 Return post-order list of node names. 196 197 Parameter descriptions: 198 """ 199 install_list = [] 200 for child in self.children: 201 child_install_list = child.GetInstallList() 202 install_list.extend(child_install_list) 203 install_list.append(self.name) 204 return install_list 205 206 def PrintTree(self, level=0): 207 """ 208 Print pre-order node names with indentation denoting node depth level. 209 210 Parameter descriptions: 211 level Current depth level 212 """ 213 INDENT_PER_LEVEL = 4 214 print ' ' * (level * INDENT_PER_LEVEL) + self.name 215 for child in self.children: 216 child.PrintTree(level + 1) 217 218 219def check_call_cmd(*cmd): 220 """ 221 Verbose prints the directory location the given command is called from and 222 the command, then executes the command using check_call. 223 224 Parameter descriptions: 225 dir Directory location command is to be called from 226 cmd List of parameters constructing the complete command 227 """ 228 printline(os.getcwd(), ">", " ".join(cmd)) 229 check_call(cmd) 230 231 232def clone_pkg(pkg, branch): 233 """ 234 Clone the given openbmc package's git repository from gerrit into 235 the WORKSPACE location 236 237 Parameter descriptions: 238 pkg Name of the package to clone 239 branch Branch to clone from pkg 240 """ 241 pkg_dir = os.path.join(WORKSPACE, pkg) 242 if os.path.exists(os.path.join(pkg_dir, '.git')): 243 return pkg_dir 244 pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg) 245 os.mkdir(pkg_dir) 246 printline(pkg_dir, "> git clone", pkg_repo, branch, "./") 247 try: 248 # first try the branch 249 repo_inst = Repo.clone_from(pkg_repo, pkg_dir, 250 branch=branch).working_dir 251 except: 252 printline("Input branch not found, default to master") 253 repo_inst = Repo.clone_from(pkg_repo, pkg_dir, 254 branch="master").working_dir 255 return repo_inst 256 257 258def get_autoconf_deps(pkgdir): 259 """ 260 Parse the given 'configure.ac' file for package dependencies and return 261 a list of the dependencies found. If the package is not autoconf it is just 262 ignored. 263 264 Parameter descriptions: 265 pkgdir Directory where package source is located 266 """ 267 configure_ac = os.path.join(pkgdir, 'configure.ac') 268 if not os.path.exists(configure_ac): 269 return [] 270 271 configure_ac_contents = '' 272 # Prepend some special function overrides so we can parse out dependencies 273 for macro in DEPENDENCIES.iterkeys(): 274 configure_ac_contents += ('m4_define([' + macro + '], [' + 275 macro + '_START$' + str(DEPENDENCIES_OFFSET[macro] + 1) + 276 macro + '_END])\n') 277 with open(configure_ac, "rt") as f: 278 configure_ac_contents += f.read() 279 280 autoconf_process = subprocess.Popen(['autoconf', '-Wno-undefined', '-'], 281 stdin=subprocess.PIPE, stdout=subprocess.PIPE, 282 stderr=subprocess.PIPE) 283 (stdout, stderr) = autoconf_process.communicate(input=configure_ac_contents) 284 if not stdout: 285 print(stderr) 286 raise Exception("Failed to run autoconf for parsing dependencies") 287 288 # Parse out all of the dependency text 289 matches = [] 290 for macro in DEPENDENCIES.iterkeys(): 291 pattern = '(' + macro + ')_START(.*?)' + macro + '_END' 292 for match in re.compile(pattern).finditer(stdout): 293 matches.append((match.group(1), match.group(2))) 294 295 # Look up dependencies from the text 296 found_deps = [] 297 for macro, deptext in matches: 298 for potential_dep in deptext.split(' '): 299 for known_dep in DEPENDENCIES[macro].iterkeys(): 300 if potential_dep.startswith(known_dep): 301 found_deps.append(DEPENDENCIES[macro][known_dep]) 302 303 return found_deps 304 305def get_meson_deps(pkgdir): 306 """ 307 Parse the given 'meson.build' file for package dependencies and return 308 a list of the dependencies found. If the package is not meson compatible 309 it is just ignored. 310 311 Parameter descriptions: 312 pkgdir Directory where package source is located 313 """ 314 meson_build = os.path.join(pkgdir, 'meson.build') 315 if not os.path.exists(meson_build): 316 return [] 317 318 found_deps = [] 319 for root, dirs, files in os.walk(pkgdir): 320 if 'meson.build' not in files: 321 continue 322 with open(os.path.join(root, 'meson.build'), 'rt') as f: 323 build_contents = f.read() 324 for match in re.finditer(r"dependency\('([^']*)'.*?\)\n", build_contents): 325 maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(match.group(1)) 326 if maybe_dep is not None: 327 found_deps.append(maybe_dep) 328 329 return found_deps 330 331make_parallel = [ 332 'make', 333 # Run enough jobs to saturate all the cpus 334 '-j', str(multiprocessing.cpu_count()), 335 # Don't start more jobs if the load avg is too high 336 '-l', str(multiprocessing.cpu_count()), 337 # Synchronize the output so logs aren't intermixed in stdout / stderr 338 '-O', 339] 340 341def enFlag(flag, enabled): 342 """ 343 Returns an configure flag as a string 344 345 Parameters: 346 flag The name of the flag 347 enabled Whether the flag is enabled or disabled 348 """ 349 return '--' + ('enable' if enabled else 'disable') + '-' + flag 350 351def mesonFeature(val): 352 """ 353 Returns the meson flag which signifies the value 354 355 True is enabled which requires the feature. 356 False is disabled which disables the feature. 357 None is auto which autodetects the feature. 358 359 Parameters: 360 val The value being converted 361 """ 362 if val is True: 363 return "enabled" 364 elif val is False: 365 return "disabled" 366 elif val is None: 367 return "auto" 368 else: 369 raise Exception("Bad meson feature value") 370 371def parse_meson_options(options_file): 372 """ 373 Returns a set of options defined in the provides meson_options.txt file 374 375 Parameters: 376 options_file The file containing options 377 """ 378 options_contents = '' 379 with open(options_file, "rt") as f: 380 options_contents += f.read() 381 options = sets.Set() 382 pattern = 'option\\(\\s*\'([^\']*)\'' 383 for match in re.compile(pattern).finditer(options_contents): 384 options.add(match.group(1)) 385 return options 386 387def build_and_install(pkg, build_for_testing=False): 388 """ 389 Builds and installs the package in the environment. Optionally 390 builds the examples and test cases for package. 391 392 Parameter description: 393 pkg The package we are building 394 build_for_testing Enable options related to testing on the package? 395 """ 396 os.chdir(os.path.join(WORKSPACE, pkg)) 397 398 # Refresh dynamic linker run time bindings for dependencies 399 check_call_cmd('sudo', '-n', '--', 'ldconfig') 400 401 # Build & install this package 402 # Always try using meson first 403 if os.path.exists('meson.build'): 404 meson_options = sets.Set() 405 if os.path.exists("meson_options.txt"): 406 meson_options = parse_meson_options("meson_options.txt") 407 meson_flags = [ 408 '-Db_colorout=never', 409 '-Dwerror=true', 410 '-Dwarning_level=3', 411 ] 412 if build_for_testing: 413 meson_flags.append('--buildtype=debug') 414 else: 415 meson_flags.append('--buildtype=debugoptimized') 416 if 'tests' in meson_options: 417 meson_flags.append('-Dtests=' + mesonFeature(build_for_testing)) 418 if 'examples' in meson_options: 419 meson_flags.append('-Dexamples=' + str(build_for_testing).lower()) 420 if MESON_FLAGS.get(pkg) is not None: 421 meson_flags.extend(MESON_FLAGS.get(pkg)) 422 try: 423 check_call_cmd('meson', 'setup', '--reconfigure', 'build', *meson_flags) 424 except: 425 shutil.rmtree('build') 426 check_call_cmd('meson', 'setup', 'build', *meson_flags) 427 check_call_cmd('ninja', '-C', 'build') 428 check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install') 429 # Assume we are autoconf otherwise 430 else: 431 conf_flags = [ 432 enFlag('silent-rules', False), 433 enFlag('examples', build_for_testing), 434 enFlag('tests', build_for_testing), 435 ] 436 if not TEST_ONLY: 437 conf_flags.extend([ 438 enFlag('code-coverage', build_for_testing), 439 enFlag('valgrind', build_for_testing), 440 ]) 441 # Add any necessary configure flags for package 442 if CONFIGURE_FLAGS.get(pkg) is not None: 443 conf_flags.extend(CONFIGURE_FLAGS.get(pkg)) 444 for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']: 445 if os.path.exists(bootstrap): 446 check_call_cmd('./' + bootstrap) 447 break 448 check_call_cmd('./configure', *conf_flags) 449 check_call_cmd(*make_parallel) 450 check_call_cmd('sudo', '-n', '--', *(make_parallel + [ 'install' ])) 451 452def build_dep_tree(pkg, pkgdir, dep_added, head, branch, dep_tree=None): 453 """ 454 For each package(pkg), starting with the package to be unit tested, 455 parse its 'configure.ac' file from within the package's directory(pkgdir) 456 for each package dependency defined recursively doing the same thing 457 on each package found as a dependency. 458 459 Parameter descriptions: 460 pkg Name of the package 461 pkgdir Directory where package source is located 462 dep_added Current dict of dependencies and added status 463 head Head node of the dependency tree 464 branch Branch to clone from pkg 465 dep_tree Current dependency tree node 466 """ 467 if not dep_tree: 468 dep_tree = head 469 470 with open("/tmp/depcache", "r") as depcache: 471 cache = depcache.readline() 472 473 # Read out pkg dependencies 474 pkg_deps = [] 475 pkg_deps += get_autoconf_deps(pkgdir) 476 pkg_deps += get_meson_deps(pkgdir) 477 478 for dep in sets.Set(pkg_deps): 479 if dep in cache: 480 continue 481 # Dependency package not already known 482 if dep_added.get(dep) is None: 483 # Dependency package not added 484 new_child = dep_tree.AddChild(dep) 485 dep_added[dep] = False 486 dep_pkgdir = clone_pkg(dep,branch) 487 # Determine this dependency package's 488 # dependencies and add them before 489 # returning to add this package 490 dep_added = build_dep_tree(dep, 491 dep_pkgdir, 492 dep_added, 493 head, 494 branch, 495 new_child) 496 else: 497 # Dependency package known and added 498 if dep_added[dep]: 499 continue 500 else: 501 # Cyclic dependency failure 502 raise Exception("Cyclic dependencies found in "+pkg) 503 504 if not dep_added[pkg]: 505 dep_added[pkg] = True 506 507 return dep_added 508 509def make_target_exists(target): 510 """ 511 Runs a check against the makefile in the current directory to determine 512 if the target exists so that it can be built. 513 514 Parameter descriptions: 515 target The make target we are checking 516 """ 517 try: 518 cmd = [ 'make', '-n', target ] 519 with open(os.devnull, 'w') as devnull: 520 check_call(cmd, stdout=devnull, stderr=devnull) 521 return True 522 except CalledProcessError: 523 return False 524 525def run_unit_tests(): 526 """ 527 Runs the unit tests for the package via `make check` 528 """ 529 try: 530 cmd = make_parallel + [ 'check' ] 531 for i in range(0, args.repeat): 532 check_call_cmd(*cmd) 533 except CalledProcessError: 534 for root, _, files in os.walk(os.getcwd()): 535 if 'test-suite.log' not in files: 536 continue 537 check_call_cmd('cat', os.path.join(root, 'test-suite.log')) 538 raise Exception('Unit tests failed') 539 540def run_cppcheck(): 541 try: 542 # http://cppcheck.sourceforge.net/manual.pdf 543 ignore_list = ['-i%s' % path for path in os.listdir(os.getcwd()) \ 544 if path.endswith('-src') or path.endswith('-build')] 545 ignore_list.extend(('-itest', '-iscripts')) 546 params = ['cppcheck', '-j', str(multiprocessing.cpu_count()), 547 '--enable=all'] 548 params.extend(ignore_list) 549 params.append('.') 550 551 check_call_cmd(*params) 552 except CalledProcessError: 553 raise Exception('Cppcheck failed') 554 555def is_valgrind_safe(): 556 """ 557 Returns whether it is safe to run valgrind on our platform 558 """ 559 src = 'unit-test-vg.c' 560 exe = './unit-test-vg' 561 with open(src, 'w') as h: 562 h.write('#include <errno.h>\n') 563 h.write('#include <stdio.h>\n') 564 h.write('#include <stdlib.h>\n') 565 h.write('#include <string.h>\n') 566 h.write('int main() {\n') 567 h.write('char *heap_str = malloc(16);\n') 568 h.write('strcpy(heap_str, "RandString");\n') 569 h.write('int res = strcmp("RandString", heap_str);\n') 570 h.write('free(heap_str);\n') 571 h.write('char errstr[64];\n') 572 h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n') 573 h.write('printf("%s\\n", errstr);\n') 574 h.write('return res;\n') 575 h.write('}\n') 576 try: 577 with open(os.devnull, 'w') as devnull: 578 check_call(['gcc', '-O2', '-o', exe, src], 579 stdout=devnull, stderr=devnull) 580 check_call(['valgrind', '--error-exitcode=99', exe], 581 stdout=devnull, stderr=devnull) 582 return True 583 except: 584 sys.stderr.write("###### Platform is not valgrind safe ######\n") 585 return False 586 finally: 587 os.remove(src) 588 os.remove(exe) 589 590def is_sanitize_safe(): 591 """ 592 Returns whether it is safe to run sanitizers on our platform 593 """ 594 src = 'unit-test-sanitize.c' 595 exe = './unit-test-sanitize' 596 with open(src, 'w') as h: 597 h.write('int main() { return 0; }\n') 598 try: 599 with open(os.devnull, 'w') as devnull: 600 check_call(['gcc', '-O2', '-fsanitize=address', 601 '-fsanitize=undefined', '-o', exe, src], 602 stdout=devnull, stderr=devnull) 603 check_call([exe], stdout=devnull, stderr=devnull) 604 return True 605 except: 606 sys.stderr.write("###### Platform is not sanitize safe ######\n") 607 return False 608 finally: 609 os.remove(src) 610 os.remove(exe) 611 612def meson_setup_exists(setup): 613 """ 614 Returns whether the meson build supports the named test setup. 615 616 Parameter descriptions: 617 setup The setup target to check 618 """ 619 try: 620 with open(os.devnull, 'w') as devnull: 621 output = subprocess.check_output( 622 ['meson', 'test', '-C', 'build', 623 '--setup', setup, '-t', '0'], 624 stderr=subprocess.STDOUT) 625 except CalledProcessError as e: 626 output = e.output 627 return not re.search('Test setup .* not found from project', output) 628 629def maybe_meson_valgrind(): 630 """ 631 Potentially runs the unit tests through valgrind for the package 632 via `meson test`. The package can specify custom valgrind configurations 633 by utilizing add_test_setup() in a meson.build 634 """ 635 if not is_valgrind_safe(): 636 sys.stderr.write("###### Skipping valgrind ######\n") 637 return 638 if meson_setup_exists('valgrind'): 639 check_call_cmd('meson', 'test', '-C', 'build', 640 '--setup', 'valgrind') 641 else: 642 check_call_cmd('meson', 'test', '-C', 'build', 643 '--wrapper', 'valgrind') 644 645def maybe_make_valgrind(): 646 """ 647 Potentially runs the unit tests through valgrind for the package 648 via `make check-valgrind`. If the package does not have valgrind testing 649 then it just skips over this. 650 """ 651 # Valgrind testing is currently broken by an aggressive strcmp optimization 652 # that is inlined into optimized code for POWER by gcc 7+. Until we find 653 # a workaround, just don't run valgrind tests on POWER. 654 # https://github.com/openbmc/openbmc/issues/3315 655 if not is_valgrind_safe(): 656 sys.stderr.write("###### Skipping valgrind ######\n") 657 return 658 if not make_target_exists('check-valgrind'): 659 return 660 661 try: 662 cmd = make_parallel + [ 'check-valgrind' ] 663 check_call_cmd(*cmd) 664 except CalledProcessError: 665 for root, _, files in os.walk(os.getcwd()): 666 for f in files: 667 if re.search('test-suite-[a-z]+.log', f) is None: 668 continue 669 check_call_cmd('cat', os.path.join(root, f)) 670 raise Exception('Valgrind tests failed') 671 672def maybe_make_coverage(): 673 """ 674 Potentially runs the unit tests through code coverage for the package 675 via `make check-code-coverage`. If the package does not have code coverage 676 testing then it just skips over this. 677 """ 678 if not make_target_exists('check-code-coverage'): 679 return 680 681 # Actually run code coverage 682 try: 683 cmd = make_parallel + [ 'check-code-coverage' ] 684 check_call_cmd(*cmd) 685 except CalledProcessError: 686 raise Exception('Code coverage failed') 687 688if __name__ == '__main__': 689 # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS] 690 CONFIGURE_FLAGS = { 691 'sdbusplus': ['--enable-transaction'], 692 'phosphor-logging': 693 ['--enable-metadata-processing', 694 'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml'] 695 } 696 697 # MESON_FLAGS = [GIT REPO]:[MESON FLAGS] 698 MESON_FLAGS = { 699 } 700 701 # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO] 702 DEPENDENCIES = { 703 'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'}, 704 'AC_CHECK_HEADER': { 705 'host-ipmid': 'phosphor-host-ipmid', 706 'blobs-ipmid': 'phosphor-ipmi-blobs', 707 'sdbusplus': 'sdbusplus', 708 'sdeventplus': 'sdeventplus', 709 'stdplus': 'stdplus', 710 'gpioplus': 'gpioplus', 711 'phosphor-logging/log.hpp': 'phosphor-logging', 712 }, 713 'AC_PATH_PROG': {'sdbus++': 'sdbusplus'}, 714 'PKG_CHECK_MODULES': { 715 'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces', 716 'openpower-dbus-interfaces': 'openpower-dbus-interfaces', 717 'ibm-dbus-interfaces': 'ibm-dbus-interfaces', 718 'libipmid': 'phosphor-host-ipmid', 719 'libipmid-host': 'phosphor-host-ipmid', 720 'sdbusplus': 'sdbusplus', 721 'sdeventplus': 'sdeventplus', 722 'stdplus': 'stdplus', 723 'gpioplus': 'gpioplus', 724 'phosphor-logging': 'phosphor-logging', 725 'phosphor-snmp': 'phosphor-snmp', 726 'ipmiblob': 'ipmi-blob-tool', 727 }, 728 } 729 730 # Offset into array of macro parameters MACRO(0, 1, ...N) 731 DEPENDENCIES_OFFSET = { 732 'AC_CHECK_LIB': 0, 733 'AC_CHECK_HEADER': 0, 734 'AC_PATH_PROG': 1, 735 'PKG_CHECK_MODULES': 1, 736 } 737 738 # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING] 739 DEPENDENCIES_REGEX = { 740 'phosphor-logging': r'\S+-dbus-interfaces$' 741 } 742 743 # Set command line arguments 744 parser = argparse.ArgumentParser() 745 parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True, 746 help="Workspace directory location(i.e. /home)") 747 parser.add_argument("-p", "--package", dest="PACKAGE", required=True, 748 help="OpenBMC package to be unit tested") 749 parser.add_argument("-t", "--test-only", dest="TEST_ONLY", 750 action="store_true", required=False, default=False, 751 help="Only run test cases, no other validation") 752 parser.add_argument("-v", "--verbose", action="store_true", 753 help="Print additional package status messages") 754 parser.add_argument("-r", "--repeat", help="Repeat tests N times", 755 type=int, default=1) 756 parser.add_argument("-b", "--branch", dest="BRANCH", required=False, 757 help="Branch to target for dependent repositories", 758 default="master") 759 args = parser.parse_args(sys.argv[1:]) 760 WORKSPACE = args.WORKSPACE 761 UNIT_TEST_PKG = args.PACKAGE 762 TEST_ONLY = args.TEST_ONLY 763 BRANCH = args.BRANCH 764 if args.verbose: 765 def printline(*line): 766 for arg in line: 767 print arg, 768 print 769 else: 770 printline = lambda *l: None 771 772 # First validate code formatting if repo has style formatting files. 773 # The format-code.sh checks for these files. 774 CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG 775 check_call_cmd("./format-code.sh", CODE_SCAN_DIR) 776 777 # Automake and meson 778 if (os.path.isfile(CODE_SCAN_DIR + "/configure.ac") or 779 os.path.isfile(CODE_SCAN_DIR + '/meson.build')): 780 prev_umask = os.umask(000) 781 # Determine dependencies and add them 782 dep_added = dict() 783 dep_added[UNIT_TEST_PKG] = False 784 # Create dependency tree 785 dep_tree = DepTree(UNIT_TEST_PKG) 786 build_dep_tree(UNIT_TEST_PKG, 787 os.path.join(WORKSPACE, UNIT_TEST_PKG), 788 dep_added, 789 dep_tree, 790 BRANCH) 791 792 # Reorder Dependency Tree 793 for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems(): 794 dep_tree.ReorderDeps(pkg_name, regex_str) 795 if args.verbose: 796 dep_tree.PrintTree() 797 install_list = dep_tree.GetInstallList() 798 # We don't want to treat our package as a dependency 799 install_list.remove(UNIT_TEST_PKG) 800 # install reordered dependencies 801 for dep in install_list: 802 build_and_install(dep, False) 803 os.chdir(os.path.join(WORKSPACE, UNIT_TEST_PKG)) 804 # Run package unit tests 805 build_and_install(UNIT_TEST_PKG, True) 806 if os.path.isfile(CODE_SCAN_DIR + '/meson.build'): 807 if not TEST_ONLY: 808 maybe_meson_valgrind() 809 810 # Run clang-tidy only if the project has a configuration 811 if os.path.isfile('.clang-tidy'): 812 check_call_cmd('run-clang-tidy-8.py', '-p', 813 'build') 814 # Run the basic clang static analyzer otherwise 815 else: 816 check_call_cmd('ninja', '-C', 'build', 817 'scan-build') 818 819 # Run tests through sanitizers 820 # b_lundef is needed if clang++ is CXX since it resolves the 821 # asan symbols at runtime only. We don't want to set it earlier 822 # in the build process to ensure we don't have undefined 823 # runtime code. 824 if is_sanitize_safe(): 825 check_call_cmd('meson', 'configure', 'build', 826 '-Db_sanitize=address,undefined', 827 '-Db_lundef=false') 828 check_call_cmd('meson', 'test', '-C', 'build', 829 '--logbase', 'testlog-ubasan') 830 # TODO: Fix memory sanitizer 831 #check_call_cmd('meson', 'configure', 'build', 832 # '-Db_sanitize=memory') 833 #check_call_cmd('meson', 'test', '-C', 'build' 834 # '--logbase', 'testlog-msan') 835 check_call_cmd('meson', 'configure', 'build', 836 '-Db_sanitize=none', '-Db_lundef=true') 837 else: 838 sys.stderr.write("###### Skipping sanitizers ######\n") 839 840 # Run coverage checks 841 check_call_cmd('meson', 'configure', 'build', 842 '-Db_coverage=true') 843 check_call_cmd('meson', 'test', '-C', 'build') 844 # Only build coverage HTML if coverage files were produced 845 for root, dirs, files in os.walk('build'): 846 if any([f.endswith('.gcda') for f in files]): 847 check_call_cmd('ninja', '-C', 'build', 848 'coverage-html') 849 break 850 check_call_cmd('meson', 'configure', 'build', 851 '-Db_coverage=false') 852 else: 853 check_call_cmd('meson', 'test', '-C', 'build') 854 else: 855 run_unit_tests() 856 if not TEST_ONLY: 857 maybe_make_valgrind() 858 maybe_make_coverage() 859 if not TEST_ONLY: 860 run_cppcheck() 861 862 os.umask(prev_umask) 863 864 # Cmake 865 elif os.path.isfile(CODE_SCAN_DIR + "/CMakeLists.txt"): 866 os.chdir(os.path.join(WORKSPACE, UNIT_TEST_PKG)) 867 check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.') 868 check_call_cmd('cmake', '--build', '.', '--', '-j', 869 str(multiprocessing.cpu_count())) 870 if make_target_exists('test'): 871 check_call_cmd('ctest', '.') 872 if not TEST_ONLY: 873 maybe_make_valgrind() 874 maybe_make_coverage() 875 run_cppcheck() 876 if os.path.isfile('.clang-tidy'): 877 check_call_cmd('run-clang-tidy-8.py', '-p', '.') 878 879 else: 880 print "Not a supported repo for CI Tests, exit" 881 quit() 882