1#!/usr/bin/env python3 2 3""" 4This script determines the given package's openbmc dependencies from its 5configure.ac file where it downloads, configures, builds, and installs each of 6these dependencies. Then the given package is configured, built, and installed 7prior to executing its unit tests. 8""" 9 10from git import Repo 11# interpreter is not used directly but this resolves dependency ordering 12# that would be broken if we didn't include it. 13from mesonbuild import interpreter 14from mesonbuild import coredata, optinterpreter 15from mesonbuild.mesonlib import OptionKey 16from mesonbuild.mesonlib import version_compare as meson_version_compare 17from urllib.parse import urljoin 18from subprocess import check_call, call, CalledProcessError 19import os 20import sys 21import argparse 22import multiprocessing 23import re 24import subprocess 25import shutil 26import platform 27 28 29class DepTree(): 30 """ 31 Represents package dependency tree, where each node is a DepTree with a 32 name and DepTree children. 33 """ 34 35 def __init__(self, name): 36 """ 37 Create new DepTree. 38 39 Parameter descriptions: 40 name Name of new tree node. 41 """ 42 self.name = name 43 self.children = list() 44 45 def AddChild(self, name): 46 """ 47 Add new child node to current node. 48 49 Parameter descriptions: 50 name Name of new child 51 """ 52 new_child = DepTree(name) 53 self.children.append(new_child) 54 return new_child 55 56 def AddChildNode(self, node): 57 """ 58 Add existing child node to current node. 59 60 Parameter descriptions: 61 node Tree node to add 62 """ 63 self.children.append(node) 64 65 def RemoveChild(self, name): 66 """ 67 Remove child node. 68 69 Parameter descriptions: 70 name Name of child to remove 71 """ 72 for child in self.children: 73 if child.name == name: 74 self.children.remove(child) 75 return 76 77 def GetNode(self, name): 78 """ 79 Return node with matching name. Return None if not found. 80 81 Parameter descriptions: 82 name Name of node to return 83 """ 84 if self.name == name: 85 return self 86 for child in self.children: 87 node = child.GetNode(name) 88 if node: 89 return node 90 return None 91 92 def GetParentNode(self, name, parent_node=None): 93 """ 94 Return parent of node with matching name. Return none if not found. 95 96 Parameter descriptions: 97 name Name of node to get parent of 98 parent_node Parent of current node 99 """ 100 if self.name == name: 101 return parent_node 102 for child in self.children: 103 found_node = child.GetParentNode(name, self) 104 if found_node: 105 return found_node 106 return None 107 108 def GetPath(self, name, path=None): 109 """ 110 Return list of node names from head to matching name. 111 Return None if not found. 112 113 Parameter descriptions: 114 name Name of node 115 path List of node names from head to current node 116 """ 117 if not path: 118 path = [] 119 if self.name == name: 120 path.append(self.name) 121 return path 122 for child in self.children: 123 match = child.GetPath(name, path + [self.name]) 124 if match: 125 return match 126 return None 127 128 def GetPathRegex(self, name, regex_str, path=None): 129 """ 130 Return list of node paths that end in name, or match regex_str. 131 Return empty list if not found. 132 133 Parameter descriptions: 134 name Name of node to search for 135 regex_str Regex string to match node names 136 path Path of node names from head to current node 137 """ 138 new_paths = [] 139 if not path: 140 path = [] 141 match = re.match(regex_str, self.name) 142 if (self.name == name) or (match): 143 new_paths.append(path + [self.name]) 144 for child in self.children: 145 return_paths = None 146 full_path = path + [self.name] 147 return_paths = child.GetPathRegex(name, regex_str, full_path) 148 for i in return_paths: 149 new_paths.append(i) 150 return new_paths 151 152 def MoveNode(self, from_name, to_name): 153 """ 154 Mode existing from_name node to become child of to_name node. 155 156 Parameter descriptions: 157 from_name Name of node to make a child of to_name 158 to_name Name of node to make parent of from_name 159 """ 160 parent_from_node = self.GetParentNode(from_name) 161 from_node = self.GetNode(from_name) 162 parent_from_node.RemoveChild(from_name) 163 to_node = self.GetNode(to_name) 164 to_node.AddChildNode(from_node) 165 166 def ReorderDeps(self, name, regex_str): 167 """ 168 Reorder dependency tree. If tree contains nodes with names that 169 match 'name' and 'regex_str', move 'regex_str' nodes that are 170 to the right of 'name' node, so that they become children of the 171 'name' node. 172 173 Parameter descriptions: 174 name Name of node to look for 175 regex_str Regex string to match names to 176 """ 177 name_path = self.GetPath(name) 178 if not name_path: 179 return 180 paths = self.GetPathRegex(name, regex_str) 181 is_name_in_paths = False 182 name_index = 0 183 for i in range(len(paths)): 184 path = paths[i] 185 if path[-1] == name: 186 is_name_in_paths = True 187 name_index = i 188 break 189 if not is_name_in_paths: 190 return 191 for i in range(name_index + 1, len(paths)): 192 path = paths[i] 193 if name in path: 194 continue 195 from_name = path[-1] 196 self.MoveNode(from_name, name) 197 198 def GetInstallList(self): 199 """ 200 Return post-order list of node names. 201 202 Parameter descriptions: 203 """ 204 install_list = [] 205 for child in self.children: 206 child_install_list = child.GetInstallList() 207 install_list.extend(child_install_list) 208 install_list.append(self.name) 209 return install_list 210 211 def PrintTree(self, level=0): 212 """ 213 Print pre-order node names with indentation denoting node depth level. 214 215 Parameter descriptions: 216 level Current depth level 217 """ 218 INDENT_PER_LEVEL = 4 219 print(' ' * (level * INDENT_PER_LEVEL) + self.name) 220 for child in self.children: 221 child.PrintTree(level + 1) 222 223 224def check_call_cmd(*cmd): 225 """ 226 Verbose prints the directory location the given command is called from and 227 the command, then executes the command using check_call. 228 229 Parameter descriptions: 230 dir Directory location command is to be called from 231 cmd List of parameters constructing the complete command 232 """ 233 printline(os.getcwd(), ">", " ".join(cmd)) 234 check_call(cmd) 235 236 237def clone_pkg(pkg, branch): 238 """ 239 Clone the given openbmc package's git repository from gerrit into 240 the WORKSPACE location 241 242 Parameter descriptions: 243 pkg Name of the package to clone 244 branch Branch to clone from pkg 245 """ 246 pkg_dir = os.path.join(WORKSPACE, pkg) 247 if os.path.exists(os.path.join(pkg_dir, '.git')): 248 return pkg_dir 249 pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg) 250 os.mkdir(pkg_dir) 251 printline(pkg_dir, "> git clone", pkg_repo, branch, "./") 252 try: 253 # first try the branch 254 clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch) 255 repo_inst = clone.working_dir 256 except: 257 printline("Input branch not found, default to master") 258 clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master") 259 repo_inst = clone.working_dir 260 return repo_inst 261 262 263def make_target_exists(target): 264 """ 265 Runs a check against the makefile in the current directory to determine 266 if the target exists so that it can be built. 267 268 Parameter descriptions: 269 target The make target we are checking 270 """ 271 try: 272 cmd = ['make', '-n', target] 273 with open(os.devnull, 'w') as devnull: 274 check_call(cmd, stdout=devnull, stderr=devnull) 275 return True 276 except CalledProcessError: 277 return False 278 279 280make_parallel = [ 281 'make', 282 # Run enough jobs to saturate all the cpus 283 '-j', str(multiprocessing.cpu_count()), 284 # Don't start more jobs if the load avg is too high 285 '-l', str(multiprocessing.cpu_count()), 286 # Synchronize the output so logs aren't intermixed in stdout / stderr 287 '-O', 288] 289 290 291def build_and_install(name, build_for_testing=False): 292 """ 293 Builds and installs the package in the environment. Optionally 294 builds the examples and test cases for package. 295 296 Parameter description: 297 name The name of the package we are building 298 build_for_testing Enable options related to testing on the package? 299 """ 300 os.chdir(os.path.join(WORKSPACE, name)) 301 302 # Refresh dynamic linker run time bindings for dependencies 303 check_call_cmd('sudo', '-n', '--', 'ldconfig') 304 305 pkg = Package() 306 if build_for_testing: 307 pkg.test() 308 else: 309 pkg.install() 310 311 312def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None): 313 """ 314 For each package (name), starting with the package to be unit tested, 315 extract its dependencies. For each package dependency defined, recursively 316 apply the same strategy 317 318 Parameter descriptions: 319 name Name of the package 320 pkgdir Directory where package source is located 321 dep_added Current dict of dependencies and added status 322 head Head node of the dependency tree 323 branch Branch to clone from pkg 324 dep_tree Current dependency tree node 325 """ 326 if not dep_tree: 327 dep_tree = head 328 329 with open("/tmp/depcache", "r") as depcache: 330 cache = depcache.readline() 331 332 # Read out pkg dependencies 333 pkg = Package(name, pkgdir) 334 335 build = pkg.build_system() 336 if build == None: 337 raise Exception(f"Unable to find build system for {name}.") 338 339 for dep in set(build.dependencies()): 340 if dep in cache: 341 continue 342 # Dependency package not already known 343 if dep_added.get(dep) is None: 344 print(f"Adding {dep} dependency to {name}.") 345 # Dependency package not added 346 new_child = dep_tree.AddChild(dep) 347 dep_added[dep] = False 348 dep_pkgdir = clone_pkg(dep, branch) 349 # Determine this dependency package's 350 # dependencies and add them before 351 # returning to add this package 352 dep_added = build_dep_tree(dep, 353 dep_pkgdir, 354 dep_added, 355 head, 356 branch, 357 new_child) 358 else: 359 # Dependency package known and added 360 if dep_added[dep]: 361 continue 362 else: 363 # Cyclic dependency failure 364 raise Exception("Cyclic dependencies found in "+name) 365 366 if not dep_added[name]: 367 dep_added[name] = True 368 369 return dep_added 370 371 372def run_cppcheck(): 373 match_re = re.compile(r'((?!\.mako\.).)*\.[ch](?:pp)?$', re.I) 374 cppcheck_files = [] 375 stdout = subprocess.check_output(['git', 'ls-files']) 376 377 for f in stdout.decode('utf-8').split(): 378 if match_re.match(f): 379 cppcheck_files.append(f) 380 381 if not cppcheck_files: 382 # skip cppcheck if there arent' any c or cpp sources. 383 print("no files") 384 return None 385 386 # http://cppcheck.sourceforge.net/manual.pdf 387 params = ['cppcheck', '-j', str(multiprocessing.cpu_count()), 388 '--enable=all', '--library=googletest', '--file-list=-'] 389 390 cppcheck_process = subprocess.Popen( 391 params, 392 stdout=subprocess.PIPE, 393 stderr=subprocess.PIPE, 394 stdin=subprocess.PIPE) 395 (stdout, stderr) = cppcheck_process.communicate( 396 input='\n'.join(cppcheck_files).encode('utf-8')) 397 398 if cppcheck_process.wait(): 399 raise Exception('Cppcheck failed') 400 print(stdout.decode('utf-8')) 401 print(stderr.decode('utf-8')) 402 403 404def is_valgrind_safe(): 405 """ 406 Returns whether it is safe to run valgrind on our platform 407 """ 408 src = 'unit-test-vg.c' 409 exe = './unit-test-vg' 410 with open(src, 'w') as h: 411 h.write('#include <errno.h>\n') 412 h.write('#include <stdio.h>\n') 413 h.write('#include <stdlib.h>\n') 414 h.write('#include <string.h>\n') 415 h.write('int main() {\n') 416 h.write('char *heap_str = malloc(16);\n') 417 h.write('strcpy(heap_str, "RandString");\n') 418 h.write('int res = strcmp("RandString", heap_str);\n') 419 h.write('free(heap_str);\n') 420 h.write('char errstr[64];\n') 421 h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n') 422 h.write('printf("%s\\n", errstr);\n') 423 h.write('return res;\n') 424 h.write('}\n') 425 try: 426 with open(os.devnull, 'w') as devnull: 427 check_call(['gcc', '-O2', '-o', exe, src], 428 stdout=devnull, stderr=devnull) 429 check_call(['valgrind', '--error-exitcode=99', exe], 430 stdout=devnull, stderr=devnull) 431 return True 432 except: 433 sys.stderr.write("###### Platform is not valgrind safe ######\n") 434 return False 435 finally: 436 os.remove(src) 437 os.remove(exe) 438 439 440def is_sanitize_safe(): 441 """ 442 Returns whether it is safe to run sanitizers on our platform 443 """ 444 src = 'unit-test-sanitize.c' 445 exe = './unit-test-sanitize' 446 with open(src, 'w') as h: 447 h.write('int main() { return 0; }\n') 448 try: 449 with open(os.devnull, 'w') as devnull: 450 check_call(['gcc', '-O2', '-fsanitize=address', 451 '-fsanitize=undefined', '-o', exe, src], 452 stdout=devnull, stderr=devnull) 453 check_call([exe], stdout=devnull, stderr=devnull) 454 455 # TODO - Sanitizer not working on ppc64le 456 # https://github.com/openbmc/openbmc-build-scripts/issues/31 457 if (platform.processor() == 'ppc64le'): 458 sys.stderr.write("###### ppc64le is not sanitize safe ######\n") 459 return False 460 else: 461 return True 462 except: 463 sys.stderr.write("###### Platform is not sanitize safe ######\n") 464 return False 465 finally: 466 os.remove(src) 467 os.remove(exe) 468 469 470def maybe_make_valgrind(): 471 """ 472 Potentially runs the unit tests through valgrind for the package 473 via `make check-valgrind`. If the package does not have valgrind testing 474 then it just skips over this. 475 """ 476 # Valgrind testing is currently broken by an aggressive strcmp optimization 477 # that is inlined into optimized code for POWER by gcc 7+. Until we find 478 # a workaround, just don't run valgrind tests on POWER. 479 # https://github.com/openbmc/openbmc/issues/3315 480 if not is_valgrind_safe(): 481 sys.stderr.write("###### Skipping valgrind ######\n") 482 return 483 if not make_target_exists('check-valgrind'): 484 return 485 486 try: 487 cmd = make_parallel + ['check-valgrind'] 488 check_call_cmd(*cmd) 489 except CalledProcessError: 490 for root, _, files in os.walk(os.getcwd()): 491 for f in files: 492 if re.search('test-suite-[a-z]+.log', f) is None: 493 continue 494 check_call_cmd('cat', os.path.join(root, f)) 495 raise Exception('Valgrind tests failed') 496 497 498def maybe_make_coverage(): 499 """ 500 Potentially runs the unit tests through code coverage for the package 501 via `make check-code-coverage`. If the package does not have code coverage 502 testing then it just skips over this. 503 """ 504 if not make_target_exists('check-code-coverage'): 505 return 506 507 # Actually run code coverage 508 try: 509 cmd = make_parallel + ['check-code-coverage'] 510 check_call_cmd(*cmd) 511 except CalledProcessError: 512 raise Exception('Code coverage failed') 513 514 515class BuildSystem(object): 516 """ 517 Build systems generally provide the means to configure, build, install and 518 test software. The BuildSystem class defines a set of interfaces on top of 519 which Autotools, Meson, CMake and possibly other build system drivers can 520 be implemented, separating out the phases to control whether a package 521 should merely be installed or also tested and analyzed. 522 """ 523 def __init__(self, package, path): 524 """Initialise the driver with properties independent of the build system 525 526 Keyword arguments: 527 package: The name of the package. Derived from the path if None 528 path: The path to the package. Set to the working directory if None 529 """ 530 self.path = "." if not path else path 531 realpath = os.path.realpath(self.path) 532 self.package = package if package else os.path.basename(realpath) 533 self.build_for_testing = False 534 535 def probe(self): 536 """Test if the build system driver can be applied to the package 537 538 Return True if the driver can drive the package's build system, 539 otherwise False. 540 541 Generally probe() is implemented by testing for the presence of the 542 build system's configuration file(s). 543 """ 544 raise NotImplemented 545 546 def dependencies(self): 547 """Provide the package's dependencies 548 549 Returns a list of dependencies. If no dependencies are required then an 550 empty list must be returned. 551 552 Generally dependencies() is implemented by analysing and extracting the 553 data from the build system configuration. 554 """ 555 raise NotImplemented 556 557 def configure(self, build_for_testing): 558 """Configure the source ready for building 559 560 Should raise an exception if configuration failed. 561 562 Keyword arguments: 563 build_for_testing: Mark the package as being built for testing rather 564 than for installation as a dependency for the 565 package under test. Setting to True generally 566 implies that the package will be configured to build 567 with debug information, at a low level of 568 optimisation and possibly with sanitizers enabled. 569 570 Generally configure() is implemented by invoking the build system 571 tooling to generate Makefiles or equivalent. 572 """ 573 raise NotImplemented 574 575 def build(self): 576 """Build the software ready for installation and/or testing 577 578 Should raise an exception if the build fails 579 580 Generally build() is implemented by invoking `make` or `ninja`. 581 """ 582 raise NotImplemented 583 584 def install(self): 585 """Install the software ready for use 586 587 Should raise an exception if installation fails 588 589 Like build(), install() is generally implemented by invoking `make` or 590 `ninja`. 591 """ 592 raise NotImplemented 593 594 def test(self): 595 """Build and run the test suite associated with the package 596 597 Should raise an exception if the build or testing fails. 598 599 Like install(), test() is generally implemented by invoking `make` or 600 `ninja`. 601 """ 602 raise NotImplemented 603 604 def analyze(self): 605 """Run any supported analysis tools over the codebase 606 607 Should raise an exception if analysis fails. 608 609 Some analysis tools such as scan-build need injection into the build 610 system. analyze() provides the necessary hook to implement such 611 behaviour. Analyzers independent of the build system can also be 612 specified here but at the cost of possible duplication of code between 613 the build system driver implementations. 614 """ 615 raise NotImplemented 616 617 618class Autotools(BuildSystem): 619 def __init__(self, package=None, path=None): 620 super(Autotools, self).__init__(package, path) 621 622 def probe(self): 623 return os.path.isfile(os.path.join(self.path, 'configure.ac')) 624 625 def dependencies(self): 626 configure_ac = os.path.join(self.path, 'configure.ac') 627 628 contents = '' 629 # Prepend some special function overrides so we can parse out 630 # dependencies 631 for macro in DEPENDENCIES.keys(): 632 contents += ('m4_define([' + macro + '], [' + macro + '_START$' + 633 str(DEPENDENCIES_OFFSET[macro] + 1) + 634 macro + '_END])\n') 635 with open(configure_ac, "rt") as f: 636 contents += f.read() 637 638 autoconf_cmdline = ['autoconf', '-Wno-undefined', '-'] 639 autoconf_process = subprocess.Popen(autoconf_cmdline, 640 stdin=subprocess.PIPE, 641 stdout=subprocess.PIPE, 642 stderr=subprocess.PIPE) 643 document = contents.encode('utf-8') 644 (stdout, stderr) = autoconf_process.communicate(input=document) 645 if not stdout: 646 print(stderr) 647 raise Exception("Failed to run autoconf for parsing dependencies") 648 649 # Parse out all of the dependency text 650 matches = [] 651 for macro in DEPENDENCIES.keys(): 652 pattern = '(' + macro + ')_START(.*?)' + macro + '_END' 653 for match in re.compile(pattern).finditer(stdout.decode('utf-8')): 654 matches.append((match.group(1), match.group(2))) 655 656 # Look up dependencies from the text 657 found_deps = [] 658 for macro, deptext in matches: 659 for potential_dep in deptext.split(' '): 660 for known_dep in DEPENDENCIES[macro].keys(): 661 if potential_dep.startswith(known_dep): 662 found_deps.append(DEPENDENCIES[macro][known_dep]) 663 664 return found_deps 665 666 def _configure_feature(self, flag, enabled): 667 """ 668 Returns an configure flag as a string 669 670 Parameters: 671 flag The name of the flag 672 enabled Whether the flag is enabled or disabled 673 """ 674 return '--' + ('enable' if enabled else 'disable') + '-' + flag 675 676 def configure(self, build_for_testing): 677 self.build_for_testing = build_for_testing 678 conf_flags = [ 679 self._configure_feature('silent-rules', False), 680 self._configure_feature('examples', build_for_testing), 681 self._configure_feature('tests', build_for_testing), 682 self._configure_feature('itests', INTEGRATION_TEST), 683 ] 684 conf_flags.extend([ 685 self._configure_feature('code-coverage', build_for_testing), 686 self._configure_feature('valgrind', build_for_testing), 687 ]) 688 # Add any necessary configure flags for package 689 if CONFIGURE_FLAGS.get(self.package) is not None: 690 conf_flags.extend(CONFIGURE_FLAGS.get(self.package)) 691 for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']: 692 if os.path.exists(bootstrap): 693 check_call_cmd('./' + bootstrap) 694 break 695 check_call_cmd('./configure', *conf_flags) 696 697 def build(self): 698 check_call_cmd(*make_parallel) 699 700 def install(self): 701 check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install'])) 702 703 def test(self): 704 try: 705 cmd = make_parallel + ['check'] 706 for i in range(0, args.repeat): 707 check_call_cmd(*cmd) 708 709 maybe_make_valgrind() 710 maybe_make_coverage() 711 except CalledProcessError: 712 for root, _, files in os.walk(os.getcwd()): 713 if 'test-suite.log' not in files: 714 continue 715 check_call_cmd('cat', os.path.join(root, 'test-suite.log')) 716 raise Exception('Unit tests failed') 717 718 def analyze(self): 719 run_cppcheck() 720 721 722class CMake(BuildSystem): 723 def __init__(self, package=None, path=None): 724 super(CMake, self).__init__(package, path) 725 726 def probe(self): 727 return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt')) 728 729 def dependencies(self): 730 return [] 731 732 def configure(self, build_for_testing): 733 self.build_for_testing = build_for_testing 734 if INTEGRATION_TEST: 735 check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', 736 '-DITESTS=ON', '.') 737 else: 738 check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.') 739 740 def build(self): 741 check_call_cmd('cmake', '--build', '.', '--', '-j', 742 str(multiprocessing.cpu_count())) 743 744 def install(self): 745 pass 746 747 def test(self): 748 if make_target_exists('test'): 749 check_call_cmd('ctest', '.') 750 751 def analyze(self): 752 if os.path.isfile('.clang-tidy'): 753 try: 754 os.mkdir("tidy-build") 755 except FileExistsError as e: 756 pass 757 # clang-tidy needs to run on a clang-specific build 758 check_call_cmd('cmake', '-DCMAKE_C_COMPILER=clang', 759 '-DCMAKE_CXX_COMPILER=clang++', 760 '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', 761 '-H.', 762 '-Btidy-build') 763 # we need to cd here because otherwise clang-tidy doesn't find the 764 # .clang-tidy file in the roots of repos. Its arguably a "bug" 765 # with run-clang-tidy at a minimum it's "weird" that it requires 766 # the .clang-tidy to be up a dir 767 os.chdir("tidy-build") 768 try: 769 check_call_cmd('run-clang-tidy', "-header-filter=.*", '-p', 770 '.') 771 finally: 772 os.chdir("..") 773 774 maybe_make_valgrind() 775 maybe_make_coverage() 776 run_cppcheck() 777 778 779class Meson(BuildSystem): 780 def __init__(self, package=None, path=None): 781 super(Meson, self).__init__(package, path) 782 783 def probe(self): 784 return os.path.isfile(os.path.join(self.path, 'meson.build')) 785 786 def dependencies(self): 787 meson_build = os.path.join(self.path, 'meson.build') 788 if not os.path.exists(meson_build): 789 return [] 790 791 found_deps = [] 792 for root, dirs, files in os.walk(self.path): 793 if 'meson.build' not in files: 794 continue 795 with open(os.path.join(root, 'meson.build'), 'rt') as f: 796 build_contents = f.read() 797 pattern = r"dependency\('([^']*)'.*?\),?\n" 798 for match in re.finditer(pattern, build_contents): 799 group = match.group(1) 800 maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group) 801 if maybe_dep is not None: 802 found_deps.append(maybe_dep) 803 804 return found_deps 805 806 def _parse_options(self, options_file): 807 """ 808 Returns a set of options defined in the provides meson_options.txt file 809 810 Parameters: 811 options_file The file containing options 812 """ 813 oi = optinterpreter.OptionInterpreter('') 814 oi.process(options_file) 815 return oi.options 816 817 def _configure_boolean(self, val): 818 """ 819 Returns the meson flag which signifies the value 820 821 True is true which requires the boolean. 822 False is false which disables the boolean. 823 824 Parameters: 825 val The value being converted 826 """ 827 if val is True: 828 return 'true' 829 elif val is False: 830 return 'false' 831 else: 832 raise Exception("Bad meson boolean value") 833 834 def _configure_feature(self, val): 835 """ 836 Returns the meson flag which signifies the value 837 838 True is enabled which requires the feature. 839 False is disabled which disables the feature. 840 None is auto which autodetects the feature. 841 842 Parameters: 843 val The value being converted 844 """ 845 if val is True: 846 return "enabled" 847 elif val is False: 848 return "disabled" 849 elif val is None: 850 return "auto" 851 else: 852 raise Exception("Bad meson feature value") 853 854 def _configure_option(self, opts, key, val): 855 """ 856 Returns the meson flag which signifies the value 857 based on the type of the opt 858 859 Parameters: 860 opt The meson option which we are setting 861 val The value being converted 862 """ 863 if isinstance(opts[key], coredata.UserBooleanOption): 864 str_val = self._configure_boolean(val) 865 elif isinstance(opts[key], coredata.UserFeatureOption): 866 str_val = self._configure_feature(val) 867 else: 868 raise Exception('Unknown meson option type') 869 return "-D{}={}".format(key, str_val) 870 871 def configure(self, build_for_testing): 872 self.build_for_testing = build_for_testing 873 meson_options = {} 874 if os.path.exists("meson_options.txt"): 875 meson_options = self._parse_options("meson_options.txt") 876 meson_flags = [ 877 '-Db_colorout=never', 878 '-Dwerror=true', 879 '-Dwarning_level=3', 880 ] 881 if build_for_testing: 882 meson_flags.append('--buildtype=debug') 883 else: 884 meson_flags.append('--buildtype=debugoptimized') 885 if OptionKey('tests') in meson_options: 886 meson_flags.append(self._configure_option(meson_options, OptionKey('tests'), build_for_testing)) 887 if OptionKey('examples') in meson_options: 888 meson_flags.append(self._configure_option(meson_options, OptionKey('examples'), build_for_testing)) 889 if OptionKey('itests') in meson_options: 890 meson_flags.append(self._configure_option(meson_options, OptionKey('itests'), INTEGRATION_TEST)) 891 if MESON_FLAGS.get(self.package) is not None: 892 meson_flags.extend(MESON_FLAGS.get(self.package)) 893 try: 894 check_call_cmd('meson', 'setup', '--reconfigure', 'build', 895 *meson_flags) 896 except: 897 shutil.rmtree('build') 898 check_call_cmd('meson', 'setup', 'build', *meson_flags) 899 900 def build(self): 901 check_call_cmd('ninja', '-C', 'build') 902 903 def install(self): 904 check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install') 905 906 def test(self): 907 # It is useful to check various settings of the meson.build file 908 # for compatibility, such as meson_version checks. We shouldn't 909 # do this in the configure path though because it affects subprojects 910 # and dependencies as well, but we only want this applied to the 911 # project-under-test (otherwise an upstream dependency could fail 912 # this check without our control). 913 self._extra_meson_checks() 914 915 try: 916 test_args = ('--repeat', str(args.repeat), '-C', 'build') 917 check_call_cmd('meson', 'test', *test_args) 918 919 except CalledProcessError: 920 for root, _, files in os.walk(os.getcwd()): 921 if 'testlog.txt' not in files: 922 continue 923 check_call_cmd('cat', os.path.join(root, 'testlog.txt')) 924 raise Exception('Unit tests failed') 925 926 def _setup_exists(self, setup): 927 """ 928 Returns whether the meson build supports the named test setup. 929 930 Parameter descriptions: 931 setup The setup target to check 932 """ 933 try: 934 with open(os.devnull, 'w') as devnull: 935 output = subprocess.check_output( 936 ['meson', 'test', '-C', 'build', 937 '--setup', setup, '-t', '0'], 938 stderr=subprocess.STDOUT) 939 except CalledProcessError as e: 940 output = e.output 941 output = output.decode('utf-8') 942 return not re.search('Test setup .* not found from project', output) 943 944 def _maybe_valgrind(self): 945 """ 946 Potentially runs the unit tests through valgrind for the package 947 via `meson test`. The package can specify custom valgrind 948 configurations by utilizing add_test_setup() in a meson.build 949 """ 950 if not is_valgrind_safe(): 951 sys.stderr.write("###### Skipping valgrind ######\n") 952 return 953 try: 954 if self._setup_exists('valgrind'): 955 check_call_cmd('meson', 'test','-t','10','-C', 'build', 956 '--setup', 'valgrind') 957 else: 958 check_call_cmd('meson', 'test','-t','10', '-C', 'build', 959 '--wrapper', 'valgrind') 960 except CalledProcessError: 961 for root, _, files in os.walk(os.getcwd()): 962 if 'testlog-valgrind.txt' not in files: 963 continue 964 cat_args = os.path.join(root, 'testlog-valgrind.txt') 965 check_call_cmd('cat', cat_args) 966 raise Exception('Valgrind tests failed') 967 968 def analyze(self): 969 self._maybe_valgrind() 970 971 # Run clang-tidy only if the project has a configuration 972 if os.path.isfile('.clang-tidy'): 973 os.environ["CXX"] = "clang++" 974 check_call_cmd('meson', 'setup', 'build-clang') 975 check_call_cmd('run-clang-tidy', '-p', 976 'build-clang') 977 # Run the basic clang static analyzer otherwise 978 else: 979 check_call_cmd('ninja', '-C', 'build', 980 'scan-build') 981 982 # Run tests through sanitizers 983 # b_lundef is needed if clang++ is CXX since it resolves the 984 # asan symbols at runtime only. We don't want to set it earlier 985 # in the build process to ensure we don't have undefined 986 # runtime code. 987 if is_sanitize_safe(): 988 check_call_cmd('meson', 'configure', 'build', 989 '-Db_sanitize=address,undefined', 990 '-Db_lundef=false') 991 check_call_cmd('meson', 'test', '-C', 'build', 992 '--logbase', 'testlog-ubasan') 993 # TODO: Fix memory sanitizer 994 # check_call_cmd('meson', 'configure', 'build', 995 # '-Db_sanitize=memory') 996 # check_call_cmd('meson', 'test', '-C', 'build' 997 # '--logbase', 'testlog-msan') 998 check_call_cmd('meson', 'configure', 'build', 999 '-Db_sanitize=none') 1000 else: 1001 sys.stderr.write("###### Skipping sanitizers ######\n") 1002 1003 # Run coverage checks 1004 check_call_cmd('meson', 'configure', 'build', 1005 '-Db_coverage=true') 1006 self.test() 1007 # Only build coverage HTML if coverage files were produced 1008 for root, dirs, files in os.walk('build'): 1009 if any([f.endswith('.gcda') for f in files]): 1010 check_call_cmd('ninja', '-C', 'build', 1011 'coverage-html') 1012 break 1013 check_call_cmd('meson', 'configure', 'build', 1014 '-Db_coverage=false') 1015 run_cppcheck() 1016 1017 def _extra_meson_checks(self): 1018 with open(os.path.join(self.path, 'meson.build'), 'rt') as f: 1019 build_contents = f.read() 1020 1021 # Find project's specified meson_version. 1022 meson_version = None 1023 pattern = r"meson_version:[^']*'([^']*)'" 1024 for match in re.finditer(pattern, build_contents): 1025 group = match.group(1) 1026 meson_version = group 1027 1028 # C++20 requires at least Meson 0.57 but Meson itself doesn't 1029 # identify this. Add to our unit-test checks so that we don't 1030 # get a meson.build missing this. 1031 pattern = r"'cpp_std=c\+\+20'" 1032 for match in re.finditer(pattern, build_contents): 1033 if not meson_version or \ 1034 not meson_version_compare(meson_version, ">=0.57"): 1035 raise Exception( 1036 "C++20 support requires specifying in meson.build: " 1037 + "meson_version: '>=0.57'" 1038 ) 1039 1040 1041class Package(object): 1042 def __init__(self, name=None, path=None): 1043 self.supported = [Meson, Autotools, CMake] 1044 self.name = name 1045 self.path = path 1046 self.test_only = False 1047 1048 def build_systems(self): 1049 instances = (system(self.name, self.path) for system in self.supported) 1050 return (instance for instance in instances if instance.probe()) 1051 1052 def build_system(self, preferred=None): 1053 systems = list(self.build_systems()) 1054 1055 if not systems: 1056 return None 1057 1058 if preferred: 1059 return {type(system): system for system in systems}[preferred] 1060 1061 return next(iter(systems)) 1062 1063 def install(self, system=None): 1064 if not system: 1065 system = self.build_system() 1066 1067 system.configure(False) 1068 system.build() 1069 system.install() 1070 1071 def _test_one(self, system): 1072 system.configure(True) 1073 system.build() 1074 system.install() 1075 system.test() 1076 if not TEST_ONLY: 1077 system.analyze() 1078 1079 def test(self): 1080 for system in self.build_systems(): 1081 self._test_one(system) 1082 1083 1084def find_file(filename, basedir): 1085 """ 1086 Finds all occurrences of a file (or list of files) in the base 1087 directory and passes them back with their relative paths. 1088 1089 Parameter descriptions: 1090 filename The name of the file (or list of files) to 1091 find 1092 basedir The base directory search in 1093 """ 1094 1095 if not isinstance(filename, list): 1096 filename = [ filename ] 1097 1098 filepaths = [] 1099 for root, dirs, files in os.walk(basedir): 1100 if os.path.split(root)[-1] == 'subprojects': 1101 for f in files: 1102 subproject = '.'.join(f.split('.')[0:-1]) 1103 if f.endswith('.wrap') and subproject in dirs: 1104 # don't find files in meson subprojects with wraps 1105 dirs.remove(subproject) 1106 for f in filename: 1107 if f in files: 1108 filepaths.append(os.path.join(root, f)) 1109 return filepaths 1110 1111 1112if __name__ == '__main__': 1113 # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS] 1114 CONFIGURE_FLAGS = { 1115 'phosphor-logging': 1116 ['--enable-metadata-processing', '--enable-openpower-pel-extension', 1117 'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml'] 1118 } 1119 1120 # MESON_FLAGS = [GIT REPO]:[MESON FLAGS] 1121 MESON_FLAGS = { 1122 'phosphor-dbus-interfaces': 1123 ['-Ddata_com_ibm=true', '-Ddata_org_open_power=true'], 1124 'phosphor-logging': 1125 ['-Dopenpower-pel-extension=enabled'] 1126 } 1127 1128 # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO] 1129 DEPENDENCIES = { 1130 'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'}, 1131 'AC_CHECK_HEADER': { 1132 'host-ipmid': 'phosphor-host-ipmid', 1133 'blobs-ipmid': 'phosphor-ipmi-blobs', 1134 'sdbusplus': 'sdbusplus', 1135 'sdeventplus': 'sdeventplus', 1136 'stdplus': 'stdplus', 1137 'gpioplus': 'gpioplus', 1138 'phosphor-logging/log.hpp': 'phosphor-logging', 1139 }, 1140 'AC_PATH_PROG': {'sdbus++': 'sdbusplus'}, 1141 'PKG_CHECK_MODULES': { 1142 'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces', 1143 'libipmid': 'phosphor-host-ipmid', 1144 'libipmid-host': 'phosphor-host-ipmid', 1145 'sdbusplus': 'sdbusplus', 1146 'sdeventplus': 'sdeventplus', 1147 'stdplus': 'stdplus', 1148 'gpioplus': 'gpioplus', 1149 'phosphor-logging': 'phosphor-logging', 1150 'phosphor-snmp': 'phosphor-snmp', 1151 'ipmiblob': 'ipmi-blob-tool', 1152 'hei': 'openpower-libhei', 1153 'phosphor-ipmi-blobs': 'phosphor-ipmi-blobs', 1154 'libcr51sign': 'google-misc', 1155 }, 1156 } 1157 1158 # Offset into array of macro parameters MACRO(0, 1, ...N) 1159 DEPENDENCIES_OFFSET = { 1160 'AC_CHECK_LIB': 0, 1161 'AC_CHECK_HEADER': 0, 1162 'AC_PATH_PROG': 1, 1163 'PKG_CHECK_MODULES': 1, 1164 } 1165 1166 # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING] 1167 DEPENDENCIES_REGEX = { 1168 'phosphor-logging': r'\S+-dbus-interfaces$' 1169 } 1170 1171 # Set command line arguments 1172 parser = argparse.ArgumentParser() 1173 parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True, 1174 help="Workspace directory location(i.e. /home)") 1175 parser.add_argument("-p", "--package", dest="PACKAGE", required=True, 1176 help="OpenBMC package to be unit tested") 1177 parser.add_argument("-t", "--test-only", dest="TEST_ONLY", 1178 action="store_true", required=False, default=False, 1179 help="Only run test cases, no other validation") 1180 arg_inttests = parser.add_mutually_exclusive_group() 1181 arg_inttests.add_argument("--integration-tests", dest="INTEGRATION_TEST", 1182 action="store_true", required=False, default=True, 1183 help="Enable integration tests [default].") 1184 arg_inttests.add_argument("--no-integration-tests", dest="INTEGRATION_TEST", 1185 action="store_false", required=False, 1186 help="Disable integration tests.") 1187 parser.add_argument("-v", "--verbose", action="store_true", 1188 help="Print additional package status messages") 1189 parser.add_argument("-r", "--repeat", help="Repeat tests N times", 1190 type=int, default=1) 1191 parser.add_argument("-b", "--branch", dest="BRANCH", required=False, 1192 help="Branch to target for dependent repositories", 1193 default="master") 1194 parser.add_argument("-n", "--noformat", dest="FORMAT", 1195 action="store_false", required=False, 1196 help="Whether or not to run format code") 1197 args = parser.parse_args(sys.argv[1:]) 1198 WORKSPACE = args.WORKSPACE 1199 UNIT_TEST_PKG = args.PACKAGE 1200 TEST_ONLY = args.TEST_ONLY 1201 INTEGRATION_TEST = args.INTEGRATION_TEST 1202 BRANCH = args.BRANCH 1203 FORMAT_CODE = args.FORMAT 1204 if args.verbose: 1205 def printline(*line): 1206 for arg in line: 1207 print(arg, end=' ') 1208 print() 1209 else: 1210 def printline(*line): 1211 pass 1212 1213 CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG) 1214 1215 # First validate code formatting if repo has style formatting files. 1216 # The format-code.sh checks for these files. 1217 if FORMAT_CODE: 1218 format_scripts = find_file(['format-code.sh', 'format-code'], 1219 CODE_SCAN_DIR) 1220 1221 # use default format-code.sh if no other found 1222 if not format_scripts: 1223 format_scripts.append(os.path.join(WORKSPACE, "format-code.sh")) 1224 1225 for f in format_scripts: 1226 check_call_cmd(f, CODE_SCAN_DIR) 1227 1228 # Check if this repo has a supported make infrastructure 1229 pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR) 1230 if not pkg.build_system(): 1231 print("No valid build system, exit") 1232 sys.exit(0) 1233 1234 prev_umask = os.umask(000) 1235 1236 # Determine dependencies and add them 1237 dep_added = dict() 1238 dep_added[UNIT_TEST_PKG] = False 1239 1240 # Create dependency tree 1241 dep_tree = DepTree(UNIT_TEST_PKG) 1242 build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH) 1243 1244 # Reorder Dependency Tree 1245 for pkg_name, regex_str in DEPENDENCIES_REGEX.items(): 1246 dep_tree.ReorderDeps(pkg_name, regex_str) 1247 if args.verbose: 1248 dep_tree.PrintTree() 1249 1250 install_list = dep_tree.GetInstallList() 1251 1252 # We don't want to treat our package as a dependency 1253 install_list.remove(UNIT_TEST_PKG) 1254 1255 # Install reordered dependencies 1256 for dep in install_list: 1257 build_and_install(dep, False) 1258 1259 # Run package unit tests 1260 build_and_install(UNIT_TEST_PKG, True) 1261 1262 os.umask(prev_umask) 1263 1264 # Run any custom CI scripts the repo has, of which there can be 1265 # multiple of and anywhere in the repository. 1266 ci_scripts = find_file(['run-ci.sh', 'run-ci'], CODE_SCAN_DIR) 1267 if ci_scripts: 1268 os.chdir(CODE_SCAN_DIR) 1269 for ci_script in ci_scripts: 1270 check_call_cmd(ci_script) 1271