1#!/usr/bin/env python3 2 3""" 4This script determines the given package's openbmc dependencies from its 5configure.ac file where it downloads, configures, builds, and installs each of 6these dependencies. Then the given package is configured, built, and installed 7prior to executing its unit tests. 8""" 9 10from git import Repo 11from mesonbuild import coredata, optinterpreter 12from mesonbuild.mesonlib import OptionKey 13from mesonbuild.mesonlib import version_compare as meson_version_compare 14from urllib.parse import urljoin 15from subprocess import check_call, call, CalledProcessError 16import os 17import sys 18import argparse 19import multiprocessing 20import re 21import subprocess 22import shutil 23import platform 24 25 26class DepTree(): 27 """ 28 Represents package dependency tree, where each node is a DepTree with a 29 name and DepTree children. 30 """ 31 32 def __init__(self, name): 33 """ 34 Create new DepTree. 35 36 Parameter descriptions: 37 name Name of new tree node. 38 """ 39 self.name = name 40 self.children = list() 41 42 def AddChild(self, name): 43 """ 44 Add new child node to current node. 45 46 Parameter descriptions: 47 name Name of new child 48 """ 49 new_child = DepTree(name) 50 self.children.append(new_child) 51 return new_child 52 53 def AddChildNode(self, node): 54 """ 55 Add existing child node to current node. 56 57 Parameter descriptions: 58 node Tree node to add 59 """ 60 self.children.append(node) 61 62 def RemoveChild(self, name): 63 """ 64 Remove child node. 65 66 Parameter descriptions: 67 name Name of child to remove 68 """ 69 for child in self.children: 70 if child.name == name: 71 self.children.remove(child) 72 return 73 74 def GetNode(self, name): 75 """ 76 Return node with matching name. Return None if not found. 77 78 Parameter descriptions: 79 name Name of node to return 80 """ 81 if self.name == name: 82 return self 83 for child in self.children: 84 node = child.GetNode(name) 85 if node: 86 return node 87 return None 88 89 def GetParentNode(self, name, parent_node=None): 90 """ 91 Return parent of node with matching name. Return none if not found. 92 93 Parameter descriptions: 94 name Name of node to get parent of 95 parent_node Parent of current node 96 """ 97 if self.name == name: 98 return parent_node 99 for child in self.children: 100 found_node = child.GetParentNode(name, self) 101 if found_node: 102 return found_node 103 return None 104 105 def GetPath(self, name, path=None): 106 """ 107 Return list of node names from head to matching name. 108 Return None if not found. 109 110 Parameter descriptions: 111 name Name of node 112 path List of node names from head to current node 113 """ 114 if not path: 115 path = [] 116 if self.name == name: 117 path.append(self.name) 118 return path 119 for child in self.children: 120 match = child.GetPath(name, path + [self.name]) 121 if match: 122 return match 123 return None 124 125 def GetPathRegex(self, name, regex_str, path=None): 126 """ 127 Return list of node paths that end in name, or match regex_str. 128 Return empty list if not found. 129 130 Parameter descriptions: 131 name Name of node to search for 132 regex_str Regex string to match node names 133 path Path of node names from head to current node 134 """ 135 new_paths = [] 136 if not path: 137 path = [] 138 match = re.match(regex_str, self.name) 139 if (self.name == name) or (match): 140 new_paths.append(path + [self.name]) 141 for child in self.children: 142 return_paths = None 143 full_path = path + [self.name] 144 return_paths = child.GetPathRegex(name, regex_str, full_path) 145 for i in return_paths: 146 new_paths.append(i) 147 return new_paths 148 149 def MoveNode(self, from_name, to_name): 150 """ 151 Mode existing from_name node to become child of to_name node. 152 153 Parameter descriptions: 154 from_name Name of node to make a child of to_name 155 to_name Name of node to make parent of from_name 156 """ 157 parent_from_node = self.GetParentNode(from_name) 158 from_node = self.GetNode(from_name) 159 parent_from_node.RemoveChild(from_name) 160 to_node = self.GetNode(to_name) 161 to_node.AddChildNode(from_node) 162 163 def ReorderDeps(self, name, regex_str): 164 """ 165 Reorder dependency tree. If tree contains nodes with names that 166 match 'name' and 'regex_str', move 'regex_str' nodes that are 167 to the right of 'name' node, so that they become children of the 168 'name' node. 169 170 Parameter descriptions: 171 name Name of node to look for 172 regex_str Regex string to match names to 173 """ 174 name_path = self.GetPath(name) 175 if not name_path: 176 return 177 paths = self.GetPathRegex(name, regex_str) 178 is_name_in_paths = False 179 name_index = 0 180 for i in range(len(paths)): 181 path = paths[i] 182 if path[-1] == name: 183 is_name_in_paths = True 184 name_index = i 185 break 186 if not is_name_in_paths: 187 return 188 for i in range(name_index + 1, len(paths)): 189 path = paths[i] 190 if name in path: 191 continue 192 from_name = path[-1] 193 self.MoveNode(from_name, name) 194 195 def GetInstallList(self): 196 """ 197 Return post-order list of node names. 198 199 Parameter descriptions: 200 """ 201 install_list = [] 202 for child in self.children: 203 child_install_list = child.GetInstallList() 204 install_list.extend(child_install_list) 205 install_list.append(self.name) 206 return install_list 207 208 def PrintTree(self, level=0): 209 """ 210 Print pre-order node names with indentation denoting node depth level. 211 212 Parameter descriptions: 213 level Current depth level 214 """ 215 INDENT_PER_LEVEL = 4 216 print(' ' * (level * INDENT_PER_LEVEL) + self.name) 217 for child in self.children: 218 child.PrintTree(level + 1) 219 220 221def check_call_cmd(*cmd): 222 """ 223 Verbose prints the directory location the given command is called from and 224 the command, then executes the command using check_call. 225 226 Parameter descriptions: 227 dir Directory location command is to be called from 228 cmd List of parameters constructing the complete command 229 """ 230 printline(os.getcwd(), ">", " ".join(cmd)) 231 check_call(cmd) 232 233 234def clone_pkg(pkg, branch): 235 """ 236 Clone the given openbmc package's git repository from gerrit into 237 the WORKSPACE location 238 239 Parameter descriptions: 240 pkg Name of the package to clone 241 branch Branch to clone from pkg 242 """ 243 pkg_dir = os.path.join(WORKSPACE, pkg) 244 if os.path.exists(os.path.join(pkg_dir, '.git')): 245 return pkg_dir 246 pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg) 247 os.mkdir(pkg_dir) 248 printline(pkg_dir, "> git clone", pkg_repo, branch, "./") 249 try: 250 # first try the branch 251 clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch) 252 repo_inst = clone.working_dir 253 except: 254 printline("Input branch not found, default to master") 255 clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master") 256 repo_inst = clone.working_dir 257 return repo_inst 258 259 260def make_target_exists(target): 261 """ 262 Runs a check against the makefile in the current directory to determine 263 if the target exists so that it can be built. 264 265 Parameter descriptions: 266 target The make target we are checking 267 """ 268 try: 269 cmd = ['make', '-n', target] 270 with open(os.devnull, 'w') as devnull: 271 check_call(cmd, stdout=devnull, stderr=devnull) 272 return True 273 except CalledProcessError: 274 return False 275 276 277make_parallel = [ 278 'make', 279 # Run enough jobs to saturate all the cpus 280 '-j', str(multiprocessing.cpu_count()), 281 # Don't start more jobs if the load avg is too high 282 '-l', str(multiprocessing.cpu_count()), 283 # Synchronize the output so logs aren't intermixed in stdout / stderr 284 '-O', 285] 286 287 288def build_and_install(name, build_for_testing=False): 289 """ 290 Builds and installs the package in the environment. Optionally 291 builds the examples and test cases for package. 292 293 Parameter description: 294 name The name of the package we are building 295 build_for_testing Enable options related to testing on the package? 296 """ 297 os.chdir(os.path.join(WORKSPACE, name)) 298 299 # Refresh dynamic linker run time bindings for dependencies 300 check_call_cmd('sudo', '-n', '--', 'ldconfig') 301 302 pkg = Package() 303 if build_for_testing: 304 pkg.test() 305 else: 306 pkg.install() 307 308 309def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None): 310 """ 311 For each package (name), starting with the package to be unit tested, 312 extract its dependencies. For each package dependency defined, recursively 313 apply the same strategy 314 315 Parameter descriptions: 316 name Name of the package 317 pkgdir Directory where package source is located 318 dep_added Current dict of dependencies and added status 319 head Head node of the dependency tree 320 branch Branch to clone from pkg 321 dep_tree Current dependency tree node 322 """ 323 if not dep_tree: 324 dep_tree = head 325 326 with open("/tmp/depcache", "r") as depcache: 327 cache = depcache.readline() 328 329 # Read out pkg dependencies 330 pkg = Package(name, pkgdir) 331 332 build = pkg.build_system() 333 if build == None: 334 raise Exception(f"Unable to find build system for {name}.") 335 336 for dep in set(build.dependencies()): 337 if dep in cache: 338 continue 339 # Dependency package not already known 340 if dep_added.get(dep) is None: 341 print(f"Adding {dep} dependency to {name}.") 342 # Dependency package not added 343 new_child = dep_tree.AddChild(dep) 344 dep_added[dep] = False 345 dep_pkgdir = clone_pkg(dep, branch) 346 # Determine this dependency package's 347 # dependencies and add them before 348 # returning to add this package 349 dep_added = build_dep_tree(dep, 350 dep_pkgdir, 351 dep_added, 352 head, 353 branch, 354 new_child) 355 else: 356 # Dependency package known and added 357 if dep_added[dep]: 358 continue 359 else: 360 # Cyclic dependency failure 361 raise Exception("Cyclic dependencies found in "+name) 362 363 if not dep_added[name]: 364 dep_added[name] = True 365 366 return dep_added 367 368 369def run_cppcheck(): 370 match_re = re.compile(r'((?!\.mako\.).)*\.[ch](?:pp)?$', re.I) 371 cppcheck_files = [] 372 stdout = subprocess.check_output(['git', 'ls-files']) 373 374 for f in stdout.decode('utf-8').split(): 375 if match_re.match(f): 376 cppcheck_files.append(f) 377 378 if not cppcheck_files: 379 # skip cppcheck if there arent' any c or cpp sources. 380 print("no files") 381 return None 382 383 # http://cppcheck.sourceforge.net/manual.pdf 384 params = ['cppcheck', '-j', str(multiprocessing.cpu_count()), 385 '--enable=all', '--library=googletest', '--file-list=-'] 386 387 cppcheck_process = subprocess.Popen( 388 params, 389 stdout=subprocess.PIPE, 390 stderr=subprocess.PIPE, 391 stdin=subprocess.PIPE) 392 (stdout, stderr) = cppcheck_process.communicate( 393 input='\n'.join(cppcheck_files).encode('utf-8')) 394 395 if cppcheck_process.wait(): 396 raise Exception('Cppcheck failed') 397 print(stdout.decode('utf-8')) 398 print(stderr.decode('utf-8')) 399 400 401def is_valgrind_safe(): 402 """ 403 Returns whether it is safe to run valgrind on our platform 404 """ 405 src = 'unit-test-vg.c' 406 exe = './unit-test-vg' 407 with open(src, 'w') as h: 408 h.write('#include <errno.h>\n') 409 h.write('#include <stdio.h>\n') 410 h.write('#include <stdlib.h>\n') 411 h.write('#include <string.h>\n') 412 h.write('int main() {\n') 413 h.write('char *heap_str = malloc(16);\n') 414 h.write('strcpy(heap_str, "RandString");\n') 415 h.write('int res = strcmp("RandString", heap_str);\n') 416 h.write('free(heap_str);\n') 417 h.write('char errstr[64];\n') 418 h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n') 419 h.write('printf("%s\\n", errstr);\n') 420 h.write('return res;\n') 421 h.write('}\n') 422 try: 423 with open(os.devnull, 'w') as devnull: 424 check_call(['gcc', '-O2', '-o', exe, src], 425 stdout=devnull, stderr=devnull) 426 check_call(['valgrind', '--error-exitcode=99', exe], 427 stdout=devnull, stderr=devnull) 428 return True 429 except: 430 sys.stderr.write("###### Platform is not valgrind safe ######\n") 431 return False 432 finally: 433 os.remove(src) 434 os.remove(exe) 435 436 437def is_sanitize_safe(): 438 """ 439 Returns whether it is safe to run sanitizers on our platform 440 """ 441 src = 'unit-test-sanitize.c' 442 exe = './unit-test-sanitize' 443 with open(src, 'w') as h: 444 h.write('int main() { return 0; }\n') 445 try: 446 with open(os.devnull, 'w') as devnull: 447 check_call(['gcc', '-O2', '-fsanitize=address', 448 '-fsanitize=undefined', '-o', exe, src], 449 stdout=devnull, stderr=devnull) 450 check_call([exe], stdout=devnull, stderr=devnull) 451 452 # TODO - Sanitizer not working on ppc64le 453 # https://github.com/openbmc/openbmc-build-scripts/issues/31 454 if (platform.processor() == 'ppc64le'): 455 sys.stderr.write("###### ppc64le is not sanitize safe ######\n") 456 return False 457 else: 458 return True 459 except: 460 sys.stderr.write("###### Platform is not sanitize safe ######\n") 461 return False 462 finally: 463 os.remove(src) 464 os.remove(exe) 465 466 467def maybe_make_valgrind(): 468 """ 469 Potentially runs the unit tests through valgrind for the package 470 via `make check-valgrind`. If the package does not have valgrind testing 471 then it just skips over this. 472 """ 473 # Valgrind testing is currently broken by an aggressive strcmp optimization 474 # that is inlined into optimized code for POWER by gcc 7+. Until we find 475 # a workaround, just don't run valgrind tests on POWER. 476 # https://github.com/openbmc/openbmc/issues/3315 477 if not is_valgrind_safe(): 478 sys.stderr.write("###### Skipping valgrind ######\n") 479 return 480 if not make_target_exists('check-valgrind'): 481 return 482 483 try: 484 cmd = make_parallel + ['check-valgrind'] 485 check_call_cmd(*cmd) 486 except CalledProcessError: 487 for root, _, files in os.walk(os.getcwd()): 488 for f in files: 489 if re.search('test-suite-[a-z]+.log', f) is None: 490 continue 491 check_call_cmd('cat', os.path.join(root, f)) 492 raise Exception('Valgrind tests failed') 493 494 495def maybe_make_coverage(): 496 """ 497 Potentially runs the unit tests through code coverage for the package 498 via `make check-code-coverage`. If the package does not have code coverage 499 testing then it just skips over this. 500 """ 501 if not make_target_exists('check-code-coverage'): 502 return 503 504 # Actually run code coverage 505 try: 506 cmd = make_parallel + ['check-code-coverage'] 507 check_call_cmd(*cmd) 508 except CalledProcessError: 509 raise Exception('Code coverage failed') 510 511 512class BuildSystem(object): 513 """ 514 Build systems generally provide the means to configure, build, install and 515 test software. The BuildSystem class defines a set of interfaces on top of 516 which Autotools, Meson, CMake and possibly other build system drivers can 517 be implemented, separating out the phases to control whether a package 518 should merely be installed or also tested and analyzed. 519 """ 520 def __init__(self, package, path): 521 """Initialise the driver with properties independent of the build system 522 523 Keyword arguments: 524 package: The name of the package. Derived from the path if None 525 path: The path to the package. Set to the working directory if None 526 """ 527 self.path = "." if not path else path 528 realpath = os.path.realpath(self.path) 529 self.package = package if package else os.path.basename(realpath) 530 self.build_for_testing = False 531 532 def probe(self): 533 """Test if the build system driver can be applied to the package 534 535 Return True if the driver can drive the package's build system, 536 otherwise False. 537 538 Generally probe() is implemented by testing for the presence of the 539 build system's configuration file(s). 540 """ 541 raise NotImplemented 542 543 def dependencies(self): 544 """Provide the package's dependencies 545 546 Returns a list of dependencies. If no dependencies are required then an 547 empty list must be returned. 548 549 Generally dependencies() is implemented by analysing and extracting the 550 data from the build system configuration. 551 """ 552 raise NotImplemented 553 554 def configure(self, build_for_testing): 555 """Configure the source ready for building 556 557 Should raise an exception if configuration failed. 558 559 Keyword arguments: 560 build_for_testing: Mark the package as being built for testing rather 561 than for installation as a dependency for the 562 package under test. Setting to True generally 563 implies that the package will be configured to build 564 with debug information, at a low level of 565 optimisation and possibly with sanitizers enabled. 566 567 Generally configure() is implemented by invoking the build system 568 tooling to generate Makefiles or equivalent. 569 """ 570 raise NotImplemented 571 572 def build(self): 573 """Build the software ready for installation and/or testing 574 575 Should raise an exception if the build fails 576 577 Generally build() is implemented by invoking `make` or `ninja`. 578 """ 579 raise NotImplemented 580 581 def install(self): 582 """Install the software ready for use 583 584 Should raise an exception if installation fails 585 586 Like build(), install() is generally implemented by invoking `make` or 587 `ninja`. 588 """ 589 raise NotImplemented 590 591 def test(self): 592 """Build and run the test suite associated with the package 593 594 Should raise an exception if the build or testing fails. 595 596 Like install(), test() is generally implemented by invoking `make` or 597 `ninja`. 598 """ 599 raise NotImplemented 600 601 def analyze(self): 602 """Run any supported analysis tools over the codebase 603 604 Should raise an exception if analysis fails. 605 606 Some analysis tools such as scan-build need injection into the build 607 system. analyze() provides the necessary hook to implement such 608 behaviour. Analyzers independent of the build system can also be 609 specified here but at the cost of possible duplication of code between 610 the build system driver implementations. 611 """ 612 raise NotImplemented 613 614 615class Autotools(BuildSystem): 616 def __init__(self, package=None, path=None): 617 super(Autotools, self).__init__(package, path) 618 619 def probe(self): 620 return os.path.isfile(os.path.join(self.path, 'configure.ac')) 621 622 def dependencies(self): 623 configure_ac = os.path.join(self.path, 'configure.ac') 624 625 contents = '' 626 # Prepend some special function overrides so we can parse out 627 # dependencies 628 for macro in DEPENDENCIES.keys(): 629 contents += ('m4_define([' + macro + '], [' + macro + '_START$' + 630 str(DEPENDENCIES_OFFSET[macro] + 1) + 631 macro + '_END])\n') 632 with open(configure_ac, "rt") as f: 633 contents += f.read() 634 635 autoconf_cmdline = ['autoconf', '-Wno-undefined', '-'] 636 autoconf_process = subprocess.Popen(autoconf_cmdline, 637 stdin=subprocess.PIPE, 638 stdout=subprocess.PIPE, 639 stderr=subprocess.PIPE) 640 document = contents.encode('utf-8') 641 (stdout, stderr) = autoconf_process.communicate(input=document) 642 if not stdout: 643 print(stderr) 644 raise Exception("Failed to run autoconf for parsing dependencies") 645 646 # Parse out all of the dependency text 647 matches = [] 648 for macro in DEPENDENCIES.keys(): 649 pattern = '(' + macro + ')_START(.*?)' + macro + '_END' 650 for match in re.compile(pattern).finditer(stdout.decode('utf-8')): 651 matches.append((match.group(1), match.group(2))) 652 653 # Look up dependencies from the text 654 found_deps = [] 655 for macro, deptext in matches: 656 for potential_dep in deptext.split(' '): 657 for known_dep in DEPENDENCIES[macro].keys(): 658 if potential_dep.startswith(known_dep): 659 found_deps.append(DEPENDENCIES[macro][known_dep]) 660 661 return found_deps 662 663 def _configure_feature(self, flag, enabled): 664 """ 665 Returns an configure flag as a string 666 667 Parameters: 668 flag The name of the flag 669 enabled Whether the flag is enabled or disabled 670 """ 671 return '--' + ('enable' if enabled else 'disable') + '-' + flag 672 673 def configure(self, build_for_testing): 674 self.build_for_testing = build_for_testing 675 conf_flags = [ 676 self._configure_feature('silent-rules', False), 677 self._configure_feature('examples', build_for_testing), 678 self._configure_feature('tests', build_for_testing), 679 self._configure_feature('itests', INTEGRATION_TEST), 680 ] 681 conf_flags.extend([ 682 self._configure_feature('code-coverage', build_for_testing), 683 self._configure_feature('valgrind', build_for_testing), 684 ]) 685 # Add any necessary configure flags for package 686 if CONFIGURE_FLAGS.get(self.package) is not None: 687 conf_flags.extend(CONFIGURE_FLAGS.get(self.package)) 688 for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']: 689 if os.path.exists(bootstrap): 690 check_call_cmd('./' + bootstrap) 691 break 692 check_call_cmd('./configure', *conf_flags) 693 694 def build(self): 695 check_call_cmd(*make_parallel) 696 697 def install(self): 698 check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install'])) 699 700 def test(self): 701 try: 702 cmd = make_parallel + ['check'] 703 for i in range(0, args.repeat): 704 check_call_cmd(*cmd) 705 706 maybe_make_valgrind() 707 maybe_make_coverage() 708 except CalledProcessError: 709 for root, _, files in os.walk(os.getcwd()): 710 if 'test-suite.log' not in files: 711 continue 712 check_call_cmd('cat', os.path.join(root, 'test-suite.log')) 713 raise Exception('Unit tests failed') 714 715 def analyze(self): 716 run_cppcheck() 717 718 719class CMake(BuildSystem): 720 def __init__(self, package=None, path=None): 721 super(CMake, self).__init__(package, path) 722 723 def probe(self): 724 return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt')) 725 726 def dependencies(self): 727 return [] 728 729 def configure(self, build_for_testing): 730 self.build_for_testing = build_for_testing 731 if INTEGRATION_TEST: 732 check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', 733 '-DITESTS=ON', '.') 734 else: 735 check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.') 736 737 def build(self): 738 check_call_cmd('cmake', '--build', '.', '--', '-j', 739 str(multiprocessing.cpu_count())) 740 741 def install(self): 742 pass 743 744 def test(self): 745 if make_target_exists('test'): 746 check_call_cmd('ctest', '.') 747 748 def analyze(self): 749 if os.path.isfile('.clang-tidy'): 750 try: 751 os.mkdir("tidy-build") 752 except FileExistsError as e: 753 pass 754 # clang-tidy needs to run on a clang-specific build 755 check_call_cmd('cmake', '-DCMAKE_C_COMPILER=clang', 756 '-DCMAKE_CXX_COMPILER=clang++', 757 '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', 758 '-H.', 759 '-Btidy-build') 760 # we need to cd here because otherwise clang-tidy doesn't find the 761 # .clang-tidy file in the roots of repos. Its arguably a "bug" 762 # with run-clang-tidy at a minimum it's "weird" that it requires 763 # the .clang-tidy to be up a dir 764 os.chdir("tidy-build") 765 try: 766 check_call_cmd('run-clang-tidy.py', "-header-filter=.*", '-p', 767 '.') 768 finally: 769 os.chdir("..") 770 771 maybe_make_valgrind() 772 maybe_make_coverage() 773 run_cppcheck() 774 775 776class Meson(BuildSystem): 777 def __init__(self, package=None, path=None): 778 super(Meson, self).__init__(package, path) 779 780 def probe(self): 781 return os.path.isfile(os.path.join(self.path, 'meson.build')) 782 783 def dependencies(self): 784 meson_build = os.path.join(self.path, 'meson.build') 785 if not os.path.exists(meson_build): 786 return [] 787 788 found_deps = [] 789 for root, dirs, files in os.walk(self.path): 790 if 'meson.build' not in files: 791 continue 792 with open(os.path.join(root, 'meson.build'), 'rt') as f: 793 build_contents = f.read() 794 pattern = r"dependency\('([^']*)'.*?\),?\n" 795 for match in re.finditer(pattern, build_contents): 796 group = match.group(1) 797 maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group) 798 if maybe_dep is not None: 799 found_deps.append(maybe_dep) 800 801 return found_deps 802 803 def _parse_options(self, options_file): 804 """ 805 Returns a set of options defined in the provides meson_options.txt file 806 807 Parameters: 808 options_file The file containing options 809 """ 810 oi = optinterpreter.OptionInterpreter('') 811 oi.process(options_file) 812 return oi.options 813 814 def _configure_boolean(self, val): 815 """ 816 Returns the meson flag which signifies the value 817 818 True is true which requires the boolean. 819 False is false which disables the boolean. 820 821 Parameters: 822 val The value being converted 823 """ 824 if val is True: 825 return 'true' 826 elif val is False: 827 return 'false' 828 else: 829 raise Exception("Bad meson boolean value") 830 831 def _configure_feature(self, val): 832 """ 833 Returns the meson flag which signifies the value 834 835 True is enabled which requires the feature. 836 False is disabled which disables the feature. 837 None is auto which autodetects the feature. 838 839 Parameters: 840 val The value being converted 841 """ 842 if val is True: 843 return "enabled" 844 elif val is False: 845 return "disabled" 846 elif val is None: 847 return "auto" 848 else: 849 raise Exception("Bad meson feature value") 850 851 def _configure_option(self, opts, key, val): 852 """ 853 Returns the meson flag which signifies the value 854 based on the type of the opt 855 856 Parameters: 857 opt The meson option which we are setting 858 val The value being converted 859 """ 860 if isinstance(opts[key], coredata.UserBooleanOption): 861 str_val = self._configure_boolean(val) 862 elif isinstance(opts[key], coredata.UserFeatureOption): 863 str_val = self._configure_feature(val) 864 else: 865 raise Exception('Unknown meson option type') 866 return "-D{}={}".format(key, str_val) 867 868 def configure(self, build_for_testing): 869 self.build_for_testing = build_for_testing 870 meson_options = {} 871 if os.path.exists("meson_options.txt"): 872 meson_options = self._parse_options("meson_options.txt") 873 meson_flags = [ 874 '-Db_colorout=never', 875 '-Dwerror=true', 876 '-Dwarning_level=3', 877 ] 878 if build_for_testing: 879 meson_flags.append('--buildtype=debug') 880 else: 881 meson_flags.append('--buildtype=debugoptimized') 882 if OptionKey('tests') in meson_options: 883 meson_flags.append(self._configure_option(meson_options, OptionKey('tests'), build_for_testing)) 884 if OptionKey('examples') in meson_options: 885 meson_flags.append(self._configure_option(meson_options, OptionKey('examples'), build_for_testing)) 886 if OptionKey('itests') in meson_options: 887 meson_flags.append(self._configure_option(meson_options, OptionKey('itests'), INTEGRATION_TEST)) 888 if MESON_FLAGS.get(self.package) is not None: 889 meson_flags.extend(MESON_FLAGS.get(self.package)) 890 try: 891 check_call_cmd('meson', 'setup', '--reconfigure', 'build', 892 *meson_flags) 893 except: 894 shutil.rmtree('build') 895 check_call_cmd('meson', 'setup', 'build', *meson_flags) 896 897 def build(self): 898 check_call_cmd('ninja', '-C', 'build') 899 900 def install(self): 901 check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install') 902 903 def test(self): 904 # It is useful to check various settings of the meson.build file 905 # for compatibility, such as meson_version checks. We shouldn't 906 # do this in the configure path though because it affects subprojects 907 # and dependencies as well, but we only want this applied to the 908 # project-under-test (otherwise an upstream dependency could fail 909 # this check without our control). 910 self._extra_meson_checks() 911 912 try: 913 test_args = ('--repeat', str(args.repeat), '-C', 'build') 914 check_call_cmd('meson', 'test', *test_args) 915 916 except CalledProcessError: 917 for root, _, files in os.walk(os.getcwd()): 918 if 'testlog.txt' not in files: 919 continue 920 check_call_cmd('cat', os.path.join(root, 'testlog.txt')) 921 raise Exception('Unit tests failed') 922 923 def _setup_exists(self, setup): 924 """ 925 Returns whether the meson build supports the named test setup. 926 927 Parameter descriptions: 928 setup The setup target to check 929 """ 930 try: 931 with open(os.devnull, 'w') as devnull: 932 output = subprocess.check_output( 933 ['meson', 'test', '-C', 'build', 934 '--setup', setup, '-t', '0'], 935 stderr=subprocess.STDOUT) 936 except CalledProcessError as e: 937 output = e.output 938 output = output.decode('utf-8') 939 return not re.search('Test setup .* not found from project', output) 940 941 def _maybe_valgrind(self): 942 """ 943 Potentially runs the unit tests through valgrind for the package 944 via `meson test`. The package can specify custom valgrind 945 configurations by utilizing add_test_setup() in a meson.build 946 """ 947 if not is_valgrind_safe(): 948 sys.stderr.write("###### Skipping valgrind ######\n") 949 return 950 try: 951 if self._setup_exists('valgrind'): 952 check_call_cmd('meson', 'test','-t','10','-C', 'build', 953 '--setup', 'valgrind') 954 else: 955 check_call_cmd('meson', 'test','-t','10', '-C', 'build', 956 '--wrapper', 'valgrind') 957 except CalledProcessError: 958 for root, _, files in os.walk(os.getcwd()): 959 if 'testlog-valgrind.txt' not in files: 960 continue 961 cat_args = os.path.join(root, 'testlog-valgrind.txt') 962 check_call_cmd('cat', cat_args) 963 raise Exception('Valgrind tests failed') 964 965 def analyze(self): 966 self._maybe_valgrind() 967 968 # Run clang-tidy only if the project has a configuration 969 if os.path.isfile('.clang-tidy'): 970 os.environ["CXX"] = "clang++" 971 check_call_cmd('meson', 'setup', 'build-clang') 972 check_call_cmd('run-clang-tidy.py', '-p', 973 'build-clang') 974 # Run the basic clang static analyzer otherwise 975 else: 976 check_call_cmd('ninja', '-C', 'build', 977 'scan-build') 978 979 # Run tests through sanitizers 980 # b_lundef is needed if clang++ is CXX since it resolves the 981 # asan symbols at runtime only. We don't want to set it earlier 982 # in the build process to ensure we don't have undefined 983 # runtime code. 984 if is_sanitize_safe(): 985 check_call_cmd('meson', 'configure', 'build', 986 '-Db_sanitize=address,undefined', 987 '-Db_lundef=false') 988 check_call_cmd('meson', 'test', '-C', 'build', 989 '--logbase', 'testlog-ubasan') 990 # TODO: Fix memory sanitizer 991 # check_call_cmd('meson', 'configure', 'build', 992 # '-Db_sanitize=memory') 993 # check_call_cmd('meson', 'test', '-C', 'build' 994 # '--logbase', 'testlog-msan') 995 check_call_cmd('meson', 'configure', 'build', 996 '-Db_sanitize=none') 997 else: 998 sys.stderr.write("###### Skipping sanitizers ######\n") 999 1000 # Run coverage checks 1001 check_call_cmd('meson', 'configure', 'build', 1002 '-Db_coverage=true') 1003 self.test() 1004 # Only build coverage HTML if coverage files were produced 1005 for root, dirs, files in os.walk('build'): 1006 if any([f.endswith('.gcda') for f in files]): 1007 check_call_cmd('ninja', '-C', 'build', 1008 'coverage-html') 1009 break 1010 check_call_cmd('meson', 'configure', 'build', 1011 '-Db_coverage=false') 1012 run_cppcheck() 1013 1014 def _extra_meson_checks(self): 1015 with open(os.path.join(self.path, 'meson.build'), 'rt') as f: 1016 build_contents = f.read() 1017 1018 # Find project's specified meson_version. 1019 meson_version = None 1020 pattern = r"meson_version:[^']*'([^']*)'" 1021 for match in re.finditer(pattern, build_contents): 1022 group = match.group(1) 1023 meson_version = group 1024 1025 # C++20 requires at least Meson 0.57 but Meson itself doesn't 1026 # identify this. Add to our unit-test checks so that we don't 1027 # get a meson.build missing this. 1028 pattern = r"'cpp_std=c\+\+20'" 1029 for match in re.finditer(pattern, build_contents): 1030 if not meson_version or \ 1031 not meson_version_compare(meson_version, ">=0.57"): 1032 raise Exception( 1033 "C++20 support requires specifying in meson.build: " 1034 + "meson_version: '>=0.57'" 1035 ) 1036 1037 1038class Package(object): 1039 def __init__(self, name=None, path=None): 1040 self.supported = [Meson, Autotools, CMake] 1041 self.name = name 1042 self.path = path 1043 self.test_only = False 1044 1045 def build_systems(self): 1046 instances = (system(self.name, self.path) for system in self.supported) 1047 return (instance for instance in instances if instance.probe()) 1048 1049 def build_system(self, preferred=None): 1050 systems = list(self.build_systems()) 1051 1052 if not systems: 1053 return None 1054 1055 if preferred: 1056 return {type(system): system for system in systems}[preferred] 1057 1058 return next(iter(systems)) 1059 1060 def install(self, system=None): 1061 if not system: 1062 system = self.build_system() 1063 1064 system.configure(False) 1065 system.build() 1066 system.install() 1067 1068 def _test_one(self, system): 1069 system.configure(True) 1070 system.build() 1071 system.install() 1072 system.test() 1073 if not TEST_ONLY: 1074 system.analyze() 1075 1076 def test(self): 1077 for system in self.build_systems(): 1078 self._test_one(system) 1079 1080 1081def find_file(filename, basedir): 1082 """ 1083 Finds all occurrences of a file (or list of files) in the base 1084 directory and passes them back with their relative paths. 1085 1086 Parameter descriptions: 1087 filename The name of the file (or list of files) to 1088 find 1089 basedir The base directory search in 1090 """ 1091 1092 if not isinstance(filename, list): 1093 filename = [ filename ] 1094 1095 filepaths = [] 1096 for root, dirs, files in os.walk(basedir): 1097 for f in filename: 1098 if f in files: 1099 filepaths.append(os.path.join(root, f)) 1100 return filepaths 1101 1102 1103if __name__ == '__main__': 1104 # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS] 1105 CONFIGURE_FLAGS = { 1106 'phosphor-logging': 1107 ['--enable-metadata-processing', '--enable-openpower-pel-extension', 1108 'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml'] 1109 } 1110 1111 # MESON_FLAGS = [GIT REPO]:[MESON FLAGS] 1112 MESON_FLAGS = { 1113 'phosphor-dbus-interfaces': 1114 ['-Ddata_com_ibm=true', '-Ddata_org_open_power=true'], 1115 'phosphor-logging': 1116 ['-Dopenpower-pel-extension=enabled'] 1117 } 1118 1119 # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO] 1120 DEPENDENCIES = { 1121 'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'}, 1122 'AC_CHECK_HEADER': { 1123 'host-ipmid': 'phosphor-host-ipmid', 1124 'blobs-ipmid': 'phosphor-ipmi-blobs', 1125 'sdbusplus': 'sdbusplus', 1126 'sdeventplus': 'sdeventplus', 1127 'stdplus': 'stdplus', 1128 'gpioplus': 'gpioplus', 1129 'phosphor-logging/log.hpp': 'phosphor-logging', 1130 }, 1131 'AC_PATH_PROG': {'sdbus++': 'sdbusplus'}, 1132 'PKG_CHECK_MODULES': { 1133 'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces', 1134 'libipmid': 'phosphor-host-ipmid', 1135 'libipmid-host': 'phosphor-host-ipmid', 1136 'sdbusplus': 'sdbusplus', 1137 'sdeventplus': 'sdeventplus', 1138 'stdplus': 'stdplus', 1139 'gpioplus': 'gpioplus', 1140 'phosphor-logging': 'phosphor-logging', 1141 'phosphor-snmp': 'phosphor-snmp', 1142 'ipmiblob': 'ipmi-blob-tool', 1143 'hei': 'openpower-libhei', 1144 'phosphor-ipmi-blobs': 'phosphor-ipmi-blobs', 1145 }, 1146 } 1147 1148 # Offset into array of macro parameters MACRO(0, 1, ...N) 1149 DEPENDENCIES_OFFSET = { 1150 'AC_CHECK_LIB': 0, 1151 'AC_CHECK_HEADER': 0, 1152 'AC_PATH_PROG': 1, 1153 'PKG_CHECK_MODULES': 1, 1154 } 1155 1156 # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING] 1157 DEPENDENCIES_REGEX = { 1158 'phosphor-logging': r'\S+-dbus-interfaces$' 1159 } 1160 1161 # Set command line arguments 1162 parser = argparse.ArgumentParser() 1163 parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True, 1164 help="Workspace directory location(i.e. /home)") 1165 parser.add_argument("-p", "--package", dest="PACKAGE", required=True, 1166 help="OpenBMC package to be unit tested") 1167 parser.add_argument("-t", "--test-only", dest="TEST_ONLY", 1168 action="store_true", required=False, default=False, 1169 help="Only run test cases, no other validation") 1170 arg_inttests = parser.add_mutually_exclusive_group() 1171 arg_inttests.add_argument("--integration-tests", dest="INTEGRATION_TEST", 1172 action="store_true", required=False, default=True, 1173 help="Enable integration tests [default].") 1174 arg_inttests.add_argument("--no-integration-tests", dest="INTEGRATION_TEST", 1175 action="store_false", required=False, 1176 help="Disable integration tests.") 1177 parser.add_argument("-v", "--verbose", action="store_true", 1178 help="Print additional package status messages") 1179 parser.add_argument("-r", "--repeat", help="Repeat tests N times", 1180 type=int, default=1) 1181 parser.add_argument("-b", "--branch", dest="BRANCH", required=False, 1182 help="Branch to target for dependent repositories", 1183 default="master") 1184 parser.add_argument("-n", "--noformat", dest="FORMAT", 1185 action="store_false", required=False, 1186 help="Whether or not to run format code") 1187 args = parser.parse_args(sys.argv[1:]) 1188 WORKSPACE = args.WORKSPACE 1189 UNIT_TEST_PKG = args.PACKAGE 1190 TEST_ONLY = args.TEST_ONLY 1191 INTEGRATION_TEST = args.INTEGRATION_TEST 1192 BRANCH = args.BRANCH 1193 FORMAT_CODE = args.FORMAT 1194 if args.verbose: 1195 def printline(*line): 1196 for arg in line: 1197 print(arg, end=' ') 1198 print() 1199 else: 1200 def printline(*line): 1201 pass 1202 1203 CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG) 1204 1205 # First validate code formatting if repo has style formatting files. 1206 # The format-code.sh checks for these files. 1207 if FORMAT_CODE: 1208 format_scripts = find_file(['format-code.sh', 'format-code'], 1209 CODE_SCAN_DIR) 1210 1211 # use default format-code.sh if no other found 1212 if not format_scripts: 1213 format_scripts.append(os.path.join(WORKSPACE, "format-code.sh")) 1214 1215 for f in format_scripts: 1216 check_call_cmd(f, CODE_SCAN_DIR) 1217 1218 # Check if this repo has a supported make infrastructure 1219 pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR) 1220 if not pkg.build_system(): 1221 print("No valid build system, exit") 1222 sys.exit(0) 1223 1224 prev_umask = os.umask(000) 1225 1226 # Determine dependencies and add them 1227 dep_added = dict() 1228 dep_added[UNIT_TEST_PKG] = False 1229 1230 # Create dependency tree 1231 dep_tree = DepTree(UNIT_TEST_PKG) 1232 build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH) 1233 1234 # Reorder Dependency Tree 1235 for pkg_name, regex_str in DEPENDENCIES_REGEX.items(): 1236 dep_tree.ReorderDeps(pkg_name, regex_str) 1237 if args.verbose: 1238 dep_tree.PrintTree() 1239 1240 install_list = dep_tree.GetInstallList() 1241 1242 # We don't want to treat our package as a dependency 1243 install_list.remove(UNIT_TEST_PKG) 1244 1245 # Install reordered dependencies 1246 for dep in install_list: 1247 build_and_install(dep, False) 1248 1249 # Run package unit tests 1250 build_and_install(UNIT_TEST_PKG, True) 1251 1252 os.umask(prev_umask) 1253 1254 # Run any custom CI scripts the repo has, of which there can be 1255 # multiple of and anywhere in the repository. 1256 ci_scripts = find_file(['run-ci.sh', 'run-ci'], CODE_SCAN_DIR) 1257 if ci_scripts: 1258 os.chdir(CODE_SCAN_DIR) 1259 for ci_script in ci_scripts: 1260 check_call_cmd(ci_script) 1261