1#!/usr/bin/env python3 2 3""" 4This script determines the given package's openbmc dependencies from its 5configure.ac file where it downloads, configures, builds, and installs each of 6these dependencies. Then the given package is configured, built, and installed 7prior to executing its unit tests. 8""" 9 10import argparse 11import multiprocessing 12import os 13import platform 14import re 15import shutil 16import subprocess 17import sys 18from subprocess import CalledProcessError, check_call 19from tempfile import TemporaryDirectory 20from urllib.parse import urljoin 21 22from git import Repo 23 24# interpreter is not used directly but this resolves dependency ordering 25# that would be broken if we didn't include it. 26from mesonbuild import interpreter # noqa: F401 27from mesonbuild import coredata, optinterpreter 28from mesonbuild.mesonlib import OptionKey 29from mesonbuild.mesonlib import version_compare as meson_version_compare 30 31 32class DepTree: 33 """ 34 Represents package dependency tree, where each node is a DepTree with a 35 name and DepTree children. 36 """ 37 38 def __init__(self, name): 39 """ 40 Create new DepTree. 41 42 Parameter descriptions: 43 name Name of new tree node. 44 """ 45 self.name = name 46 self.children = list() 47 48 def AddChild(self, name): 49 """ 50 Add new child node to current node. 51 52 Parameter descriptions: 53 name Name of new child 54 """ 55 new_child = DepTree(name) 56 self.children.append(new_child) 57 return new_child 58 59 def AddChildNode(self, node): 60 """ 61 Add existing child node to current node. 62 63 Parameter descriptions: 64 node Tree node to add 65 """ 66 self.children.append(node) 67 68 def RemoveChild(self, name): 69 """ 70 Remove child node. 71 72 Parameter descriptions: 73 name Name of child to remove 74 """ 75 for child in self.children: 76 if child.name == name: 77 self.children.remove(child) 78 return 79 80 def GetNode(self, name): 81 """ 82 Return node with matching name. Return None if not found. 83 84 Parameter descriptions: 85 name Name of node to return 86 """ 87 if self.name == name: 88 return self 89 for child in self.children: 90 node = child.GetNode(name) 91 if node: 92 return node 93 return None 94 95 def GetParentNode(self, name, parent_node=None): 96 """ 97 Return parent of node with matching name. Return none if not found. 98 99 Parameter descriptions: 100 name Name of node to get parent of 101 parent_node Parent of current node 102 """ 103 if self.name == name: 104 return parent_node 105 for child in self.children: 106 found_node = child.GetParentNode(name, self) 107 if found_node: 108 return found_node 109 return None 110 111 def GetPath(self, name, path=None): 112 """ 113 Return list of node names from head to matching name. 114 Return None if not found. 115 116 Parameter descriptions: 117 name Name of node 118 path List of node names from head to current node 119 """ 120 if not path: 121 path = [] 122 if self.name == name: 123 path.append(self.name) 124 return path 125 for child in self.children: 126 match = child.GetPath(name, path + [self.name]) 127 if match: 128 return match 129 return None 130 131 def GetPathRegex(self, name, regex_str, path=None): 132 """ 133 Return list of node paths that end in name, or match regex_str. 134 Return empty list if not found. 135 136 Parameter descriptions: 137 name Name of node to search for 138 regex_str Regex string to match node names 139 path Path of node names from head to current node 140 """ 141 new_paths = [] 142 if not path: 143 path = [] 144 match = re.match(regex_str, self.name) 145 if (self.name == name) or (match): 146 new_paths.append(path + [self.name]) 147 for child in self.children: 148 return_paths = None 149 full_path = path + [self.name] 150 return_paths = child.GetPathRegex(name, regex_str, full_path) 151 for i in return_paths: 152 new_paths.append(i) 153 return new_paths 154 155 def MoveNode(self, from_name, to_name): 156 """ 157 Mode existing from_name node to become child of to_name node. 158 159 Parameter descriptions: 160 from_name Name of node to make a child of to_name 161 to_name Name of node to make parent of from_name 162 """ 163 parent_from_node = self.GetParentNode(from_name) 164 from_node = self.GetNode(from_name) 165 parent_from_node.RemoveChild(from_name) 166 to_node = self.GetNode(to_name) 167 to_node.AddChildNode(from_node) 168 169 def ReorderDeps(self, name, regex_str): 170 """ 171 Reorder dependency tree. If tree contains nodes with names that 172 match 'name' and 'regex_str', move 'regex_str' nodes that are 173 to the right of 'name' node, so that they become children of the 174 'name' node. 175 176 Parameter descriptions: 177 name Name of node to look for 178 regex_str Regex string to match names to 179 """ 180 name_path = self.GetPath(name) 181 if not name_path: 182 return 183 paths = self.GetPathRegex(name, regex_str) 184 is_name_in_paths = False 185 name_index = 0 186 for i in range(len(paths)): 187 path = paths[i] 188 if path[-1] == name: 189 is_name_in_paths = True 190 name_index = i 191 break 192 if not is_name_in_paths: 193 return 194 for i in range(name_index + 1, len(paths)): 195 path = paths[i] 196 if name in path: 197 continue 198 from_name = path[-1] 199 self.MoveNode(from_name, name) 200 201 def GetInstallList(self): 202 """ 203 Return post-order list of node names. 204 205 Parameter descriptions: 206 """ 207 install_list = [] 208 for child in self.children: 209 child_install_list = child.GetInstallList() 210 install_list.extend(child_install_list) 211 install_list.append(self.name) 212 return install_list 213 214 def PrintTree(self, level=0): 215 """ 216 Print pre-order node names with indentation denoting node depth level. 217 218 Parameter descriptions: 219 level Current depth level 220 """ 221 INDENT_PER_LEVEL = 4 222 print(" " * (level * INDENT_PER_LEVEL) + self.name) 223 for child in self.children: 224 child.PrintTree(level + 1) 225 226 227def check_call_cmd(*cmd, **kwargs): 228 """ 229 Verbose prints the directory location the given command is called from and 230 the command, then executes the command using check_call. 231 232 Parameter descriptions: 233 dir Directory location command is to be called from 234 cmd List of parameters constructing the complete command 235 """ 236 printline(os.getcwd(), ">", " ".join(cmd)) 237 check_call(cmd, **kwargs) 238 239 240def clone_pkg(pkg, branch): 241 """ 242 Clone the given openbmc package's git repository from gerrit into 243 the WORKSPACE location 244 245 Parameter descriptions: 246 pkg Name of the package to clone 247 branch Branch to clone from pkg 248 """ 249 pkg_dir = os.path.join(WORKSPACE, pkg) 250 if os.path.exists(os.path.join(pkg_dir, ".git")): 251 return pkg_dir 252 pkg_repo = urljoin("https://gerrit.openbmc.org/openbmc/", pkg) 253 os.mkdir(pkg_dir) 254 printline(pkg_dir, "> git clone", pkg_repo, branch, "./") 255 try: 256 # first try the branch 257 clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch) 258 repo_inst = clone.working_dir 259 except Exception: 260 printline("Input branch not found, default to master") 261 clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master") 262 repo_inst = clone.working_dir 263 return repo_inst 264 265 266def make_target_exists(target): 267 """ 268 Runs a check against the makefile in the current directory to determine 269 if the target exists so that it can be built. 270 271 Parameter descriptions: 272 target The make target we are checking 273 """ 274 try: 275 cmd = ["make", "-n", target] 276 with open(os.devnull, "w") as devnull: 277 check_call(cmd, stdout=devnull, stderr=devnull) 278 return True 279 except CalledProcessError: 280 return False 281 282 283make_parallel = [ 284 "make", 285 # Run enough jobs to saturate all the cpus 286 "-j", 287 str(multiprocessing.cpu_count()), 288 # Don't start more jobs if the load avg is too high 289 "-l", 290 str(multiprocessing.cpu_count()), 291 # Synchronize the output so logs aren't intermixed in stdout / stderr 292 "-O", 293] 294 295 296def build_and_install(name, build_for_testing=False): 297 """ 298 Builds and installs the package in the environment. Optionally 299 builds the examples and test cases for package. 300 301 Parameter description: 302 name The name of the package we are building 303 build_for_testing Enable options related to testing on the package? 304 """ 305 os.chdir(os.path.join(WORKSPACE, name)) 306 307 # Refresh dynamic linker run time bindings for dependencies 308 check_call_cmd("sudo", "-n", "--", "ldconfig") 309 310 pkg = Package() 311 if build_for_testing: 312 pkg.test() 313 else: 314 pkg.install() 315 316 317def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None): 318 """ 319 For each package (name), starting with the package to be unit tested, 320 extract its dependencies. For each package dependency defined, recursively 321 apply the same strategy 322 323 Parameter descriptions: 324 name Name of the package 325 pkgdir Directory where package source is located 326 dep_added Current dict of dependencies and added status 327 head Head node of the dependency tree 328 branch Branch to clone from pkg 329 dep_tree Current dependency tree node 330 """ 331 if not dep_tree: 332 dep_tree = head 333 334 with open("/tmp/depcache", "r") as depcache: 335 cache = depcache.readline() 336 337 # Read out pkg dependencies 338 pkg = Package(name, pkgdir) 339 340 build = pkg.build_system() 341 if not build: 342 raise Exception(f"Unable to find build system for {name}.") 343 344 for dep in set(build.dependencies()): 345 if dep in cache: 346 continue 347 # Dependency package not already known 348 if dep_added.get(dep) is None: 349 print(f"Adding {dep} dependency to {name}.") 350 # Dependency package not added 351 new_child = dep_tree.AddChild(dep) 352 dep_added[dep] = False 353 dep_pkgdir = clone_pkg(dep, branch) 354 # Determine this dependency package's 355 # dependencies and add them before 356 # returning to add this package 357 dep_added = build_dep_tree( 358 dep, dep_pkgdir, dep_added, head, branch, new_child 359 ) 360 else: 361 # Dependency package known and added 362 if dep_added[dep]: 363 continue 364 else: 365 # Cyclic dependency failure 366 raise Exception("Cyclic dependencies found in " + name) 367 368 if not dep_added[name]: 369 dep_added[name] = True 370 371 return dep_added 372 373 374def run_cppcheck(): 375 if not os.path.exists(os.path.join("build", "compile_commands.json")): 376 return None 377 378 with TemporaryDirectory() as cpp_dir: 379 # http://cppcheck.sourceforge.net/manual.pdf 380 try: 381 check_call_cmd( 382 "cppcheck", 383 "-j", 384 str(multiprocessing.cpu_count()), 385 "--enable=style,performance,portability,missingInclude", 386 "--inline-suppr", 387 "--suppress=useStlAlgorithm", 388 "--suppress=unusedStructMember", 389 "--suppress=postfixOperator", 390 "--suppress=unreadVariable", 391 "--suppress=knownConditionTrueFalse", 392 "--library=googletest", 393 "--project=build/compile_commands.json", 394 f"--cppcheck-build-dir={cpp_dir}", 395 ) 396 except subprocess.CalledProcessError: 397 print("cppcheck found errors") 398 399 400def is_valgrind_safe(): 401 """ 402 Returns whether it is safe to run valgrind on our platform 403 """ 404 src = "unit-test-vg.c" 405 exe = "./unit-test-vg" 406 with open(src, "w") as h: 407 h.write("#include <errno.h>\n") 408 h.write("#include <stdio.h>\n") 409 h.write("#include <stdlib.h>\n") 410 h.write("#include <string.h>\n") 411 h.write("int main() {\n") 412 h.write("char *heap_str = malloc(16);\n") 413 h.write('strcpy(heap_str, "RandString");\n') 414 h.write('int res = strcmp("RandString", heap_str);\n') 415 h.write("free(heap_str);\n") 416 h.write("char errstr[64];\n") 417 h.write("strerror_r(EINVAL, errstr, sizeof(errstr));\n") 418 h.write('printf("%s\\n", errstr);\n') 419 h.write("return res;\n") 420 h.write("}\n") 421 try: 422 with open(os.devnull, "w") as devnull: 423 check_call( 424 ["gcc", "-O2", "-o", exe, src], stdout=devnull, stderr=devnull 425 ) 426 check_call( 427 ["valgrind", "--error-exitcode=99", exe], 428 stdout=devnull, 429 stderr=devnull, 430 ) 431 return True 432 except Exception: 433 sys.stderr.write("###### Platform is not valgrind safe ######\n") 434 return False 435 finally: 436 os.remove(src) 437 os.remove(exe) 438 439 440def is_sanitize_safe(): 441 """ 442 Returns whether it is safe to run sanitizers on our platform 443 """ 444 src = "unit-test-sanitize.c" 445 exe = "./unit-test-sanitize" 446 with open(src, "w") as h: 447 h.write("int main() { return 0; }\n") 448 try: 449 with open(os.devnull, "w") as devnull: 450 check_call( 451 [ 452 "gcc", 453 "-O2", 454 "-fsanitize=address", 455 "-fsanitize=undefined", 456 "-o", 457 exe, 458 src, 459 ], 460 stdout=devnull, 461 stderr=devnull, 462 ) 463 check_call([exe], stdout=devnull, stderr=devnull) 464 465 # TODO - Sanitizer not working on ppc64le 466 # https://github.com/openbmc/openbmc-build-scripts/issues/31 467 if platform.processor() == "ppc64le": 468 sys.stderr.write("###### ppc64le is not sanitize safe ######\n") 469 return False 470 else: 471 return True 472 except Exception: 473 sys.stderr.write("###### Platform is not sanitize safe ######\n") 474 return False 475 finally: 476 os.remove(src) 477 os.remove(exe) 478 479 480def maybe_make_valgrind(): 481 """ 482 Potentially runs the unit tests through valgrind for the package 483 via `make check-valgrind`. If the package does not have valgrind testing 484 then it just skips over this. 485 """ 486 # Valgrind testing is currently broken by an aggressive strcmp optimization 487 # that is inlined into optimized code for POWER by gcc 7+. Until we find 488 # a workaround, just don't run valgrind tests on POWER. 489 # https://github.com/openbmc/openbmc/issues/3315 490 if not is_valgrind_safe(): 491 sys.stderr.write("###### Skipping valgrind ######\n") 492 return 493 if not make_target_exists("check-valgrind"): 494 return 495 496 try: 497 cmd = make_parallel + ["check-valgrind"] 498 check_call_cmd(*cmd) 499 except CalledProcessError: 500 for root, _, files in os.walk(os.getcwd()): 501 for f in files: 502 if re.search("test-suite-[a-z]+.log", f) is None: 503 continue 504 check_call_cmd("cat", os.path.join(root, f)) 505 raise Exception("Valgrind tests failed") 506 507 508def maybe_make_coverage(): 509 """ 510 Potentially runs the unit tests through code coverage for the package 511 via `make check-code-coverage`. If the package does not have code coverage 512 testing then it just skips over this. 513 """ 514 if not make_target_exists("check-code-coverage"): 515 return 516 517 # Actually run code coverage 518 try: 519 cmd = make_parallel + ["check-code-coverage"] 520 check_call_cmd(*cmd) 521 except CalledProcessError: 522 raise Exception("Code coverage failed") 523 524 525class BuildSystem(object): 526 """ 527 Build systems generally provide the means to configure, build, install and 528 test software. The BuildSystem class defines a set of interfaces on top of 529 which Autotools, Meson, CMake and possibly other build system drivers can 530 be implemented, separating out the phases to control whether a package 531 should merely be installed or also tested and analyzed. 532 """ 533 534 def __init__(self, package, path): 535 """Initialise the driver with properties independent of the build 536 system 537 538 Keyword arguments: 539 package: The name of the package. Derived from the path if None 540 path: The path to the package. Set to the working directory if None 541 """ 542 self.path = "." if not path else path 543 realpath = os.path.realpath(self.path) 544 self.package = package if package else os.path.basename(realpath) 545 self.build_for_testing = False 546 547 def probe(self): 548 """Test if the build system driver can be applied to the package 549 550 Return True if the driver can drive the package's build system, 551 otherwise False. 552 553 Generally probe() is implemented by testing for the presence of the 554 build system's configuration file(s). 555 """ 556 raise NotImplementedError 557 558 def dependencies(self): 559 """Provide the package's dependencies 560 561 Returns a list of dependencies. If no dependencies are required then an 562 empty list must be returned. 563 564 Generally dependencies() is implemented by analysing and extracting the 565 data from the build system configuration. 566 """ 567 raise NotImplementedError 568 569 def configure(self, build_for_testing): 570 """Configure the source ready for building 571 572 Should raise an exception if configuration failed. 573 574 Keyword arguments: 575 build_for_testing: Mark the package as being built for testing rather 576 than for installation as a dependency for the 577 package under test. Setting to True generally 578 implies that the package will be configured to build 579 with debug information, at a low level of 580 optimisation and possibly with sanitizers enabled. 581 582 Generally configure() is implemented by invoking the build system 583 tooling to generate Makefiles or equivalent. 584 """ 585 raise NotImplementedError 586 587 def build(self): 588 """Build the software ready for installation and/or testing 589 590 Should raise an exception if the build fails 591 592 Generally build() is implemented by invoking `make` or `ninja`. 593 """ 594 raise NotImplementedError 595 596 def install(self): 597 """Install the software ready for use 598 599 Should raise an exception if installation fails 600 601 Like build(), install() is generally implemented by invoking `make` or 602 `ninja`. 603 """ 604 raise NotImplementedError 605 606 def test(self): 607 """Build and run the test suite associated with the package 608 609 Should raise an exception if the build or testing fails. 610 611 Like install(), test() is generally implemented by invoking `make` or 612 `ninja`. 613 """ 614 raise NotImplementedError 615 616 def analyze(self): 617 """Run any supported analysis tools over the codebase 618 619 Should raise an exception if analysis fails. 620 621 Some analysis tools such as scan-build need injection into the build 622 system. analyze() provides the necessary hook to implement such 623 behaviour. Analyzers independent of the build system can also be 624 specified here but at the cost of possible duplication of code between 625 the build system driver implementations. 626 """ 627 raise NotImplementedError 628 629 630class Autotools(BuildSystem): 631 def __init__(self, package=None, path=None): 632 super(Autotools, self).__init__(package, path) 633 634 def probe(self): 635 return os.path.isfile(os.path.join(self.path, "configure.ac")) 636 637 def dependencies(self): 638 configure_ac = os.path.join(self.path, "configure.ac") 639 640 contents = "" 641 # Prepend some special function overrides so we can parse out 642 # dependencies 643 for macro in DEPENDENCIES.keys(): 644 contents += ( 645 "m4_define([" 646 + macro 647 + "], [" 648 + macro 649 + "_START$" 650 + str(DEPENDENCIES_OFFSET[macro] + 1) 651 + macro 652 + "_END])\n" 653 ) 654 with open(configure_ac, "rt") as f: 655 contents += f.read() 656 657 autoconf_cmdline = ["autoconf", "-Wno-undefined", "-"] 658 autoconf_process = subprocess.Popen( 659 autoconf_cmdline, 660 stdin=subprocess.PIPE, 661 stdout=subprocess.PIPE, 662 stderr=subprocess.PIPE, 663 ) 664 document = contents.encode("utf-8") 665 (stdout, stderr) = autoconf_process.communicate(input=document) 666 if not stdout: 667 print(stderr) 668 raise Exception("Failed to run autoconf for parsing dependencies") 669 670 # Parse out all of the dependency text 671 matches = [] 672 for macro in DEPENDENCIES.keys(): 673 pattern = "(" + macro + ")_START(.*?)" + macro + "_END" 674 for match in re.compile(pattern).finditer(stdout.decode("utf-8")): 675 matches.append((match.group(1), match.group(2))) 676 677 # Look up dependencies from the text 678 found_deps = [] 679 for macro, deptext in matches: 680 for potential_dep in deptext.split(" "): 681 for known_dep in DEPENDENCIES[macro].keys(): 682 if potential_dep.startswith(known_dep): 683 found_deps.append(DEPENDENCIES[macro][known_dep]) 684 685 return found_deps 686 687 def _configure_feature(self, flag, enabled): 688 """ 689 Returns an configure flag as a string 690 691 Parameters: 692 flag The name of the flag 693 enabled Whether the flag is enabled or disabled 694 """ 695 return "--" + ("enable" if enabled else "disable") + "-" + flag 696 697 def configure(self, build_for_testing): 698 self.build_for_testing = build_for_testing 699 conf_flags = [ 700 self._configure_feature("silent-rules", False), 701 self._configure_feature("examples", build_for_testing), 702 self._configure_feature("tests", build_for_testing), 703 self._configure_feature("itests", INTEGRATION_TEST), 704 ] 705 conf_flags.extend( 706 [ 707 self._configure_feature("code-coverage", build_for_testing), 708 self._configure_feature("valgrind", build_for_testing), 709 ] 710 ) 711 # Add any necessary configure flags for package 712 if CONFIGURE_FLAGS.get(self.package) is not None: 713 conf_flags.extend(CONFIGURE_FLAGS.get(self.package)) 714 for bootstrap in ["bootstrap.sh", "bootstrap", "autogen.sh"]: 715 if os.path.exists(bootstrap): 716 check_call_cmd("./" + bootstrap) 717 break 718 check_call_cmd("./configure", *conf_flags) 719 720 def build(self): 721 check_call_cmd(*make_parallel) 722 723 def install(self): 724 check_call_cmd("sudo", "-n", "--", *(make_parallel + ["install"])) 725 726 def test(self): 727 try: 728 cmd = make_parallel + ["check"] 729 for i in range(0, args.repeat): 730 check_call_cmd(*cmd) 731 732 maybe_make_valgrind() 733 maybe_make_coverage() 734 except CalledProcessError: 735 for root, _, files in os.walk(os.getcwd()): 736 if "test-suite.log" not in files: 737 continue 738 check_call_cmd("cat", os.path.join(root, "test-suite.log")) 739 raise Exception("Unit tests failed") 740 741 def analyze(self): 742 run_cppcheck() 743 744 745class CMake(BuildSystem): 746 def __init__(self, package=None, path=None): 747 super(CMake, self).__init__(package, path) 748 749 def probe(self): 750 return os.path.isfile(os.path.join(self.path, "CMakeLists.txt")) 751 752 def dependencies(self): 753 return [] 754 755 def configure(self, build_for_testing): 756 self.build_for_testing = build_for_testing 757 if INTEGRATION_TEST: 758 check_call_cmd( 759 "cmake", 760 "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON", 761 "-DITESTS=ON", 762 ".", 763 ) 764 else: 765 check_call_cmd("cmake", "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON", ".") 766 767 def build(self): 768 check_call_cmd( 769 "cmake", 770 "--build", 771 ".", 772 "--", 773 "-j", 774 str(multiprocessing.cpu_count()), 775 ) 776 777 def install(self): 778 check_call_cmd("sudo", "cmake", "--install", ".") 779 780 def test(self): 781 if make_target_exists("test"): 782 check_call_cmd("ctest", ".") 783 784 def analyze(self): 785 if os.path.isfile(".clang-tidy"): 786 with TemporaryDirectory(prefix="build", dir=".") as build_dir: 787 # clang-tidy needs to run on a clang-specific build 788 check_call_cmd( 789 "cmake", 790 "-DCMAKE_C_COMPILER=clang", 791 "-DCMAKE_CXX_COMPILER=clang++", 792 "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON", 793 "-H.", 794 "-B" + build_dir, 795 ) 796 797 check_call_cmd( 798 "run-clang-tidy", "-header-filter=.*", "-p", build_dir 799 ) 800 801 maybe_make_valgrind() 802 maybe_make_coverage() 803 run_cppcheck() 804 805 806class Meson(BuildSystem): 807 def __init__(self, package=None, path=None): 808 super(Meson, self).__init__(package, path) 809 810 def probe(self): 811 return os.path.isfile(os.path.join(self.path, "meson.build")) 812 813 def dependencies(self): 814 meson_build = os.path.join(self.path, "meson.build") 815 if not os.path.exists(meson_build): 816 return [] 817 818 found_deps = [] 819 for root, dirs, files in os.walk(self.path): 820 if "meson.build" not in files: 821 continue 822 with open(os.path.join(root, "meson.build"), "rt") as f: 823 build_contents = f.read() 824 pattern = r"dependency\('([^']*)'.*?\),?\n" 825 for match in re.finditer(pattern, build_contents): 826 group = match.group(1) 827 maybe_dep = DEPENDENCIES["PKG_CHECK_MODULES"].get(group) 828 if maybe_dep is not None: 829 found_deps.append(maybe_dep) 830 831 return found_deps 832 833 def _parse_options(self, options_file): 834 """ 835 Returns a set of options defined in the provides meson_options.txt file 836 837 Parameters: 838 options_file The file containing options 839 """ 840 oi = optinterpreter.OptionInterpreter("") 841 oi.process(options_file) 842 return oi.options 843 844 def _configure_boolean(self, val): 845 """ 846 Returns the meson flag which signifies the value 847 848 True is true which requires the boolean. 849 False is false which disables the boolean. 850 851 Parameters: 852 val The value being converted 853 """ 854 if val is True: 855 return "true" 856 elif val is False: 857 return "false" 858 else: 859 raise Exception("Bad meson boolean value") 860 861 def _configure_feature(self, val): 862 """ 863 Returns the meson flag which signifies the value 864 865 True is enabled which requires the feature. 866 False is disabled which disables the feature. 867 None is auto which autodetects the feature. 868 869 Parameters: 870 val The value being converted 871 """ 872 if val is True: 873 return "enabled" 874 elif val is False: 875 return "disabled" 876 elif val is None: 877 return "auto" 878 else: 879 raise Exception("Bad meson feature value") 880 881 def _configure_option(self, opts, key, val): 882 """ 883 Returns the meson flag which signifies the value 884 based on the type of the opt 885 886 Parameters: 887 opt The meson option which we are setting 888 val The value being converted 889 """ 890 if isinstance(opts[key], coredata.UserBooleanOption): 891 str_val = self._configure_boolean(val) 892 elif isinstance(opts[key], coredata.UserFeatureOption): 893 str_val = self._configure_feature(val) 894 else: 895 raise Exception("Unknown meson option type") 896 return "-D{}={}".format(key, str_val) 897 898 def configure(self, build_for_testing): 899 self.build_for_testing = build_for_testing 900 meson_options = {} 901 if os.path.exists("meson.options"): 902 meson_options = self._parse_options("meson.options") 903 elif os.path.exists("meson_options.txt"): 904 meson_options = self._parse_options("meson_options.txt") 905 meson_flags = [ 906 "-Db_colorout=never", 907 "-Dwerror=true", 908 "-Dwarning_level=3", 909 ] 910 if build_for_testing: 911 # -Ddebug=true -Doptimization=g is helpful for abi-dumper but isn't a combination that 912 # is supported by meson's build types. Configure it manually. 913 meson_flags.append("-Ddebug=true") 914 meson_flags.append("-Doptimization=g") 915 else: 916 meson_flags.append("--buildtype=debugoptimized") 917 if OptionKey("tests") in meson_options: 918 meson_flags.append( 919 self._configure_option( 920 meson_options, OptionKey("tests"), build_for_testing 921 ) 922 ) 923 if OptionKey("examples") in meson_options: 924 meson_flags.append( 925 self._configure_option( 926 meson_options, OptionKey("examples"), build_for_testing 927 ) 928 ) 929 if OptionKey("itests") in meson_options: 930 meson_flags.append( 931 self._configure_option( 932 meson_options, OptionKey("itests"), INTEGRATION_TEST 933 ) 934 ) 935 if MESON_FLAGS.get(self.package) is not None: 936 meson_flags.extend(MESON_FLAGS.get(self.package)) 937 try: 938 check_call_cmd( 939 "meson", "setup", "--reconfigure", "build", *meson_flags 940 ) 941 except Exception: 942 shutil.rmtree("build", ignore_errors=True) 943 check_call_cmd("meson", "setup", "build", *meson_flags) 944 945 def build(self): 946 check_call_cmd("ninja", "-C", "build") 947 948 def install(self): 949 check_call_cmd("sudo", "-n", "--", "ninja", "-C", "build", "install") 950 951 def test(self): 952 # It is useful to check various settings of the meson.build file 953 # for compatibility, such as meson_version checks. We shouldn't 954 # do this in the configure path though because it affects subprojects 955 # and dependencies as well, but we only want this applied to the 956 # project-under-test (otherwise an upstream dependency could fail 957 # this check without our control). 958 self._extra_meson_checks() 959 960 try: 961 test_args = ("--repeat", str(args.repeat), "-C", "build") 962 check_call_cmd("meson", "test", "--print-errorlogs", *test_args) 963 964 except CalledProcessError: 965 raise Exception("Unit tests failed") 966 967 def _setup_exists(self, setup): 968 """ 969 Returns whether the meson build supports the named test setup. 970 971 Parameter descriptions: 972 setup The setup target to check 973 """ 974 try: 975 with open(os.devnull, "w"): 976 output = subprocess.check_output( 977 [ 978 "meson", 979 "test", 980 "-C", 981 "build", 982 "--setup", 983 setup, 984 "-t", 985 "0", 986 ], 987 stderr=subprocess.STDOUT, 988 ) 989 except CalledProcessError as e: 990 output = e.output 991 output = output.decode("utf-8") 992 return not re.search("Test setup .* not found from project", output) 993 994 def _maybe_valgrind(self): 995 """ 996 Potentially runs the unit tests through valgrind for the package 997 via `meson test`. The package can specify custom valgrind 998 configurations by utilizing add_test_setup() in a meson.build 999 """ 1000 if not is_valgrind_safe(): 1001 sys.stderr.write("###### Skipping valgrind ######\n") 1002 return 1003 try: 1004 if self._setup_exists("valgrind"): 1005 check_call_cmd( 1006 "meson", 1007 "test", 1008 "-t", 1009 "10", 1010 "-C", 1011 "build", 1012 "--print-errorlogs", 1013 "--setup", 1014 "valgrind", 1015 ) 1016 else: 1017 check_call_cmd( 1018 "meson", 1019 "test", 1020 "-t", 1021 "10", 1022 "-C", 1023 "build", 1024 "--print-errorlogs", 1025 "--wrapper", 1026 "valgrind", 1027 ) 1028 except CalledProcessError: 1029 raise Exception("Valgrind tests failed") 1030 1031 def analyze(self): 1032 self._maybe_valgrind() 1033 1034 # Run clang-tidy only if the project has a configuration 1035 if os.path.isfile(".clang-tidy"): 1036 os.environ["CXX"] = "clang++" 1037 with TemporaryDirectory(prefix="build", dir=".") as build_dir: 1038 check_call_cmd("meson", "setup", build_dir) 1039 if not os.path.isfile(".openbmc-no-clang"): 1040 check_call_cmd("meson", "compile", "-C", build_dir) 1041 try: 1042 check_call_cmd("ninja", "-C", build_dir, "clang-tidy") 1043 except subprocess.CalledProcessError: 1044 check_call_cmd( 1045 "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff" 1046 ) 1047 raise 1048 # Run the basic clang static analyzer otherwise 1049 else: 1050 check_call_cmd("ninja", "-C", "build", "scan-build") 1051 1052 # Run tests through sanitizers 1053 # b_lundef is needed if clang++ is CXX since it resolves the 1054 # asan symbols at runtime only. We don't want to set it earlier 1055 # in the build process to ensure we don't have undefined 1056 # runtime code. 1057 if is_sanitize_safe(): 1058 check_call_cmd( 1059 "meson", 1060 "configure", 1061 "build", 1062 "-Db_sanitize=address,undefined", 1063 "-Db_lundef=false", 1064 ) 1065 check_call_cmd( 1066 "meson", 1067 "test", 1068 "-C", 1069 "build", 1070 "--print-errorlogs", 1071 "--logbase", 1072 "testlog-ubasan", 1073 env=os.environ | {"UBSAN_OPTIONS": "halt_on_error=1"}, 1074 ) 1075 # TODO: Fix memory sanitizer 1076 # check_call_cmd('meson', 'configure', 'build', 1077 # '-Db_sanitize=memory') 1078 # check_call_cmd('meson', 'test', '-C', 'build' 1079 # '--logbase', 'testlog-msan') 1080 check_call_cmd("meson", "configure", "build", "-Db_sanitize=none") 1081 else: 1082 sys.stderr.write("###### Skipping sanitizers ######\n") 1083 1084 # Run coverage checks 1085 check_call_cmd("meson", "configure", "build", "-Db_coverage=true") 1086 self.test() 1087 # Only build coverage HTML if coverage files were produced 1088 for root, dirs, files in os.walk("build"): 1089 if any([f.endswith(".gcda") for f in files]): 1090 check_call_cmd("ninja", "-C", "build", "coverage-html") 1091 break 1092 check_call_cmd("meson", "configure", "build", "-Db_coverage=false") 1093 run_cppcheck() 1094 1095 def _extra_meson_checks(self): 1096 with open(os.path.join(self.path, "meson.build"), "rt") as f: 1097 build_contents = f.read() 1098 1099 # Find project's specified meson_version. 1100 meson_version = None 1101 pattern = r"meson_version:[^']*'([^']*)'" 1102 for match in re.finditer(pattern, build_contents): 1103 group = match.group(1) 1104 meson_version = group 1105 1106 # C++20 requires at least Meson 0.57 but Meson itself doesn't 1107 # identify this. Add to our unit-test checks so that we don't 1108 # get a meson.build missing this. 1109 pattern = r"'cpp_std=c\+\+20'" 1110 for match in re.finditer(pattern, build_contents): 1111 if not meson_version or not meson_version_compare( 1112 meson_version, ">=0.57" 1113 ): 1114 raise Exception( 1115 "C++20 support requires specifying in meson.build: " 1116 + "meson_version: '>=0.57'" 1117 ) 1118 1119 # C++23 requires at least Meson 1.1.1 but Meson itself doesn't 1120 # identify this. Add to our unit-test checks so that we don't 1121 # get a meson.build missing this. 1122 pattern = r"'cpp_std=c\+\+23'" 1123 for match in re.finditer(pattern, build_contents): 1124 if not meson_version or not meson_version_compare( 1125 meson_version, ">=1.1.1" 1126 ): 1127 raise Exception( 1128 "C++23 support requires specifying in meson.build: " 1129 + "meson_version: '>=1.1.1'" 1130 ) 1131 1132 if "get_variable(" in build_contents: 1133 if not meson_version or not meson_version_compare( 1134 meson_version, ">=0.58" 1135 ): 1136 raise Exception( 1137 "dep.get_variable() with positional argument requires " 1138 + "meson_Version: '>=0.58'" 1139 ) 1140 1141 1142class Package(object): 1143 def __init__(self, name=None, path=None): 1144 self.supported = [Meson, Autotools, CMake] 1145 self.name = name 1146 self.path = path 1147 self.test_only = False 1148 1149 def build_systems(self): 1150 instances = (system(self.name, self.path) for system in self.supported) 1151 return (instance for instance in instances if instance.probe()) 1152 1153 def build_system(self, preferred=None): 1154 systems = list(self.build_systems()) 1155 1156 if not systems: 1157 return None 1158 1159 if preferred: 1160 return {type(system): system for system in systems}[preferred] 1161 1162 return next(iter(systems)) 1163 1164 def install(self, system=None): 1165 if not system: 1166 system = self.build_system() 1167 1168 system.configure(False) 1169 system.build() 1170 system.install() 1171 1172 def _test_one(self, system): 1173 system.configure(True) 1174 system.build() 1175 system.install() 1176 system.test() 1177 if not TEST_ONLY: 1178 system.analyze() 1179 1180 def test(self): 1181 for system in self.build_systems(): 1182 self._test_one(system) 1183 1184 1185def find_file(filename, basedir): 1186 """ 1187 Finds all occurrences of a file (or list of files) in the base 1188 directory and passes them back with their relative paths. 1189 1190 Parameter descriptions: 1191 filename The name of the file (or list of files) to 1192 find 1193 basedir The base directory search in 1194 """ 1195 1196 if not isinstance(filename, list): 1197 filename = [filename] 1198 1199 filepaths = [] 1200 for root, dirs, files in os.walk(basedir): 1201 if os.path.split(root)[-1] == "subprojects": 1202 for f in files: 1203 subproject = ".".join(f.split(".")[0:-1]) 1204 if f.endswith(".wrap") and subproject in dirs: 1205 # don't find files in meson subprojects with wraps 1206 dirs.remove(subproject) 1207 for f in filename: 1208 if f in files: 1209 filepaths.append(os.path.join(root, f)) 1210 return filepaths 1211 1212 1213if __name__ == "__main__": 1214 # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS] 1215 CONFIGURE_FLAGS = { 1216 "phosphor-logging": [ 1217 "--enable-metadata-processing", 1218 "--enable-openpower-pel-extension", 1219 "YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml", 1220 ] 1221 } 1222 1223 # MESON_FLAGS = [GIT REPO]:[MESON FLAGS] 1224 MESON_FLAGS = { 1225 "phosphor-dbus-interfaces": [ 1226 "-Ddata_com_ibm=true", 1227 "-Ddata_org_open_power=true", 1228 ], 1229 "phosphor-logging": ["-Dopenpower-pel-extension=enabled"], 1230 } 1231 1232 # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO] 1233 DEPENDENCIES = { 1234 "AC_CHECK_LIB": {"mapper": "phosphor-objmgr"}, 1235 "AC_CHECK_HEADER": { 1236 "host-ipmid": "phosphor-host-ipmid", 1237 "blobs-ipmid": "phosphor-ipmi-blobs", 1238 "sdbusplus": "sdbusplus", 1239 "sdeventplus": "sdeventplus", 1240 "stdplus": "stdplus", 1241 "gpioplus": "gpioplus", 1242 "phosphor-logging/log.hpp": "phosphor-logging", 1243 }, 1244 "AC_PATH_PROG": {"sdbus++": "sdbusplus"}, 1245 "PKG_CHECK_MODULES": { 1246 "phosphor-dbus-interfaces": "phosphor-dbus-interfaces", 1247 "libipmid": "phosphor-host-ipmid", 1248 "libipmid-host": "phosphor-host-ipmid", 1249 "sdbusplus": "sdbusplus", 1250 "sdeventplus": "sdeventplus", 1251 "stdplus": "stdplus", 1252 "gpioplus": "gpioplus", 1253 "phosphor-logging": "phosphor-logging", 1254 "phosphor-snmp": "phosphor-snmp", 1255 "ipmiblob": "ipmi-blob-tool", 1256 "hei": "openpower-libhei", 1257 "phosphor-ipmi-blobs": "phosphor-ipmi-blobs", 1258 "libcr51sign": "google-misc", 1259 }, 1260 } 1261 1262 # Offset into array of macro parameters MACRO(0, 1, ...N) 1263 DEPENDENCIES_OFFSET = { 1264 "AC_CHECK_LIB": 0, 1265 "AC_CHECK_HEADER": 0, 1266 "AC_PATH_PROG": 1, 1267 "PKG_CHECK_MODULES": 1, 1268 } 1269 1270 # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING] 1271 DEPENDENCIES_REGEX = {"phosphor-logging": r"\S+-dbus-interfaces$"} 1272 1273 # Set command line arguments 1274 parser = argparse.ArgumentParser() 1275 parser.add_argument( 1276 "-w", 1277 "--workspace", 1278 dest="WORKSPACE", 1279 required=True, 1280 help="Workspace directory location(i.e. /home)", 1281 ) 1282 parser.add_argument( 1283 "-p", 1284 "--package", 1285 dest="PACKAGE", 1286 required=True, 1287 help="OpenBMC package to be unit tested", 1288 ) 1289 parser.add_argument( 1290 "-t", 1291 "--test-only", 1292 dest="TEST_ONLY", 1293 action="store_true", 1294 required=False, 1295 default=False, 1296 help="Only run test cases, no other validation", 1297 ) 1298 arg_inttests = parser.add_mutually_exclusive_group() 1299 arg_inttests.add_argument( 1300 "--integration-tests", 1301 dest="INTEGRATION_TEST", 1302 action="store_true", 1303 required=False, 1304 default=True, 1305 help="Enable integration tests [default].", 1306 ) 1307 arg_inttests.add_argument( 1308 "--no-integration-tests", 1309 dest="INTEGRATION_TEST", 1310 action="store_false", 1311 required=False, 1312 help="Disable integration tests.", 1313 ) 1314 parser.add_argument( 1315 "-v", 1316 "--verbose", 1317 action="store_true", 1318 help="Print additional package status messages", 1319 ) 1320 parser.add_argument( 1321 "-r", "--repeat", help="Repeat tests N times", type=int, default=1 1322 ) 1323 parser.add_argument( 1324 "-b", 1325 "--branch", 1326 dest="BRANCH", 1327 required=False, 1328 help="Branch to target for dependent repositories", 1329 default="master", 1330 ) 1331 parser.add_argument( 1332 "-n", 1333 "--noformat", 1334 dest="FORMAT", 1335 action="store_false", 1336 required=False, 1337 help="Whether or not to run format code", 1338 ) 1339 args = parser.parse_args(sys.argv[1:]) 1340 WORKSPACE = args.WORKSPACE 1341 UNIT_TEST_PKG = args.PACKAGE 1342 TEST_ONLY = args.TEST_ONLY 1343 INTEGRATION_TEST = args.INTEGRATION_TEST 1344 BRANCH = args.BRANCH 1345 FORMAT_CODE = args.FORMAT 1346 if args.verbose: 1347 1348 def printline(*line): 1349 for arg in line: 1350 print(arg, end=" ") 1351 print() 1352 1353 else: 1354 1355 def printline(*line): 1356 pass 1357 1358 CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG) 1359 1360 # Run format-code.sh, which will in turn call any repo-level formatters. 1361 if FORMAT_CODE: 1362 check_call_cmd( 1363 os.path.join( 1364 WORKSPACE, "openbmc-build-scripts", "scripts", "format-code.sh" 1365 ), 1366 CODE_SCAN_DIR, 1367 ) 1368 1369 # Check to see if any files changed 1370 check_call_cmd( 1371 "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff", "--exit-code" 1372 ) 1373 1374 # Check if this repo has a supported make infrastructure 1375 pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR) 1376 if not pkg.build_system(): 1377 print("No valid build system, exit") 1378 sys.exit(0) 1379 1380 prev_umask = os.umask(000) 1381 1382 # Determine dependencies and add them 1383 dep_added = dict() 1384 dep_added[UNIT_TEST_PKG] = False 1385 1386 # Create dependency tree 1387 dep_tree = DepTree(UNIT_TEST_PKG) 1388 build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH) 1389 1390 # Reorder Dependency Tree 1391 for pkg_name, regex_str in DEPENDENCIES_REGEX.items(): 1392 dep_tree.ReorderDeps(pkg_name, regex_str) 1393 if args.verbose: 1394 dep_tree.PrintTree() 1395 1396 install_list = dep_tree.GetInstallList() 1397 1398 # We don't want to treat our package as a dependency 1399 install_list.remove(UNIT_TEST_PKG) 1400 1401 # Install reordered dependencies 1402 for dep in install_list: 1403 build_and_install(dep, False) 1404 1405 # Run package unit tests 1406 build_and_install(UNIT_TEST_PKG, True) 1407 1408 os.umask(prev_umask) 1409 1410 # Run any custom CI scripts the repo has, of which there can be 1411 # multiple of and anywhere in the repository. 1412 ci_scripts = find_file(["run-ci.sh", "run-ci"], CODE_SCAN_DIR) 1413 if ci_scripts: 1414 os.chdir(CODE_SCAN_DIR) 1415 for ci_script in ci_scripts: 1416 check_call_cmd(ci_script) 1417