1#!/usr/bin/env python3 2 3""" 4This script determines the given package's openbmc dependencies from its 5configure.ac file where it downloads, configures, builds, and installs each of 6these dependencies. Then the given package is configured, built, and installed 7prior to executing its unit tests. 8""" 9 10import argparse 11import json 12import multiprocessing 13import os 14import platform 15import re 16import shutil 17import subprocess 18import sys 19from subprocess import CalledProcessError, check_call 20from tempfile import TemporaryDirectory 21from urllib.parse import urljoin 22 23from git import Repo 24 25# interpreter is not used directly but this resolves dependency ordering 26# that would be broken if we didn't include it. 27from mesonbuild import interpreter # noqa: F401 28from mesonbuild import optinterpreter, options 29from mesonbuild.mesonlib import version_compare as meson_version_compare 30from mesonbuild.options import OptionKey, OptionStore 31 32 33class DepTree: 34 """ 35 Represents package dependency tree, where each node is a DepTree with a 36 name and DepTree children. 37 """ 38 39 def __init__(self, name): 40 """ 41 Create new DepTree. 42 43 Parameter descriptions: 44 name Name of new tree node. 45 """ 46 self.name = name 47 self.children = list() 48 49 def AddChild(self, name): 50 """ 51 Add new child node to current node. 52 53 Parameter descriptions: 54 name Name of new child 55 """ 56 new_child = DepTree(name) 57 self.children.append(new_child) 58 return new_child 59 60 def AddChildNode(self, node): 61 """ 62 Add existing child node to current node. 63 64 Parameter descriptions: 65 node Tree node to add 66 """ 67 self.children.append(node) 68 69 def RemoveChild(self, name): 70 """ 71 Remove child node. 72 73 Parameter descriptions: 74 name Name of child to remove 75 """ 76 for child in self.children: 77 if child.name == name: 78 self.children.remove(child) 79 return 80 81 def GetNode(self, name): 82 """ 83 Return node with matching name. Return None if not found. 84 85 Parameter descriptions: 86 name Name of node to return 87 """ 88 if self.name == name: 89 return self 90 for child in self.children: 91 node = child.GetNode(name) 92 if node: 93 return node 94 return None 95 96 def GetParentNode(self, name, parent_node=None): 97 """ 98 Return parent of node with matching name. Return none if not found. 99 100 Parameter descriptions: 101 name Name of node to get parent of 102 parent_node Parent of current node 103 """ 104 if self.name == name: 105 return parent_node 106 for child in self.children: 107 found_node = child.GetParentNode(name, self) 108 if found_node: 109 return found_node 110 return None 111 112 def GetPath(self, name, path=None): 113 """ 114 Return list of node names from head to matching name. 115 Return None if not found. 116 117 Parameter descriptions: 118 name Name of node 119 path List of node names from head to current node 120 """ 121 if not path: 122 path = [] 123 if self.name == name: 124 path.append(self.name) 125 return path 126 for child in self.children: 127 match = child.GetPath(name, path + [self.name]) 128 if match: 129 return match 130 return None 131 132 def GetPathRegex(self, name, regex_str, path=None): 133 """ 134 Return list of node paths that end in name, or match regex_str. 135 Return empty list if not found. 136 137 Parameter descriptions: 138 name Name of node to search for 139 regex_str Regex string to match node names 140 path Path of node names from head to current node 141 """ 142 new_paths = [] 143 if not path: 144 path = [] 145 match = re.match(regex_str, self.name) 146 if (self.name == name) or (match): 147 new_paths.append(path + [self.name]) 148 for child in self.children: 149 return_paths = None 150 full_path = path + [self.name] 151 return_paths = child.GetPathRegex(name, regex_str, full_path) 152 for i in return_paths: 153 new_paths.append(i) 154 return new_paths 155 156 def MoveNode(self, from_name, to_name): 157 """ 158 Mode existing from_name node to become child of to_name node. 159 160 Parameter descriptions: 161 from_name Name of node to make a child of to_name 162 to_name Name of node to make parent of from_name 163 """ 164 parent_from_node = self.GetParentNode(from_name) 165 from_node = self.GetNode(from_name) 166 parent_from_node.RemoveChild(from_name) 167 to_node = self.GetNode(to_name) 168 to_node.AddChildNode(from_node) 169 170 def ReorderDeps(self, name, regex_str): 171 """ 172 Reorder dependency tree. If tree contains nodes with names that 173 match 'name' and 'regex_str', move 'regex_str' nodes that are 174 to the right of 'name' node, so that they become children of the 175 'name' node. 176 177 Parameter descriptions: 178 name Name of node to look for 179 regex_str Regex string to match names to 180 """ 181 name_path = self.GetPath(name) 182 if not name_path: 183 return 184 paths = self.GetPathRegex(name, regex_str) 185 is_name_in_paths = False 186 name_index = 0 187 for i in range(len(paths)): 188 path = paths[i] 189 if path[-1] == name: 190 is_name_in_paths = True 191 name_index = i 192 break 193 if not is_name_in_paths: 194 return 195 for i in range(name_index + 1, len(paths)): 196 path = paths[i] 197 if name in path: 198 continue 199 from_name = path[-1] 200 self.MoveNode(from_name, name) 201 202 def GetInstallList(self): 203 """ 204 Return post-order list of node names. 205 206 Parameter descriptions: 207 """ 208 install_list = [] 209 for child in self.children: 210 child_install_list = child.GetInstallList() 211 install_list.extend(child_install_list) 212 install_list.append(self.name) 213 return install_list 214 215 def PrintTree(self, level=0): 216 """ 217 Print pre-order node names with indentation denoting node depth level. 218 219 Parameter descriptions: 220 level Current depth level 221 """ 222 INDENT_PER_LEVEL = 4 223 print(" " * (level * INDENT_PER_LEVEL) + self.name) 224 for child in self.children: 225 child.PrintTree(level + 1) 226 227 228def check_call_cmd(*cmd, **kwargs): 229 """ 230 Verbose prints the directory location the given command is called from and 231 the command, then executes the command using check_call. 232 233 Parameter descriptions: 234 dir Directory location command is to be called from 235 cmd List of parameters constructing the complete command 236 """ 237 printline(os.getcwd(), ">", " ".join(cmd)) 238 check_call(cmd, **kwargs) 239 240 241def clone_pkg(pkg, branch): 242 """ 243 Clone the given openbmc package's git repository from gerrit into 244 the WORKSPACE location 245 246 Parameter descriptions: 247 pkg Name of the package to clone 248 branch Branch to clone from pkg 249 """ 250 pkg_dir = os.path.join(WORKSPACE, pkg) 251 if os.path.exists(os.path.join(pkg_dir, ".git")): 252 return pkg_dir 253 pkg_repo = urljoin("https://gerrit.openbmc.org/openbmc/", pkg) 254 os.mkdir(pkg_dir) 255 printline(pkg_dir, "> git clone", pkg_repo, branch, "./") 256 try: 257 # first try the branch 258 clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch) 259 repo_inst = clone.working_dir 260 except Exception: 261 printline("Input branch not found, default to master") 262 clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master") 263 repo_inst = clone.working_dir 264 return repo_inst 265 266 267def make_target_exists(target): 268 """ 269 Runs a check against the makefile in the current directory to determine 270 if the target exists so that it can be built. 271 272 Parameter descriptions: 273 target The make target we are checking 274 """ 275 try: 276 cmd = ["make", "-n", target] 277 with open(os.devnull, "w") as devnull: 278 check_call(cmd, stdout=devnull, stderr=devnull) 279 return True 280 except CalledProcessError: 281 return False 282 283 284make_parallel = [ 285 "make", 286 # Run enough jobs to saturate all the cpus 287 "-j", 288 str(multiprocessing.cpu_count()), 289 # Don't start more jobs if the load avg is too high 290 "-l", 291 str(multiprocessing.cpu_count()), 292 # Synchronize the output so logs aren't intermixed in stdout / stderr 293 "-O", 294] 295 296 297def build_and_install(name, build_for_testing=False): 298 """ 299 Builds and installs the package in the environment. Optionally 300 builds the examples and test cases for package. 301 302 Parameter description: 303 name The name of the package we are building 304 build_for_testing Enable options related to testing on the package? 305 """ 306 os.chdir(os.path.join(WORKSPACE, name)) 307 308 # Refresh dynamic linker run time bindings for dependencies 309 check_call_cmd("sudo", "-n", "--", "ldconfig") 310 311 pkg = Package() 312 if build_for_testing: 313 pkg.test() 314 else: 315 pkg.install() 316 317 318def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None): 319 """ 320 For each package (name), starting with the package to be unit tested, 321 extract its dependencies. For each package dependency defined, recursively 322 apply the same strategy 323 324 Parameter descriptions: 325 name Name of the package 326 pkgdir Directory where package source is located 327 dep_added Current dict of dependencies and added status 328 head Head node of the dependency tree 329 branch Branch to clone from pkg 330 dep_tree Current dependency tree node 331 """ 332 if not dep_tree: 333 dep_tree = head 334 335 with open("/tmp/depcache", "r") as depcache: 336 cache = depcache.readline() 337 338 # Read out pkg dependencies 339 pkg = Package(name, pkgdir) 340 341 build = pkg.build_system() 342 if not build: 343 raise Exception(f"Unable to find build system for {name}.") 344 345 for dep in set(build.dependencies()): 346 if dep in cache: 347 continue 348 # Dependency package not already known 349 if dep_added.get(dep) is None: 350 print(f"Adding {dep} dependency to {name}.") 351 # Dependency package not added 352 new_child = dep_tree.AddChild(dep) 353 dep_added[dep] = False 354 dep_pkgdir = clone_pkg(dep, branch) 355 # Determine this dependency package's 356 # dependencies and add them before 357 # returning to add this package 358 dep_added = build_dep_tree( 359 dep, dep_pkgdir, dep_added, head, branch, new_child 360 ) 361 else: 362 # Dependency package known and added 363 if dep_added[dep]: 364 continue 365 else: 366 # Cyclic dependency failure 367 raise Exception("Cyclic dependencies found in " + name) 368 369 if not dep_added[name]: 370 dep_added[name] = True 371 372 return dep_added 373 374 375def run_cppcheck(): 376 if not os.path.exists(os.path.join("build", "compile_commands.json")): 377 return None 378 379 with TemporaryDirectory() as cpp_dir: 380 # http://cppcheck.sourceforge.net/manual.pdf 381 try: 382 check_call_cmd( 383 "cppcheck", 384 "-j", 385 str(multiprocessing.cpu_count()), 386 "--enable=style,performance,portability,missingInclude", 387 "--inline-suppr", 388 "--suppress=useStlAlgorithm", 389 "--suppress=unusedStructMember", 390 "--suppress=postfixOperator", 391 "--suppress=unreadVariable", 392 "--suppress=knownConditionTrueFalse", 393 "--library=googletest", 394 "--project=build/compile_commands.json", 395 f"--cppcheck-build-dir={cpp_dir}", 396 ) 397 except subprocess.CalledProcessError: 398 print("cppcheck found errors") 399 400 401def is_valgrind_safe(): 402 """ 403 Returns whether it is safe to run valgrind on our platform 404 """ 405 src = "unit-test-vg.c" 406 exe = "./unit-test-vg" 407 with open(src, "w") as h: 408 h.write("#include <errno.h>\n") 409 h.write("#include <stdio.h>\n") 410 h.write("#include <stdlib.h>\n") 411 h.write("#include <string.h>\n") 412 h.write("int main() {\n") 413 h.write("char *heap_str = malloc(16);\n") 414 h.write('strcpy(heap_str, "RandString");\n') 415 h.write('int res = strcmp("RandString", heap_str);\n') 416 h.write("free(heap_str);\n") 417 h.write("char errstr[64];\n") 418 h.write("strerror_r(EINVAL, errstr, sizeof(errstr));\n") 419 h.write('printf("%s\\n", errstr);\n') 420 h.write("return res;\n") 421 h.write("}\n") 422 try: 423 with open(os.devnull, "w") as devnull: 424 check_call( 425 ["gcc", "-O2", "-o", exe, src], stdout=devnull, stderr=devnull 426 ) 427 check_call( 428 ["valgrind", "--error-exitcode=99", exe], 429 stdout=devnull, 430 stderr=devnull, 431 ) 432 return True 433 except Exception: 434 sys.stderr.write("###### Platform is not valgrind safe ######\n") 435 return False 436 finally: 437 os.remove(src) 438 os.remove(exe) 439 440 441def is_sanitize_safe(): 442 """ 443 Returns whether it is safe to run sanitizers on our platform 444 """ 445 src = "unit-test-sanitize.c" 446 exe = "./unit-test-sanitize" 447 with open(src, "w") as h: 448 h.write("int main() { return 0; }\n") 449 try: 450 with open(os.devnull, "w") as devnull: 451 check_call( 452 [ 453 "gcc", 454 "-O2", 455 "-fsanitize=address", 456 "-fsanitize=undefined", 457 "-o", 458 exe, 459 src, 460 ], 461 stdout=devnull, 462 stderr=devnull, 463 ) 464 check_call([exe], stdout=devnull, stderr=devnull) 465 466 # TODO - Sanitizer not working on ppc64le 467 # https://github.com/openbmc/openbmc-build-scripts/issues/31 468 if platform.processor() == "ppc64le": 469 sys.stderr.write("###### ppc64le is not sanitize safe ######\n") 470 return False 471 else: 472 return True 473 except Exception: 474 sys.stderr.write("###### Platform is not sanitize safe ######\n") 475 return False 476 finally: 477 os.remove(src) 478 os.remove(exe) 479 480 481def maybe_make_valgrind(): 482 """ 483 Potentially runs the unit tests through valgrind for the package 484 via `make check-valgrind`. If the package does not have valgrind testing 485 then it just skips over this. 486 """ 487 # Valgrind testing is currently broken by an aggressive strcmp optimization 488 # that is inlined into optimized code for POWER by gcc 7+. Until we find 489 # a workaround, just don't run valgrind tests on POWER. 490 # https://github.com/openbmc/openbmc/issues/3315 491 if not is_valgrind_safe(): 492 sys.stderr.write("###### Skipping valgrind ######\n") 493 return 494 if not make_target_exists("check-valgrind"): 495 return 496 497 try: 498 cmd = make_parallel + ["check-valgrind"] 499 check_call_cmd(*cmd) 500 except CalledProcessError: 501 for root, _, files in os.walk(os.getcwd()): 502 for f in files: 503 if re.search("test-suite-[a-z]+.log", f) is None: 504 continue 505 check_call_cmd("cat", os.path.join(root, f)) 506 raise Exception("Valgrind tests failed") 507 508 509def maybe_make_coverage(): 510 """ 511 Potentially runs the unit tests through code coverage for the package 512 via `make check-code-coverage`. If the package does not have code coverage 513 testing then it just skips over this. 514 """ 515 if not make_target_exists("check-code-coverage"): 516 return 517 518 # Actually run code coverage 519 try: 520 cmd = make_parallel + ["check-code-coverage"] 521 check_call_cmd(*cmd) 522 except CalledProcessError: 523 raise Exception("Code coverage failed") 524 525 526class BuildSystem(object): 527 """ 528 Build systems generally provide the means to configure, build, install and 529 test software. The BuildSystem class defines a set of interfaces on top of 530 which Autotools, Meson, CMake and possibly other build system drivers can 531 be implemented, separating out the phases to control whether a package 532 should merely be installed or also tested and analyzed. 533 """ 534 535 def __init__(self, package, path): 536 """Initialise the driver with properties independent of the build 537 system 538 539 Keyword arguments: 540 package: The name of the package. Derived from the path if None 541 path: The path to the package. Set to the working directory if None 542 """ 543 self.path = "." if not path else path 544 realpath = os.path.realpath(self.path) 545 self.package = package if package else os.path.basename(realpath) 546 self.build_for_testing = False 547 548 def probe(self): 549 """Test if the build system driver can be applied to the package 550 551 Return True if the driver can drive the package's build system, 552 otherwise False. 553 554 Generally probe() is implemented by testing for the presence of the 555 build system's configuration file(s). 556 """ 557 raise NotImplementedError 558 559 def dependencies(self): 560 """Provide the package's dependencies 561 562 Returns a list of dependencies. If no dependencies are required then an 563 empty list must be returned. 564 565 Generally dependencies() is implemented by analysing and extracting the 566 data from the build system configuration. 567 """ 568 raise NotImplementedError 569 570 def configure(self, build_for_testing): 571 """Configure the source ready for building 572 573 Should raise an exception if configuration failed. 574 575 Keyword arguments: 576 build_for_testing: Mark the package as being built for testing rather 577 than for installation as a dependency for the 578 package under test. Setting to True generally 579 implies that the package will be configured to build 580 with debug information, at a low level of 581 optimisation and possibly with sanitizers enabled. 582 583 Generally configure() is implemented by invoking the build system 584 tooling to generate Makefiles or equivalent. 585 """ 586 raise NotImplementedError 587 588 def build(self): 589 """Build the software ready for installation and/or testing 590 591 Should raise an exception if the build fails 592 593 Generally build() is implemented by invoking `make` or `ninja`. 594 """ 595 raise NotImplementedError 596 597 def install(self): 598 """Install the software ready for use 599 600 Should raise an exception if installation fails 601 602 Like build(), install() is generally implemented by invoking `make` or 603 `ninja`. 604 """ 605 raise NotImplementedError 606 607 def test(self): 608 """Build and run the test suite associated with the package 609 610 Should raise an exception if the build or testing fails. 611 612 Like install(), test() is generally implemented by invoking `make` or 613 `ninja`. 614 """ 615 raise NotImplementedError 616 617 def analyze(self): 618 """Run any supported analysis tools over the codebase 619 620 Should raise an exception if analysis fails. 621 622 Some analysis tools such as scan-build need injection into the build 623 system. analyze() provides the necessary hook to implement such 624 behaviour. Analyzers independent of the build system can also be 625 specified here but at the cost of possible duplication of code between 626 the build system driver implementations. 627 """ 628 raise NotImplementedError 629 630 631class Autotools(BuildSystem): 632 def __init__(self, package=None, path=None): 633 super(Autotools, self).__init__(package, path) 634 635 def probe(self): 636 return os.path.isfile(os.path.join(self.path, "configure.ac")) 637 638 def dependencies(self): 639 configure_ac = os.path.join(self.path, "configure.ac") 640 641 contents = "" 642 # Prepend some special function overrides so we can parse out 643 # dependencies 644 for macro in DEPENDENCIES.keys(): 645 contents += ( 646 "m4_define([" 647 + macro 648 + "], [" 649 + macro 650 + "_START$" 651 + str(DEPENDENCIES_OFFSET[macro] + 1) 652 + macro 653 + "_END])\n" 654 ) 655 with open(configure_ac, "rt") as f: 656 contents += f.read() 657 658 autoconf_cmdline = ["autoconf", "-Wno-undefined", "-"] 659 autoconf_process = subprocess.Popen( 660 autoconf_cmdline, 661 stdin=subprocess.PIPE, 662 stdout=subprocess.PIPE, 663 stderr=subprocess.PIPE, 664 ) 665 document = contents.encode("utf-8") 666 (stdout, stderr) = autoconf_process.communicate(input=document) 667 if not stdout: 668 print(stderr) 669 raise Exception("Failed to run autoconf for parsing dependencies") 670 671 # Parse out all of the dependency text 672 matches = [] 673 for macro in DEPENDENCIES.keys(): 674 pattern = "(" + macro + ")_START(.*?)" + macro + "_END" 675 for match in re.compile(pattern).finditer(stdout.decode("utf-8")): 676 matches.append((match.group(1), match.group(2))) 677 678 # Look up dependencies from the text 679 found_deps = [] 680 for macro, deptext in matches: 681 for potential_dep in deptext.split(" "): 682 for known_dep in DEPENDENCIES[macro].keys(): 683 if potential_dep.startswith(known_dep): 684 found_deps.append(DEPENDENCIES[macro][known_dep]) 685 686 return found_deps 687 688 def _configure_feature(self, flag, enabled): 689 """ 690 Returns an configure flag as a string 691 692 Parameters: 693 flag The name of the flag 694 enabled Whether the flag is enabled or disabled 695 """ 696 return "--" + ("enable" if enabled else "disable") + "-" + flag 697 698 def configure(self, build_for_testing): 699 self.build_for_testing = build_for_testing 700 conf_flags = [ 701 self._configure_feature("silent-rules", False), 702 self._configure_feature("examples", build_for_testing), 703 self._configure_feature("tests", build_for_testing), 704 self._configure_feature("itests", INTEGRATION_TEST), 705 ] 706 conf_flags.extend( 707 [ 708 self._configure_feature("code-coverage", False), 709 self._configure_feature("valgrind", build_for_testing), 710 ] 711 ) 712 # Add any necessary configure flags for package 713 if CONFIGURE_FLAGS.get(self.package) is not None: 714 conf_flags.extend(CONFIGURE_FLAGS.get(self.package)) 715 for bootstrap in ["bootstrap.sh", "bootstrap", "autogen.sh"]: 716 if os.path.exists(bootstrap): 717 check_call_cmd("./" + bootstrap) 718 break 719 check_call_cmd("./configure", *conf_flags) 720 721 def build(self): 722 check_call_cmd(*make_parallel) 723 724 def install(self): 725 check_call_cmd("sudo", "-n", "--", *(make_parallel + ["install"])) 726 check_call_cmd("sudo", "-n", "--", "ldconfig") 727 728 def test(self): 729 try: 730 cmd = make_parallel + ["check"] 731 for i in range(0, args.repeat): 732 check_call_cmd(*cmd) 733 734 maybe_make_valgrind() 735 maybe_make_coverage() 736 except CalledProcessError: 737 for root, _, files in os.walk(os.getcwd()): 738 if "test-suite.log" not in files: 739 continue 740 check_call_cmd("cat", os.path.join(root, "test-suite.log")) 741 raise Exception("Unit tests failed") 742 743 def analyze(self): 744 run_cppcheck() 745 746 747class CMake(BuildSystem): 748 def __init__(self, package=None, path=None): 749 super(CMake, self).__init__(package, path) 750 751 def probe(self): 752 return os.path.isfile(os.path.join(self.path, "CMakeLists.txt")) 753 754 def dependencies(self): 755 return [] 756 757 def configure(self, build_for_testing): 758 self.build_for_testing = build_for_testing 759 if INTEGRATION_TEST: 760 check_call_cmd( 761 "cmake", 762 "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON", 763 "-DCMAKE_CXX_FLAGS='-DBOOST_USE_VALGRIND'", 764 "-DITESTS=ON", 765 ".", 766 ) 767 else: 768 check_call_cmd( 769 "cmake", 770 "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON", 771 "-DCMAKE_CXX_FLAGS='-DBOOST_USE_VALGRIND'", 772 ".", 773 ) 774 775 def build(self): 776 check_call_cmd( 777 "cmake", 778 "--build", 779 ".", 780 "--", 781 "-j", 782 str(multiprocessing.cpu_count()), 783 ) 784 785 def install(self): 786 check_call_cmd("sudo", "cmake", "--install", ".") 787 check_call_cmd("sudo", "-n", "--", "ldconfig") 788 789 def test(self): 790 if make_target_exists("test"): 791 check_call_cmd("ctest", ".") 792 793 def analyze(self): 794 if os.path.isfile(".clang-tidy"): 795 with TemporaryDirectory(prefix="build", dir=".") as build_dir: 796 # clang-tidy needs to run on a clang-specific build 797 check_call_cmd( 798 "cmake", 799 "-DCMAKE_C_COMPILER=clang", 800 "-DCMAKE_CXX_COMPILER=clang++", 801 "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON", 802 "-H.", 803 "-B" + build_dir, 804 ) 805 806 check_call_cmd( 807 "run-clang-tidy", "-header-filter=.*", "-p", build_dir 808 ) 809 810 maybe_make_valgrind() 811 maybe_make_coverage() 812 run_cppcheck() 813 814 815class Meson(BuildSystem): 816 @staticmethod 817 def _project_name(path): 818 doc = subprocess.check_output( 819 ["meson", "introspect", "--projectinfo", path], 820 stderr=subprocess.STDOUT, 821 ).decode("utf-8") 822 return json.loads(doc)["descriptive_name"] 823 824 def __init__(self, package=None, path=None): 825 super(Meson, self).__init__(package, path) 826 827 def probe(self): 828 return os.path.isfile(os.path.join(self.path, "meson.build")) 829 830 def dependencies(self): 831 meson_build = os.path.join(self.path, "meson.build") 832 if not os.path.exists(meson_build): 833 return [] 834 835 found_deps = [] 836 for root, dirs, files in os.walk(self.path): 837 if "meson.build" not in files: 838 continue 839 with open(os.path.join(root, "meson.build"), "rt") as f: 840 build_contents = f.read() 841 pattern = r"dependency\('([^']*)'.*?\),?\n" 842 for match in re.finditer(pattern, build_contents): 843 group = match.group(1) 844 maybe_dep = DEPENDENCIES["PKG_CHECK_MODULES"].get(group) 845 if maybe_dep is not None: 846 found_deps.append(maybe_dep) 847 848 return found_deps 849 850 def _parse_options(self, options_file): 851 """ 852 Returns a set of options defined in the provides meson_options.txt file 853 854 Parameters: 855 options_file The file containing options 856 """ 857 store = OptionStore() 858 oi = optinterpreter.OptionInterpreter(store, "") 859 oi.process(options_file) 860 return oi.options 861 862 def _configure_boolean(self, val): 863 """ 864 Returns the meson flag which signifies the value 865 866 True is true which requires the boolean. 867 False is false which disables the boolean. 868 869 Parameters: 870 val The value being converted 871 """ 872 if val is True: 873 return "true" 874 elif val is False: 875 return "false" 876 else: 877 raise Exception("Bad meson boolean value") 878 879 def _configure_feature(self, val): 880 """ 881 Returns the meson flag which signifies the value 882 883 True is enabled which requires the feature. 884 False is disabled which disables the feature. 885 None is auto which autodetects the feature. 886 887 Parameters: 888 val The value being converted 889 """ 890 if val is True: 891 return "enabled" 892 elif val is False: 893 return "disabled" 894 elif val is None: 895 return "auto" 896 else: 897 raise Exception("Bad meson feature value") 898 899 def _configure_option(self, opts, key, val): 900 """ 901 Returns the meson flag which signifies the value 902 based on the type of the opt 903 904 Parameters: 905 opt The meson option which we are setting 906 val The value being converted 907 """ 908 if isinstance(opts[key], options.UserBooleanOption): 909 str_val = self._configure_boolean(val) 910 elif isinstance(opts[key], options.UserFeatureOption): 911 str_val = self._configure_feature(val) 912 else: 913 raise Exception("Unknown meson option type") 914 return "-D{}={}".format(key, str_val) 915 916 def get_configure_flags(self, build_for_testing): 917 self.build_for_testing = build_for_testing 918 meson_options = {} 919 if os.path.exists("meson.options"): 920 meson_options = self._parse_options("meson.options") 921 elif os.path.exists("meson_options.txt"): 922 meson_options = self._parse_options("meson_options.txt") 923 meson_flags = [ 924 "-Db_colorout=never", 925 "-Dwerror=true", 926 "-Dwarning_level=3", 927 "-Dcpp_args='-DBOOST_USE_VALGRIND'", 928 ] 929 if build_for_testing: 930 # -Ddebug=true -Doptimization=g is helpful for abi-dumper but isn't a combination that 931 # is supported by meson's build types. Configure it manually. 932 meson_flags.append("-Ddebug=true") 933 meson_flags.append("-Doptimization=g") 934 else: 935 meson_flags.append("--buildtype=debugoptimized") 936 if OptionKey("tests") in meson_options: 937 meson_flags.append( 938 self._configure_option( 939 meson_options, OptionKey("tests"), build_for_testing 940 ) 941 ) 942 if OptionKey("examples") in meson_options: 943 meson_flags.append( 944 self._configure_option( 945 meson_options, OptionKey("examples"), build_for_testing 946 ) 947 ) 948 if OptionKey("itests") in meson_options: 949 meson_flags.append( 950 self._configure_option( 951 meson_options, OptionKey("itests"), INTEGRATION_TEST 952 ) 953 ) 954 if MESON_FLAGS.get(self.package) is not None: 955 meson_flags.extend(MESON_FLAGS.get(self.package)) 956 return meson_flags 957 958 def configure(self, build_for_testing): 959 meson_flags = self.get_configure_flags(build_for_testing) 960 try: 961 check_call_cmd( 962 "meson", "setup", "--reconfigure", "build", *meson_flags 963 ) 964 except Exception: 965 shutil.rmtree("build", ignore_errors=True) 966 check_call_cmd("meson", "setup", "build", *meson_flags) 967 968 self.package = Meson._project_name("build") 969 970 def build(self): 971 check_call_cmd("ninja", "-C", "build") 972 973 def install(self): 974 check_call_cmd("sudo", "-n", "--", "ninja", "-C", "build", "install") 975 check_call_cmd("sudo", "-n", "--", "ldconfig") 976 977 def test(self): 978 # It is useful to check various settings of the meson.build file 979 # for compatibility, such as meson_version checks. We shouldn't 980 # do this in the configure path though because it affects subprojects 981 # and dependencies as well, but we only want this applied to the 982 # project-under-test (otherwise an upstream dependency could fail 983 # this check without our control). 984 self._extra_meson_checks() 985 986 try: 987 test_args = ("--repeat", str(args.repeat), "-C", "build") 988 check_call_cmd("meson", "test", "--print-errorlogs", *test_args) 989 990 except CalledProcessError: 991 raise Exception("Unit tests failed") 992 993 def _setup_exists(self, setup): 994 """ 995 Returns whether the meson build supports the named test setup. 996 997 Parameter descriptions: 998 setup The setup target to check 999 """ 1000 try: 1001 with open(os.devnull, "w"): 1002 output = subprocess.check_output( 1003 [ 1004 "meson", 1005 "test", 1006 "-C", 1007 "build", 1008 "--setup", 1009 "{}:{}".format(self.package, setup), 1010 "__likely_not_a_test__", 1011 ], 1012 stderr=subprocess.STDOUT, 1013 ) 1014 except CalledProcessError as e: 1015 output = e.output 1016 output = output.decode("utf-8") 1017 return not re.search("Unknown test setup '[^']+'[.]", output) 1018 1019 def _maybe_valgrind(self): 1020 """ 1021 Potentially runs the unit tests through valgrind for the package 1022 via `meson test`. The package can specify custom valgrind 1023 configurations by utilizing add_test_setup() in a meson.build 1024 """ 1025 if not is_valgrind_safe(): 1026 sys.stderr.write("###### Skipping valgrind ######\n") 1027 return 1028 try: 1029 if self._setup_exists("valgrind"): 1030 check_call_cmd( 1031 "meson", 1032 "test", 1033 "-t", 1034 "10", 1035 "-C", 1036 "build", 1037 "--print-errorlogs", 1038 "--setup", 1039 "{}:valgrind".format(self.package), 1040 ) 1041 else: 1042 check_call_cmd( 1043 "meson", 1044 "test", 1045 "-t", 1046 "10", 1047 "-C", 1048 "build", 1049 "--print-errorlogs", 1050 "--wrapper", 1051 "valgrind --error-exitcode=1", 1052 ) 1053 except CalledProcessError: 1054 raise Exception("Valgrind tests failed") 1055 1056 def analyze(self): 1057 self._maybe_valgrind() 1058 1059 # Run clang-tidy only if the project has a configuration 1060 if os.path.isfile(".clang-tidy"): 1061 os.environ["CXX"] = "clang++" 1062 with TemporaryDirectory(prefix="build", dir=".") as build_dir: 1063 check_call_cmd("meson", "setup", build_dir) 1064 if not os.path.isfile(".openbmc-no-clang"): 1065 check_call_cmd("meson", "compile", "-C", build_dir) 1066 try: 1067 check_call_cmd("ninja", "-C", build_dir, "clang-tidy-fix") 1068 except subprocess.CalledProcessError: 1069 check_call_cmd( 1070 "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff" 1071 ) 1072 raise 1073 # Run the basic clang static analyzer otherwise 1074 else: 1075 check_call_cmd("ninja", "-C", "build", "scan-build") 1076 1077 # Run tests through sanitizers 1078 # b_lundef is needed if clang++ is CXX since it resolves the 1079 # asan symbols at runtime only. We don't want to set it earlier 1080 # in the build process to ensure we don't have undefined 1081 # runtime code. 1082 if is_sanitize_safe(): 1083 meson_flags = self.get_configure_flags(self.build_for_testing) 1084 meson_flags.append("-Db_sanitize=address,undefined") 1085 try: 1086 check_call_cmd( 1087 "meson", "setup", "--reconfigure", "build", *meson_flags 1088 ) 1089 except Exception: 1090 shutil.rmtree("build", ignore_errors=True) 1091 check_call_cmd("meson", "setup", "build", *meson_flags) 1092 check_call_cmd( 1093 "meson", 1094 "test", 1095 "-C", 1096 "build", 1097 "--print-errorlogs", 1098 "--logbase", 1099 "testlog-ubasan", 1100 env=os.environ | {"UBSAN_OPTIONS": "halt_on_error=1"}, 1101 ) 1102 # TODO: Fix memory sanitizer 1103 # check_call_cmd('meson', 'configure', 'build', 1104 # '-Db_sanitize=memory') 1105 # check_call_cmd('meson', 'test', '-C', 'build' 1106 # '--logbase', 'testlog-msan') 1107 meson_flags.remove("-Db_sanitize=address,undefined") 1108 try: 1109 check_call_cmd( 1110 "meson", "setup", "--reconfigure", "build", *meson_flags 1111 ) 1112 except Exception: 1113 shutil.rmtree("build", ignore_errors=True) 1114 check_call_cmd("meson", "setup", "build", *meson_flags) 1115 else: 1116 sys.stderr.write("###### Skipping sanitizers ######\n") 1117 1118 # Run coverage checks 1119 check_call_cmd("meson", "configure", "build", "-Db_coverage=true") 1120 self.test() 1121 # Only build coverage HTML if coverage files were produced 1122 for root, dirs, files in os.walk("build"): 1123 if any([f.endswith(".gcda") for f in files]): 1124 check_call_cmd("ninja", "-C", "build", "coverage-html") 1125 break 1126 check_call_cmd("meson", "configure", "build", "-Db_coverage=false") 1127 run_cppcheck() 1128 1129 def _extra_meson_checks(self): 1130 with open(os.path.join(self.path, "meson.build"), "rt") as f: 1131 build_contents = f.read() 1132 1133 # Find project's specified meson_version. 1134 meson_version = None 1135 pattern = r"meson_version:[^']*'([^']*)'" 1136 for match in re.finditer(pattern, build_contents): 1137 group = match.group(1) 1138 meson_version = group 1139 1140 # C++20 requires at least Meson 0.57 but Meson itself doesn't 1141 # identify this. Add to our unit-test checks so that we don't 1142 # get a meson.build missing this. 1143 pattern = r"'cpp_std=c\+\+20'" 1144 for match in re.finditer(pattern, build_contents): 1145 if not meson_version or not meson_version_compare( 1146 meson_version, ">=0.57" 1147 ): 1148 raise Exception( 1149 "C++20 support requires specifying in meson.build: " 1150 + "meson_version: '>=0.57'" 1151 ) 1152 1153 # C++23 requires at least Meson 1.1.1 but Meson itself doesn't 1154 # identify this. Add to our unit-test checks so that we don't 1155 # get a meson.build missing this. 1156 pattern = r"'cpp_std=c\+\+23'" 1157 for match in re.finditer(pattern, build_contents): 1158 if not meson_version or not meson_version_compare( 1159 meson_version, ">=1.1.1" 1160 ): 1161 raise Exception( 1162 "C++23 support requires specifying in meson.build: " 1163 + "meson_version: '>=1.1.1'" 1164 ) 1165 1166 if "get_variable(" in build_contents: 1167 if not meson_version or not meson_version_compare( 1168 meson_version, ">=0.58" 1169 ): 1170 raise Exception( 1171 "dep.get_variable() with positional argument requires " 1172 + "meson_Version: '>=0.58'" 1173 ) 1174 1175 1176class Package(object): 1177 def __init__(self, name=None, path=None): 1178 self.supported = [Meson, Autotools, CMake] 1179 self.name = name 1180 self.path = path 1181 self.test_only = False 1182 1183 def build_systems(self): 1184 instances = (system(self.name, self.path) for system in self.supported) 1185 return (instance for instance in instances if instance.probe()) 1186 1187 def build_system(self, preferred=None): 1188 systems = list(self.build_systems()) 1189 1190 if not systems: 1191 return None 1192 1193 if preferred: 1194 return {type(system): system for system in systems}[preferred] 1195 1196 return next(iter(systems)) 1197 1198 def install(self, system=None): 1199 if not system: 1200 system = self.build_system() 1201 1202 system.configure(False) 1203 system.build() 1204 system.install() 1205 1206 def _test_one(self, system): 1207 system.configure(True) 1208 system.build() 1209 system.install() 1210 system.test() 1211 if not TEST_ONLY: 1212 system.analyze() 1213 1214 def test(self): 1215 for system in self.build_systems(): 1216 self._test_one(system) 1217 1218 1219def find_file(filename, basedir): 1220 """ 1221 Finds all occurrences of a file (or list of files) in the base 1222 directory and passes them back with their relative paths. 1223 1224 Parameter descriptions: 1225 filename The name of the file (or list of files) to 1226 find 1227 basedir The base directory search in 1228 """ 1229 1230 if not isinstance(filename, list): 1231 filename = [filename] 1232 1233 filepaths = [] 1234 for root, dirs, files in os.walk(basedir): 1235 if os.path.split(root)[-1] == "subprojects": 1236 for f in files: 1237 subproject = ".".join(f.split(".")[0:-1]) 1238 if f.endswith(".wrap") and subproject in dirs: 1239 # don't find files in meson subprojects with wraps 1240 dirs.remove(subproject) 1241 for f in filename: 1242 if f in files: 1243 filepaths.append(os.path.join(root, f)) 1244 return filepaths 1245 1246 1247if __name__ == "__main__": 1248 # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS] 1249 CONFIGURE_FLAGS = { 1250 "phosphor-logging": [ 1251 "--enable-metadata-processing", 1252 "--enable-openpower-pel-extension", 1253 "YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml", 1254 ] 1255 } 1256 1257 # MESON_FLAGS = [GIT REPO]:[MESON FLAGS] 1258 MESON_FLAGS = { 1259 "phosphor-dbus-interfaces": [ 1260 "-Ddata_com_ibm=true", 1261 "-Ddata_org_open_power=true", 1262 ], 1263 "phosphor-logging": ["-Dopenpower-pel-extension=enabled"], 1264 } 1265 1266 # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO] 1267 DEPENDENCIES = { 1268 "AC_CHECK_LIB": {"mapper": "phosphor-objmgr"}, 1269 "AC_CHECK_HEADER": { 1270 "host-ipmid": "phosphor-host-ipmid", 1271 "blobs-ipmid": "phosphor-ipmi-blobs", 1272 "sdbusplus": "sdbusplus", 1273 "sdeventplus": "sdeventplus", 1274 "stdplus": "stdplus", 1275 "gpioplus": "gpioplus", 1276 "phosphor-logging/log.hpp": "phosphor-logging", 1277 }, 1278 "AC_PATH_PROG": {"sdbus++": "sdbusplus"}, 1279 "PKG_CHECK_MODULES": { 1280 "phosphor-dbus-interfaces": "phosphor-dbus-interfaces", 1281 "libipmid": "phosphor-host-ipmid", 1282 "libipmid-host": "phosphor-host-ipmid", 1283 "sdbusplus": "sdbusplus", 1284 "sdeventplus": "sdeventplus", 1285 "stdplus": "stdplus", 1286 "gpioplus": "gpioplus", 1287 "phosphor-logging": "phosphor-logging", 1288 "phosphor-snmp": "phosphor-snmp", 1289 "ipmiblob": "ipmi-blob-tool", 1290 "hei": "openpower-libhei", 1291 "phosphor-ipmi-blobs": "phosphor-ipmi-blobs", 1292 "libcr51sign": "google-misc", 1293 }, 1294 } 1295 1296 # Offset into array of macro parameters MACRO(0, 1, ...N) 1297 DEPENDENCIES_OFFSET = { 1298 "AC_CHECK_LIB": 0, 1299 "AC_CHECK_HEADER": 0, 1300 "AC_PATH_PROG": 1, 1301 "PKG_CHECK_MODULES": 1, 1302 } 1303 1304 # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING] 1305 DEPENDENCIES_REGEX = {"phosphor-logging": r"\S+-dbus-interfaces$"} 1306 1307 # Set command line arguments 1308 parser = argparse.ArgumentParser() 1309 parser.add_argument( 1310 "-w", 1311 "--workspace", 1312 dest="WORKSPACE", 1313 required=True, 1314 help="Workspace directory location(i.e. /home)", 1315 ) 1316 parser.add_argument( 1317 "-p", 1318 "--package", 1319 dest="PACKAGE", 1320 required=True, 1321 help="OpenBMC package to be unit tested", 1322 ) 1323 parser.add_argument( 1324 "-t", 1325 "--test-only", 1326 dest="TEST_ONLY", 1327 action="store_true", 1328 required=False, 1329 default=False, 1330 help="Only run test cases, no other validation", 1331 ) 1332 arg_inttests = parser.add_mutually_exclusive_group() 1333 arg_inttests.add_argument( 1334 "--integration-tests", 1335 dest="INTEGRATION_TEST", 1336 action="store_true", 1337 required=False, 1338 default=True, 1339 help="Enable integration tests [default].", 1340 ) 1341 arg_inttests.add_argument( 1342 "--no-integration-tests", 1343 dest="INTEGRATION_TEST", 1344 action="store_false", 1345 required=False, 1346 help="Disable integration tests.", 1347 ) 1348 parser.add_argument( 1349 "-v", 1350 "--verbose", 1351 action="store_true", 1352 help="Print additional package status messages", 1353 ) 1354 parser.add_argument( 1355 "-r", "--repeat", help="Repeat tests N times", type=int, default=1 1356 ) 1357 parser.add_argument( 1358 "-b", 1359 "--branch", 1360 dest="BRANCH", 1361 required=False, 1362 help="Branch to target for dependent repositories", 1363 default="master", 1364 ) 1365 parser.add_argument( 1366 "-n", 1367 "--noformat", 1368 dest="FORMAT", 1369 action="store_false", 1370 required=False, 1371 help="Whether or not to run format code", 1372 ) 1373 args = parser.parse_args(sys.argv[1:]) 1374 WORKSPACE = args.WORKSPACE 1375 UNIT_TEST_PKG = args.PACKAGE 1376 TEST_ONLY = args.TEST_ONLY 1377 INTEGRATION_TEST = args.INTEGRATION_TEST 1378 BRANCH = args.BRANCH 1379 FORMAT_CODE = args.FORMAT 1380 if args.verbose: 1381 1382 def printline(*line): 1383 for arg in line: 1384 print(arg, end=" ") 1385 print() 1386 1387 else: 1388 1389 def printline(*line): 1390 pass 1391 1392 CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG) 1393 1394 # Run format-code.sh, which will in turn call any repo-level formatters. 1395 if FORMAT_CODE: 1396 check_call_cmd( 1397 os.path.join( 1398 WORKSPACE, "openbmc-build-scripts", "scripts", "format-code.sh" 1399 ), 1400 CODE_SCAN_DIR, 1401 ) 1402 1403 # Check to see if any files changed 1404 check_call_cmd( 1405 "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff", "--exit-code" 1406 ) 1407 1408 # Check if this repo has a supported make infrastructure 1409 pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR) 1410 if not pkg.build_system(): 1411 print("No valid build system, exit") 1412 sys.exit(0) 1413 1414 prev_umask = os.umask(000) 1415 1416 # Determine dependencies and add them 1417 dep_added = dict() 1418 dep_added[UNIT_TEST_PKG] = False 1419 1420 # Create dependency tree 1421 dep_tree = DepTree(UNIT_TEST_PKG) 1422 build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH) 1423 1424 # Reorder Dependency Tree 1425 for pkg_name, regex_str in DEPENDENCIES_REGEX.items(): 1426 dep_tree.ReorderDeps(pkg_name, regex_str) 1427 if args.verbose: 1428 dep_tree.PrintTree() 1429 1430 install_list = dep_tree.GetInstallList() 1431 1432 # We don't want to treat our package as a dependency 1433 install_list.remove(UNIT_TEST_PKG) 1434 1435 # Install reordered dependencies 1436 for dep in install_list: 1437 build_and_install(dep, False) 1438 1439 # Run package unit tests 1440 build_and_install(UNIT_TEST_PKG, True) 1441 1442 os.umask(prev_umask) 1443 1444 # Run any custom CI scripts the repo has, of which there can be 1445 # multiple of and anywhere in the repository. 1446 ci_scripts = find_file(["run-ci.sh", "run-ci"], CODE_SCAN_DIR) 1447 if ci_scripts: 1448 os.chdir(CODE_SCAN_DIR) 1449 for ci_script in ci_scripts: 1450 check_call_cmd(ci_script) 1451