xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 0774452e7c75a6030442ab03a5ad7b90ad022e69)
1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10import argparse
11import json
12import multiprocessing
13import os
14import platform
15import re
16import resource
17import shutil
18import subprocess
19import sys
20import tempfile
21from subprocess import CalledProcessError, check_call
22from tempfile import TemporaryDirectory
23from urllib.parse import urljoin
24
25from git import Repo
26
27# interpreter is not used directly but this resolves dependency ordering
28# that would be broken if we didn't include it.
29from mesonbuild import interpreter  # noqa: F401
30from mesonbuild import optinterpreter, options
31from mesonbuild.mesonlib import version_compare as meson_version_compare
32from mesonbuild.options import OptionKey, OptionStore
33
34
35class DepTree:
36    """
37    Represents package dependency tree, where each node is a DepTree with a
38    name and DepTree children.
39    """
40
41    def __init__(self, name):
42        """
43        Create new DepTree.
44
45        Parameter descriptions:
46        name               Name of new tree node.
47        """
48        self.name = name
49        self.children = list()
50
51    def AddChild(self, name):
52        """
53        Add new child node to current node.
54
55        Parameter descriptions:
56        name               Name of new child
57        """
58        new_child = DepTree(name)
59        self.children.append(new_child)
60        return new_child
61
62    def AddChildNode(self, node):
63        """
64        Add existing child node to current node.
65
66        Parameter descriptions:
67        node               Tree node to add
68        """
69        self.children.append(node)
70
71    def RemoveChild(self, name):
72        """
73        Remove child node.
74
75        Parameter descriptions:
76        name               Name of child to remove
77        """
78        for child in self.children:
79            if child.name == name:
80                self.children.remove(child)
81                return
82
83    def GetNode(self, name):
84        """
85        Return node with matching name. Return None if not found.
86
87        Parameter descriptions:
88        name               Name of node to return
89        """
90        if self.name == name:
91            return self
92        for child in self.children:
93            node = child.GetNode(name)
94            if node:
95                return node
96        return None
97
98    def GetParentNode(self, name, parent_node=None):
99        """
100        Return parent of node with matching name. Return none if not found.
101
102        Parameter descriptions:
103        name               Name of node to get parent of
104        parent_node        Parent of current node
105        """
106        if self.name == name:
107            return parent_node
108        for child in self.children:
109            found_node = child.GetParentNode(name, self)
110            if found_node:
111                return found_node
112        return None
113
114    def GetPath(self, name, path=None):
115        """
116        Return list of node names from head to matching name.
117        Return None if not found.
118
119        Parameter descriptions:
120        name               Name of node
121        path               List of node names from head to current node
122        """
123        if not path:
124            path = []
125        if self.name == name:
126            path.append(self.name)
127            return path
128        for child in self.children:
129            match = child.GetPath(name, path + [self.name])
130            if match:
131                return match
132        return None
133
134    def GetPathRegex(self, name, regex_str, path=None):
135        """
136        Return list of node paths that end in name, or match regex_str.
137        Return empty list if not found.
138
139        Parameter descriptions:
140        name               Name of node to search for
141        regex_str          Regex string to match node names
142        path               Path of node names from head to current node
143        """
144        new_paths = []
145        if not path:
146            path = []
147        match = re.match(regex_str, self.name)
148        if (self.name == name) or (match):
149            new_paths.append(path + [self.name])
150        for child in self.children:
151            return_paths = None
152            full_path = path + [self.name]
153            return_paths = child.GetPathRegex(name, regex_str, full_path)
154            for i in return_paths:
155                new_paths.append(i)
156        return new_paths
157
158    def MoveNode(self, from_name, to_name):
159        """
160        Mode existing from_name node to become child of to_name node.
161
162        Parameter descriptions:
163        from_name          Name of node to make a child of to_name
164        to_name            Name of node to make parent of from_name
165        """
166        parent_from_node = self.GetParentNode(from_name)
167        from_node = self.GetNode(from_name)
168        parent_from_node.RemoveChild(from_name)
169        to_node = self.GetNode(to_name)
170        to_node.AddChildNode(from_node)
171
172    def ReorderDeps(self, name, regex_str):
173        """
174        Reorder dependency tree.  If tree contains nodes with names that
175        match 'name' and 'regex_str', move 'regex_str' nodes that are
176        to the right of 'name' node, so that they become children of the
177        'name' node.
178
179        Parameter descriptions:
180        name               Name of node to look for
181        regex_str          Regex string to match names to
182        """
183        name_path = self.GetPath(name)
184        if not name_path:
185            return
186        paths = self.GetPathRegex(name, regex_str)
187        is_name_in_paths = False
188        name_index = 0
189        for i in range(len(paths)):
190            path = paths[i]
191            if path[-1] == name:
192                is_name_in_paths = True
193                name_index = i
194                break
195        if not is_name_in_paths:
196            return
197        for i in range(name_index + 1, len(paths)):
198            path = paths[i]
199            if name in path:
200                continue
201            from_name = path[-1]
202            self.MoveNode(from_name, name)
203
204    def GetInstallList(self):
205        """
206        Return post-order list of node names.
207
208        Parameter descriptions:
209        """
210        install_list = []
211        for child in self.children:
212            child_install_list = child.GetInstallList()
213            install_list.extend(child_install_list)
214        install_list.append(self.name)
215        return install_list
216
217    def PrintTree(self, level=0):
218        """
219        Print pre-order node names with indentation denoting node depth level.
220
221        Parameter descriptions:
222        level              Current depth level
223        """
224        INDENT_PER_LEVEL = 4
225        print(" " * (level * INDENT_PER_LEVEL) + self.name)
226        for child in self.children:
227            child.PrintTree(level + 1)
228
229
230def check_call_cmd(*cmd, **kwargs):
231    """
232    Verbose prints the directory location the given command is called from and
233    the command, then executes the command using check_call.
234
235    Parameter descriptions:
236    dir                 Directory location command is to be called from
237    cmd                 List of parameters constructing the complete command
238    """
239    printline(os.getcwd(), ">", " ".join(cmd))
240    check_call(cmd, **kwargs)
241
242
243def clone_pkg(pkg, branch):
244    """
245    Clone the given openbmc package's git repository from gerrit into
246    the WORKSPACE location
247
248    Parameter descriptions:
249    pkg                 Name of the package to clone
250    branch              Branch to clone from pkg
251    """
252    pkg_dir = os.path.join(WORKSPACE, pkg)
253    if os.path.exists(os.path.join(pkg_dir, ".git")):
254        return pkg_dir
255    pkg_repo = urljoin("https://gerrit.openbmc.org/openbmc/", pkg)
256    os.mkdir(pkg_dir)
257    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
258    try:
259        # first try the branch
260        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
261        repo_inst = clone.working_dir
262    except Exception:
263        printline("Input branch not found, default to master")
264        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
265        repo_inst = clone.working_dir
266    return repo_inst
267
268
269def make_target_exists(target):
270    """
271    Runs a check against the makefile in the current directory to determine
272    if the target exists so that it can be built.
273
274    Parameter descriptions:
275    target              The make target we are checking
276    """
277    try:
278        cmd = ["make", "-n", target]
279        with open(os.devnull, "w") as devnull:
280            check_call(cmd, stdout=devnull, stderr=devnull)
281        return True
282    except CalledProcessError:
283        return False
284
285
286make_parallel = [
287    "make",
288    # Run enough jobs to saturate all the cpus
289    "-j",
290    str(multiprocessing.cpu_count()),
291    # Don't start more jobs if the load avg is too high
292    "-l",
293    str(multiprocessing.cpu_count()),
294    # Synchronize the output so logs aren't intermixed in stdout / stderr
295    "-O",
296]
297
298
299def build_and_install(name, build_for_testing=False):
300    """
301    Builds and installs the package in the environment. Optionally
302    builds the examples and test cases for package.
303
304    Parameter description:
305    name                The name of the package we are building
306    build_for_testing   Enable options related to testing on the package?
307    """
308    os.chdir(os.path.join(WORKSPACE, name))
309
310    # Refresh dynamic linker run time bindings for dependencies
311    check_call_cmd("sudo", "-n", "--", "ldconfig")
312
313    pkg = Package()
314    if build_for_testing:
315        pkg.test()
316    else:
317        pkg.install()
318
319
320def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
321    """
322    For each package (name), starting with the package to be unit tested,
323    extract its dependencies. For each package dependency defined, recursively
324    apply the same strategy
325
326    Parameter descriptions:
327    name                Name of the package
328    pkgdir              Directory where package source is located
329    dep_added           Current dict of dependencies and added status
330    head                Head node of the dependency tree
331    branch              Branch to clone from pkg
332    dep_tree            Current dependency tree node
333    """
334    if not dep_tree:
335        dep_tree = head
336
337    with open("/tmp/depcache", "r") as depcache:
338        cache = depcache.readline()
339
340    # Read out pkg dependencies
341    pkg = Package(name, pkgdir)
342
343    build = pkg.build_system()
344    if not build:
345        raise Exception(f"Unable to find build system for {name}.")
346
347    for dep in set(build.dependencies()):
348        if dep in cache:
349            continue
350        # Dependency package not already known
351        if dep_added.get(dep) is None:
352            print(f"Adding {dep} dependency to {name}.")
353            # Dependency package not added
354            new_child = dep_tree.AddChild(dep)
355            dep_added[dep] = False
356            dep_pkgdir = clone_pkg(dep, branch)
357            # Determine this dependency package's
358            # dependencies and add them before
359            # returning to add this package
360            dep_added = build_dep_tree(
361                dep, dep_pkgdir, dep_added, head, branch, new_child
362            )
363        else:
364            # Dependency package known and added
365            if dep_added[dep]:
366                continue
367            else:
368                # Cyclic dependency failure
369                raise Exception("Cyclic dependencies found in " + name)
370
371    if not dep_added[name]:
372        dep_added[name] = True
373
374    return dep_added
375
376
377def run_cppcheck():
378    if (
379        not os.path.exists(os.path.join("build", "compile_commands.json"))
380        or NO_CPPCHECK
381    ):
382        return None
383
384    with TemporaryDirectory() as cpp_dir:
385        # http://cppcheck.sourceforge.net/manual.pdf
386        try:
387            check_call_cmd(
388                "cppcheck",
389                "-j",
390                str(multiprocessing.cpu_count()),
391                "--enable=style,performance,portability,missingInclude",
392                "--inline-suppr",
393                "--suppress=useStlAlgorithm",
394                "--suppress=unusedStructMember",
395                "--suppress=postfixOperator",
396                "--suppress=unreadVariable",
397                "--suppress=knownConditionTrueFalse",
398                "--library=googletest",
399                "--project=build/compile_commands.json",
400                f"--cppcheck-build-dir={cpp_dir}",
401            )
402        except subprocess.CalledProcessError:
403            print("cppcheck found errors")
404
405
406def valgrind_rlimit_nofile(soft=2048, hard=4096):
407    resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard))
408
409
410def is_valgrind_safe():
411    """
412    Returns whether it is safe to run valgrind on our platform
413    """
414    with tempfile.TemporaryDirectory() as temp:
415        src = os.path.join(temp, "unit-test-vg.c")
416        exe = os.path.join(temp, "unit-test-vg")
417        with open(src, "w") as h:
418            h.write("#include <errno.h>\n")
419            h.write("#include <stdio.h>\n")
420            h.write("#include <stdlib.h>\n")
421            h.write("#include <string.h>\n")
422            h.write("int main() {\n")
423            h.write("char *heap_str = malloc(16);\n")
424            h.write('strcpy(heap_str, "RandString");\n')
425            h.write('int res = strcmp("RandString", heap_str);\n')
426            h.write("free(heap_str);\n")
427            h.write("char errstr[64];\n")
428            h.write("strerror_r(EINVAL, errstr, sizeof(errstr));\n")
429            h.write('printf("%s\\n", errstr);\n')
430            h.write("return res;\n")
431            h.write("}\n")
432        check_call(
433            ["gcc", "-O2", "-o", exe, src],
434            stdout=subprocess.DEVNULL,
435            stderr=subprocess.DEVNULL,
436        )
437        try:
438            check_call(
439                ["valgrind", "--error-exitcode=99", exe],
440                stdout=subprocess.DEVNULL,
441                stderr=subprocess.DEVNULL,
442                preexec_fn=valgrind_rlimit_nofile,
443            )
444        except CalledProcessError:
445            sys.stderr.write("###### Platform is not valgrind safe ######\n")
446            return False
447        return True
448
449
450def is_sanitize_safe():
451    """
452    Returns whether it is safe to run sanitizers on our platform
453    """
454    src = "unit-test-sanitize.c"
455    exe = "./unit-test-sanitize"
456    with open(src, "w") as h:
457        h.write("int main() { return 0; }\n")
458    try:
459        with open(os.devnull, "w") as devnull:
460            check_call(
461                [
462                    "gcc",
463                    "-O2",
464                    "-fsanitize=address",
465                    "-fsanitize=undefined",
466                    "-o",
467                    exe,
468                    src,
469                ],
470                stdout=devnull,
471                stderr=devnull,
472            )
473            check_call([exe], stdout=devnull, stderr=devnull)
474
475        # TODO - Sanitizer not working on ppc64le
476        # https://github.com/openbmc/openbmc-build-scripts/issues/31
477        if platform.processor() == "ppc64le":
478            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
479            return False
480        else:
481            return True
482    except Exception:
483        sys.stderr.write("###### Platform is not sanitize safe ######\n")
484        return False
485    finally:
486        os.remove(src)
487        os.remove(exe)
488
489
490def maybe_make_valgrind():
491    """
492    Potentially runs the unit tests through valgrind for the package
493    via `make check-valgrind`. If the package does not have valgrind testing
494    then it just skips over this.
495    """
496    # Valgrind testing is currently broken by an aggressive strcmp optimization
497    # that is inlined into optimized code for POWER by gcc 7+. Until we find
498    # a workaround, just don't run valgrind tests on POWER.
499    # https://github.com/openbmc/openbmc/issues/3315
500    if not is_valgrind_safe():
501        sys.stderr.write("###### Skipping valgrind ######\n")
502        return
503    if not make_target_exists("check-valgrind"):
504        return
505
506    try:
507        cmd = make_parallel + ["check-valgrind"]
508        check_call_cmd(*cmd, preexec_fn=valgrind_rlimit_nofile)
509    except CalledProcessError:
510        for root, _, files in os.walk(os.getcwd()):
511            for f in files:
512                if re.search("test-suite-[a-z]+.log", f) is None:
513                    continue
514                check_call_cmd("cat", os.path.join(root, f))
515        raise Exception("Valgrind tests failed")
516
517
518def maybe_make_coverage():
519    """
520    Potentially runs the unit tests through code coverage for the package
521    via `make check-code-coverage`. If the package does not have code coverage
522    testing then it just skips over this.
523    """
524    if not make_target_exists("check-code-coverage"):
525        return
526
527    # Actually run code coverage
528    try:
529        cmd = make_parallel + ["check-code-coverage"]
530        check_call_cmd(*cmd)
531    except CalledProcessError:
532        raise Exception("Code coverage failed")
533
534
535class BuildSystem(object):
536    """
537    Build systems generally provide the means to configure, build, install and
538    test software. The BuildSystem class defines a set of interfaces on top of
539    which Autotools, Meson, CMake and possibly other build system drivers can
540    be implemented, separating out the phases to control whether a package
541    should merely be installed or also tested and analyzed.
542    """
543
544    def __init__(self, package, path):
545        """Initialise the driver with properties independent of the build
546        system
547
548        Keyword arguments:
549        package: The name of the package. Derived from the path if None
550        path: The path to the package. Set to the working directory if None
551        """
552        self.path = "." if not path else path
553        realpath = os.path.realpath(self.path)
554        self.package = package if package else os.path.basename(realpath)
555        self.build_for_testing = False
556
557    def probe(self):
558        """Test if the build system driver can be applied to the package
559
560        Return True if the driver can drive the package's build system,
561        otherwise False.
562
563        Generally probe() is implemented by testing for the presence of the
564        build system's configuration file(s).
565        """
566        raise NotImplementedError
567
568    def dependencies(self):
569        """Provide the package's dependencies
570
571        Returns a list of dependencies. If no dependencies are required then an
572        empty list must be returned.
573
574        Generally dependencies() is implemented by analysing and extracting the
575        data from the build system configuration.
576        """
577        raise NotImplementedError
578
579    def configure(self, build_for_testing):
580        """Configure the source ready for building
581
582        Should raise an exception if configuration failed.
583
584        Keyword arguments:
585        build_for_testing: Mark the package as being built for testing rather
586                           than for installation as a dependency for the
587                           package under test. Setting to True generally
588                           implies that the package will be configured to build
589                           with debug information, at a low level of
590                           optimisation and possibly with sanitizers enabled.
591
592        Generally configure() is implemented by invoking the build system
593        tooling to generate Makefiles or equivalent.
594        """
595        raise NotImplementedError
596
597    def build(self):
598        """Build the software ready for installation and/or testing
599
600        Should raise an exception if the build fails
601
602        Generally build() is implemented by invoking `make` or `ninja`.
603        """
604        raise NotImplementedError
605
606    def install(self):
607        """Install the software ready for use
608
609        Should raise an exception if installation fails
610
611        Like build(), install() is generally implemented by invoking `make` or
612        `ninja`.
613        """
614        raise NotImplementedError
615
616    def test(self):
617        """Build and run the test suite associated with the package
618
619        Should raise an exception if the build or testing fails.
620
621        Like install(), test() is generally implemented by invoking `make` or
622        `ninja`.
623        """
624        raise NotImplementedError
625
626    def analyze(self):
627        """Run any supported analysis tools over the codebase
628
629        Should raise an exception if analysis fails.
630
631        Some analysis tools such as scan-build need injection into the build
632        system. analyze() provides the necessary hook to implement such
633        behaviour. Analyzers independent of the build system can also be
634        specified here but at the cost of possible duplication of code between
635        the build system driver implementations.
636        """
637        raise NotImplementedError
638
639
640class Autotools(BuildSystem):
641    def __init__(self, package=None, path=None):
642        super(Autotools, self).__init__(package, path)
643
644    def probe(self):
645        return os.path.isfile(os.path.join(self.path, "configure.ac"))
646
647    def dependencies(self):
648        configure_ac = os.path.join(self.path, "configure.ac")
649
650        contents = ""
651        # Prepend some special function overrides so we can parse out
652        # dependencies
653        for macro in DEPENDENCIES.keys():
654            contents += (
655                "m4_define(["
656                + macro
657                + "], ["
658                + macro
659                + "_START$"
660                + str(DEPENDENCIES_OFFSET[macro] + 1)
661                + macro
662                + "_END])\n"
663            )
664        with open(configure_ac, "rt") as f:
665            contents += f.read()
666
667        autoconf_cmdline = ["autoconf", "-Wno-undefined", "-"]
668        autoconf_process = subprocess.Popen(
669            autoconf_cmdline,
670            stdin=subprocess.PIPE,
671            stdout=subprocess.PIPE,
672            stderr=subprocess.PIPE,
673        )
674        document = contents.encode("utf-8")
675        (stdout, stderr) = autoconf_process.communicate(input=document)
676        if not stdout:
677            print(stderr)
678            raise Exception("Failed to run autoconf for parsing dependencies")
679
680        # Parse out all of the dependency text
681        matches = []
682        for macro in DEPENDENCIES.keys():
683            pattern = "(" + macro + ")_START(.*?)" + macro + "_END"
684            for match in re.compile(pattern).finditer(stdout.decode("utf-8")):
685                matches.append((match.group(1), match.group(2)))
686
687        # Look up dependencies from the text
688        found_deps = []
689        for macro, deptext in matches:
690            for potential_dep in deptext.split(" "):
691                for known_dep in DEPENDENCIES[macro].keys():
692                    if potential_dep.startswith(known_dep):
693                        found_deps.append(DEPENDENCIES[macro][known_dep])
694
695        return found_deps
696
697    def _configure_feature(self, flag, enabled):
698        """
699        Returns an configure flag as a string
700
701        Parameters:
702        flag                The name of the flag
703        enabled             Whether the flag is enabled or disabled
704        """
705        return "--" + ("enable" if enabled else "disable") + "-" + flag
706
707    def configure(self, build_for_testing):
708        self.build_for_testing = build_for_testing
709        conf_flags = [
710            self._configure_feature("silent-rules", False),
711            self._configure_feature("examples", build_for_testing),
712            self._configure_feature("tests", build_for_testing),
713            self._configure_feature("itests", INTEGRATION_TEST),
714        ]
715        conf_flags.extend(
716            [
717                self._configure_feature("code-coverage", False),
718                self._configure_feature("valgrind", build_for_testing),
719            ]
720        )
721        # Add any necessary configure flags for package
722        if CONFIGURE_FLAGS.get(self.package) is not None:
723            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
724        for bootstrap in ["bootstrap.sh", "bootstrap", "autogen.sh"]:
725            if os.path.exists(bootstrap):
726                check_call_cmd("./" + bootstrap)
727                break
728        check_call_cmd("./configure", *conf_flags)
729
730    def build(self):
731        check_call_cmd(*make_parallel)
732
733    def install(self):
734        check_call_cmd("sudo", "-n", "--", *(make_parallel + ["install"]))
735        check_call_cmd("sudo", "-n", "--", "ldconfig")
736
737    def test(self):
738        try:
739            cmd = make_parallel + ["check"]
740            for i in range(0, args.repeat):
741                check_call_cmd(*cmd)
742
743            maybe_make_valgrind()
744            maybe_make_coverage()
745        except CalledProcessError:
746            for root, _, files in os.walk(os.getcwd()):
747                if "test-suite.log" not in files:
748                    continue
749                check_call_cmd("cat", os.path.join(root, "test-suite.log"))
750            raise Exception("Unit tests failed")
751
752    def analyze(self):
753        run_cppcheck()
754
755
756class CMake(BuildSystem):
757    def __init__(self, package=None, path=None):
758        super(CMake, self).__init__(package, path)
759
760    def probe(self):
761        return os.path.isfile(os.path.join(self.path, "CMakeLists.txt"))
762
763    def dependencies(self):
764        return []
765
766    def configure(self, build_for_testing):
767        self.build_for_testing = build_for_testing
768        if INTEGRATION_TEST:
769            check_call_cmd(
770                "cmake",
771                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
772                "-DCMAKE_CXX_FLAGS='-DBOOST_USE_VALGRIND'",
773                "-DITESTS=ON",
774                ".",
775            )
776        else:
777            check_call_cmd(
778                "cmake",
779                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
780                "-DCMAKE_CXX_FLAGS='-DBOOST_USE_VALGRIND'",
781                ".",
782            )
783
784    def build(self):
785        check_call_cmd(
786            "cmake",
787            "--build",
788            ".",
789            "--",
790            "-j",
791            str(multiprocessing.cpu_count()),
792        )
793
794    def install(self):
795        check_call_cmd("sudo", "cmake", "--install", ".")
796        check_call_cmd("sudo", "-n", "--", "ldconfig")
797
798    def test(self):
799        if make_target_exists("test"):
800            check_call_cmd("ctest", ".")
801
802    def analyze(self):
803        if os.path.isfile(".clang-tidy"):
804            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
805                # clang-tidy needs to run on a clang-specific build
806                check_call_cmd(
807                    "cmake",
808                    "-DCMAKE_C_COMPILER=clang",
809                    "-DCMAKE_CXX_COMPILER=clang++",
810                    "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
811                    "-H.",
812                    "-B" + build_dir,
813                )
814
815                check_call_cmd(
816                    "run-clang-tidy", "-header-filter=.*", "-p", build_dir
817                )
818
819        maybe_make_valgrind()
820        maybe_make_coverage()
821        run_cppcheck()
822
823
824class Meson(BuildSystem):
825    @staticmethod
826    def _project_name(path):
827        doc = subprocess.check_output(
828            ["meson", "introspect", "--projectinfo", path],
829            stderr=subprocess.STDOUT,
830        ).decode("utf-8")
831        return json.loads(doc)["descriptive_name"]
832
833    def __init__(self, package=None, path=None):
834        super(Meson, self).__init__(package, path)
835
836    def probe(self):
837        return os.path.isfile(os.path.join(self.path, "meson.build"))
838
839    def dependencies(self):
840        meson_build = os.path.join(self.path, "meson.build")
841        if not os.path.exists(meson_build):
842            return []
843
844        found_deps = []
845        for root, dirs, files in os.walk(self.path):
846            if "meson.build" not in files:
847                continue
848            with open(os.path.join(root, "meson.build"), "rt") as f:
849                build_contents = f.read()
850            pattern = r"dependency\('([^']*)'.*?\),?"
851            for match in re.finditer(pattern, build_contents):
852                group = match.group(1)
853                maybe_dep = DEPENDENCIES["PKG_CHECK_MODULES"].get(group)
854                if maybe_dep is not None:
855                    found_deps.append(maybe_dep)
856
857        return found_deps
858
859    def _parse_options(self, options_file):
860        """
861        Returns a set of options defined in the provides meson_options.txt file
862
863        Parameters:
864        options_file        The file containing options
865        """
866        store = OptionStore(is_cross=False)
867        oi = optinterpreter.OptionInterpreter(store, "")
868        oi.process(options_file)
869        return oi.options
870
871    def _configure_boolean(self, val):
872        """
873        Returns the meson flag which signifies the value
874
875        True is true which requires the boolean.
876        False is false which disables the boolean.
877
878        Parameters:
879        val                 The value being converted
880        """
881        if val is True:
882            return "true"
883        elif val is False:
884            return "false"
885        else:
886            raise Exception("Bad meson boolean value")
887
888    def _configure_feature(self, val):
889        """
890        Returns the meson flag which signifies the value
891
892        True is enabled which requires the feature.
893        False is disabled which disables the feature.
894        None is auto which autodetects the feature.
895
896        Parameters:
897        val                 The value being converted
898        """
899        if val is True:
900            return "enabled"
901        elif val is False:
902            return "disabled"
903        elif val is None:
904            return "auto"
905        else:
906            raise Exception("Bad meson feature value")
907
908    def _configure_option(self, opts, key, val):
909        """
910        Returns the meson flag which signifies the value
911        based on the type of the opt
912
913        Parameters:
914        opt                 The meson option which we are setting
915        val                 The value being converted
916        """
917        if isinstance(opts[key], options.UserBooleanOption):
918            str_val = self._configure_boolean(val)
919        elif isinstance(opts[key], options.UserFeatureOption):
920            str_val = self._configure_feature(val)
921        else:
922            raise Exception("Unknown meson option type")
923        return "-D{}={}".format(key, str_val)
924
925    def get_configure_flags(self, build_for_testing):
926        self.build_for_testing = build_for_testing
927        meson_options = {}
928        if os.path.exists("meson.options"):
929            meson_options = self._parse_options("meson.options")
930        elif os.path.exists("meson_options.txt"):
931            meson_options = self._parse_options("meson_options.txt")
932        meson_flags = [
933            "-Db_colorout=never",
934            "-Dwerror=true",
935            "-Dwarning_level=3",
936            "-Dcpp_args='-DBOOST_USE_VALGRIND'",
937        ]
938        if build_for_testing:
939            # -Ddebug=true -Doptimization=g is helpful for abi-dumper but isn't a combination that
940            # is supported by meson's build types. Configure it manually.
941            meson_flags.append("-Ddebug=true")
942            meson_flags.append("-Doptimization=g")
943        else:
944            meson_flags.append("--buildtype=debugoptimized")
945        if OptionKey("tests") in meson_options:
946            meson_flags.append(
947                self._configure_option(
948                    meson_options, OptionKey("tests"), build_for_testing
949                )
950            )
951        if OptionKey("examples") in meson_options:
952            meson_flags.append(
953                self._configure_option(
954                    meson_options, OptionKey("examples"), build_for_testing
955                )
956            )
957        if OptionKey("itests") in meson_options:
958            meson_flags.append(
959                self._configure_option(
960                    meson_options, OptionKey("itests"), INTEGRATION_TEST
961                )
962            )
963        if MESON_FLAGS.get(self.package) is not None:
964            meson_flags.extend(MESON_FLAGS.get(self.package))
965        return meson_flags
966
967    def configure(self, build_for_testing):
968        meson_flags = self.get_configure_flags(build_for_testing)
969        try:
970            check_call_cmd(
971                "meson", "setup", "--reconfigure", "build", *meson_flags
972            )
973        except Exception:
974            shutil.rmtree("build", ignore_errors=True)
975            check_call_cmd("meson", "setup", "build", *meson_flags)
976
977        self.package = Meson._project_name("build")
978
979    def build(self):
980        check_call_cmd("ninja", "-C", "build")
981
982    def install(self):
983        check_call_cmd("sudo", "-n", "--", "ninja", "-C", "build", "install")
984        check_call_cmd("sudo", "-n", "--", "ldconfig")
985
986    def test(self):
987        # It is useful to check various settings of the meson.build file
988        # for compatibility, such as meson_version checks.  We shouldn't
989        # do this in the configure path though because it affects subprojects
990        # and dependencies as well, but we only want this applied to the
991        # project-under-test (otherwise an upstream dependency could fail
992        # this check without our control).
993        self._extra_meson_checks()
994
995        try:
996            test_args = ("--repeat", str(args.repeat), "-C", "build")
997            check_call_cmd("meson", "test", "--print-errorlogs", *test_args)
998
999        except CalledProcessError:
1000            raise Exception("Unit tests failed")
1001
1002    def _setup_exists(self, setup):
1003        """
1004        Returns whether the meson build supports the named test setup.
1005
1006        Parameter descriptions:
1007        setup              The setup target to check
1008        """
1009        try:
1010            with open(os.devnull, "w"):
1011                output = subprocess.check_output(
1012                    [
1013                        "meson",
1014                        "test",
1015                        "-C",
1016                        "build",
1017                        "--setup",
1018                        "{}:{}".format(self.package, setup),
1019                        "__likely_not_a_test__",
1020                    ],
1021                    stderr=subprocess.STDOUT,
1022                )
1023        except CalledProcessError as e:
1024            output = e.output
1025        output = output.decode("utf-8")
1026        return not re.search("Unknown test setup '[^']+'[.]", output)
1027
1028    def _maybe_valgrind(self):
1029        """
1030        Potentially runs the unit tests through valgrind for the package
1031        via `meson test`. The package can specify custom valgrind
1032        configurations by utilizing add_test_setup() in a meson.build
1033        """
1034        if not is_valgrind_safe():
1035            sys.stderr.write("###### Skipping valgrind ######\n")
1036            return
1037        try:
1038            if self._setup_exists("valgrind"):
1039                check_call_cmd(
1040                    "meson",
1041                    "test",
1042                    "-t",
1043                    "10",
1044                    "-C",
1045                    "build",
1046                    "--print-errorlogs",
1047                    "--setup",
1048                    "{}:valgrind".format(self.package),
1049                    preexec_fn=valgrind_rlimit_nofile,
1050                )
1051            else:
1052                check_call_cmd(
1053                    "meson",
1054                    "test",
1055                    "-t",
1056                    "10",
1057                    "-C",
1058                    "build",
1059                    "--print-errorlogs",
1060                    "--wrapper",
1061                    "valgrind --error-exitcode=1",
1062                    preexec_fn=valgrind_rlimit_nofile,
1063                )
1064        except CalledProcessError:
1065            raise Exception("Valgrind tests failed")
1066
1067    def analyze(self):
1068        self._maybe_valgrind()
1069
1070        # Run clang-tidy only if the project has a configuration
1071        if os.path.isfile(".clang-tidy"):
1072            clang_env = os.environ.copy()
1073            clang_env["CC"] = "clang"
1074            clang_env["CXX"] = "clang++"
1075            # Clang-20 currently has some issue with libstdcpp's
1076            # std::forward_like which results in a bunch of compile errors.
1077            # Adding -fno-builtin-std-forward_like causes them to go away.
1078            clang_env["CXXFLAGS"] = "-fno-builtin-std-forward_like"
1079            clang_env["CC_LD"] = "lld"
1080            clang_env["CXX_LD"] = "lld"
1081            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
1082                check_call_cmd("meson", "setup", build_dir, env=clang_env)
1083                if not os.path.isfile(".openbmc-no-clang"):
1084                    check_call_cmd(
1085                        "meson", "compile", "-C", build_dir, env=clang_env
1086                    )
1087                try:
1088                    check_call_cmd(
1089                        "ninja",
1090                        "-C",
1091                        build_dir,
1092                        "clang-tidy-fix",
1093                        env=clang_env,
1094                    )
1095                except subprocess.CalledProcessError:
1096                    check_call_cmd(
1097                        "git",
1098                        "-C",
1099                        CODE_SCAN_DIR,
1100                        "--no-pager",
1101                        "diff",
1102                        env=clang_env,
1103                    )
1104                    raise
1105        # Run the basic clang static analyzer otherwise
1106        else:
1107            check_call_cmd("ninja", "-C", "build", "scan-build")
1108
1109        # Run tests through sanitizers
1110        # b_lundef is needed if clang++ is CXX since it resolves the
1111        # asan symbols at runtime only. We don't want to set it earlier
1112        # in the build process to ensure we don't have undefined
1113        # runtime code.
1114        if is_sanitize_safe():
1115            meson_flags = self.get_configure_flags(self.build_for_testing)
1116            meson_flags.append("-Db_sanitize=address,undefined")
1117            try:
1118                check_call_cmd(
1119                    "meson", "setup", "--reconfigure", "build", *meson_flags
1120                )
1121            except Exception:
1122                shutil.rmtree("build", ignore_errors=True)
1123                check_call_cmd("meson", "setup", "build", *meson_flags)
1124            check_call_cmd(
1125                "meson",
1126                "test",
1127                "-C",
1128                "build",
1129                "--print-errorlogs",
1130                "--logbase",
1131                "testlog-ubasan",
1132            )
1133            # TODO: Fix memory sanitizer
1134            # check_call_cmd('meson', 'configure', 'build',
1135            #                '-Db_sanitize=memory')
1136            # check_call_cmd('meson', 'test', '-C', 'build'
1137            #                '--logbase', 'testlog-msan')
1138            meson_flags = [
1139                s.replace(
1140                    "-Db_sanitize=address,undefined", "-Db_sanitize=none"
1141                )
1142                for s in meson_flags
1143            ]
1144            try:
1145                check_call_cmd(
1146                    "meson", "setup", "--reconfigure", "build", *meson_flags
1147                )
1148            except Exception:
1149                shutil.rmtree("build", ignore_errors=True)
1150                check_call_cmd("meson", "setup", "build", *meson_flags)
1151        else:
1152            sys.stderr.write("###### Skipping sanitizers ######\n")
1153
1154        # Run coverage checks
1155        check_call_cmd("meson", "configure", "build", "-Db_coverage=true")
1156        self.test()
1157        # Only build coverage HTML if coverage files were produced
1158        for root, dirs, files in os.walk("build"):
1159            if any([f.endswith(".gcda") for f in files]):
1160                check_call_cmd("ninja", "-C", "build", "coverage-html")
1161                break
1162        check_call_cmd("meson", "configure", "build", "-Db_coverage=false")
1163        run_cppcheck()
1164
1165    def _extra_meson_checks(self):
1166        with open(os.path.join(self.path, "meson.build"), "rt") as f:
1167            build_contents = f.read()
1168
1169        # Find project's specified meson_version.
1170        meson_version = None
1171        pattern = r"meson_version:[^']*'([^']*)'"
1172        for match in re.finditer(pattern, build_contents):
1173            group = match.group(1)
1174            meson_version = group
1175
1176        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1177        # identify this.  Add to our unit-test checks so that we don't
1178        # get a meson.build missing this.
1179        pattern = r"'cpp_std=c\+\+20'"
1180        for match in re.finditer(pattern, build_contents):
1181            if not meson_version or not meson_version_compare(
1182                meson_version, ">=0.57"
1183            ):
1184                raise Exception(
1185                    "C++20 support requires specifying in meson.build: "
1186                    + "meson_version: '>=0.57'"
1187                )
1188
1189        # C++23 requires at least Meson 1.1.1 but Meson itself doesn't
1190        # identify this.  Add to our unit-test checks so that we don't
1191        # get a meson.build missing this.
1192        pattern = r"'cpp_std=c\+\+23'"
1193        for match in re.finditer(pattern, build_contents):
1194            if not meson_version or not meson_version_compare(
1195                meson_version, ">=1.1.1"
1196            ):
1197                raise Exception(
1198                    "C++23 support requires specifying in meson.build: "
1199                    + "meson_version: '>=1.1.1'"
1200                )
1201
1202        if "get_variable(" in build_contents:
1203            if not meson_version or not meson_version_compare(
1204                meson_version, ">=0.58"
1205            ):
1206                raise Exception(
1207                    "dep.get_variable() with positional argument requires "
1208                    + "meson_version: '>=0.58'"
1209                )
1210
1211        if "relative_to(" in build_contents:
1212            if not meson_version or not meson_version_compare(
1213                meson_version, ">=1.3.0"
1214            ):
1215                raise Exception(
1216                    "fs.relative_to() requires meson_version: '>=1.3.0'"
1217                )
1218
1219
1220class Package(object):
1221    def __init__(self, name=None, path=None):
1222        self.supported = [Meson, Autotools, CMake]
1223        self.name = name
1224        self.path = path
1225        self.test_only = False
1226
1227    def build_systems(self):
1228        instances = (system(self.name, self.path) for system in self.supported)
1229        return (instance for instance in instances if instance.probe())
1230
1231    def build_system(self, preferred=None):
1232        systems = list(self.build_systems())
1233
1234        if not systems:
1235            return None
1236
1237        if preferred:
1238            return {type(system): system for system in systems}[preferred]
1239
1240        return next(iter(systems))
1241
1242    def install(self, system=None):
1243        if not system:
1244            system = self.build_system()
1245
1246        system.configure(False)
1247        system.build()
1248        system.install()
1249
1250    def _test_one(self, system):
1251        system.configure(True)
1252        system.build()
1253        system.install()
1254        system.test()
1255        if not TEST_ONLY:
1256            system.analyze()
1257
1258    def test(self):
1259        for system in self.build_systems():
1260            self._test_one(system)
1261
1262
1263def find_file(filename, basedir):
1264    """
1265    Finds all occurrences of a file (or list of files) in the base
1266    directory and passes them back with their relative paths.
1267
1268    Parameter descriptions:
1269    filename              The name of the file (or list of files) to
1270                          find
1271    basedir               The base directory search in
1272    """
1273
1274    if not isinstance(filename, list):
1275        filename = [filename]
1276
1277    filepaths = []
1278    for root, dirs, files in os.walk(basedir):
1279        if os.path.split(root)[-1] == "subprojects":
1280            for f in files:
1281                subproject = ".".join(f.split(".")[0:-1])
1282                if f.endswith(".wrap") and subproject in dirs:
1283                    # don't find files in meson subprojects with wraps
1284                    dirs.remove(subproject)
1285        for f in filename:
1286            if f in files:
1287                filepaths.append(os.path.join(root, f))
1288    return filepaths
1289
1290
1291if __name__ == "__main__":
1292    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1293    CONFIGURE_FLAGS = {
1294        "phosphor-logging": [
1295            "--enable-metadata-processing",
1296            "--enable-openpower-pel-extension",
1297            "YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml",
1298        ]
1299    }
1300
1301    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1302    MESON_FLAGS = {
1303        "phosphor-dbus-interfaces": [
1304            "-Ddata_com_ibm=true",
1305            "-Ddata_org_open_power=true",
1306        ],
1307        "phosphor-logging": ["-Dopenpower-pel-extension=enabled"],
1308    }
1309
1310    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1311    DEPENDENCIES = {
1312        "AC_CHECK_LIB": {"mapper": "phosphor-objmgr"},
1313        "AC_CHECK_HEADER": {
1314            "host-ipmid": "phosphor-host-ipmid",
1315            "blobs-ipmid": "phosphor-ipmi-blobs",
1316            "sdbusplus": "sdbusplus",
1317            "sdeventplus": "sdeventplus",
1318            "stdplus": "stdplus",
1319            "gpioplus": "gpioplus",
1320            "phosphor-logging/log.hpp": "phosphor-logging",
1321        },
1322        "AC_PATH_PROG": {"sdbus++": "sdbusplus"},
1323        "PKG_CHECK_MODULES": {
1324            "phosphor-dbus-interfaces": "phosphor-dbus-interfaces",
1325            "libipmid": "phosphor-host-ipmid",
1326            "libipmid-host": "phosphor-host-ipmid",
1327            "sdbusplus": "sdbusplus",
1328            "sdeventplus": "sdeventplus",
1329            "stdplus": "stdplus",
1330            "gpioplus": "gpioplus",
1331            "phosphor-logging": "phosphor-logging",
1332            "phosphor-snmp": "phosphor-snmp",
1333            "ipmiblob": "ipmi-blob-tool",
1334            "hei": "openpower-libhei",
1335            "phosphor-ipmi-blobs": "phosphor-ipmi-blobs",
1336            "libcr51sign": "google-misc",
1337        },
1338    }
1339
1340    # Offset into array of macro parameters MACRO(0, 1, ...N)
1341    DEPENDENCIES_OFFSET = {
1342        "AC_CHECK_LIB": 0,
1343        "AC_CHECK_HEADER": 0,
1344        "AC_PATH_PROG": 1,
1345        "PKG_CHECK_MODULES": 1,
1346    }
1347
1348    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1349    DEPENDENCIES_REGEX = {"phosphor-logging": r"\S+-dbus-interfaces$"}
1350
1351    # Set command line arguments
1352    parser = argparse.ArgumentParser()
1353    parser.add_argument(
1354        "-w",
1355        "--workspace",
1356        dest="WORKSPACE",
1357        required=True,
1358        help="Workspace directory location(i.e. /home)",
1359    )
1360    parser.add_argument(
1361        "-p",
1362        "--package",
1363        dest="PACKAGE",
1364        required=True,
1365        help="OpenBMC package to be unit tested",
1366    )
1367    parser.add_argument(
1368        "-t",
1369        "--test-only",
1370        dest="TEST_ONLY",
1371        action="store_true",
1372        required=False,
1373        default=False,
1374        help="Only run test cases, no other validation",
1375    )
1376    parser.add_argument(
1377        "--no-cppcheck",
1378        dest="NO_CPPCHECK",
1379        action="store_true",
1380        required=False,
1381        default=False,
1382        help="Do not run cppcheck",
1383    )
1384    arg_inttests = parser.add_mutually_exclusive_group()
1385    arg_inttests.add_argument(
1386        "--integration-tests",
1387        dest="INTEGRATION_TEST",
1388        action="store_true",
1389        required=False,
1390        default=True,
1391        help="Enable integration tests [default].",
1392    )
1393    arg_inttests.add_argument(
1394        "--no-integration-tests",
1395        dest="INTEGRATION_TEST",
1396        action="store_false",
1397        required=False,
1398        help="Disable integration tests.",
1399    )
1400    parser.add_argument(
1401        "-v",
1402        "--verbose",
1403        action="store_true",
1404        help="Print additional package status messages",
1405    )
1406    parser.add_argument(
1407        "-r", "--repeat", help="Repeat tests N times", type=int, default=1
1408    )
1409    parser.add_argument(
1410        "-b",
1411        "--branch",
1412        dest="BRANCH",
1413        required=False,
1414        help="Branch to target for dependent repositories",
1415        default="master",
1416    )
1417    parser.add_argument(
1418        "-n",
1419        "--noformat",
1420        dest="FORMAT",
1421        action="store_false",
1422        required=False,
1423        help="Whether or not to run format code",
1424    )
1425    args = parser.parse_args(sys.argv[1:])
1426    WORKSPACE = args.WORKSPACE
1427    UNIT_TEST_PKG = args.PACKAGE
1428    TEST_ONLY = args.TEST_ONLY
1429    NO_CPPCHECK = args.NO_CPPCHECK
1430    INTEGRATION_TEST = args.INTEGRATION_TEST
1431    BRANCH = args.BRANCH
1432    FORMAT_CODE = args.FORMAT
1433    if args.verbose:
1434
1435        def printline(*line):
1436            for arg in line:
1437                print(arg, end=" ")
1438            print()
1439
1440    else:
1441
1442        def printline(*line):
1443            pass
1444
1445    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1446
1447    # Run format-code.sh, which will in turn call any repo-level formatters.
1448    if FORMAT_CODE:
1449        check_call_cmd(
1450            os.path.join(
1451                WORKSPACE, "openbmc-build-scripts", "scripts", "format-code.sh"
1452            ),
1453            CODE_SCAN_DIR,
1454        )
1455
1456        # Check to see if any files changed
1457        check_call_cmd(
1458            "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff", "--exit-code"
1459        )
1460
1461    # Check if this repo has a supported make infrastructure
1462    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1463    if not pkg.build_system():
1464        print("No valid build system, exit")
1465        sys.exit(0)
1466
1467    prev_umask = os.umask(000)
1468
1469    # Determine dependencies and add them
1470    dep_added = dict()
1471    dep_added[UNIT_TEST_PKG] = False
1472
1473    # Create dependency tree
1474    dep_tree = DepTree(UNIT_TEST_PKG)
1475    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1476
1477    # Reorder Dependency Tree
1478    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1479        dep_tree.ReorderDeps(pkg_name, regex_str)
1480    if args.verbose:
1481        dep_tree.PrintTree()
1482
1483    install_list = dep_tree.GetInstallList()
1484
1485    # We don't want to treat our package as a dependency
1486    install_list.remove(UNIT_TEST_PKG)
1487
1488    # Install reordered dependencies
1489    for dep in install_list:
1490        build_and_install(dep, False)
1491
1492    # Run package unit tests
1493    build_and_install(UNIT_TEST_PKG, True)
1494
1495    os.umask(prev_umask)
1496
1497    # Run any custom CI scripts the repo has, of which there can be
1498    # multiple of and anywhere in the repository.
1499    ci_scripts = find_file(["run-ci.sh", "run-ci"], CODE_SCAN_DIR)
1500    if ci_scripts:
1501        os.chdir(CODE_SCAN_DIR)
1502        for ci_script in ci_scripts:
1503            check_call_cmd(ci_script)
1504