xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision ed4733918d2d5c22d78d315274e49e77395f075c)
1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10import argparse
11import json
12import multiprocessing
13import os
14import platform
15import re
16import resource
17import shutil
18import subprocess
19import sys
20import tempfile
21from subprocess import CalledProcessError, check_call
22from tempfile import TemporaryDirectory
23from urllib.parse import urljoin
24
25from git import Repo
26from git.exc import GitCommandError
27
28# interpreter is not used directly but this resolves dependency ordering
29# that would be broken if we didn't include it.
30from mesonbuild import interpreter  # noqa: F401
31from mesonbuild import optinterpreter, options
32from mesonbuild.mesonlib import version_compare as meson_version_compare
33from mesonbuild.options import OptionKey, OptionStore
34
35
36class DepTree:
37    """
38    Represents package dependency tree, where each node is a DepTree with a
39    name and DepTree children.
40    """
41
42    def __init__(self, name):
43        """
44        Create new DepTree.
45
46        Parameter descriptions:
47        name               Name of new tree node.
48        """
49        self.name = name
50        self.children = list()
51
52    def AddChild(self, name):
53        """
54        Add new child node to current node.
55
56        Parameter descriptions:
57        name               Name of new child
58        """
59        new_child = DepTree(name)
60        self.children.append(new_child)
61        return new_child
62
63    def AddChildNode(self, node):
64        """
65        Add existing child node to current node.
66
67        Parameter descriptions:
68        node               Tree node to add
69        """
70        self.children.append(node)
71
72    def RemoveChild(self, name):
73        """
74        Remove child node.
75
76        Parameter descriptions:
77        name               Name of child to remove
78        """
79        for child in self.children:
80            if child.name == name:
81                self.children.remove(child)
82                return
83
84    def GetNode(self, name):
85        """
86        Return node with matching name. Return None if not found.
87
88        Parameter descriptions:
89        name               Name of node to return
90        """
91        if self.name == name:
92            return self
93        for child in self.children:
94            node = child.GetNode(name)
95            if node:
96                return node
97        return None
98
99    def GetParentNode(self, name, parent_node=None):
100        """
101        Return parent of node with matching name. Return none if not found.
102
103        Parameter descriptions:
104        name               Name of node to get parent of
105        parent_node        Parent of current node
106        """
107        if self.name == name:
108            return parent_node
109        for child in self.children:
110            found_node = child.GetParentNode(name, self)
111            if found_node:
112                return found_node
113        return None
114
115    def GetPath(self, name, path=None):
116        """
117        Return list of node names from head to matching name.
118        Return None if not found.
119
120        Parameter descriptions:
121        name               Name of node
122        path               List of node names from head to current node
123        """
124        if not path:
125            path = []
126        if self.name == name:
127            path.append(self.name)
128            return path
129        for child in self.children:
130            match = child.GetPath(name, path + [self.name])
131            if match:
132                return match
133        return None
134
135    def GetPathRegex(self, name, regex_str, path=None):
136        """
137        Return list of node paths that end in name, or match regex_str.
138        Return empty list if not found.
139
140        Parameter descriptions:
141        name               Name of node to search for
142        regex_str          Regex string to match node names
143        path               Path of node names from head to current node
144        """
145        new_paths = []
146        if not path:
147            path = []
148        match = re.match(regex_str, self.name)
149        if (self.name == name) or (match):
150            new_paths.append(path + [self.name])
151        for child in self.children:
152            return_paths = None
153            full_path = path + [self.name]
154            return_paths = child.GetPathRegex(name, regex_str, full_path)
155            for i in return_paths:
156                new_paths.append(i)
157        return new_paths
158
159    def MoveNode(self, from_name, to_name):
160        """
161        Mode existing from_name node to become child of to_name node.
162
163        Parameter descriptions:
164        from_name          Name of node to make a child of to_name
165        to_name            Name of node to make parent of from_name
166        """
167        parent_from_node = self.GetParentNode(from_name)
168        from_node = self.GetNode(from_name)
169        parent_from_node.RemoveChild(from_name)
170        to_node = self.GetNode(to_name)
171        to_node.AddChildNode(from_node)
172
173    def ReorderDeps(self, name, regex_str):
174        """
175        Reorder dependency tree.  If tree contains nodes with names that
176        match 'name' and 'regex_str', move 'regex_str' nodes that are
177        to the right of 'name' node, so that they become children of the
178        'name' node.
179
180        Parameter descriptions:
181        name               Name of node to look for
182        regex_str          Regex string to match names to
183        """
184        name_path = self.GetPath(name)
185        if not name_path:
186            return
187        paths = self.GetPathRegex(name, regex_str)
188        is_name_in_paths = False
189        name_index = 0
190        for i in range(len(paths)):
191            path = paths[i]
192            if path[-1] == name:
193                is_name_in_paths = True
194                name_index = i
195                break
196        if not is_name_in_paths:
197            return
198        for i in range(name_index + 1, len(paths)):
199            path = paths[i]
200            if name in path:
201                continue
202            from_name = path[-1]
203            self.MoveNode(from_name, name)
204
205    def GetInstallList(self):
206        """
207        Return post-order list of node names.
208
209        Parameter descriptions:
210        """
211        install_list = []
212        for child in self.children:
213            child_install_list = child.GetInstallList()
214            install_list.extend(child_install_list)
215        install_list.append(self.name)
216        return install_list
217
218    def PrintTree(self, level=0):
219        """
220        Print pre-order node names with indentation denoting node depth level.
221
222        Parameter descriptions:
223        level              Current depth level
224        """
225        INDENT_PER_LEVEL = 4
226        print(" " * (level * INDENT_PER_LEVEL) + self.name)
227        for child in self.children:
228            child.PrintTree(level + 1)
229
230
231def check_call_cmd(*cmd, **kwargs):
232    """
233    Verbose prints the directory location the given command is called from and
234    the command, then executes the command using check_call.
235
236    Parameter descriptions:
237    dir                 Directory location command is to be called from
238    cmd                 List of parameters constructing the complete command
239    """
240    printline(os.getcwd(), ">", " ".join(cmd))
241    check_call(cmd, **kwargs)
242
243
244def clone_pkg(pkg, branch):
245    """
246    Clone the given openbmc package's git repository from gerrit into
247    the WORKSPACE location
248
249    Parameter descriptions:
250    pkg                 Name of the package to clone
251    branch              Branch to clone from pkg
252    """
253    pkg_dir = os.path.join(WORKSPACE, pkg)
254    if os.path.exists(os.path.join(pkg_dir, ".git")):
255        return pkg_dir
256    pkg_repo = urljoin("https://gerrit.openbmc.org/openbmc/", pkg)
257    os.mkdir(pkg_dir)
258    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
259    try:
260        # first try the branch
261        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
262        repo_inst = clone.working_dir
263    except GitCommandError:
264        printline("Input branch not found, default to master")
265        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
266        repo_inst = clone.working_dir
267    return repo_inst
268
269
270def make_target_exists(target):
271    """
272    Runs a check against the makefile in the current directory to determine
273    if the target exists so that it can be built.
274
275    Parameter descriptions:
276    target              The make target we are checking
277    """
278    try:
279        cmd = ["make", "-n", target]
280        with open(os.devnull, "w") as devnull:
281            check_call(cmd, stdout=devnull, stderr=devnull)
282        return True
283    except CalledProcessError:
284        return False
285
286
287make_parallel = [
288    "make",
289    # Run enough jobs to saturate all the cpus
290    "-j",
291    str(multiprocessing.cpu_count()),
292    # Don't start more jobs if the load avg is too high
293    "-l",
294    str(multiprocessing.cpu_count()),
295    # Synchronize the output so logs aren't intermixed in stdout / stderr
296    "-O",
297]
298
299
300def build_and_install(name, build_for_testing=False):
301    """
302    Builds and installs the package in the environment. Optionally
303    builds the examples and test cases for package.
304
305    Parameter description:
306    name                The name of the package we are building
307    build_for_testing   Enable options related to testing on the package?
308    """
309    os.chdir(os.path.join(WORKSPACE, name))
310
311    # Refresh dynamic linker run time bindings for dependencies
312    check_call_cmd("sudo", "-n", "--", "ldconfig")
313
314    pkg = Package()
315    if build_for_testing:
316        pkg.test()
317    else:
318        pkg.install()
319
320
321def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
322    """
323    For each package (name), starting with the package to be unit tested,
324    extract its dependencies. For each package dependency defined, recursively
325    apply the same strategy
326
327    Parameter descriptions:
328    name                Name of the package
329    pkgdir              Directory where package source is located
330    dep_added           Current dict of dependencies and added status
331    head                Head node of the dependency tree
332    branch              Branch to clone from pkg
333    dep_tree            Current dependency tree node
334    """
335    if not dep_tree:
336        dep_tree = head
337
338    with open("/tmp/depcache", "r") as depcache:
339        cache = depcache.readline()
340
341    # Read out pkg dependencies
342    pkg = Package(name, pkgdir)
343
344    build = pkg.build_system()
345    if not build:
346        raise Exception(f"Unable to find build system for {name}.")
347
348    for dep in set(build.dependencies()):
349        if dep in cache:
350            continue
351        # Dependency package not already known
352        if dep_added.get(dep) is None:
353            print(f"Adding {dep} dependency to {name}.")
354            # Dependency package not added
355            new_child = dep_tree.AddChild(dep)
356            dep_added[dep] = False
357            dep_pkgdir = clone_pkg(dep, branch)
358            # Determine this dependency package's
359            # dependencies and add them before
360            # returning to add this package
361            dep_added = build_dep_tree(
362                dep, dep_pkgdir, dep_added, head, branch, new_child
363            )
364        else:
365            # Dependency package known and added
366            if dep_added[dep]:
367                continue
368            else:
369                # Cyclic dependency failure
370                raise Exception("Cyclic dependencies found in " + name)
371
372    if not dep_added[name]:
373        dep_added[name] = True
374
375    return dep_added
376
377
378def run_cppcheck():
379    if (
380        not os.path.exists(os.path.join("build", "compile_commands.json"))
381        or NO_CPPCHECK
382    ):
383        return None
384
385    with TemporaryDirectory() as cpp_dir:
386        # http://cppcheck.sourceforge.net/manual.pdf
387        try:
388            check_call_cmd(
389                "cppcheck",
390                "-j",
391                str(multiprocessing.cpu_count()),
392                "--enable=style,performance,portability,missingInclude",
393                "--inline-suppr",
394                "--suppress=useStlAlgorithm",
395                "--suppress=unusedStructMember",
396                "--suppress=postfixOperator",
397                "--suppress=unreadVariable",
398                "--suppress=knownConditionTrueFalse",
399                "--library=googletest",
400                "--project=build/compile_commands.json",
401                f"--cppcheck-build-dir={cpp_dir}",
402            )
403        except subprocess.CalledProcessError:
404            print("cppcheck found errors")
405
406
407def valgrind_rlimit_nofile(soft=2048, hard=4096):
408    resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard))
409
410
411def is_valgrind_safe():
412    """
413    Returns whether it is safe to run valgrind on our platform
414    """
415    with tempfile.TemporaryDirectory() as temp:
416        src = os.path.join(temp, "unit-test-vg.c")
417        exe = os.path.join(temp, "unit-test-vg")
418        with open(src, "w") as h:
419            h.write("#include <errno.h>\n")
420            h.write("#include <stdio.h>\n")
421            h.write("#include <stdlib.h>\n")
422            h.write("#include <string.h>\n")
423            h.write("int main() {\n")
424            h.write("char *heap_str = malloc(16);\n")
425            h.write('strcpy(heap_str, "RandString");\n')
426            h.write('int res = strcmp("RandString", heap_str);\n')
427            h.write("free(heap_str);\n")
428            h.write("char errstr[64];\n")
429            h.write("strerror_r(EINVAL, errstr, sizeof(errstr));\n")
430            h.write('printf("%s\\n", errstr);\n')
431            h.write("return res;\n")
432            h.write("}\n")
433        check_call(
434            ["gcc", "-O2", "-o", exe, src],
435            stdout=subprocess.DEVNULL,
436            stderr=subprocess.DEVNULL,
437        )
438        try:
439            check_call(
440                ["valgrind", "--error-exitcode=99", exe],
441                stdout=subprocess.DEVNULL,
442                stderr=subprocess.DEVNULL,
443                preexec_fn=valgrind_rlimit_nofile,
444            )
445        except CalledProcessError:
446            sys.stderr.write("###### Platform is not valgrind safe ######\n")
447            return False
448        return True
449
450
451def is_sanitize_safe():
452    """
453    Returns whether it is safe to run sanitizers on our platform
454    """
455    src = "unit-test-sanitize.c"
456    exe = "./unit-test-sanitize"
457    with open(src, "w") as h:
458        h.write("int main() { return 0; }\n")
459    try:
460        with open(os.devnull, "w") as devnull:
461            check_call(
462                [
463                    "gcc",
464                    "-O2",
465                    "-fsanitize=address",
466                    "-fsanitize=undefined",
467                    "-o",
468                    exe,
469                    src,
470                ],
471                stdout=devnull,
472                stderr=devnull,
473            )
474            check_call([exe], stdout=devnull, stderr=devnull)
475
476        # TODO: Sanitizer not working on ppc64le
477        # https://github.com/openbmc/openbmc-build-scripts/issues/31
478        if platform.processor() == "ppc64le":
479            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
480            return False
481        else:
482            return True
483    except Exception:
484        sys.stderr.write("###### Platform is not sanitize safe ######\n")
485        return False
486    finally:
487        os.remove(src)
488        os.remove(exe)
489
490
491def maybe_make_valgrind():
492    """
493    Potentially runs the unit tests through valgrind for the package
494    via `make check-valgrind`. If the package does not have valgrind testing
495    then it just skips over this.
496    """
497    # Valgrind testing is currently broken by an aggressive strcmp optimization
498    # that is inlined into optimized code for POWER by gcc 7+. Until we find
499    # a workaround, just don't run valgrind tests on POWER.
500    # https://github.com/openbmc/openbmc/issues/3315
501    if not is_valgrind_safe():
502        sys.stderr.write("###### Skipping valgrind ######\n")
503        return
504    if not make_target_exists("check-valgrind"):
505        return
506
507    try:
508        cmd = make_parallel + ["check-valgrind"]
509        check_call_cmd(*cmd, preexec_fn=valgrind_rlimit_nofile)
510    except CalledProcessError:
511        for root, _, files in os.walk(os.getcwd()):
512            for f in files:
513                if re.search("test-suite-[a-z]+.log", f) is None:
514                    continue
515                check_call_cmd("cat", os.path.join(root, f))
516        raise Exception("Valgrind tests failed")
517
518
519def maybe_make_coverage():
520    """
521    Potentially runs the unit tests through code coverage for the package
522    via `make check-code-coverage`. If the package does not have code coverage
523    testing then it just skips over this.
524    """
525    if not make_target_exists("check-code-coverage"):
526        return
527
528    # Actually run code coverage
529    try:
530        cmd = make_parallel + ["check-code-coverage"]
531        check_call_cmd(*cmd)
532    except CalledProcessError:
533        raise Exception("Code coverage failed")
534
535
536class BuildSystem(object):
537    """
538    Build systems generally provide the means to configure, build, install and
539    test software. The BuildSystem class defines a set of interfaces on top of
540    which Autotools, Meson, CMake and possibly other build system drivers can
541    be implemented, separating out the phases to control whether a package
542    should merely be installed or also tested and analyzed.
543    """
544
545    def __init__(self, package, path):
546        """Initialise the driver with properties independent of the build
547        system
548
549        Keyword arguments:
550        package: The name of the package. Derived from the path if None
551        path: The path to the package. Set to the working directory if None
552        """
553        self.path = "." if not path else path
554        realpath = os.path.realpath(self.path)
555        self.package = package if package else os.path.basename(realpath)
556        self.build_for_testing = False
557
558    def probe(self):
559        """Test if the build system driver can be applied to the package
560
561        Return True if the driver can drive the package's build system,
562        otherwise False.
563
564        Generally probe() is implemented by testing for the presence of the
565        build system's configuration file(s).
566        """
567        raise NotImplementedError
568
569    def dependencies(self):
570        """Provide the package's dependencies
571
572        Returns a list of dependencies. If no dependencies are required then an
573        empty list must be returned.
574
575        Generally dependencies() is implemented by analysing and extracting the
576        data from the build system configuration.
577        """
578        raise NotImplementedError
579
580    def configure(self, build_for_testing):
581        """Configure the source ready for building
582
583        Should raise an exception if configuration failed.
584
585        Keyword arguments:
586        build_for_testing: Mark the package as being built for testing rather
587                           than for installation as a dependency for the
588                           package under test. Setting to True generally
589                           implies that the package will be configured to build
590                           with debug information, at a low level of
591                           optimisation and possibly with sanitizers enabled.
592
593        Generally configure() is implemented by invoking the build system
594        tooling to generate Makefiles or equivalent.
595        """
596        raise NotImplementedError
597
598    def build(self):
599        """Build the software ready for installation and/or testing
600
601        Should raise an exception if the build fails
602
603        Generally build() is implemented by invoking `make` or `ninja`.
604        """
605        raise NotImplementedError
606
607    def install(self):
608        """Install the software ready for use
609
610        Should raise an exception if installation fails
611
612        Like build(), install() is generally implemented by invoking `make` or
613        `ninja`.
614        """
615        raise NotImplementedError
616
617    def test(self):
618        """Build and run the test suite associated with the package
619
620        Should raise an exception if the build or testing fails.
621
622        Like install(), test() is generally implemented by invoking `make` or
623        `ninja`.
624        """
625        raise NotImplementedError
626
627    def analyze(self):
628        """Run any supported analysis tools over the codebase
629
630        Should raise an exception if analysis fails.
631
632        Some analysis tools such as scan-build need injection into the build
633        system. analyze() provides the necessary hook to implement such
634        behaviour. Analyzers independent of the build system can also be
635        specified here but at the cost of possible duplication of code between
636        the build system driver implementations.
637        """
638        raise NotImplementedError
639
640
641class Autotools(BuildSystem):
642    def __init__(self, package=None, path=None):
643        super(Autotools, self).__init__(package, path)
644
645    def probe(self):
646        return os.path.isfile(os.path.join(self.path, "configure.ac"))
647
648    def dependencies(self):
649        configure_ac = os.path.join(self.path, "configure.ac")
650
651        contents = ""
652        # Prepend some special function overrides so we can parse out
653        # dependencies
654        for macro in DEPENDENCIES.keys():
655            contents += (
656                "m4_define(["
657                + macro
658                + "], ["
659                + macro
660                + "_START$"
661                + str(DEPENDENCIES_OFFSET[macro] + 1)
662                + macro
663                + "_END])\n"
664            )
665        with open(configure_ac, "rt") as f:
666            contents += f.read()
667
668        autoconf_cmdline = ["autoconf", "-Wno-undefined", "-"]
669        autoconf_process = subprocess.Popen(
670            autoconf_cmdline,
671            stdin=subprocess.PIPE,
672            stdout=subprocess.PIPE,
673            stderr=subprocess.PIPE,
674        )
675        document = contents.encode("utf-8")
676        (stdout, stderr) = autoconf_process.communicate(input=document)
677        if not stdout:
678            print(stderr)
679            raise Exception("Failed to run autoconf for parsing dependencies")
680
681        # Parse out all of the dependency text
682        matches = []
683        for macro in DEPENDENCIES.keys():
684            pattern = "(" + macro + ")_START(.*?)" + macro + "_END"
685            for match in re.compile(pattern).finditer(stdout.decode("utf-8")):
686                matches.append((match.group(1), match.group(2)))
687
688        # Look up dependencies from the text
689        found_deps = []
690        for macro, deptext in matches:
691            for potential_dep in deptext.split(" "):
692                for known_dep in DEPENDENCIES[macro].keys():
693                    if potential_dep.startswith(known_dep):
694                        found_deps.append(DEPENDENCIES[macro][known_dep])
695
696        return found_deps
697
698    def _configure_feature(self, flag, enabled):
699        """
700        Returns an configure flag as a string
701
702        Parameters:
703        flag                The name of the flag
704        enabled             Whether the flag is enabled or disabled
705        """
706        return "--" + ("enable" if enabled else "disable") + "-" + flag
707
708    def configure(self, build_for_testing):
709        self.build_for_testing = build_for_testing
710        conf_flags = [
711            self._configure_feature("silent-rules", False),
712            self._configure_feature("examples", build_for_testing),
713            self._configure_feature("tests", build_for_testing),
714            self._configure_feature("itests", INTEGRATION_TEST),
715        ]
716        conf_flags.extend(
717            [
718                self._configure_feature("code-coverage", False),
719                self._configure_feature("valgrind", build_for_testing),
720            ]
721        )
722        # Add any necessary configure flags for package
723        if CONFIGURE_FLAGS.get(self.package) is not None:
724            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
725        for bootstrap in ["bootstrap.sh", "bootstrap", "autogen.sh"]:
726            if os.path.exists(bootstrap):
727                check_call_cmd("./" + bootstrap)
728                break
729        check_call_cmd("./configure", *conf_flags)
730
731    def build(self):
732        check_call_cmd(*make_parallel)
733
734    def install(self):
735        check_call_cmd("sudo", "-n", "--", *(make_parallel + ["install"]))
736        check_call_cmd("sudo", "-n", "--", "ldconfig")
737
738    def test(self):
739        try:
740            cmd = make_parallel + ["check"]
741            for i in range(0, args.repeat):
742                check_call_cmd(*cmd)
743
744            maybe_make_valgrind()
745            maybe_make_coverage()
746        except CalledProcessError:
747            for root, _, files in os.walk(os.getcwd()):
748                if "test-suite.log" not in files:
749                    continue
750                check_call_cmd("cat", os.path.join(root, "test-suite.log"))
751            raise Exception("Unit tests failed")
752
753    def analyze(self):
754        run_cppcheck()
755
756
757class CMake(BuildSystem):
758    def __init__(self, package=None, path=None):
759        super(CMake, self).__init__(package, path)
760
761    def probe(self):
762        return os.path.isfile(os.path.join(self.path, "CMakeLists.txt"))
763
764    def dependencies(self):
765        return []
766
767    def configure(self, build_for_testing):
768        self.build_for_testing = build_for_testing
769        if INTEGRATION_TEST:
770            check_call_cmd(
771                "cmake",
772                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
773                "-DCMAKE_CXX_FLAGS='-DBOOST_USE_VALGRIND'",
774                "-DITESTS=ON",
775                ".",
776            )
777        else:
778            check_call_cmd(
779                "cmake",
780                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
781                "-DCMAKE_CXX_FLAGS='-DBOOST_USE_VALGRIND'",
782                ".",
783            )
784
785    def build(self):
786        check_call_cmd(
787            "cmake",
788            "--build",
789            ".",
790            "--",
791            "-j",
792            str(multiprocessing.cpu_count()),
793        )
794
795    def install(self):
796        check_call_cmd("sudo", "cmake", "--install", ".")
797        check_call_cmd("sudo", "-n", "--", "ldconfig")
798
799    def test(self):
800        if make_target_exists("test"):
801            check_call_cmd("ctest", ".")
802
803    def analyze(self):
804        if os.path.isfile(".clang-tidy"):
805            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
806                # clang-tidy needs to run on a clang-specific build
807                check_call_cmd(
808                    "cmake",
809                    "-DCMAKE_C_COMPILER=clang",
810                    "-DCMAKE_CXX_COMPILER=clang++",
811                    "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
812                    "-H.",
813                    "-B" + build_dir,
814                )
815
816                check_call_cmd(
817                    "run-clang-tidy", "-header-filter=.*", "-p", build_dir
818                )
819
820        maybe_make_valgrind()
821        maybe_make_coverage()
822        run_cppcheck()
823
824
825class Meson(BuildSystem):
826    @staticmethod
827    def _project_name(path):
828        doc = subprocess.check_output(
829            ["meson", "introspect", "--projectinfo", path],
830            stderr=subprocess.STDOUT,
831        ).decode("utf-8")
832        return json.loads(doc)["descriptive_name"]
833
834    def __init__(self, package=None, path=None):
835        super(Meson, self).__init__(package, path)
836
837    def probe(self):
838        return os.path.isfile(os.path.join(self.path, "meson.build"))
839
840    def dependencies(self):
841        meson_build = os.path.join(self.path, "meson.build")
842        if not os.path.exists(meson_build):
843            return []
844
845        found_deps = []
846        for root, dirs, files in os.walk(self.path):
847            if "meson.build" not in files:
848                continue
849            with open(os.path.join(root, "meson.build"), "rt") as f:
850                build_contents = f.read()
851            pattern = r"dependency\('([^']*)'.*?\),?"
852            for match in re.finditer(pattern, build_contents):
853                group = match.group(1)
854                maybe_dep = DEPENDENCIES["PKG_CHECK_MODULES"].get(group)
855                if maybe_dep is not None:
856                    found_deps.append(maybe_dep)
857
858        return found_deps
859
860    def _parse_options(self, options_file):
861        """
862        Returns a set of options defined in the provides meson_options.txt file
863
864        Parameters:
865        options_file        The file containing options
866        """
867        store = OptionStore(is_cross=False)
868        oi = optinterpreter.OptionInterpreter(store, "")
869        oi.process(options_file)
870        return oi.options
871
872    def _configure_boolean(self, val):
873        """
874        Returns the meson flag which signifies the value
875
876        True is true which requires the boolean.
877        False is false which disables the boolean.
878
879        Parameters:
880        val                 The value being converted
881        """
882        if val is True:
883            return "true"
884        elif val is False:
885            return "false"
886        else:
887            raise Exception("Bad meson boolean value")
888
889    def _configure_feature(self, val):
890        """
891        Returns the meson flag which signifies the value
892
893        True is enabled which requires the feature.
894        False is disabled which disables the feature.
895        None is auto which autodetects the feature.
896
897        Parameters:
898        val                 The value being converted
899        """
900        if val is True:
901            return "enabled"
902        elif val is False:
903            return "disabled"
904        elif val is None:
905            return "auto"
906        else:
907            raise Exception("Bad meson feature value")
908
909    def _configure_option(self, opts, key, val):
910        """
911        Returns the meson flag which signifies the value
912        based on the type of the opt
913
914        Parameters:
915        opt                 The meson option which we are setting
916        val                 The value being converted
917        """
918        if isinstance(opts[key], options.UserBooleanOption):
919            str_val = self._configure_boolean(val)
920        elif isinstance(opts[key], options.UserFeatureOption):
921            str_val = self._configure_feature(val)
922        else:
923            raise Exception("Unknown meson option type")
924        return "-D{}={}".format(key, str_val)
925
926    def get_configure_flags(self, build_for_testing):
927        self.build_for_testing = build_for_testing
928        meson_options = {}
929        if os.path.exists("meson.options"):
930            meson_options = self._parse_options("meson.options")
931        elif os.path.exists("meson_options.txt"):
932            meson_options = self._parse_options("meson_options.txt")
933        meson_flags = [
934            "-Db_colorout=never",
935            "-Dwerror=true",
936            "-Dwarning_level=3",
937            "-Dcpp_args='-DBOOST_USE_VALGRIND'",
938        ]
939        if build_for_testing:
940            # -Ddebug=true -Doptimization=g is helpful for abi-dumper but isn't a combination that
941            # is supported by meson's build types. Configure it manually.
942            meson_flags.append("-Ddebug=true")
943            meson_flags.append("-Doptimization=g")
944        else:
945            meson_flags.append("--buildtype=debugoptimized")
946        if OptionKey("tests") in meson_options:
947            meson_flags.append(
948                self._configure_option(
949                    meson_options, OptionKey("tests"), build_for_testing
950                )
951            )
952        if OptionKey("examples") in meson_options:
953            meson_flags.append(
954                self._configure_option(
955                    meson_options, OptionKey("examples"), build_for_testing
956                )
957            )
958        if OptionKey("itests") in meson_options:
959            meson_flags.append(
960                self._configure_option(
961                    meson_options, OptionKey("itests"), INTEGRATION_TEST
962                )
963            )
964        if MESON_FLAGS.get(self.package) is not None:
965            meson_flags.extend(MESON_FLAGS.get(self.package))
966        return meson_flags
967
968    def configure(self, build_for_testing):
969        meson_flags = self.get_configure_flags(build_for_testing)
970        try:
971            check_call_cmd(
972                "meson", "setup", "--reconfigure", "build", *meson_flags
973            )
974        except Exception:
975            shutil.rmtree("build", ignore_errors=True)
976            check_call_cmd("meson", "setup", "build", *meson_flags)
977
978        self.package = Meson._project_name("build")
979
980    def build(self):
981        check_call_cmd("ninja", "-C", "build")
982
983    def install(self):
984        check_call_cmd("sudo", "-n", "--", "ninja", "-C", "build", "install")
985        check_call_cmd("sudo", "-n", "--", "ldconfig")
986
987    def test(self):
988        # It is useful to check various settings of the meson.build file
989        # for compatibility, such as meson_version checks.  We shouldn't
990        # do this in the configure path though because it affects subprojects
991        # and dependencies as well, but we only want this applied to the
992        # project-under-test (otherwise an upstream dependency could fail
993        # this check without our control).
994        self._extra_meson_checks()
995
996        try:
997            test_args = ("--repeat", str(args.repeat), "-C", "build")
998            check_call_cmd("meson", "test", "--print-errorlogs", *test_args)
999
1000        except CalledProcessError:
1001            raise Exception("Unit tests failed")
1002
1003    def _setup_exists(self, setup):
1004        """
1005        Returns whether the meson build supports the named test setup.
1006
1007        Parameter descriptions:
1008        setup              The setup target to check
1009        """
1010        try:
1011            with open(os.devnull, "w"):
1012                output = subprocess.check_output(
1013                    [
1014                        "meson",
1015                        "test",
1016                        "-C",
1017                        "build",
1018                        "--setup",
1019                        "{}:{}".format(self.package, setup),
1020                        "__likely_not_a_test__",
1021                    ],
1022                    stderr=subprocess.STDOUT,
1023                )
1024        except CalledProcessError as e:
1025            output = e.output
1026        output = output.decode("utf-8")
1027        return not re.search("Unknown test setup '[^']+'[.]", output)
1028
1029    def _maybe_valgrind(self):
1030        """
1031        Potentially runs the unit tests through valgrind for the package
1032        via `meson test`. The package can specify custom valgrind
1033        configurations by utilizing add_test_setup() in a meson.build
1034        """
1035        if not is_valgrind_safe():
1036            sys.stderr.write("###### Skipping valgrind ######\n")
1037            return
1038        try:
1039            if self._setup_exists("valgrind"):
1040                check_call_cmd(
1041                    "meson",
1042                    "test",
1043                    "-t",
1044                    "10",
1045                    "-C",
1046                    "build",
1047                    "--print-errorlogs",
1048                    "--setup",
1049                    "{}:valgrind".format(self.package),
1050                    preexec_fn=valgrind_rlimit_nofile,
1051                )
1052            else:
1053                check_call_cmd(
1054                    "meson",
1055                    "test",
1056                    "-t",
1057                    "10",
1058                    "-C",
1059                    "build",
1060                    "--print-errorlogs",
1061                    "--wrapper",
1062                    "valgrind --error-exitcode=1",
1063                    preexec_fn=valgrind_rlimit_nofile,
1064                )
1065        except CalledProcessError:
1066            raise Exception("Valgrind tests failed")
1067
1068    def analyze(self):
1069        self._maybe_valgrind()
1070
1071        # Run clang-tidy only if the project has a configuration
1072        if os.path.isfile(".clang-tidy"):
1073            clang_env = os.environ.copy()
1074            clang_env["CC"] = "clang"
1075            clang_env["CXX"] = "clang++"
1076            # Clang-20 currently has some issue with libstdcpp's
1077            # std::forward_like which results in a bunch of compile errors.
1078            # Adding -fno-builtin-std-forward_like causes them to go away.
1079            clang_env["CXXFLAGS"] = "-fno-builtin-std-forward_like"
1080            clang_env["CC_LD"] = "lld"
1081            clang_env["CXX_LD"] = "lld"
1082            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
1083                check_call_cmd("meson", "setup", build_dir, env=clang_env)
1084                if not os.path.isfile(".openbmc-no-clang"):
1085                    check_call_cmd(
1086                        "meson", "compile", "-C", build_dir, env=clang_env
1087                    )
1088                try:
1089                    check_call_cmd(
1090                        "ninja",
1091                        "-C",
1092                        build_dir,
1093                        "clang-tidy-fix",
1094                        env=clang_env,
1095                    )
1096                except subprocess.CalledProcessError:
1097                    check_call_cmd(
1098                        "git",
1099                        "-C",
1100                        CODE_SCAN_DIR,
1101                        "--no-pager",
1102                        "diff",
1103                        env=clang_env,
1104                    )
1105                    raise
1106        # Run the basic clang static analyzer otherwise
1107        else:
1108            check_call_cmd("ninja", "-C", "build", "scan-build")
1109
1110        # Run tests through sanitizers
1111        # b_lundef is needed if clang++ is CXX since it resolves the
1112        # asan symbols at runtime only. We don't want to set it earlier
1113        # in the build process to ensure we don't have undefined
1114        # runtime code.
1115        if is_sanitize_safe():
1116            meson_flags = self.get_configure_flags(self.build_for_testing)
1117            meson_flags.append("-Db_sanitize=address,undefined")
1118            try:
1119                check_call_cmd(
1120                    "meson", "setup", "--reconfigure", "build", *meson_flags
1121                )
1122            except Exception:
1123                shutil.rmtree("build", ignore_errors=True)
1124                check_call_cmd("meson", "setup", "build", *meson_flags)
1125            check_call_cmd(
1126                "meson",
1127                "test",
1128                "-C",
1129                "build",
1130                "--print-errorlogs",
1131                "--logbase",
1132                "testlog-ubasan",
1133            )
1134            # TODO: Fix memory sanitizer
1135            # check_call_cmd('meson', 'configure', 'build',
1136            #                '-Db_sanitize=memory')
1137            # check_call_cmd('meson', 'test', '-C', 'build'
1138            #                '--logbase', 'testlog-msan')
1139            meson_flags = [
1140                s.replace(
1141                    "-Db_sanitize=address,undefined", "-Db_sanitize=none"
1142                )
1143                for s in meson_flags
1144            ]
1145            try:
1146                check_call_cmd(
1147                    "meson", "setup", "--reconfigure", "build", *meson_flags
1148                )
1149            except Exception:
1150                shutil.rmtree("build", ignore_errors=True)
1151                check_call_cmd("meson", "setup", "build", *meson_flags)
1152        else:
1153            sys.stderr.write("###### Skipping sanitizers ######\n")
1154
1155        # Run coverage checks
1156        check_call_cmd("meson", "configure", "build", "-Db_coverage=true")
1157        self.test()
1158        # Only build coverage HTML if coverage files were produced
1159        for root, dirs, files in os.walk("build"):
1160            if any([f.endswith(".gcda") for f in files]):
1161                check_call_cmd("ninja", "-C", "build", "coverage-html")
1162                break
1163        check_call_cmd("meson", "configure", "build", "-Db_coverage=false")
1164        run_cppcheck()
1165
1166    def _extra_meson_checks(self):
1167        with open(os.path.join(self.path, "meson.build"), "rt") as f:
1168            build_contents = f.read()
1169
1170        # Find project's specified meson_version.
1171        meson_version = None
1172        pattern = r"meson_version:[^']*'([^']*)'"
1173        for match in re.finditer(pattern, build_contents):
1174            group = match.group(1)
1175            meson_version = group
1176
1177        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1178        # identify this.  Add to our unit-test checks so that we don't
1179        # get a meson.build missing this.
1180        pattern = r"'cpp_std=c\+\+20'"
1181        for match in re.finditer(pattern, build_contents):
1182            if not meson_version or not meson_version_compare(
1183                meson_version, ">=0.57"
1184            ):
1185                raise Exception(
1186                    "C++20 support requires specifying in meson.build: "
1187                    + "meson_version: '>=0.57'"
1188                )
1189
1190        # C++23 requires at least Meson 1.1.1 but Meson itself doesn't
1191        # identify this.  Add to our unit-test checks so that we don't
1192        # get a meson.build missing this.
1193        pattern = r"'cpp_std=c\+\+23'"
1194        for match in re.finditer(pattern, build_contents):
1195            if not meson_version or not meson_version_compare(
1196                meson_version, ">=1.1.1"
1197            ):
1198                raise Exception(
1199                    "C++23 support requires specifying in meson.build: "
1200                    + "meson_version: '>=1.1.1'"
1201                )
1202
1203        if "get_variable(" in build_contents:
1204            if not meson_version or not meson_version_compare(
1205                meson_version, ">=0.58"
1206            ):
1207                raise Exception(
1208                    "dep.get_variable() with positional argument requires "
1209                    + "meson_version: '>=0.58'"
1210                )
1211
1212        if "relative_to(" in build_contents:
1213            if not meson_version or not meson_version_compare(
1214                meson_version, ">=1.3.0"
1215            ):
1216                raise Exception(
1217                    "fs.relative_to() requires meson_version: '>=1.3.0'"
1218                )
1219
1220
1221class Package(object):
1222    def __init__(self, name=None, path=None):
1223        self.supported = [Meson, Autotools, CMake]
1224        self.name = name
1225        self.path = path
1226        self.test_only = False
1227
1228    def build_systems(self):
1229        instances = (system(self.name, self.path) for system in self.supported)
1230        return (instance for instance in instances if instance.probe())
1231
1232    def build_system(self, preferred=None):
1233        systems = list(self.build_systems())
1234
1235        if not systems:
1236            return None
1237
1238        if preferred:
1239            return {type(system): system for system in systems}[preferred]
1240
1241        return next(iter(systems))
1242
1243    def install(self, system=None):
1244        if not system:
1245            system = self.build_system()
1246
1247        system.configure(False)
1248        system.build()
1249        system.install()
1250
1251    def _test_one(self, system):
1252        system.configure(True)
1253        system.build()
1254        system.install()
1255        system.test()
1256        if not TEST_ONLY:
1257            system.analyze()
1258
1259    def test(self):
1260        for system in self.build_systems():
1261            self._test_one(system)
1262
1263
1264def find_file(filename, basedir):
1265    """
1266    Finds all occurrences of a file (or list of files) in the base
1267    directory and passes them back with their relative paths.
1268
1269    Parameter descriptions:
1270    filename              The name of the file (or list of files) to
1271                          find
1272    basedir               The base directory search in
1273    """
1274
1275    if not isinstance(filename, list):
1276        filename = [filename]
1277
1278    filepaths = []
1279    for root, dirs, files in os.walk(basedir):
1280        if os.path.split(root)[-1] == "subprojects":
1281            for f in files:
1282                subproject = ".".join(f.split(".")[0:-1])
1283                if f.endswith(".wrap") and subproject in dirs:
1284                    # don't find files in meson subprojects with wraps
1285                    dirs.remove(subproject)
1286        for f in filename:
1287            if f in files:
1288                filepaths.append(os.path.join(root, f))
1289    return filepaths
1290
1291
1292if __name__ == "__main__":
1293    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1294    CONFIGURE_FLAGS = {
1295        "phosphor-logging": [
1296            "--enable-metadata-processing",
1297            "--enable-openpower-pel-extension",
1298            "YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml",
1299        ]
1300    }
1301
1302    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1303    MESON_FLAGS = {
1304        "phosphor-dbus-interfaces": [
1305            "-Ddata_com_ibm=true",
1306            "-Ddata_org_open_power=true",
1307        ],
1308        "phosphor-logging": ["-Dopenpower-pel-extension=enabled"],
1309    }
1310
1311    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1312    DEPENDENCIES = {
1313        "AC_CHECK_LIB": {"mapper": "phosphor-objmgr"},
1314        "AC_CHECK_HEADER": {
1315            "host-ipmid": "phosphor-host-ipmid",
1316            "blobs-ipmid": "phosphor-ipmi-blobs",
1317            "sdbusplus": "sdbusplus",
1318            "sdeventplus": "sdeventplus",
1319            "stdplus": "stdplus",
1320            "gpioplus": "gpioplus",
1321            "phosphor-logging/log.hpp": "phosphor-logging",
1322        },
1323        "AC_PATH_PROG": {"sdbus++": "sdbusplus"},
1324        "PKG_CHECK_MODULES": {
1325            "phosphor-dbus-interfaces": "phosphor-dbus-interfaces",
1326            "libipmid": "phosphor-host-ipmid",
1327            "libipmid-host": "phosphor-host-ipmid",
1328            "sdbusplus": "sdbusplus",
1329            "sdeventplus": "sdeventplus",
1330            "stdplus": "stdplus",
1331            "gpioplus": "gpioplus",
1332            "phosphor-logging": "phosphor-logging",
1333            "phosphor-snmp": "phosphor-snmp",
1334            "ipmiblob": "ipmi-blob-tool",
1335            "hei": "openpower-libhei",
1336            "phosphor-ipmi-blobs": "phosphor-ipmi-blobs",
1337            "libcr51sign": "google-misc",
1338        },
1339    }
1340
1341    # Offset into array of macro parameters MACRO(0, 1, ...N)
1342    DEPENDENCIES_OFFSET = {
1343        "AC_CHECK_LIB": 0,
1344        "AC_CHECK_HEADER": 0,
1345        "AC_PATH_PROG": 1,
1346        "PKG_CHECK_MODULES": 1,
1347    }
1348
1349    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1350    DEPENDENCIES_REGEX = {"phosphor-logging": r"\S+-dbus-interfaces$"}
1351
1352    # Set command line arguments
1353    parser = argparse.ArgumentParser()
1354    parser.add_argument(
1355        "-w",
1356        "--workspace",
1357        dest="WORKSPACE",
1358        required=True,
1359        help="Workspace directory location(i.e. /home)",
1360    )
1361    parser.add_argument(
1362        "-p",
1363        "--package",
1364        dest="PACKAGE",
1365        required=True,
1366        help="OpenBMC package to be unit tested",
1367    )
1368    parser.add_argument(
1369        "-t",
1370        "--test-only",
1371        dest="TEST_ONLY",
1372        action="store_true",
1373        required=False,
1374        default=False,
1375        help="Only run test cases, no other validation",
1376    )
1377    parser.add_argument(
1378        "--no-cppcheck",
1379        dest="NO_CPPCHECK",
1380        action="store_true",
1381        required=False,
1382        default=False,
1383        help="Do not run cppcheck",
1384    )
1385    arg_inttests = parser.add_mutually_exclusive_group()
1386    arg_inttests.add_argument(
1387        "--integration-tests",
1388        dest="INTEGRATION_TEST",
1389        action="store_true",
1390        required=False,
1391        default=True,
1392        help="Enable integration tests [default].",
1393    )
1394    arg_inttests.add_argument(
1395        "--no-integration-tests",
1396        dest="INTEGRATION_TEST",
1397        action="store_false",
1398        required=False,
1399        help="Disable integration tests.",
1400    )
1401    parser.add_argument(
1402        "-v",
1403        "--verbose",
1404        action="store_true",
1405        help="Print additional package status messages",
1406    )
1407    parser.add_argument(
1408        "-r", "--repeat", help="Repeat tests N times", type=int, default=1
1409    )
1410    parser.add_argument(
1411        "-b",
1412        "--branch",
1413        dest="BRANCH",
1414        required=False,
1415        help="Branch to target for dependent repositories",
1416        default="master",
1417    )
1418    parser.add_argument(
1419        "-n",
1420        "--noformat",
1421        dest="FORMAT",
1422        action="store_false",
1423        required=False,
1424        help="Whether or not to run format code",
1425    )
1426    args = parser.parse_args(sys.argv[1:])
1427    WORKSPACE = args.WORKSPACE
1428    UNIT_TEST_PKG = args.PACKAGE
1429    TEST_ONLY = args.TEST_ONLY
1430    NO_CPPCHECK = args.NO_CPPCHECK
1431    INTEGRATION_TEST = args.INTEGRATION_TEST
1432    BRANCH = args.BRANCH
1433    FORMAT_CODE = args.FORMAT
1434    if args.verbose:
1435
1436        def printline(*line):
1437            for arg in line:
1438                print(arg, end=" ")
1439            print()
1440
1441    else:
1442
1443        def printline(*line):
1444            pass
1445
1446    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1447
1448    # Run format-code.sh, which will in turn call any repo-level formatters.
1449    if FORMAT_CODE:
1450        check_call_cmd(
1451            os.path.join(
1452                WORKSPACE, "openbmc-build-scripts", "scripts", "format-code.sh"
1453            ),
1454            CODE_SCAN_DIR,
1455        )
1456
1457        # Check to see if any files changed
1458        check_call_cmd(
1459            "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff", "--exit-code"
1460        )
1461
1462    # Check if this repo has a supported make infrastructure
1463    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1464    if not pkg.build_system():
1465        print("No valid build system, exit")
1466        sys.exit(0)
1467
1468    prev_umask = os.umask(000)
1469
1470    # Determine dependencies and add them
1471    dep_added = dict()
1472    dep_added[UNIT_TEST_PKG] = False
1473
1474    # Create dependency tree
1475    dep_tree = DepTree(UNIT_TEST_PKG)
1476    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1477
1478    # Reorder Dependency Tree
1479    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1480        dep_tree.ReorderDeps(pkg_name, regex_str)
1481    if args.verbose:
1482        dep_tree.PrintTree()
1483
1484    install_list = dep_tree.GetInstallList()
1485
1486    # We don't want to treat our package as a dependency
1487    install_list.remove(UNIT_TEST_PKG)
1488
1489    # Install reordered dependencies
1490    for dep in install_list:
1491        build_and_install(dep, False)
1492
1493    # Run package unit tests
1494    build_and_install(UNIT_TEST_PKG, True)
1495
1496    os.umask(prev_umask)
1497
1498    # Run any custom CI scripts the repo has, of which there can be
1499    # multiple of and anywhere in the repository.
1500    ci_scripts = find_file(["run-ci.sh", "run-ci"], CODE_SCAN_DIR)
1501    if ci_scripts:
1502        os.chdir(CODE_SCAN_DIR)
1503        for ci_script in ci_scripts:
1504            check_call_cmd(ci_script)
1505