xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision d8e150a50c3daed8530bfc0846afb3c94f25bb2b)
1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10import argparse
11import json
12import multiprocessing
13import os
14import platform
15import re
16import shutil
17import subprocess
18import sys
19from subprocess import CalledProcessError, check_call
20from tempfile import TemporaryDirectory
21from urllib.parse import urljoin
22
23from git import Repo
24
25# interpreter is not used directly but this resolves dependency ordering
26# that would be broken if we didn't include it.
27from mesonbuild import interpreter  # noqa: F401
28from mesonbuild import optinterpreter, options
29from mesonbuild.mesonlib import version_compare as meson_version_compare
30from mesonbuild.options import OptionKey, OptionStore
31
32
33class DepTree:
34    """
35    Represents package dependency tree, where each node is a DepTree with a
36    name and DepTree children.
37    """
38
39    def __init__(self, name):
40        """
41        Create new DepTree.
42
43        Parameter descriptions:
44        name               Name of new tree node.
45        """
46        self.name = name
47        self.children = list()
48
49    def AddChild(self, name):
50        """
51        Add new child node to current node.
52
53        Parameter descriptions:
54        name               Name of new child
55        """
56        new_child = DepTree(name)
57        self.children.append(new_child)
58        return new_child
59
60    def AddChildNode(self, node):
61        """
62        Add existing child node to current node.
63
64        Parameter descriptions:
65        node               Tree node to add
66        """
67        self.children.append(node)
68
69    def RemoveChild(self, name):
70        """
71        Remove child node.
72
73        Parameter descriptions:
74        name               Name of child to remove
75        """
76        for child in self.children:
77            if child.name == name:
78                self.children.remove(child)
79                return
80
81    def GetNode(self, name):
82        """
83        Return node with matching name. Return None if not found.
84
85        Parameter descriptions:
86        name               Name of node to return
87        """
88        if self.name == name:
89            return self
90        for child in self.children:
91            node = child.GetNode(name)
92            if node:
93                return node
94        return None
95
96    def GetParentNode(self, name, parent_node=None):
97        """
98        Return parent of node with matching name. Return none if not found.
99
100        Parameter descriptions:
101        name               Name of node to get parent of
102        parent_node        Parent of current node
103        """
104        if self.name == name:
105            return parent_node
106        for child in self.children:
107            found_node = child.GetParentNode(name, self)
108            if found_node:
109                return found_node
110        return None
111
112    def GetPath(self, name, path=None):
113        """
114        Return list of node names from head to matching name.
115        Return None if not found.
116
117        Parameter descriptions:
118        name               Name of node
119        path               List of node names from head to current node
120        """
121        if not path:
122            path = []
123        if self.name == name:
124            path.append(self.name)
125            return path
126        for child in self.children:
127            match = child.GetPath(name, path + [self.name])
128            if match:
129                return match
130        return None
131
132    def GetPathRegex(self, name, regex_str, path=None):
133        """
134        Return list of node paths that end in name, or match regex_str.
135        Return empty list if not found.
136
137        Parameter descriptions:
138        name               Name of node to search for
139        regex_str          Regex string to match node names
140        path               Path of node names from head to current node
141        """
142        new_paths = []
143        if not path:
144            path = []
145        match = re.match(regex_str, self.name)
146        if (self.name == name) or (match):
147            new_paths.append(path + [self.name])
148        for child in self.children:
149            return_paths = None
150            full_path = path + [self.name]
151            return_paths = child.GetPathRegex(name, regex_str, full_path)
152            for i in return_paths:
153                new_paths.append(i)
154        return new_paths
155
156    def MoveNode(self, from_name, to_name):
157        """
158        Mode existing from_name node to become child of to_name node.
159
160        Parameter descriptions:
161        from_name          Name of node to make a child of to_name
162        to_name            Name of node to make parent of from_name
163        """
164        parent_from_node = self.GetParentNode(from_name)
165        from_node = self.GetNode(from_name)
166        parent_from_node.RemoveChild(from_name)
167        to_node = self.GetNode(to_name)
168        to_node.AddChildNode(from_node)
169
170    def ReorderDeps(self, name, regex_str):
171        """
172        Reorder dependency tree.  If tree contains nodes with names that
173        match 'name' and 'regex_str', move 'regex_str' nodes that are
174        to the right of 'name' node, so that they become children of the
175        'name' node.
176
177        Parameter descriptions:
178        name               Name of node to look for
179        regex_str          Regex string to match names to
180        """
181        name_path = self.GetPath(name)
182        if not name_path:
183            return
184        paths = self.GetPathRegex(name, regex_str)
185        is_name_in_paths = False
186        name_index = 0
187        for i in range(len(paths)):
188            path = paths[i]
189            if path[-1] == name:
190                is_name_in_paths = True
191                name_index = i
192                break
193        if not is_name_in_paths:
194            return
195        for i in range(name_index + 1, len(paths)):
196            path = paths[i]
197            if name in path:
198                continue
199            from_name = path[-1]
200            self.MoveNode(from_name, name)
201
202    def GetInstallList(self):
203        """
204        Return post-order list of node names.
205
206        Parameter descriptions:
207        """
208        install_list = []
209        for child in self.children:
210            child_install_list = child.GetInstallList()
211            install_list.extend(child_install_list)
212        install_list.append(self.name)
213        return install_list
214
215    def PrintTree(self, level=0):
216        """
217        Print pre-order node names with indentation denoting node depth level.
218
219        Parameter descriptions:
220        level              Current depth level
221        """
222        INDENT_PER_LEVEL = 4
223        print(" " * (level * INDENT_PER_LEVEL) + self.name)
224        for child in self.children:
225            child.PrintTree(level + 1)
226
227
228def check_call_cmd(*cmd, **kwargs):
229    """
230    Verbose prints the directory location the given command is called from and
231    the command, then executes the command using check_call.
232
233    Parameter descriptions:
234    dir                 Directory location command is to be called from
235    cmd                 List of parameters constructing the complete command
236    """
237    printline(os.getcwd(), ">", " ".join(cmd))
238    check_call(cmd, **kwargs)
239
240
241def clone_pkg(pkg, branch):
242    """
243    Clone the given openbmc package's git repository from gerrit into
244    the WORKSPACE location
245
246    Parameter descriptions:
247    pkg                 Name of the package to clone
248    branch              Branch to clone from pkg
249    """
250    pkg_dir = os.path.join(WORKSPACE, pkg)
251    if os.path.exists(os.path.join(pkg_dir, ".git")):
252        return pkg_dir
253    pkg_repo = urljoin("https://gerrit.openbmc.org/openbmc/", pkg)
254    os.mkdir(pkg_dir)
255    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
256    try:
257        # first try the branch
258        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
259        repo_inst = clone.working_dir
260    except Exception:
261        printline("Input branch not found, default to master")
262        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
263        repo_inst = clone.working_dir
264    return repo_inst
265
266
267def make_target_exists(target):
268    """
269    Runs a check against the makefile in the current directory to determine
270    if the target exists so that it can be built.
271
272    Parameter descriptions:
273    target              The make target we are checking
274    """
275    try:
276        cmd = ["make", "-n", target]
277        with open(os.devnull, "w") as devnull:
278            check_call(cmd, stdout=devnull, stderr=devnull)
279        return True
280    except CalledProcessError:
281        return False
282
283
284make_parallel = [
285    "make",
286    # Run enough jobs to saturate all the cpus
287    "-j",
288    str(multiprocessing.cpu_count()),
289    # Don't start more jobs if the load avg is too high
290    "-l",
291    str(multiprocessing.cpu_count()),
292    # Synchronize the output so logs aren't intermixed in stdout / stderr
293    "-O",
294]
295
296
297def build_and_install(name, build_for_testing=False):
298    """
299    Builds and installs the package in the environment. Optionally
300    builds the examples and test cases for package.
301
302    Parameter description:
303    name                The name of the package we are building
304    build_for_testing   Enable options related to testing on the package?
305    """
306    os.chdir(os.path.join(WORKSPACE, name))
307
308    # Refresh dynamic linker run time bindings for dependencies
309    check_call_cmd("sudo", "-n", "--", "ldconfig")
310
311    pkg = Package()
312    if build_for_testing:
313        pkg.test()
314    else:
315        pkg.install()
316
317
318def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
319    """
320    For each package (name), starting with the package to be unit tested,
321    extract its dependencies. For each package dependency defined, recursively
322    apply the same strategy
323
324    Parameter descriptions:
325    name                Name of the package
326    pkgdir              Directory where package source is located
327    dep_added           Current dict of dependencies and added status
328    head                Head node of the dependency tree
329    branch              Branch to clone from pkg
330    dep_tree            Current dependency tree node
331    """
332    if not dep_tree:
333        dep_tree = head
334
335    with open("/tmp/depcache", "r") as depcache:
336        cache = depcache.readline()
337
338    # Read out pkg dependencies
339    pkg = Package(name, pkgdir)
340
341    build = pkg.build_system()
342    if not build:
343        raise Exception(f"Unable to find build system for {name}.")
344
345    for dep in set(build.dependencies()):
346        if dep in cache:
347            continue
348        # Dependency package not already known
349        if dep_added.get(dep) is None:
350            print(f"Adding {dep} dependency to {name}.")
351            # Dependency package not added
352            new_child = dep_tree.AddChild(dep)
353            dep_added[dep] = False
354            dep_pkgdir = clone_pkg(dep, branch)
355            # Determine this dependency package's
356            # dependencies and add them before
357            # returning to add this package
358            dep_added = build_dep_tree(
359                dep, dep_pkgdir, dep_added, head, branch, new_child
360            )
361        else:
362            # Dependency package known and added
363            if dep_added[dep]:
364                continue
365            else:
366                # Cyclic dependency failure
367                raise Exception("Cyclic dependencies found in " + name)
368
369    if not dep_added[name]:
370        dep_added[name] = True
371
372    return dep_added
373
374
375def run_cppcheck():
376    if (
377        not os.path.exists(os.path.join("build", "compile_commands.json"))
378        or NO_CPPCHECK
379    ):
380        return None
381
382    with TemporaryDirectory() as cpp_dir:
383        # http://cppcheck.sourceforge.net/manual.pdf
384        try:
385            check_call_cmd(
386                "cppcheck",
387                "-j",
388                str(multiprocessing.cpu_count()),
389                "--enable=style,performance,portability,missingInclude",
390                "--inline-suppr",
391                "--suppress=useStlAlgorithm",
392                "--suppress=unusedStructMember",
393                "--suppress=postfixOperator",
394                "--suppress=unreadVariable",
395                "--suppress=knownConditionTrueFalse",
396                "--library=googletest",
397                "--project=build/compile_commands.json",
398                f"--cppcheck-build-dir={cpp_dir}",
399            )
400        except subprocess.CalledProcessError:
401            print("cppcheck found errors")
402
403
404def is_valgrind_safe():
405    """
406    Returns whether it is safe to run valgrind on our platform
407    """
408    src = "unit-test-vg.c"
409    exe = "./unit-test-vg"
410    with open(src, "w") as h:
411        h.write("#include <errno.h>\n")
412        h.write("#include <stdio.h>\n")
413        h.write("#include <stdlib.h>\n")
414        h.write("#include <string.h>\n")
415        h.write("int main() {\n")
416        h.write("char *heap_str = malloc(16);\n")
417        h.write('strcpy(heap_str, "RandString");\n')
418        h.write('int res = strcmp("RandString", heap_str);\n')
419        h.write("free(heap_str);\n")
420        h.write("char errstr[64];\n")
421        h.write("strerror_r(EINVAL, errstr, sizeof(errstr));\n")
422        h.write('printf("%s\\n", errstr);\n')
423        h.write("return res;\n")
424        h.write("}\n")
425    try:
426        with open(os.devnull, "w") as devnull:
427            check_call(
428                ["gcc", "-O2", "-o", exe, src], stdout=devnull, stderr=devnull
429            )
430            check_call(
431                ["valgrind", "--error-exitcode=99", exe],
432                stdout=devnull,
433                stderr=devnull,
434            )
435        return True
436    except Exception:
437        sys.stderr.write("###### Platform is not valgrind safe ######\n")
438        return False
439    finally:
440        os.remove(src)
441        os.remove(exe)
442
443
444def is_sanitize_safe():
445    """
446    Returns whether it is safe to run sanitizers on our platform
447    """
448    src = "unit-test-sanitize.c"
449    exe = "./unit-test-sanitize"
450    with open(src, "w") as h:
451        h.write("int main() { return 0; }\n")
452    try:
453        with open(os.devnull, "w") as devnull:
454            check_call(
455                [
456                    "gcc",
457                    "-O2",
458                    "-fsanitize=address",
459                    "-fsanitize=undefined",
460                    "-o",
461                    exe,
462                    src,
463                ],
464                stdout=devnull,
465                stderr=devnull,
466            )
467            check_call([exe], stdout=devnull, stderr=devnull)
468
469        # TODO - Sanitizer not working on ppc64le
470        # https://github.com/openbmc/openbmc-build-scripts/issues/31
471        if platform.processor() == "ppc64le":
472            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
473            return False
474        else:
475            return True
476    except Exception:
477        sys.stderr.write("###### Platform is not sanitize safe ######\n")
478        return False
479    finally:
480        os.remove(src)
481        os.remove(exe)
482
483
484def maybe_make_valgrind():
485    """
486    Potentially runs the unit tests through valgrind for the package
487    via `make check-valgrind`. If the package does not have valgrind testing
488    then it just skips over this.
489    """
490    # Valgrind testing is currently broken by an aggressive strcmp optimization
491    # that is inlined into optimized code for POWER by gcc 7+. Until we find
492    # a workaround, just don't run valgrind tests on POWER.
493    # https://github.com/openbmc/openbmc/issues/3315
494    if not is_valgrind_safe():
495        sys.stderr.write("###### Skipping valgrind ######\n")
496        return
497    if not make_target_exists("check-valgrind"):
498        return
499
500    try:
501        cmd = make_parallel + ["check-valgrind"]
502        check_call_cmd(*cmd)
503    except CalledProcessError:
504        for root, _, files in os.walk(os.getcwd()):
505            for f in files:
506                if re.search("test-suite-[a-z]+.log", f) is None:
507                    continue
508                check_call_cmd("cat", os.path.join(root, f))
509        raise Exception("Valgrind tests failed")
510
511
512def maybe_make_coverage():
513    """
514    Potentially runs the unit tests through code coverage for the package
515    via `make check-code-coverage`. If the package does not have code coverage
516    testing then it just skips over this.
517    """
518    if not make_target_exists("check-code-coverage"):
519        return
520
521    # Actually run code coverage
522    try:
523        cmd = make_parallel + ["check-code-coverage"]
524        check_call_cmd(*cmd)
525    except CalledProcessError:
526        raise Exception("Code coverage failed")
527
528
529class BuildSystem(object):
530    """
531    Build systems generally provide the means to configure, build, install and
532    test software. The BuildSystem class defines a set of interfaces on top of
533    which Autotools, Meson, CMake and possibly other build system drivers can
534    be implemented, separating out the phases to control whether a package
535    should merely be installed or also tested and analyzed.
536    """
537
538    def __init__(self, package, path):
539        """Initialise the driver with properties independent of the build
540        system
541
542        Keyword arguments:
543        package: The name of the package. Derived from the path if None
544        path: The path to the package. Set to the working directory if None
545        """
546        self.path = "." if not path else path
547        realpath = os.path.realpath(self.path)
548        self.package = package if package else os.path.basename(realpath)
549        self.build_for_testing = False
550
551    def probe(self):
552        """Test if the build system driver can be applied to the package
553
554        Return True if the driver can drive the package's build system,
555        otherwise False.
556
557        Generally probe() is implemented by testing for the presence of the
558        build system's configuration file(s).
559        """
560        raise NotImplementedError
561
562    def dependencies(self):
563        """Provide the package's dependencies
564
565        Returns a list of dependencies. If no dependencies are required then an
566        empty list must be returned.
567
568        Generally dependencies() is implemented by analysing and extracting the
569        data from the build system configuration.
570        """
571        raise NotImplementedError
572
573    def configure(self, build_for_testing):
574        """Configure the source ready for building
575
576        Should raise an exception if configuration failed.
577
578        Keyword arguments:
579        build_for_testing: Mark the package as being built for testing rather
580                           than for installation as a dependency for the
581                           package under test. Setting to True generally
582                           implies that the package will be configured to build
583                           with debug information, at a low level of
584                           optimisation and possibly with sanitizers enabled.
585
586        Generally configure() is implemented by invoking the build system
587        tooling to generate Makefiles or equivalent.
588        """
589        raise NotImplementedError
590
591    def build(self):
592        """Build the software ready for installation and/or testing
593
594        Should raise an exception if the build fails
595
596        Generally build() is implemented by invoking `make` or `ninja`.
597        """
598        raise NotImplementedError
599
600    def install(self):
601        """Install the software ready for use
602
603        Should raise an exception if installation fails
604
605        Like build(), install() is generally implemented by invoking `make` or
606        `ninja`.
607        """
608        raise NotImplementedError
609
610    def test(self):
611        """Build and run the test suite associated with the package
612
613        Should raise an exception if the build or testing fails.
614
615        Like install(), test() is generally implemented by invoking `make` or
616        `ninja`.
617        """
618        raise NotImplementedError
619
620    def analyze(self):
621        """Run any supported analysis tools over the codebase
622
623        Should raise an exception if analysis fails.
624
625        Some analysis tools such as scan-build need injection into the build
626        system. analyze() provides the necessary hook to implement such
627        behaviour. Analyzers independent of the build system can also be
628        specified here but at the cost of possible duplication of code between
629        the build system driver implementations.
630        """
631        raise NotImplementedError
632
633
634class Autotools(BuildSystem):
635    def __init__(self, package=None, path=None):
636        super(Autotools, self).__init__(package, path)
637
638    def probe(self):
639        return os.path.isfile(os.path.join(self.path, "configure.ac"))
640
641    def dependencies(self):
642        configure_ac = os.path.join(self.path, "configure.ac")
643
644        contents = ""
645        # Prepend some special function overrides so we can parse out
646        # dependencies
647        for macro in DEPENDENCIES.keys():
648            contents += (
649                "m4_define(["
650                + macro
651                + "], ["
652                + macro
653                + "_START$"
654                + str(DEPENDENCIES_OFFSET[macro] + 1)
655                + macro
656                + "_END])\n"
657            )
658        with open(configure_ac, "rt") as f:
659            contents += f.read()
660
661        autoconf_cmdline = ["autoconf", "-Wno-undefined", "-"]
662        autoconf_process = subprocess.Popen(
663            autoconf_cmdline,
664            stdin=subprocess.PIPE,
665            stdout=subprocess.PIPE,
666            stderr=subprocess.PIPE,
667        )
668        document = contents.encode("utf-8")
669        (stdout, stderr) = autoconf_process.communicate(input=document)
670        if not stdout:
671            print(stderr)
672            raise Exception("Failed to run autoconf for parsing dependencies")
673
674        # Parse out all of the dependency text
675        matches = []
676        for macro in DEPENDENCIES.keys():
677            pattern = "(" + macro + ")_START(.*?)" + macro + "_END"
678            for match in re.compile(pattern).finditer(stdout.decode("utf-8")):
679                matches.append((match.group(1), match.group(2)))
680
681        # Look up dependencies from the text
682        found_deps = []
683        for macro, deptext in matches:
684            for potential_dep in deptext.split(" "):
685                for known_dep in DEPENDENCIES[macro].keys():
686                    if potential_dep.startswith(known_dep):
687                        found_deps.append(DEPENDENCIES[macro][known_dep])
688
689        return found_deps
690
691    def _configure_feature(self, flag, enabled):
692        """
693        Returns an configure flag as a string
694
695        Parameters:
696        flag                The name of the flag
697        enabled             Whether the flag is enabled or disabled
698        """
699        return "--" + ("enable" if enabled else "disable") + "-" + flag
700
701    def configure(self, build_for_testing):
702        self.build_for_testing = build_for_testing
703        conf_flags = [
704            self._configure_feature("silent-rules", False),
705            self._configure_feature("examples", build_for_testing),
706            self._configure_feature("tests", build_for_testing),
707            self._configure_feature("itests", INTEGRATION_TEST),
708        ]
709        conf_flags.extend(
710            [
711                self._configure_feature("code-coverage", False),
712                self._configure_feature("valgrind", build_for_testing),
713            ]
714        )
715        # Add any necessary configure flags for package
716        if CONFIGURE_FLAGS.get(self.package) is not None:
717            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
718        for bootstrap in ["bootstrap.sh", "bootstrap", "autogen.sh"]:
719            if os.path.exists(bootstrap):
720                check_call_cmd("./" + bootstrap)
721                break
722        check_call_cmd("./configure", *conf_flags)
723
724    def build(self):
725        check_call_cmd(*make_parallel)
726
727    def install(self):
728        check_call_cmd("sudo", "-n", "--", *(make_parallel + ["install"]))
729        check_call_cmd("sudo", "-n", "--", "ldconfig")
730
731    def test(self):
732        try:
733            cmd = make_parallel + ["check"]
734            for i in range(0, args.repeat):
735                check_call_cmd(*cmd)
736
737            maybe_make_valgrind()
738            maybe_make_coverage()
739        except CalledProcessError:
740            for root, _, files in os.walk(os.getcwd()):
741                if "test-suite.log" not in files:
742                    continue
743                check_call_cmd("cat", os.path.join(root, "test-suite.log"))
744            raise Exception("Unit tests failed")
745
746    def analyze(self):
747        run_cppcheck()
748
749
750class CMake(BuildSystem):
751    def __init__(self, package=None, path=None):
752        super(CMake, self).__init__(package, path)
753
754    def probe(self):
755        return os.path.isfile(os.path.join(self.path, "CMakeLists.txt"))
756
757    def dependencies(self):
758        return []
759
760    def configure(self, build_for_testing):
761        self.build_for_testing = build_for_testing
762        if INTEGRATION_TEST:
763            check_call_cmd(
764                "cmake",
765                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
766                "-DCMAKE_CXX_FLAGS='-DBOOST_USE_VALGRIND'",
767                "-DITESTS=ON",
768                ".",
769            )
770        else:
771            check_call_cmd(
772                "cmake",
773                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
774                "-DCMAKE_CXX_FLAGS='-DBOOST_USE_VALGRIND'",
775                ".",
776            )
777
778    def build(self):
779        check_call_cmd(
780            "cmake",
781            "--build",
782            ".",
783            "--",
784            "-j",
785            str(multiprocessing.cpu_count()),
786        )
787
788    def install(self):
789        check_call_cmd("sudo", "cmake", "--install", ".")
790        check_call_cmd("sudo", "-n", "--", "ldconfig")
791
792    def test(self):
793        if make_target_exists("test"):
794            check_call_cmd("ctest", ".")
795
796    def analyze(self):
797        if os.path.isfile(".clang-tidy"):
798            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
799                # clang-tidy needs to run on a clang-specific build
800                check_call_cmd(
801                    "cmake",
802                    "-DCMAKE_C_COMPILER=clang",
803                    "-DCMAKE_CXX_COMPILER=clang++",
804                    "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
805                    "-H.",
806                    "-B" + build_dir,
807                )
808
809                check_call_cmd(
810                    "run-clang-tidy", "-header-filter=.*", "-p", build_dir
811                )
812
813        maybe_make_valgrind()
814        maybe_make_coverage()
815        run_cppcheck()
816
817
818class Meson(BuildSystem):
819    @staticmethod
820    def _project_name(path):
821        doc = subprocess.check_output(
822            ["meson", "introspect", "--projectinfo", path],
823            stderr=subprocess.STDOUT,
824        ).decode("utf-8")
825        return json.loads(doc)["descriptive_name"]
826
827    def __init__(self, package=None, path=None):
828        super(Meson, self).__init__(package, path)
829
830    def probe(self):
831        return os.path.isfile(os.path.join(self.path, "meson.build"))
832
833    def dependencies(self):
834        meson_build = os.path.join(self.path, "meson.build")
835        if not os.path.exists(meson_build):
836            return []
837
838        found_deps = []
839        for root, dirs, files in os.walk(self.path):
840            if "meson.build" not in files:
841                continue
842            with open(os.path.join(root, "meson.build"), "rt") as f:
843                build_contents = f.read()
844            pattern = r"dependency\('([^']*)'.*?\),?\n"
845            for match in re.finditer(pattern, build_contents):
846                group = match.group(1)
847                maybe_dep = DEPENDENCIES["PKG_CHECK_MODULES"].get(group)
848                if maybe_dep is not None:
849                    found_deps.append(maybe_dep)
850
851        return found_deps
852
853    def _parse_options(self, options_file):
854        """
855        Returns a set of options defined in the provides meson_options.txt file
856
857        Parameters:
858        options_file        The file containing options
859        """
860        store = OptionStore()
861        oi = optinterpreter.OptionInterpreter(store, "")
862        oi.process(options_file)
863        return oi.options
864
865    def _configure_boolean(self, val):
866        """
867        Returns the meson flag which signifies the value
868
869        True is true which requires the boolean.
870        False is false which disables the boolean.
871
872        Parameters:
873        val                 The value being converted
874        """
875        if val is True:
876            return "true"
877        elif val is False:
878            return "false"
879        else:
880            raise Exception("Bad meson boolean value")
881
882    def _configure_feature(self, val):
883        """
884        Returns the meson flag which signifies the value
885
886        True is enabled which requires the feature.
887        False is disabled which disables the feature.
888        None is auto which autodetects the feature.
889
890        Parameters:
891        val                 The value being converted
892        """
893        if val is True:
894            return "enabled"
895        elif val is False:
896            return "disabled"
897        elif val is None:
898            return "auto"
899        else:
900            raise Exception("Bad meson feature value")
901
902    def _configure_option(self, opts, key, val):
903        """
904        Returns the meson flag which signifies the value
905        based on the type of the opt
906
907        Parameters:
908        opt                 The meson option which we are setting
909        val                 The value being converted
910        """
911        if isinstance(opts[key], options.UserBooleanOption):
912            str_val = self._configure_boolean(val)
913        elif isinstance(opts[key], options.UserFeatureOption):
914            str_val = self._configure_feature(val)
915        else:
916            raise Exception("Unknown meson option type")
917        return "-D{}={}".format(key, str_val)
918
919    def get_configure_flags(self, build_for_testing):
920        self.build_for_testing = build_for_testing
921        meson_options = {}
922        if os.path.exists("meson.options"):
923            meson_options = self._parse_options("meson.options")
924        elif os.path.exists("meson_options.txt"):
925            meson_options = self._parse_options("meson_options.txt")
926        meson_flags = [
927            "-Db_colorout=never",
928            "-Dwerror=true",
929            "-Dwarning_level=3",
930            "-Dcpp_args='-DBOOST_USE_VALGRIND'",
931        ]
932        if build_for_testing:
933            # -Ddebug=true -Doptimization=g is helpful for abi-dumper but isn't a combination that
934            # is supported by meson's build types. Configure it manually.
935            meson_flags.append("-Ddebug=true")
936            meson_flags.append("-Doptimization=g")
937        else:
938            meson_flags.append("--buildtype=debugoptimized")
939        if OptionKey("tests") in meson_options:
940            meson_flags.append(
941                self._configure_option(
942                    meson_options, OptionKey("tests"), build_for_testing
943                )
944            )
945        if OptionKey("examples") in meson_options:
946            meson_flags.append(
947                self._configure_option(
948                    meson_options, OptionKey("examples"), build_for_testing
949                )
950            )
951        if OptionKey("itests") in meson_options:
952            meson_flags.append(
953                self._configure_option(
954                    meson_options, OptionKey("itests"), INTEGRATION_TEST
955                )
956            )
957        if MESON_FLAGS.get(self.package) is not None:
958            meson_flags.extend(MESON_FLAGS.get(self.package))
959        return meson_flags
960
961    def configure(self, build_for_testing):
962        meson_flags = self.get_configure_flags(build_for_testing)
963        try:
964            check_call_cmd(
965                "meson", "setup", "--reconfigure", "build", *meson_flags
966            )
967        except Exception:
968            shutil.rmtree("build", ignore_errors=True)
969            check_call_cmd("meson", "setup", "build", *meson_flags)
970
971        self.package = Meson._project_name("build")
972
973    def build(self):
974        check_call_cmd("ninja", "-C", "build")
975
976    def install(self):
977        check_call_cmd("sudo", "-n", "--", "ninja", "-C", "build", "install")
978        check_call_cmd("sudo", "-n", "--", "ldconfig")
979
980    def test(self):
981        # It is useful to check various settings of the meson.build file
982        # for compatibility, such as meson_version checks.  We shouldn't
983        # do this in the configure path though because it affects subprojects
984        # and dependencies as well, but we only want this applied to the
985        # project-under-test (otherwise an upstream dependency could fail
986        # this check without our control).
987        self._extra_meson_checks()
988
989        try:
990            test_args = ("--repeat", str(args.repeat), "-C", "build")
991            check_call_cmd("meson", "test", "--print-errorlogs", *test_args)
992
993        except CalledProcessError:
994            raise Exception("Unit tests failed")
995
996    def _setup_exists(self, setup):
997        """
998        Returns whether the meson build supports the named test setup.
999
1000        Parameter descriptions:
1001        setup              The setup target to check
1002        """
1003        try:
1004            with open(os.devnull, "w"):
1005                output = subprocess.check_output(
1006                    [
1007                        "meson",
1008                        "test",
1009                        "-C",
1010                        "build",
1011                        "--setup",
1012                        "{}:{}".format(self.package, setup),
1013                        "__likely_not_a_test__",
1014                    ],
1015                    stderr=subprocess.STDOUT,
1016                )
1017        except CalledProcessError as e:
1018            output = e.output
1019        output = output.decode("utf-8")
1020        return not re.search("Unknown test setup '[^']+'[.]", output)
1021
1022    def _maybe_valgrind(self):
1023        """
1024        Potentially runs the unit tests through valgrind for the package
1025        via `meson test`. The package can specify custom valgrind
1026        configurations by utilizing add_test_setup() in a meson.build
1027        """
1028        if not is_valgrind_safe():
1029            sys.stderr.write("###### Skipping valgrind ######\n")
1030            return
1031        try:
1032            if self._setup_exists("valgrind"):
1033                check_call_cmd(
1034                    "meson",
1035                    "test",
1036                    "-t",
1037                    "10",
1038                    "-C",
1039                    "build",
1040                    "--print-errorlogs",
1041                    "--setup",
1042                    "{}:valgrind".format(self.package),
1043                )
1044            else:
1045                check_call_cmd(
1046                    "meson",
1047                    "test",
1048                    "-t",
1049                    "10",
1050                    "-C",
1051                    "build",
1052                    "--print-errorlogs",
1053                    "--wrapper",
1054                    "valgrind --error-exitcode=1",
1055                )
1056        except CalledProcessError:
1057            raise Exception("Valgrind tests failed")
1058
1059    def analyze(self):
1060        self._maybe_valgrind()
1061
1062        # Run clang-tidy only if the project has a configuration
1063        if os.path.isfile(".clang-tidy"):
1064            os.environ["CXX"] = "clang++"
1065            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
1066                check_call_cmd("meson", "setup", build_dir)
1067                if not os.path.isfile(".openbmc-no-clang"):
1068                    check_call_cmd("meson", "compile", "-C", build_dir)
1069                try:
1070                    check_call_cmd("ninja", "-C", build_dir, "clang-tidy-fix")
1071                except subprocess.CalledProcessError:
1072                    check_call_cmd(
1073                        "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff"
1074                    )
1075                    raise
1076        # Run the basic clang static analyzer otherwise
1077        else:
1078            check_call_cmd("ninja", "-C", "build", "scan-build")
1079
1080        # Run tests through sanitizers
1081        # b_lundef is needed if clang++ is CXX since it resolves the
1082        # asan symbols at runtime only. We don't want to set it earlier
1083        # in the build process to ensure we don't have undefined
1084        # runtime code.
1085        if is_sanitize_safe():
1086            meson_flags = self.get_configure_flags(self.build_for_testing)
1087            meson_flags.append("-Db_sanitize=address,undefined")
1088            try:
1089                check_call_cmd(
1090                    "meson", "setup", "--reconfigure", "build", *meson_flags
1091                )
1092            except Exception:
1093                shutil.rmtree("build", ignore_errors=True)
1094                check_call_cmd("meson", "setup", "build", *meson_flags)
1095            check_call_cmd(
1096                "meson",
1097                "test",
1098                "-C",
1099                "build",
1100                "--print-errorlogs",
1101                "--logbase",
1102                "testlog-ubasan",
1103                env=os.environ | {"UBSAN_OPTIONS": "halt_on_error=1"},
1104            )
1105            # TODO: Fix memory sanitizer
1106            # check_call_cmd('meson', 'configure', 'build',
1107            #                '-Db_sanitize=memory')
1108            # check_call_cmd('meson', 'test', '-C', 'build'
1109            #                '--logbase', 'testlog-msan')
1110            meson_flags.remove("-Db_sanitize=address,undefined")
1111            try:
1112                check_call_cmd(
1113                    "meson", "setup", "--reconfigure", "build", *meson_flags
1114                )
1115            except Exception:
1116                shutil.rmtree("build", ignore_errors=True)
1117                check_call_cmd("meson", "setup", "build", *meson_flags)
1118        else:
1119            sys.stderr.write("###### Skipping sanitizers ######\n")
1120
1121        # Run coverage checks
1122        check_call_cmd("meson", "configure", "build", "-Db_coverage=true")
1123        self.test()
1124        # Only build coverage HTML if coverage files were produced
1125        for root, dirs, files in os.walk("build"):
1126            if any([f.endswith(".gcda") for f in files]):
1127                check_call_cmd("ninja", "-C", "build", "coverage-html")
1128                break
1129        check_call_cmd("meson", "configure", "build", "-Db_coverage=false")
1130        run_cppcheck()
1131
1132    def _extra_meson_checks(self):
1133        with open(os.path.join(self.path, "meson.build"), "rt") as f:
1134            build_contents = f.read()
1135
1136        # Find project's specified meson_version.
1137        meson_version = None
1138        pattern = r"meson_version:[^']*'([^']*)'"
1139        for match in re.finditer(pattern, build_contents):
1140            group = match.group(1)
1141            meson_version = group
1142
1143        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1144        # identify this.  Add to our unit-test checks so that we don't
1145        # get a meson.build missing this.
1146        pattern = r"'cpp_std=c\+\+20'"
1147        for match in re.finditer(pattern, build_contents):
1148            if not meson_version or not meson_version_compare(
1149                meson_version, ">=0.57"
1150            ):
1151                raise Exception(
1152                    "C++20 support requires specifying in meson.build: "
1153                    + "meson_version: '>=0.57'"
1154                )
1155
1156        # C++23 requires at least Meson 1.1.1 but Meson itself doesn't
1157        # identify this.  Add to our unit-test checks so that we don't
1158        # get a meson.build missing this.
1159        pattern = r"'cpp_std=c\+\+23'"
1160        for match in re.finditer(pattern, build_contents):
1161            if not meson_version or not meson_version_compare(
1162                meson_version, ">=1.1.1"
1163            ):
1164                raise Exception(
1165                    "C++23 support requires specifying in meson.build: "
1166                    + "meson_version: '>=1.1.1'"
1167                )
1168
1169        if "get_variable(" in build_contents:
1170            if not meson_version or not meson_version_compare(
1171                meson_version, ">=0.58"
1172            ):
1173                raise Exception(
1174                    "dep.get_variable() with positional argument requires "
1175                    + "meson_Version: '>=0.58'"
1176                )
1177
1178
1179class Package(object):
1180    def __init__(self, name=None, path=None):
1181        self.supported = [Meson, Autotools, CMake]
1182        self.name = name
1183        self.path = path
1184        self.test_only = False
1185
1186    def build_systems(self):
1187        instances = (system(self.name, self.path) for system in self.supported)
1188        return (instance for instance in instances if instance.probe())
1189
1190    def build_system(self, preferred=None):
1191        systems = list(self.build_systems())
1192
1193        if not systems:
1194            return None
1195
1196        if preferred:
1197            return {type(system): system for system in systems}[preferred]
1198
1199        return next(iter(systems))
1200
1201    def install(self, system=None):
1202        if not system:
1203            system = self.build_system()
1204
1205        system.configure(False)
1206        system.build()
1207        system.install()
1208
1209    def _test_one(self, system):
1210        system.configure(True)
1211        system.build()
1212        system.install()
1213        system.test()
1214        if not TEST_ONLY:
1215            system.analyze()
1216
1217    def test(self):
1218        for system in self.build_systems():
1219            self._test_one(system)
1220
1221
1222def find_file(filename, basedir):
1223    """
1224    Finds all occurrences of a file (or list of files) in the base
1225    directory and passes them back with their relative paths.
1226
1227    Parameter descriptions:
1228    filename              The name of the file (or list of files) to
1229                          find
1230    basedir               The base directory search in
1231    """
1232
1233    if not isinstance(filename, list):
1234        filename = [filename]
1235
1236    filepaths = []
1237    for root, dirs, files in os.walk(basedir):
1238        if os.path.split(root)[-1] == "subprojects":
1239            for f in files:
1240                subproject = ".".join(f.split(".")[0:-1])
1241                if f.endswith(".wrap") and subproject in dirs:
1242                    # don't find files in meson subprojects with wraps
1243                    dirs.remove(subproject)
1244        for f in filename:
1245            if f in files:
1246                filepaths.append(os.path.join(root, f))
1247    return filepaths
1248
1249
1250if __name__ == "__main__":
1251    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1252    CONFIGURE_FLAGS = {
1253        "phosphor-logging": [
1254            "--enable-metadata-processing",
1255            "--enable-openpower-pel-extension",
1256            "YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml",
1257        ]
1258    }
1259
1260    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1261    MESON_FLAGS = {
1262        "phosphor-dbus-interfaces": [
1263            "-Ddata_com_ibm=true",
1264            "-Ddata_org_open_power=true",
1265        ],
1266        "phosphor-logging": ["-Dopenpower-pel-extension=enabled"],
1267    }
1268
1269    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1270    DEPENDENCIES = {
1271        "AC_CHECK_LIB": {"mapper": "phosphor-objmgr"},
1272        "AC_CHECK_HEADER": {
1273            "host-ipmid": "phosphor-host-ipmid",
1274            "blobs-ipmid": "phosphor-ipmi-blobs",
1275            "sdbusplus": "sdbusplus",
1276            "sdeventplus": "sdeventplus",
1277            "stdplus": "stdplus",
1278            "gpioplus": "gpioplus",
1279            "phosphor-logging/log.hpp": "phosphor-logging",
1280        },
1281        "AC_PATH_PROG": {"sdbus++": "sdbusplus"},
1282        "PKG_CHECK_MODULES": {
1283            "phosphor-dbus-interfaces": "phosphor-dbus-interfaces",
1284            "libipmid": "phosphor-host-ipmid",
1285            "libipmid-host": "phosphor-host-ipmid",
1286            "sdbusplus": "sdbusplus",
1287            "sdeventplus": "sdeventplus",
1288            "stdplus": "stdplus",
1289            "gpioplus": "gpioplus",
1290            "phosphor-logging": "phosphor-logging",
1291            "phosphor-snmp": "phosphor-snmp",
1292            "ipmiblob": "ipmi-blob-tool",
1293            "hei": "openpower-libhei",
1294            "phosphor-ipmi-blobs": "phosphor-ipmi-blobs",
1295            "libcr51sign": "google-misc",
1296        },
1297    }
1298
1299    # Offset into array of macro parameters MACRO(0, 1, ...N)
1300    DEPENDENCIES_OFFSET = {
1301        "AC_CHECK_LIB": 0,
1302        "AC_CHECK_HEADER": 0,
1303        "AC_PATH_PROG": 1,
1304        "PKG_CHECK_MODULES": 1,
1305    }
1306
1307    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1308    DEPENDENCIES_REGEX = {"phosphor-logging": r"\S+-dbus-interfaces$"}
1309
1310    # Set command line arguments
1311    parser = argparse.ArgumentParser()
1312    parser.add_argument(
1313        "-w",
1314        "--workspace",
1315        dest="WORKSPACE",
1316        required=True,
1317        help="Workspace directory location(i.e. /home)",
1318    )
1319    parser.add_argument(
1320        "-p",
1321        "--package",
1322        dest="PACKAGE",
1323        required=True,
1324        help="OpenBMC package to be unit tested",
1325    )
1326    parser.add_argument(
1327        "-t",
1328        "--test-only",
1329        dest="TEST_ONLY",
1330        action="store_true",
1331        required=False,
1332        default=False,
1333        help="Only run test cases, no other validation",
1334    )
1335    parser.add_argument(
1336        "--no-cppcheck",
1337        dest="NO_CPPCHECK",
1338        action="store_true",
1339        required=False,
1340        default=False,
1341        help="Do not run cppcheck",
1342    )
1343    arg_inttests = parser.add_mutually_exclusive_group()
1344    arg_inttests.add_argument(
1345        "--integration-tests",
1346        dest="INTEGRATION_TEST",
1347        action="store_true",
1348        required=False,
1349        default=True,
1350        help="Enable integration tests [default].",
1351    )
1352    arg_inttests.add_argument(
1353        "--no-integration-tests",
1354        dest="INTEGRATION_TEST",
1355        action="store_false",
1356        required=False,
1357        help="Disable integration tests.",
1358    )
1359    parser.add_argument(
1360        "-v",
1361        "--verbose",
1362        action="store_true",
1363        help="Print additional package status messages",
1364    )
1365    parser.add_argument(
1366        "-r", "--repeat", help="Repeat tests N times", type=int, default=1
1367    )
1368    parser.add_argument(
1369        "-b",
1370        "--branch",
1371        dest="BRANCH",
1372        required=False,
1373        help="Branch to target for dependent repositories",
1374        default="master",
1375    )
1376    parser.add_argument(
1377        "-n",
1378        "--noformat",
1379        dest="FORMAT",
1380        action="store_false",
1381        required=False,
1382        help="Whether or not to run format code",
1383    )
1384    args = parser.parse_args(sys.argv[1:])
1385    WORKSPACE = args.WORKSPACE
1386    UNIT_TEST_PKG = args.PACKAGE
1387    TEST_ONLY = args.TEST_ONLY
1388    NO_CPPCHECK = args.NO_CPPCHECK
1389    INTEGRATION_TEST = args.INTEGRATION_TEST
1390    BRANCH = args.BRANCH
1391    FORMAT_CODE = args.FORMAT
1392    if args.verbose:
1393
1394        def printline(*line):
1395            for arg in line:
1396                print(arg, end=" ")
1397            print()
1398
1399    else:
1400
1401        def printline(*line):
1402            pass
1403
1404    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1405
1406    # Run format-code.sh, which will in turn call any repo-level formatters.
1407    if FORMAT_CODE:
1408        check_call_cmd(
1409            os.path.join(
1410                WORKSPACE, "openbmc-build-scripts", "scripts", "format-code.sh"
1411            ),
1412            CODE_SCAN_DIR,
1413        )
1414
1415        # Check to see if any files changed
1416        check_call_cmd(
1417            "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff", "--exit-code"
1418        )
1419
1420    # Check if this repo has a supported make infrastructure
1421    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1422    if not pkg.build_system():
1423        print("No valid build system, exit")
1424        sys.exit(0)
1425
1426    prev_umask = os.umask(000)
1427
1428    # Determine dependencies and add them
1429    dep_added = dict()
1430    dep_added[UNIT_TEST_PKG] = False
1431
1432    # Create dependency tree
1433    dep_tree = DepTree(UNIT_TEST_PKG)
1434    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1435
1436    # Reorder Dependency Tree
1437    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1438        dep_tree.ReorderDeps(pkg_name, regex_str)
1439    if args.verbose:
1440        dep_tree.PrintTree()
1441
1442    install_list = dep_tree.GetInstallList()
1443
1444    # We don't want to treat our package as a dependency
1445    install_list.remove(UNIT_TEST_PKG)
1446
1447    # Install reordered dependencies
1448    for dep in install_list:
1449        build_and_install(dep, False)
1450
1451    # Run package unit tests
1452    build_and_install(UNIT_TEST_PKG, True)
1453
1454    os.umask(prev_umask)
1455
1456    # Run any custom CI scripts the repo has, of which there can be
1457    # multiple of and anywhere in the repository.
1458    ci_scripts = find_file(["run-ci.sh", "run-ci"], CODE_SCAN_DIR)
1459    if ci_scripts:
1460        os.chdir(CODE_SCAN_DIR)
1461        for ci_script in ci_scripts:
1462            check_call_cmd(ci_script)
1463