xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 87ab57057b1a3b8cc453dcb9352b9ace9cf488cf)
1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10import argparse
11import multiprocessing
12import os
13import platform
14import re
15import shutil
16import subprocess
17import sys
18from subprocess import CalledProcessError, check_call
19from tempfile import TemporaryDirectory
20from urllib.parse import urljoin
21
22from git import Repo
23
24# interpreter is not used directly but this resolves dependency ordering
25# that would be broken if we didn't include it.
26from mesonbuild import interpreter  # noqa: F401
27from mesonbuild import coredata, optinterpreter
28from mesonbuild.mesonlib import OptionKey
29from mesonbuild.mesonlib import version_compare as meson_version_compare
30
31
32class DepTree:
33    """
34    Represents package dependency tree, where each node is a DepTree with a
35    name and DepTree children.
36    """
37
38    def __init__(self, name):
39        """
40        Create new DepTree.
41
42        Parameter descriptions:
43        name               Name of new tree node.
44        """
45        self.name = name
46        self.children = list()
47
48    def AddChild(self, name):
49        """
50        Add new child node to current node.
51
52        Parameter descriptions:
53        name               Name of new child
54        """
55        new_child = DepTree(name)
56        self.children.append(new_child)
57        return new_child
58
59    def AddChildNode(self, node):
60        """
61        Add existing child node to current node.
62
63        Parameter descriptions:
64        node               Tree node to add
65        """
66        self.children.append(node)
67
68    def RemoveChild(self, name):
69        """
70        Remove child node.
71
72        Parameter descriptions:
73        name               Name of child to remove
74        """
75        for child in self.children:
76            if child.name == name:
77                self.children.remove(child)
78                return
79
80    def GetNode(self, name):
81        """
82        Return node with matching name. Return None if not found.
83
84        Parameter descriptions:
85        name               Name of node to return
86        """
87        if self.name == name:
88            return self
89        for child in self.children:
90            node = child.GetNode(name)
91            if node:
92                return node
93        return None
94
95    def GetParentNode(self, name, parent_node=None):
96        """
97        Return parent of node with matching name. Return none if not found.
98
99        Parameter descriptions:
100        name               Name of node to get parent of
101        parent_node        Parent of current node
102        """
103        if self.name == name:
104            return parent_node
105        for child in self.children:
106            found_node = child.GetParentNode(name, self)
107            if found_node:
108                return found_node
109        return None
110
111    def GetPath(self, name, path=None):
112        """
113        Return list of node names from head to matching name.
114        Return None if not found.
115
116        Parameter descriptions:
117        name               Name of node
118        path               List of node names from head to current node
119        """
120        if not path:
121            path = []
122        if self.name == name:
123            path.append(self.name)
124            return path
125        for child in self.children:
126            match = child.GetPath(name, path + [self.name])
127            if match:
128                return match
129        return None
130
131    def GetPathRegex(self, name, regex_str, path=None):
132        """
133        Return list of node paths that end in name, or match regex_str.
134        Return empty list if not found.
135
136        Parameter descriptions:
137        name               Name of node to search for
138        regex_str          Regex string to match node names
139        path               Path of node names from head to current node
140        """
141        new_paths = []
142        if not path:
143            path = []
144        match = re.match(regex_str, self.name)
145        if (self.name == name) or (match):
146            new_paths.append(path + [self.name])
147        for child in self.children:
148            return_paths = None
149            full_path = path + [self.name]
150            return_paths = child.GetPathRegex(name, regex_str, full_path)
151            for i in return_paths:
152                new_paths.append(i)
153        return new_paths
154
155    def MoveNode(self, from_name, to_name):
156        """
157        Mode existing from_name node to become child of to_name node.
158
159        Parameter descriptions:
160        from_name          Name of node to make a child of to_name
161        to_name            Name of node to make parent of from_name
162        """
163        parent_from_node = self.GetParentNode(from_name)
164        from_node = self.GetNode(from_name)
165        parent_from_node.RemoveChild(from_name)
166        to_node = self.GetNode(to_name)
167        to_node.AddChildNode(from_node)
168
169    def ReorderDeps(self, name, regex_str):
170        """
171        Reorder dependency tree.  If tree contains nodes with names that
172        match 'name' and 'regex_str', move 'regex_str' nodes that are
173        to the right of 'name' node, so that they become children of the
174        'name' node.
175
176        Parameter descriptions:
177        name               Name of node to look for
178        regex_str          Regex string to match names to
179        """
180        name_path = self.GetPath(name)
181        if not name_path:
182            return
183        paths = self.GetPathRegex(name, regex_str)
184        is_name_in_paths = False
185        name_index = 0
186        for i in range(len(paths)):
187            path = paths[i]
188            if path[-1] == name:
189                is_name_in_paths = True
190                name_index = i
191                break
192        if not is_name_in_paths:
193            return
194        for i in range(name_index + 1, len(paths)):
195            path = paths[i]
196            if name in path:
197                continue
198            from_name = path[-1]
199            self.MoveNode(from_name, name)
200
201    def GetInstallList(self):
202        """
203        Return post-order list of node names.
204
205        Parameter descriptions:
206        """
207        install_list = []
208        for child in self.children:
209            child_install_list = child.GetInstallList()
210            install_list.extend(child_install_list)
211        install_list.append(self.name)
212        return install_list
213
214    def PrintTree(self, level=0):
215        """
216        Print pre-order node names with indentation denoting node depth level.
217
218        Parameter descriptions:
219        level              Current depth level
220        """
221        INDENT_PER_LEVEL = 4
222        print(" " * (level * INDENT_PER_LEVEL) + self.name)
223        for child in self.children:
224            child.PrintTree(level + 1)
225
226
227def check_call_cmd(*cmd):
228    """
229    Verbose prints the directory location the given command is called from and
230    the command, then executes the command using check_call.
231
232    Parameter descriptions:
233    dir                 Directory location command is to be called from
234    cmd                 List of parameters constructing the complete command
235    """
236    printline(os.getcwd(), ">", " ".join(cmd))
237    check_call(cmd)
238
239
240def clone_pkg(pkg, branch):
241    """
242    Clone the given openbmc package's git repository from gerrit into
243    the WORKSPACE location
244
245    Parameter descriptions:
246    pkg                 Name of the package to clone
247    branch              Branch to clone from pkg
248    """
249    pkg_dir = os.path.join(WORKSPACE, pkg)
250    if os.path.exists(os.path.join(pkg_dir, ".git")):
251        return pkg_dir
252    pkg_repo = urljoin("https://gerrit.openbmc.org/openbmc/", pkg)
253    os.mkdir(pkg_dir)
254    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
255    try:
256        # first try the branch
257        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
258        repo_inst = clone.working_dir
259    except Exception:
260        printline("Input branch not found, default to master")
261        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
262        repo_inst = clone.working_dir
263    return repo_inst
264
265
266def make_target_exists(target):
267    """
268    Runs a check against the makefile in the current directory to determine
269    if the target exists so that it can be built.
270
271    Parameter descriptions:
272    target              The make target we are checking
273    """
274    try:
275        cmd = ["make", "-n", target]
276        with open(os.devnull, "w") as devnull:
277            check_call(cmd, stdout=devnull, stderr=devnull)
278        return True
279    except CalledProcessError:
280        return False
281
282
283make_parallel = [
284    "make",
285    # Run enough jobs to saturate all the cpus
286    "-j",
287    str(multiprocessing.cpu_count()),
288    # Don't start more jobs if the load avg is too high
289    "-l",
290    str(multiprocessing.cpu_count()),
291    # Synchronize the output so logs aren't intermixed in stdout / stderr
292    "-O",
293]
294
295
296def build_and_install(name, build_for_testing=False):
297    """
298    Builds and installs the package in the environment. Optionally
299    builds the examples and test cases for package.
300
301    Parameter description:
302    name                The name of the package we are building
303    build_for_testing   Enable options related to testing on the package?
304    """
305    os.chdir(os.path.join(WORKSPACE, name))
306
307    # Refresh dynamic linker run time bindings for dependencies
308    check_call_cmd("sudo", "-n", "--", "ldconfig")
309
310    pkg = Package()
311    if build_for_testing:
312        pkg.test()
313    else:
314        pkg.install()
315
316
317def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
318    """
319    For each package (name), starting with the package to be unit tested,
320    extract its dependencies. For each package dependency defined, recursively
321    apply the same strategy
322
323    Parameter descriptions:
324    name                Name of the package
325    pkgdir              Directory where package source is located
326    dep_added           Current dict of dependencies and added status
327    head                Head node of the dependency tree
328    branch              Branch to clone from pkg
329    dep_tree            Current dependency tree node
330    """
331    if not dep_tree:
332        dep_tree = head
333
334    with open("/tmp/depcache", "r") as depcache:
335        cache = depcache.readline()
336
337    # Read out pkg dependencies
338    pkg = Package(name, pkgdir)
339
340    build = pkg.build_system()
341    if not build:
342        raise Exception(f"Unable to find build system for {name}.")
343
344    for dep in set(build.dependencies()):
345        if dep in cache:
346            continue
347        # Dependency package not already known
348        if dep_added.get(dep) is None:
349            print(f"Adding {dep} dependency to {name}.")
350            # Dependency package not added
351            new_child = dep_tree.AddChild(dep)
352            dep_added[dep] = False
353            dep_pkgdir = clone_pkg(dep, branch)
354            # Determine this dependency package's
355            # dependencies and add them before
356            # returning to add this package
357            dep_added = build_dep_tree(
358                dep, dep_pkgdir, dep_added, head, branch, new_child
359            )
360        else:
361            # Dependency package known and added
362            if dep_added[dep]:
363                continue
364            else:
365                # Cyclic dependency failure
366                raise Exception("Cyclic dependencies found in " + name)
367
368    if not dep_added[name]:
369        dep_added[name] = True
370
371    return dep_added
372
373
374def run_cppcheck():
375    if not os.path.exists(os.path.join("build", "compile_commands.json")):
376        return None
377
378    with TemporaryDirectory() as cpp_dir:
379        # http://cppcheck.sourceforge.net/manual.pdf
380        try:
381            check_call_cmd(
382                "cppcheck",
383                "-j",
384                str(multiprocessing.cpu_count()),
385                "--enable=style,performance,portability,missingInclude",
386                "--suppress=useStlAlgorithm",
387                "--suppress=unusedStructMember",
388                "--suppress=postfixOperator",
389                "--suppress=unreadVariable",
390                "--suppress=knownConditionTrueFalse",
391                "--library=googletest",
392                "--project=build/compile_commands.json",
393                f"--cppcheck-build-dir={cpp_dir}",
394            )
395        except subprocess.CalledProcessError:
396            print("cppcheck found errors")
397
398
399def is_valgrind_safe():
400    """
401    Returns whether it is safe to run valgrind on our platform
402    """
403    src = "unit-test-vg.c"
404    exe = "./unit-test-vg"
405    with open(src, "w") as h:
406        h.write("#include <errno.h>\n")
407        h.write("#include <stdio.h>\n")
408        h.write("#include <stdlib.h>\n")
409        h.write("#include <string.h>\n")
410        h.write("int main() {\n")
411        h.write("char *heap_str = malloc(16);\n")
412        h.write('strcpy(heap_str, "RandString");\n')
413        h.write('int res = strcmp("RandString", heap_str);\n')
414        h.write("free(heap_str);\n")
415        h.write("char errstr[64];\n")
416        h.write("strerror_r(EINVAL, errstr, sizeof(errstr));\n")
417        h.write('printf("%s\\n", errstr);\n')
418        h.write("return res;\n")
419        h.write("}\n")
420    try:
421        with open(os.devnull, "w") as devnull:
422            check_call(
423                ["gcc", "-O2", "-o", exe, src], stdout=devnull, stderr=devnull
424            )
425            check_call(
426                ["valgrind", "--error-exitcode=99", exe],
427                stdout=devnull,
428                stderr=devnull,
429            )
430        return True
431    except Exception:
432        sys.stderr.write("###### Platform is not valgrind safe ######\n")
433        return False
434    finally:
435        os.remove(src)
436        os.remove(exe)
437
438
439def is_sanitize_safe():
440    """
441    Returns whether it is safe to run sanitizers on our platform
442    """
443    src = "unit-test-sanitize.c"
444    exe = "./unit-test-sanitize"
445    with open(src, "w") as h:
446        h.write("int main() { return 0; }\n")
447    try:
448        with open(os.devnull, "w") as devnull:
449            check_call(
450                [
451                    "gcc",
452                    "-O2",
453                    "-fsanitize=address",
454                    "-fsanitize=undefined",
455                    "-o",
456                    exe,
457                    src,
458                ],
459                stdout=devnull,
460                stderr=devnull,
461            )
462            check_call([exe], stdout=devnull, stderr=devnull)
463
464        # TODO - Sanitizer not working on ppc64le
465        # https://github.com/openbmc/openbmc-build-scripts/issues/31
466        if platform.processor() == "ppc64le":
467            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
468            return False
469        else:
470            return True
471    except Exception:
472        sys.stderr.write("###### Platform is not sanitize safe ######\n")
473        return False
474    finally:
475        os.remove(src)
476        os.remove(exe)
477
478
479def maybe_make_valgrind():
480    """
481    Potentially runs the unit tests through valgrind for the package
482    via `make check-valgrind`. If the package does not have valgrind testing
483    then it just skips over this.
484    """
485    # Valgrind testing is currently broken by an aggressive strcmp optimization
486    # that is inlined into optimized code for POWER by gcc 7+. Until we find
487    # a workaround, just don't run valgrind tests on POWER.
488    # https://github.com/openbmc/openbmc/issues/3315
489    if not is_valgrind_safe():
490        sys.stderr.write("###### Skipping valgrind ######\n")
491        return
492    if not make_target_exists("check-valgrind"):
493        return
494
495    try:
496        cmd = make_parallel + ["check-valgrind"]
497        check_call_cmd(*cmd)
498    except CalledProcessError:
499        for root, _, files in os.walk(os.getcwd()):
500            for f in files:
501                if re.search("test-suite-[a-z]+.log", f) is None:
502                    continue
503                check_call_cmd("cat", os.path.join(root, f))
504        raise Exception("Valgrind tests failed")
505
506
507def maybe_make_coverage():
508    """
509    Potentially runs the unit tests through code coverage for the package
510    via `make check-code-coverage`. If the package does not have code coverage
511    testing then it just skips over this.
512    """
513    if not make_target_exists("check-code-coverage"):
514        return
515
516    # Actually run code coverage
517    try:
518        cmd = make_parallel + ["check-code-coverage"]
519        check_call_cmd(*cmd)
520    except CalledProcessError:
521        raise Exception("Code coverage failed")
522
523
524class BuildSystem(object):
525    """
526    Build systems generally provide the means to configure, build, install and
527    test software. The BuildSystem class defines a set of interfaces on top of
528    which Autotools, Meson, CMake and possibly other build system drivers can
529    be implemented, separating out the phases to control whether a package
530    should merely be installed or also tested and analyzed.
531    """
532
533    def __init__(self, package, path):
534        """Initialise the driver with properties independent of the build
535        system
536
537        Keyword arguments:
538        package: The name of the package. Derived from the path if None
539        path: The path to the package. Set to the working directory if None
540        """
541        self.path = "." if not path else path
542        realpath = os.path.realpath(self.path)
543        self.package = package if package else os.path.basename(realpath)
544        self.build_for_testing = False
545
546    def probe(self):
547        """Test if the build system driver can be applied to the package
548
549        Return True if the driver can drive the package's build system,
550        otherwise False.
551
552        Generally probe() is implemented by testing for the presence of the
553        build system's configuration file(s).
554        """
555        raise NotImplementedError
556
557    def dependencies(self):
558        """Provide the package's dependencies
559
560        Returns a list of dependencies. If no dependencies are required then an
561        empty list must be returned.
562
563        Generally dependencies() is implemented by analysing and extracting the
564        data from the build system configuration.
565        """
566        raise NotImplementedError
567
568    def configure(self, build_for_testing):
569        """Configure the source ready for building
570
571        Should raise an exception if configuration failed.
572
573        Keyword arguments:
574        build_for_testing: Mark the package as being built for testing rather
575                           than for installation as a dependency for the
576                           package under test. Setting to True generally
577                           implies that the package will be configured to build
578                           with debug information, at a low level of
579                           optimisation and possibly with sanitizers enabled.
580
581        Generally configure() is implemented by invoking the build system
582        tooling to generate Makefiles or equivalent.
583        """
584        raise NotImplementedError
585
586    def build(self):
587        """Build the software ready for installation and/or testing
588
589        Should raise an exception if the build fails
590
591        Generally build() is implemented by invoking `make` or `ninja`.
592        """
593        raise NotImplementedError
594
595    def install(self):
596        """Install the software ready for use
597
598        Should raise an exception if installation fails
599
600        Like build(), install() is generally implemented by invoking `make` or
601        `ninja`.
602        """
603        raise NotImplementedError
604
605    def test(self):
606        """Build and run the test suite associated with the package
607
608        Should raise an exception if the build or testing fails.
609
610        Like install(), test() is generally implemented by invoking `make` or
611        `ninja`.
612        """
613        raise NotImplementedError
614
615    def analyze(self):
616        """Run any supported analysis tools over the codebase
617
618        Should raise an exception if analysis fails.
619
620        Some analysis tools such as scan-build need injection into the build
621        system. analyze() provides the necessary hook to implement such
622        behaviour. Analyzers independent of the build system can also be
623        specified here but at the cost of possible duplication of code between
624        the build system driver implementations.
625        """
626        raise NotImplementedError
627
628
629class Autotools(BuildSystem):
630    def __init__(self, package=None, path=None):
631        super(Autotools, self).__init__(package, path)
632
633    def probe(self):
634        return os.path.isfile(os.path.join(self.path, "configure.ac"))
635
636    def dependencies(self):
637        configure_ac = os.path.join(self.path, "configure.ac")
638
639        contents = ""
640        # Prepend some special function overrides so we can parse out
641        # dependencies
642        for macro in DEPENDENCIES.keys():
643            contents += (
644                "m4_define(["
645                + macro
646                + "], ["
647                + macro
648                + "_START$"
649                + str(DEPENDENCIES_OFFSET[macro] + 1)
650                + macro
651                + "_END])\n"
652            )
653        with open(configure_ac, "rt") as f:
654            contents += f.read()
655
656        autoconf_cmdline = ["autoconf", "-Wno-undefined", "-"]
657        autoconf_process = subprocess.Popen(
658            autoconf_cmdline,
659            stdin=subprocess.PIPE,
660            stdout=subprocess.PIPE,
661            stderr=subprocess.PIPE,
662        )
663        document = contents.encode("utf-8")
664        (stdout, stderr) = autoconf_process.communicate(input=document)
665        if not stdout:
666            print(stderr)
667            raise Exception("Failed to run autoconf for parsing dependencies")
668
669        # Parse out all of the dependency text
670        matches = []
671        for macro in DEPENDENCIES.keys():
672            pattern = "(" + macro + ")_START(.*?)" + macro + "_END"
673            for match in re.compile(pattern).finditer(stdout.decode("utf-8")):
674                matches.append((match.group(1), match.group(2)))
675
676        # Look up dependencies from the text
677        found_deps = []
678        for macro, deptext in matches:
679            for potential_dep in deptext.split(" "):
680                for known_dep in DEPENDENCIES[macro].keys():
681                    if potential_dep.startswith(known_dep):
682                        found_deps.append(DEPENDENCIES[macro][known_dep])
683
684        return found_deps
685
686    def _configure_feature(self, flag, enabled):
687        """
688        Returns an configure flag as a string
689
690        Parameters:
691        flag                The name of the flag
692        enabled             Whether the flag is enabled or disabled
693        """
694        return "--" + ("enable" if enabled else "disable") + "-" + flag
695
696    def configure(self, build_for_testing):
697        self.build_for_testing = build_for_testing
698        conf_flags = [
699            self._configure_feature("silent-rules", False),
700            self._configure_feature("examples", build_for_testing),
701            self._configure_feature("tests", build_for_testing),
702            self._configure_feature("itests", INTEGRATION_TEST),
703        ]
704        conf_flags.extend(
705            [
706                self._configure_feature("code-coverage", build_for_testing),
707                self._configure_feature("valgrind", build_for_testing),
708            ]
709        )
710        # Add any necessary configure flags for package
711        if CONFIGURE_FLAGS.get(self.package) is not None:
712            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
713        for bootstrap in ["bootstrap.sh", "bootstrap", "autogen.sh"]:
714            if os.path.exists(bootstrap):
715                check_call_cmd("./" + bootstrap)
716                break
717        check_call_cmd("./configure", *conf_flags)
718
719    def build(self):
720        check_call_cmd(*make_parallel)
721
722    def install(self):
723        check_call_cmd("sudo", "-n", "--", *(make_parallel + ["install"]))
724
725    def test(self):
726        try:
727            cmd = make_parallel + ["check"]
728            for i in range(0, args.repeat):
729                check_call_cmd(*cmd)
730
731            maybe_make_valgrind()
732            maybe_make_coverage()
733        except CalledProcessError:
734            for root, _, files in os.walk(os.getcwd()):
735                if "test-suite.log" not in files:
736                    continue
737                check_call_cmd("cat", os.path.join(root, "test-suite.log"))
738            raise Exception("Unit tests failed")
739
740    def analyze(self):
741        run_cppcheck()
742
743
744class CMake(BuildSystem):
745    def __init__(self, package=None, path=None):
746        super(CMake, self).__init__(package, path)
747
748    def probe(self):
749        return os.path.isfile(os.path.join(self.path, "CMakeLists.txt"))
750
751    def dependencies(self):
752        return []
753
754    def configure(self, build_for_testing):
755        self.build_for_testing = build_for_testing
756        if INTEGRATION_TEST:
757            check_call_cmd(
758                "cmake",
759                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
760                "-DITESTS=ON",
761                ".",
762            )
763        else:
764            check_call_cmd("cmake", "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON", ".")
765
766    def build(self):
767        check_call_cmd(
768            "cmake",
769            "--build",
770            ".",
771            "--",
772            "-j",
773            str(multiprocessing.cpu_count()),
774        )
775
776    def install(self):
777        check_call_cmd("sudo", "cmake", "--install", ".")
778
779    def test(self):
780        if make_target_exists("test"):
781            check_call_cmd("ctest", ".")
782
783    def analyze(self):
784        if os.path.isfile(".clang-tidy"):
785            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
786                # clang-tidy needs to run on a clang-specific build
787                check_call_cmd(
788                    "cmake",
789                    "-DCMAKE_C_COMPILER=clang",
790                    "-DCMAKE_CXX_COMPILER=clang++",
791                    "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
792                    "-H.",
793                    "-B" + build_dir,
794                )
795
796                check_call_cmd(
797                    "run-clang-tidy", "-header-filter=.*", "-p", build_dir
798                )
799
800        maybe_make_valgrind()
801        maybe_make_coverage()
802        run_cppcheck()
803
804
805class Meson(BuildSystem):
806    def __init__(self, package=None, path=None):
807        super(Meson, self).__init__(package, path)
808
809    def probe(self):
810        return os.path.isfile(os.path.join(self.path, "meson.build"))
811
812    def dependencies(self):
813        meson_build = os.path.join(self.path, "meson.build")
814        if not os.path.exists(meson_build):
815            return []
816
817        found_deps = []
818        for root, dirs, files in os.walk(self.path):
819            if "meson.build" not in files:
820                continue
821            with open(os.path.join(root, "meson.build"), "rt") as f:
822                build_contents = f.read()
823            pattern = r"dependency\('([^']*)'.*?\),?\n"
824            for match in re.finditer(pattern, build_contents):
825                group = match.group(1)
826                maybe_dep = DEPENDENCIES["PKG_CHECK_MODULES"].get(group)
827                if maybe_dep is not None:
828                    found_deps.append(maybe_dep)
829
830        return found_deps
831
832    def _parse_options(self, options_file):
833        """
834        Returns a set of options defined in the provides meson_options.txt file
835
836        Parameters:
837        options_file        The file containing options
838        """
839        oi = optinterpreter.OptionInterpreter("")
840        oi.process(options_file)
841        return oi.options
842
843    def _configure_boolean(self, val):
844        """
845        Returns the meson flag which signifies the value
846
847        True is true which requires the boolean.
848        False is false which disables the boolean.
849
850        Parameters:
851        val                 The value being converted
852        """
853        if val is True:
854            return "true"
855        elif val is False:
856            return "false"
857        else:
858            raise Exception("Bad meson boolean value")
859
860    def _configure_feature(self, val):
861        """
862        Returns the meson flag which signifies the value
863
864        True is enabled which requires the feature.
865        False is disabled which disables the feature.
866        None is auto which autodetects the feature.
867
868        Parameters:
869        val                 The value being converted
870        """
871        if val is True:
872            return "enabled"
873        elif val is False:
874            return "disabled"
875        elif val is None:
876            return "auto"
877        else:
878            raise Exception("Bad meson feature value")
879
880    def _configure_option(self, opts, key, val):
881        """
882        Returns the meson flag which signifies the value
883        based on the type of the opt
884
885        Parameters:
886        opt                 The meson option which we are setting
887        val                 The value being converted
888        """
889        if isinstance(opts[key], coredata.UserBooleanOption):
890            str_val = self._configure_boolean(val)
891        elif isinstance(opts[key], coredata.UserFeatureOption):
892            str_val = self._configure_feature(val)
893        else:
894            raise Exception("Unknown meson option type")
895        return "-D{}={}".format(key, str_val)
896
897    def configure(self, build_for_testing):
898        self.build_for_testing = build_for_testing
899        meson_options = {}
900        if os.path.exists("meson_options.txt"):
901            meson_options = self._parse_options("meson_options.txt")
902        meson_flags = [
903            "-Db_colorout=never",
904            "-Dwerror=true",
905            "-Dwarning_level=3",
906        ]
907        if build_for_testing:
908            meson_flags.append("--buildtype=debug")
909        else:
910            meson_flags.append("--buildtype=debugoptimized")
911        if OptionKey("tests") in meson_options:
912            meson_flags.append(
913                self._configure_option(
914                    meson_options, OptionKey("tests"), build_for_testing
915                )
916            )
917        if OptionKey("examples") in meson_options:
918            meson_flags.append(
919                self._configure_option(
920                    meson_options, OptionKey("examples"), build_for_testing
921                )
922            )
923        if OptionKey("itests") in meson_options:
924            meson_flags.append(
925                self._configure_option(
926                    meson_options, OptionKey("itests"), INTEGRATION_TEST
927                )
928            )
929        if MESON_FLAGS.get(self.package) is not None:
930            meson_flags.extend(MESON_FLAGS.get(self.package))
931        try:
932            check_call_cmd(
933                "meson", "setup", "--reconfigure", "build", *meson_flags
934            )
935        except Exception:
936            shutil.rmtree("build", ignore_errors=True)
937            check_call_cmd("meson", "setup", "build", *meson_flags)
938
939    def build(self):
940        check_call_cmd("ninja", "-C", "build")
941
942    def install(self):
943        check_call_cmd("sudo", "-n", "--", "ninja", "-C", "build", "install")
944
945    def test(self):
946        # It is useful to check various settings of the meson.build file
947        # for compatibility, such as meson_version checks.  We shouldn't
948        # do this in the configure path though because it affects subprojects
949        # and dependencies as well, but we only want this applied to the
950        # project-under-test (otherwise an upstream dependency could fail
951        # this check without our control).
952        self._extra_meson_checks()
953
954        try:
955            test_args = ("--repeat", str(args.repeat), "-C", "build")
956            check_call_cmd("meson", "test", "--print-errorlogs", *test_args)
957
958        except CalledProcessError:
959            raise Exception("Unit tests failed")
960
961    def _setup_exists(self, setup):
962        """
963        Returns whether the meson build supports the named test setup.
964
965        Parameter descriptions:
966        setup              The setup target to check
967        """
968        try:
969            with open(os.devnull, "w"):
970                output = subprocess.check_output(
971                    [
972                        "meson",
973                        "test",
974                        "-C",
975                        "build",
976                        "--setup",
977                        setup,
978                        "-t",
979                        "0",
980                    ],
981                    stderr=subprocess.STDOUT,
982                )
983        except CalledProcessError as e:
984            output = e.output
985        output = output.decode("utf-8")
986        return not re.search("Test setup .* not found from project", output)
987
988    def _maybe_valgrind(self):
989        """
990        Potentially runs the unit tests through valgrind for the package
991        via `meson test`. The package can specify custom valgrind
992        configurations by utilizing add_test_setup() in a meson.build
993        """
994        if not is_valgrind_safe():
995            sys.stderr.write("###### Skipping valgrind ######\n")
996            return
997        try:
998            if self._setup_exists("valgrind"):
999                check_call_cmd(
1000                    "meson",
1001                    "test",
1002                    "-t",
1003                    "10",
1004                    "-C",
1005                    "build",
1006                    "--print-errorlogs",
1007                    "--setup",
1008                    "valgrind",
1009                )
1010            else:
1011                check_call_cmd(
1012                    "meson",
1013                    "test",
1014                    "-t",
1015                    "10",
1016                    "-C",
1017                    "build",
1018                    "--print-errorlogs",
1019                    "--wrapper",
1020                    "valgrind",
1021                )
1022        except CalledProcessError:
1023            raise Exception("Valgrind tests failed")
1024
1025    def analyze(self):
1026        self._maybe_valgrind()
1027
1028        # Run clang-tidy only if the project has a configuration
1029        if os.path.isfile(".clang-tidy"):
1030            os.environ["CXX"] = "clang++"
1031            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
1032                check_call_cmd("meson", "setup", build_dir)
1033                if not os.path.isfile(".openbmc-no-clang"):
1034                    check_call_cmd("meson", "compile", "-C", build_dir)
1035                try:
1036                    check_call_cmd(
1037                        "run-clang-tidy", "-fix", "-format", "-p", build_dir
1038                    )
1039                except subprocess.CalledProcessError:
1040                    check_call_cmd(
1041                        "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff"
1042                    )
1043                    raise
1044        # Run the basic clang static analyzer otherwise
1045        else:
1046            check_call_cmd("ninja", "-C", "build", "scan-build")
1047
1048        # Run tests through sanitizers
1049        # b_lundef is needed if clang++ is CXX since it resolves the
1050        # asan symbols at runtime only. We don't want to set it earlier
1051        # in the build process to ensure we don't have undefined
1052        # runtime code.
1053        if is_sanitize_safe():
1054            check_call_cmd(
1055                "meson",
1056                "configure",
1057                "build",
1058                "-Db_sanitize=address,undefined",
1059                "-Db_lundef=false",
1060            )
1061            check_call_cmd(
1062                "meson",
1063                "test",
1064                "-C",
1065                "build",
1066                "--print-errorlogs",
1067                "--logbase",
1068                "testlog-ubasan",
1069            )
1070            # TODO: Fix memory sanitizer
1071            # check_call_cmd('meson', 'configure', 'build',
1072            #                '-Db_sanitize=memory')
1073            # check_call_cmd('meson', 'test', '-C', 'build'
1074            #                '--logbase', 'testlog-msan')
1075            check_call_cmd("meson", "configure", "build", "-Db_sanitize=none")
1076        else:
1077            sys.stderr.write("###### Skipping sanitizers ######\n")
1078
1079        # Run coverage checks
1080        check_call_cmd("meson", "configure", "build", "-Db_coverage=true")
1081        self.test()
1082        # Only build coverage HTML if coverage files were produced
1083        for root, dirs, files in os.walk("build"):
1084            if any([f.endswith(".gcda") for f in files]):
1085                check_call_cmd("ninja", "-C", "build", "coverage-html")
1086                break
1087        check_call_cmd("meson", "configure", "build", "-Db_coverage=false")
1088        run_cppcheck()
1089
1090    def _extra_meson_checks(self):
1091        with open(os.path.join(self.path, "meson.build"), "rt") as f:
1092            build_contents = f.read()
1093
1094        # Find project's specified meson_version.
1095        meson_version = None
1096        pattern = r"meson_version:[^']*'([^']*)'"
1097        for match in re.finditer(pattern, build_contents):
1098            group = match.group(1)
1099            meson_version = group
1100
1101        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1102        # identify this.  Add to our unit-test checks so that we don't
1103        # get a meson.build missing this.
1104        pattern = r"'cpp_std=c\+\+20'"
1105        for match in re.finditer(pattern, build_contents):
1106            if not meson_version or not meson_version_compare(
1107                meson_version, ">=0.57"
1108            ):
1109                raise Exception(
1110                    "C++20 support requires specifying in meson.build: "
1111                    + "meson_version: '>=0.57'"
1112                )
1113
1114
1115class Package(object):
1116    def __init__(self, name=None, path=None):
1117        self.supported = [Meson, Autotools, CMake]
1118        self.name = name
1119        self.path = path
1120        self.test_only = False
1121
1122    def build_systems(self):
1123        instances = (system(self.name, self.path) for system in self.supported)
1124        return (instance for instance in instances if instance.probe())
1125
1126    def build_system(self, preferred=None):
1127        systems = list(self.build_systems())
1128
1129        if not systems:
1130            return None
1131
1132        if preferred:
1133            return {type(system): system for system in systems}[preferred]
1134
1135        return next(iter(systems))
1136
1137    def install(self, system=None):
1138        if not system:
1139            system = self.build_system()
1140
1141        system.configure(False)
1142        system.build()
1143        system.install()
1144
1145    def _test_one(self, system):
1146        system.configure(True)
1147        system.build()
1148        system.install()
1149        system.test()
1150        if not TEST_ONLY:
1151            system.analyze()
1152
1153    def test(self):
1154        for system in self.build_systems():
1155            self._test_one(system)
1156
1157
1158def find_file(filename, basedir):
1159    """
1160    Finds all occurrences of a file (or list of files) in the base
1161    directory and passes them back with their relative paths.
1162
1163    Parameter descriptions:
1164    filename              The name of the file (or list of files) to
1165                          find
1166    basedir               The base directory search in
1167    """
1168
1169    if not isinstance(filename, list):
1170        filename = [filename]
1171
1172    filepaths = []
1173    for root, dirs, files in os.walk(basedir):
1174        if os.path.split(root)[-1] == "subprojects":
1175            for f in files:
1176                subproject = ".".join(f.split(".")[0:-1])
1177                if f.endswith(".wrap") and subproject in dirs:
1178                    # don't find files in meson subprojects with wraps
1179                    dirs.remove(subproject)
1180        for f in filename:
1181            if f in files:
1182                filepaths.append(os.path.join(root, f))
1183    return filepaths
1184
1185
1186if __name__ == "__main__":
1187    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1188    CONFIGURE_FLAGS = {
1189        "phosphor-logging": [
1190            "--enable-metadata-processing",
1191            "--enable-openpower-pel-extension",
1192            "YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml",
1193        ]
1194    }
1195
1196    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1197    MESON_FLAGS = {
1198        "phosphor-dbus-interfaces": [
1199            "-Ddata_com_ibm=true",
1200            "-Ddata_org_open_power=true",
1201        ],
1202        "phosphor-logging": ["-Dopenpower-pel-extension=enabled"],
1203    }
1204
1205    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1206    DEPENDENCIES = {
1207        "AC_CHECK_LIB": {"mapper": "phosphor-objmgr"},
1208        "AC_CHECK_HEADER": {
1209            "host-ipmid": "phosphor-host-ipmid",
1210            "blobs-ipmid": "phosphor-ipmi-blobs",
1211            "sdbusplus": "sdbusplus",
1212            "sdeventplus": "sdeventplus",
1213            "stdplus": "stdplus",
1214            "gpioplus": "gpioplus",
1215            "phosphor-logging/log.hpp": "phosphor-logging",
1216        },
1217        "AC_PATH_PROG": {"sdbus++": "sdbusplus"},
1218        "PKG_CHECK_MODULES": {
1219            "phosphor-dbus-interfaces": "phosphor-dbus-interfaces",
1220            "libipmid": "phosphor-host-ipmid",
1221            "libipmid-host": "phosphor-host-ipmid",
1222            "sdbusplus": "sdbusplus",
1223            "sdeventplus": "sdeventplus",
1224            "stdplus": "stdplus",
1225            "gpioplus": "gpioplus",
1226            "phosphor-logging": "phosphor-logging",
1227            "phosphor-snmp": "phosphor-snmp",
1228            "ipmiblob": "ipmi-blob-tool",
1229            "hei": "openpower-libhei",
1230            "phosphor-ipmi-blobs": "phosphor-ipmi-blobs",
1231            "libcr51sign": "google-misc",
1232        },
1233    }
1234
1235    # Offset into array of macro parameters MACRO(0, 1, ...N)
1236    DEPENDENCIES_OFFSET = {
1237        "AC_CHECK_LIB": 0,
1238        "AC_CHECK_HEADER": 0,
1239        "AC_PATH_PROG": 1,
1240        "PKG_CHECK_MODULES": 1,
1241    }
1242
1243    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1244    DEPENDENCIES_REGEX = {"phosphor-logging": r"\S+-dbus-interfaces$"}
1245
1246    # Set command line arguments
1247    parser = argparse.ArgumentParser()
1248    parser.add_argument(
1249        "-w",
1250        "--workspace",
1251        dest="WORKSPACE",
1252        required=True,
1253        help="Workspace directory location(i.e. /home)",
1254    )
1255    parser.add_argument(
1256        "-p",
1257        "--package",
1258        dest="PACKAGE",
1259        required=True,
1260        help="OpenBMC package to be unit tested",
1261    )
1262    parser.add_argument(
1263        "-t",
1264        "--test-only",
1265        dest="TEST_ONLY",
1266        action="store_true",
1267        required=False,
1268        default=False,
1269        help="Only run test cases, no other validation",
1270    )
1271    arg_inttests = parser.add_mutually_exclusive_group()
1272    arg_inttests.add_argument(
1273        "--integration-tests",
1274        dest="INTEGRATION_TEST",
1275        action="store_true",
1276        required=False,
1277        default=True,
1278        help="Enable integration tests [default].",
1279    )
1280    arg_inttests.add_argument(
1281        "--no-integration-tests",
1282        dest="INTEGRATION_TEST",
1283        action="store_false",
1284        required=False,
1285        help="Disable integration tests.",
1286    )
1287    parser.add_argument(
1288        "-v",
1289        "--verbose",
1290        action="store_true",
1291        help="Print additional package status messages",
1292    )
1293    parser.add_argument(
1294        "-r", "--repeat", help="Repeat tests N times", type=int, default=1
1295    )
1296    parser.add_argument(
1297        "-b",
1298        "--branch",
1299        dest="BRANCH",
1300        required=False,
1301        help="Branch to target for dependent repositories",
1302        default="master",
1303    )
1304    parser.add_argument(
1305        "-n",
1306        "--noformat",
1307        dest="FORMAT",
1308        action="store_false",
1309        required=False,
1310        help="Whether or not to run format code",
1311    )
1312    args = parser.parse_args(sys.argv[1:])
1313    WORKSPACE = args.WORKSPACE
1314    UNIT_TEST_PKG = args.PACKAGE
1315    TEST_ONLY = args.TEST_ONLY
1316    INTEGRATION_TEST = args.INTEGRATION_TEST
1317    BRANCH = args.BRANCH
1318    FORMAT_CODE = args.FORMAT
1319    if args.verbose:
1320
1321        def printline(*line):
1322            for arg in line:
1323                print(arg, end=" ")
1324            print()
1325
1326    else:
1327
1328        def printline(*line):
1329            pass
1330
1331    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1332
1333    # Run format-code.sh, which will in turn call any repo-level formatters.
1334    if FORMAT_CODE:
1335        check_call_cmd(
1336            os.path.join(
1337                WORKSPACE, "openbmc-build-scripts", "scripts", "format-code.sh"
1338            ),
1339            CODE_SCAN_DIR,
1340        )
1341
1342        # Check to see if any files changed
1343        check_call_cmd(
1344            "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff", "--exit-code"
1345        )
1346
1347    # Check if this repo has a supported make infrastructure
1348    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1349    if not pkg.build_system():
1350        print("No valid build system, exit")
1351        sys.exit(0)
1352
1353    prev_umask = os.umask(000)
1354
1355    # Determine dependencies and add them
1356    dep_added = dict()
1357    dep_added[UNIT_TEST_PKG] = False
1358
1359    # Create dependency tree
1360    dep_tree = DepTree(UNIT_TEST_PKG)
1361    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1362
1363    # Reorder Dependency Tree
1364    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1365        dep_tree.ReorderDeps(pkg_name, regex_str)
1366    if args.verbose:
1367        dep_tree.PrintTree()
1368
1369    install_list = dep_tree.GetInstallList()
1370
1371    # We don't want to treat our package as a dependency
1372    install_list.remove(UNIT_TEST_PKG)
1373
1374    # Install reordered dependencies
1375    for dep in install_list:
1376        build_and_install(dep, False)
1377
1378    # Run package unit tests
1379    build_and_install(UNIT_TEST_PKG, True)
1380
1381    os.umask(prev_umask)
1382
1383    # Run any custom CI scripts the repo has, of which there can be
1384    # multiple of and anywhere in the repository.
1385    ci_scripts = find_file(["run-ci.sh", "run-ci"], CODE_SCAN_DIR)
1386    if ci_scripts:
1387        os.chdir(CODE_SCAN_DIR)
1388        for ci_script in ci_scripts:
1389            check_call_cmd(ci_script)
1390