xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 6d237f4fa15142b7904c91278efcc9fc1ebdfe74)
1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10import argparse
11import json
12import multiprocessing
13import os
14import platform
15import re
16import shutil
17import subprocess
18import sys
19import tempfile
20from subprocess import CalledProcessError, check_call
21from tempfile import TemporaryDirectory
22from urllib.parse import urljoin
23
24from git import Repo
25
26# interpreter is not used directly but this resolves dependency ordering
27# that would be broken if we didn't include it.
28from mesonbuild import interpreter  # noqa: F401
29from mesonbuild import optinterpreter, options
30from mesonbuild.mesonlib import version_compare as meson_version_compare
31from mesonbuild.options import OptionKey, OptionStore
32
33
34class DepTree:
35    """
36    Represents package dependency tree, where each node is a DepTree with a
37    name and DepTree children.
38    """
39
40    def __init__(self, name):
41        """
42        Create new DepTree.
43
44        Parameter descriptions:
45        name               Name of new tree node.
46        """
47        self.name = name
48        self.children = list()
49
50    def AddChild(self, name):
51        """
52        Add new child node to current node.
53
54        Parameter descriptions:
55        name               Name of new child
56        """
57        new_child = DepTree(name)
58        self.children.append(new_child)
59        return new_child
60
61    def AddChildNode(self, node):
62        """
63        Add existing child node to current node.
64
65        Parameter descriptions:
66        node               Tree node to add
67        """
68        self.children.append(node)
69
70    def RemoveChild(self, name):
71        """
72        Remove child node.
73
74        Parameter descriptions:
75        name               Name of child to remove
76        """
77        for child in self.children:
78            if child.name == name:
79                self.children.remove(child)
80                return
81
82    def GetNode(self, name):
83        """
84        Return node with matching name. Return None if not found.
85
86        Parameter descriptions:
87        name               Name of node to return
88        """
89        if self.name == name:
90            return self
91        for child in self.children:
92            node = child.GetNode(name)
93            if node:
94                return node
95        return None
96
97    def GetParentNode(self, name, parent_node=None):
98        """
99        Return parent of node with matching name. Return none if not found.
100
101        Parameter descriptions:
102        name               Name of node to get parent of
103        parent_node        Parent of current node
104        """
105        if self.name == name:
106            return parent_node
107        for child in self.children:
108            found_node = child.GetParentNode(name, self)
109            if found_node:
110                return found_node
111        return None
112
113    def GetPath(self, name, path=None):
114        """
115        Return list of node names from head to matching name.
116        Return None if not found.
117
118        Parameter descriptions:
119        name               Name of node
120        path               List of node names from head to current node
121        """
122        if not path:
123            path = []
124        if self.name == name:
125            path.append(self.name)
126            return path
127        for child in self.children:
128            match = child.GetPath(name, path + [self.name])
129            if match:
130                return match
131        return None
132
133    def GetPathRegex(self, name, regex_str, path=None):
134        """
135        Return list of node paths that end in name, or match regex_str.
136        Return empty list if not found.
137
138        Parameter descriptions:
139        name               Name of node to search for
140        regex_str          Regex string to match node names
141        path               Path of node names from head to current node
142        """
143        new_paths = []
144        if not path:
145            path = []
146        match = re.match(regex_str, self.name)
147        if (self.name == name) or (match):
148            new_paths.append(path + [self.name])
149        for child in self.children:
150            return_paths = None
151            full_path = path + [self.name]
152            return_paths = child.GetPathRegex(name, regex_str, full_path)
153            for i in return_paths:
154                new_paths.append(i)
155        return new_paths
156
157    def MoveNode(self, from_name, to_name):
158        """
159        Mode existing from_name node to become child of to_name node.
160
161        Parameter descriptions:
162        from_name          Name of node to make a child of to_name
163        to_name            Name of node to make parent of from_name
164        """
165        parent_from_node = self.GetParentNode(from_name)
166        from_node = self.GetNode(from_name)
167        parent_from_node.RemoveChild(from_name)
168        to_node = self.GetNode(to_name)
169        to_node.AddChildNode(from_node)
170
171    def ReorderDeps(self, name, regex_str):
172        """
173        Reorder dependency tree.  If tree contains nodes with names that
174        match 'name' and 'regex_str', move 'regex_str' nodes that are
175        to the right of 'name' node, so that they become children of the
176        'name' node.
177
178        Parameter descriptions:
179        name               Name of node to look for
180        regex_str          Regex string to match names to
181        """
182        name_path = self.GetPath(name)
183        if not name_path:
184            return
185        paths = self.GetPathRegex(name, regex_str)
186        is_name_in_paths = False
187        name_index = 0
188        for i in range(len(paths)):
189            path = paths[i]
190            if path[-1] == name:
191                is_name_in_paths = True
192                name_index = i
193                break
194        if not is_name_in_paths:
195            return
196        for i in range(name_index + 1, len(paths)):
197            path = paths[i]
198            if name in path:
199                continue
200            from_name = path[-1]
201            self.MoveNode(from_name, name)
202
203    def GetInstallList(self):
204        """
205        Return post-order list of node names.
206
207        Parameter descriptions:
208        """
209        install_list = []
210        for child in self.children:
211            child_install_list = child.GetInstallList()
212            install_list.extend(child_install_list)
213        install_list.append(self.name)
214        return install_list
215
216    def PrintTree(self, level=0):
217        """
218        Print pre-order node names with indentation denoting node depth level.
219
220        Parameter descriptions:
221        level              Current depth level
222        """
223        INDENT_PER_LEVEL = 4
224        print(" " * (level * INDENT_PER_LEVEL) + self.name)
225        for child in self.children:
226            child.PrintTree(level + 1)
227
228
229def check_call_cmd(*cmd, **kwargs):
230    """
231    Verbose prints the directory location the given command is called from and
232    the command, then executes the command using check_call.
233
234    Parameter descriptions:
235    dir                 Directory location command is to be called from
236    cmd                 List of parameters constructing the complete command
237    """
238    printline(os.getcwd(), ">", " ".join(cmd))
239    check_call(cmd, **kwargs)
240
241
242def clone_pkg(pkg, branch):
243    """
244    Clone the given openbmc package's git repository from gerrit into
245    the WORKSPACE location
246
247    Parameter descriptions:
248    pkg                 Name of the package to clone
249    branch              Branch to clone from pkg
250    """
251    pkg_dir = os.path.join(WORKSPACE, pkg)
252    if os.path.exists(os.path.join(pkg_dir, ".git")):
253        return pkg_dir
254    pkg_repo = urljoin("https://gerrit.openbmc.org/openbmc/", pkg)
255    os.mkdir(pkg_dir)
256    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
257    try:
258        # first try the branch
259        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
260        repo_inst = clone.working_dir
261    except Exception:
262        printline("Input branch not found, default to master")
263        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
264        repo_inst = clone.working_dir
265    return repo_inst
266
267
268def make_target_exists(target):
269    """
270    Runs a check against the makefile in the current directory to determine
271    if the target exists so that it can be built.
272
273    Parameter descriptions:
274    target              The make target we are checking
275    """
276    try:
277        cmd = ["make", "-n", target]
278        with open(os.devnull, "w") as devnull:
279            check_call(cmd, stdout=devnull, stderr=devnull)
280        return True
281    except CalledProcessError:
282        return False
283
284
285make_parallel = [
286    "make",
287    # Run enough jobs to saturate all the cpus
288    "-j",
289    str(multiprocessing.cpu_count()),
290    # Don't start more jobs if the load avg is too high
291    "-l",
292    str(multiprocessing.cpu_count()),
293    # Synchronize the output so logs aren't intermixed in stdout / stderr
294    "-O",
295]
296
297
298def build_and_install(name, build_for_testing=False):
299    """
300    Builds and installs the package in the environment. Optionally
301    builds the examples and test cases for package.
302
303    Parameter description:
304    name                The name of the package we are building
305    build_for_testing   Enable options related to testing on the package?
306    """
307    os.chdir(os.path.join(WORKSPACE, name))
308
309    # Refresh dynamic linker run time bindings for dependencies
310    check_call_cmd("sudo", "-n", "--", "ldconfig")
311
312    pkg = Package()
313    if build_for_testing:
314        pkg.test()
315    else:
316        pkg.install()
317
318
319def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
320    """
321    For each package (name), starting with the package to be unit tested,
322    extract its dependencies. For each package dependency defined, recursively
323    apply the same strategy
324
325    Parameter descriptions:
326    name                Name of the package
327    pkgdir              Directory where package source is located
328    dep_added           Current dict of dependencies and added status
329    head                Head node of the dependency tree
330    branch              Branch to clone from pkg
331    dep_tree            Current dependency tree node
332    """
333    if not dep_tree:
334        dep_tree = head
335
336    with open("/tmp/depcache", "r") as depcache:
337        cache = depcache.readline()
338
339    # Read out pkg dependencies
340    pkg = Package(name, pkgdir)
341
342    build = pkg.build_system()
343    if not build:
344        raise Exception(f"Unable to find build system for {name}.")
345
346    for dep in set(build.dependencies()):
347        if dep in cache:
348            continue
349        # Dependency package not already known
350        if dep_added.get(dep) is None:
351            print(f"Adding {dep} dependency to {name}.")
352            # Dependency package not added
353            new_child = dep_tree.AddChild(dep)
354            dep_added[dep] = False
355            dep_pkgdir = clone_pkg(dep, branch)
356            # Determine this dependency package's
357            # dependencies and add them before
358            # returning to add this package
359            dep_added = build_dep_tree(
360                dep, dep_pkgdir, dep_added, head, branch, new_child
361            )
362        else:
363            # Dependency package known and added
364            if dep_added[dep]:
365                continue
366            else:
367                # Cyclic dependency failure
368                raise Exception("Cyclic dependencies found in " + name)
369
370    if not dep_added[name]:
371        dep_added[name] = True
372
373    return dep_added
374
375
376def run_cppcheck():
377    if (
378        not os.path.exists(os.path.join("build", "compile_commands.json"))
379        or NO_CPPCHECK
380    ):
381        return None
382
383    with TemporaryDirectory() as cpp_dir:
384        # http://cppcheck.sourceforge.net/manual.pdf
385        try:
386            check_call_cmd(
387                "cppcheck",
388                "-j",
389                str(multiprocessing.cpu_count()),
390                "--enable=style,performance,portability,missingInclude",
391                "--inline-suppr",
392                "--suppress=useStlAlgorithm",
393                "--suppress=unusedStructMember",
394                "--suppress=postfixOperator",
395                "--suppress=unreadVariable",
396                "--suppress=knownConditionTrueFalse",
397                "--library=googletest",
398                "--project=build/compile_commands.json",
399                f"--cppcheck-build-dir={cpp_dir}",
400            )
401        except subprocess.CalledProcessError:
402            print("cppcheck found errors")
403
404
405def is_valgrind_safe():
406    """
407    Returns whether it is safe to run valgrind on our platform
408    """
409    with tempfile.TemporaryDirectory() as temp:
410        src = os.path.join(temp, "unit-test-vg.c")
411        exe = os.path.join(temp, "unit-test-vg")
412        with open(src, "w") as h:
413            h.write("#include <errno.h>\n")
414            h.write("#include <stdio.h>\n")
415            h.write("#include <stdlib.h>\n")
416            h.write("#include <string.h>\n")
417            h.write("int main() {\n")
418            h.write("char *heap_str = malloc(16);\n")
419            h.write('strcpy(heap_str, "RandString");\n')
420            h.write('int res = strcmp("RandString", heap_str);\n')
421            h.write("free(heap_str);\n")
422            h.write("char errstr[64];\n")
423            h.write("strerror_r(EINVAL, errstr, sizeof(errstr));\n")
424            h.write('printf("%s\\n", errstr);\n')
425            h.write("return res;\n")
426            h.write("}\n")
427        check_call(
428            ["gcc", "-O2", "-o", exe, src],
429            stdout=subprocess.DEVNULL,
430            stderr=subprocess.DEVNULL,
431        )
432        try:
433            check_call(
434                ["valgrind", "--error-exitcode=99", exe],
435                stdout=subprocess.DEVNULL,
436                stderr=subprocess.DEVNULL,
437            )
438        except CalledProcessError:
439            sys.stderr.write("###### Platform is not valgrind safe ######\n")
440            return False
441        return True
442
443
444def is_sanitize_safe():
445    """
446    Returns whether it is safe to run sanitizers on our platform
447    """
448    src = "unit-test-sanitize.c"
449    exe = "./unit-test-sanitize"
450    with open(src, "w") as h:
451        h.write("int main() { return 0; }\n")
452    try:
453        with open(os.devnull, "w") as devnull:
454            check_call(
455                [
456                    "gcc",
457                    "-O2",
458                    "-fsanitize=address",
459                    "-fsanitize=undefined",
460                    "-o",
461                    exe,
462                    src,
463                ],
464                stdout=devnull,
465                stderr=devnull,
466            )
467            check_call([exe], stdout=devnull, stderr=devnull)
468
469        # TODO - Sanitizer not working on ppc64le
470        # https://github.com/openbmc/openbmc-build-scripts/issues/31
471        if platform.processor() == "ppc64le":
472            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
473            return False
474        else:
475            return True
476    except Exception:
477        sys.stderr.write("###### Platform is not sanitize safe ######\n")
478        return False
479    finally:
480        os.remove(src)
481        os.remove(exe)
482
483
484def maybe_make_valgrind():
485    """
486    Potentially runs the unit tests through valgrind for the package
487    via `make check-valgrind`. If the package does not have valgrind testing
488    then it just skips over this.
489    """
490    # Valgrind testing is currently broken by an aggressive strcmp optimization
491    # that is inlined into optimized code for POWER by gcc 7+. Until we find
492    # a workaround, just don't run valgrind tests on POWER.
493    # https://github.com/openbmc/openbmc/issues/3315
494    if not is_valgrind_safe():
495        sys.stderr.write("###### Skipping valgrind ######\n")
496        return
497    if not make_target_exists("check-valgrind"):
498        return
499
500    try:
501        cmd = make_parallel + ["check-valgrind"]
502        check_call_cmd(*cmd)
503    except CalledProcessError:
504        for root, _, files in os.walk(os.getcwd()):
505            for f in files:
506                if re.search("test-suite-[a-z]+.log", f) is None:
507                    continue
508                check_call_cmd("cat", os.path.join(root, f))
509        raise Exception("Valgrind tests failed")
510
511
512def maybe_make_coverage():
513    """
514    Potentially runs the unit tests through code coverage for the package
515    via `make check-code-coverage`. If the package does not have code coverage
516    testing then it just skips over this.
517    """
518    if not make_target_exists("check-code-coverage"):
519        return
520
521    # Actually run code coverage
522    try:
523        cmd = make_parallel + ["check-code-coverage"]
524        check_call_cmd(*cmd)
525    except CalledProcessError:
526        raise Exception("Code coverage failed")
527
528
529class BuildSystem(object):
530    """
531    Build systems generally provide the means to configure, build, install and
532    test software. The BuildSystem class defines a set of interfaces on top of
533    which Autotools, Meson, CMake and possibly other build system drivers can
534    be implemented, separating out the phases to control whether a package
535    should merely be installed or also tested and analyzed.
536    """
537
538    def __init__(self, package, path):
539        """Initialise the driver with properties independent of the build
540        system
541
542        Keyword arguments:
543        package: The name of the package. Derived from the path if None
544        path: The path to the package. Set to the working directory if None
545        """
546        self.path = "." if not path else path
547        realpath = os.path.realpath(self.path)
548        self.package = package if package else os.path.basename(realpath)
549        self.build_for_testing = False
550
551    def probe(self):
552        """Test if the build system driver can be applied to the package
553
554        Return True if the driver can drive the package's build system,
555        otherwise False.
556
557        Generally probe() is implemented by testing for the presence of the
558        build system's configuration file(s).
559        """
560        raise NotImplementedError
561
562    def dependencies(self):
563        """Provide the package's dependencies
564
565        Returns a list of dependencies. If no dependencies are required then an
566        empty list must be returned.
567
568        Generally dependencies() is implemented by analysing and extracting the
569        data from the build system configuration.
570        """
571        raise NotImplementedError
572
573    def configure(self, build_for_testing):
574        """Configure the source ready for building
575
576        Should raise an exception if configuration failed.
577
578        Keyword arguments:
579        build_for_testing: Mark the package as being built for testing rather
580                           than for installation as a dependency for the
581                           package under test. Setting to True generally
582                           implies that the package will be configured to build
583                           with debug information, at a low level of
584                           optimisation and possibly with sanitizers enabled.
585
586        Generally configure() is implemented by invoking the build system
587        tooling to generate Makefiles or equivalent.
588        """
589        raise NotImplementedError
590
591    def build(self):
592        """Build the software ready for installation and/or testing
593
594        Should raise an exception if the build fails
595
596        Generally build() is implemented by invoking `make` or `ninja`.
597        """
598        raise NotImplementedError
599
600    def install(self):
601        """Install the software ready for use
602
603        Should raise an exception if installation fails
604
605        Like build(), install() is generally implemented by invoking `make` or
606        `ninja`.
607        """
608        raise NotImplementedError
609
610    def test(self):
611        """Build and run the test suite associated with the package
612
613        Should raise an exception if the build or testing fails.
614
615        Like install(), test() is generally implemented by invoking `make` or
616        `ninja`.
617        """
618        raise NotImplementedError
619
620    def analyze(self):
621        """Run any supported analysis tools over the codebase
622
623        Should raise an exception if analysis fails.
624
625        Some analysis tools such as scan-build need injection into the build
626        system. analyze() provides the necessary hook to implement such
627        behaviour. Analyzers independent of the build system can also be
628        specified here but at the cost of possible duplication of code between
629        the build system driver implementations.
630        """
631        raise NotImplementedError
632
633
634class Autotools(BuildSystem):
635    def __init__(self, package=None, path=None):
636        super(Autotools, self).__init__(package, path)
637
638    def probe(self):
639        return os.path.isfile(os.path.join(self.path, "configure.ac"))
640
641    def dependencies(self):
642        configure_ac = os.path.join(self.path, "configure.ac")
643
644        contents = ""
645        # Prepend some special function overrides so we can parse out
646        # dependencies
647        for macro in DEPENDENCIES.keys():
648            contents += (
649                "m4_define(["
650                + macro
651                + "], ["
652                + macro
653                + "_START$"
654                + str(DEPENDENCIES_OFFSET[macro] + 1)
655                + macro
656                + "_END])\n"
657            )
658        with open(configure_ac, "rt") as f:
659            contents += f.read()
660
661        autoconf_cmdline = ["autoconf", "-Wno-undefined", "-"]
662        autoconf_process = subprocess.Popen(
663            autoconf_cmdline,
664            stdin=subprocess.PIPE,
665            stdout=subprocess.PIPE,
666            stderr=subprocess.PIPE,
667        )
668        document = contents.encode("utf-8")
669        (stdout, stderr) = autoconf_process.communicate(input=document)
670        if not stdout:
671            print(stderr)
672            raise Exception("Failed to run autoconf for parsing dependencies")
673
674        # Parse out all of the dependency text
675        matches = []
676        for macro in DEPENDENCIES.keys():
677            pattern = "(" + macro + ")_START(.*?)" + macro + "_END"
678            for match in re.compile(pattern).finditer(stdout.decode("utf-8")):
679                matches.append((match.group(1), match.group(2)))
680
681        # Look up dependencies from the text
682        found_deps = []
683        for macro, deptext in matches:
684            for potential_dep in deptext.split(" "):
685                for known_dep in DEPENDENCIES[macro].keys():
686                    if potential_dep.startswith(known_dep):
687                        found_deps.append(DEPENDENCIES[macro][known_dep])
688
689        return found_deps
690
691    def _configure_feature(self, flag, enabled):
692        """
693        Returns an configure flag as a string
694
695        Parameters:
696        flag                The name of the flag
697        enabled             Whether the flag is enabled or disabled
698        """
699        return "--" + ("enable" if enabled else "disable") + "-" + flag
700
701    def configure(self, build_for_testing):
702        self.build_for_testing = build_for_testing
703        conf_flags = [
704            self._configure_feature("silent-rules", False),
705            self._configure_feature("examples", build_for_testing),
706            self._configure_feature("tests", build_for_testing),
707            self._configure_feature("itests", INTEGRATION_TEST),
708        ]
709        conf_flags.extend(
710            [
711                self._configure_feature("code-coverage", False),
712                self._configure_feature("valgrind", build_for_testing),
713            ]
714        )
715        # Add any necessary configure flags for package
716        if CONFIGURE_FLAGS.get(self.package) is not None:
717            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
718        for bootstrap in ["bootstrap.sh", "bootstrap", "autogen.sh"]:
719            if os.path.exists(bootstrap):
720                check_call_cmd("./" + bootstrap)
721                break
722        check_call_cmd("./configure", *conf_flags)
723
724    def build(self):
725        check_call_cmd(*make_parallel)
726
727    def install(self):
728        check_call_cmd("sudo", "-n", "--", *(make_parallel + ["install"]))
729        check_call_cmd("sudo", "-n", "--", "ldconfig")
730
731    def test(self):
732        try:
733            cmd = make_parallel + ["check"]
734            for i in range(0, args.repeat):
735                check_call_cmd(*cmd)
736
737            maybe_make_valgrind()
738            maybe_make_coverage()
739        except CalledProcessError:
740            for root, _, files in os.walk(os.getcwd()):
741                if "test-suite.log" not in files:
742                    continue
743                check_call_cmd("cat", os.path.join(root, "test-suite.log"))
744            raise Exception("Unit tests failed")
745
746    def analyze(self):
747        run_cppcheck()
748
749
750class CMake(BuildSystem):
751    def __init__(self, package=None, path=None):
752        super(CMake, self).__init__(package, path)
753
754    def probe(self):
755        return os.path.isfile(os.path.join(self.path, "CMakeLists.txt"))
756
757    def dependencies(self):
758        return []
759
760    def configure(self, build_for_testing):
761        self.build_for_testing = build_for_testing
762        if INTEGRATION_TEST:
763            check_call_cmd(
764                "cmake",
765                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
766                "-DCMAKE_CXX_FLAGS='-DBOOST_USE_VALGRIND'",
767                "-DITESTS=ON",
768                ".",
769            )
770        else:
771            check_call_cmd(
772                "cmake",
773                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
774                "-DCMAKE_CXX_FLAGS='-DBOOST_USE_VALGRIND'",
775                ".",
776            )
777
778    def build(self):
779        check_call_cmd(
780            "cmake",
781            "--build",
782            ".",
783            "--",
784            "-j",
785            str(multiprocessing.cpu_count()),
786        )
787
788    def install(self):
789        check_call_cmd("sudo", "cmake", "--install", ".")
790        check_call_cmd("sudo", "-n", "--", "ldconfig")
791
792    def test(self):
793        if make_target_exists("test"):
794            check_call_cmd("ctest", ".")
795
796    def analyze(self):
797        if os.path.isfile(".clang-tidy"):
798            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
799                # clang-tidy needs to run on a clang-specific build
800                check_call_cmd(
801                    "cmake",
802                    "-DCMAKE_C_COMPILER=clang",
803                    "-DCMAKE_CXX_COMPILER=clang++",
804                    "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
805                    "-H.",
806                    "-B" + build_dir,
807                )
808
809                check_call_cmd(
810                    "run-clang-tidy", "-header-filter=.*", "-p", build_dir
811                )
812
813        maybe_make_valgrind()
814        maybe_make_coverage()
815        run_cppcheck()
816
817
818class Meson(BuildSystem):
819    @staticmethod
820    def _project_name(path):
821        doc = subprocess.check_output(
822            ["meson", "introspect", "--projectinfo", path],
823            stderr=subprocess.STDOUT,
824        ).decode("utf-8")
825        return json.loads(doc)["descriptive_name"]
826
827    def __init__(self, package=None, path=None):
828        super(Meson, self).__init__(package, path)
829
830    def probe(self):
831        return os.path.isfile(os.path.join(self.path, "meson.build"))
832
833    def dependencies(self):
834        meson_build = os.path.join(self.path, "meson.build")
835        if not os.path.exists(meson_build):
836            return []
837
838        found_deps = []
839        for root, dirs, files in os.walk(self.path):
840            if "meson.build" not in files:
841                continue
842            with open(os.path.join(root, "meson.build"), "rt") as f:
843                build_contents = f.read()
844            pattern = r"dependency\('([^']*)'.*?\),?"
845            for match in re.finditer(pattern, build_contents):
846                group = match.group(1)
847                maybe_dep = DEPENDENCIES["PKG_CHECK_MODULES"].get(group)
848                if maybe_dep is not None:
849                    found_deps.append(maybe_dep)
850
851        return found_deps
852
853    def _parse_options(self, options_file):
854        """
855        Returns a set of options defined in the provides meson_options.txt file
856
857        Parameters:
858        options_file        The file containing options
859        """
860        store = OptionStore(is_cross=False)
861        oi = optinterpreter.OptionInterpreter(store, "")
862        oi.process(options_file)
863        return oi.options
864
865    def _configure_boolean(self, val):
866        """
867        Returns the meson flag which signifies the value
868
869        True is true which requires the boolean.
870        False is false which disables the boolean.
871
872        Parameters:
873        val                 The value being converted
874        """
875        if val is True:
876            return "true"
877        elif val is False:
878            return "false"
879        else:
880            raise Exception("Bad meson boolean value")
881
882    def _configure_feature(self, val):
883        """
884        Returns the meson flag which signifies the value
885
886        True is enabled which requires the feature.
887        False is disabled which disables the feature.
888        None is auto which autodetects the feature.
889
890        Parameters:
891        val                 The value being converted
892        """
893        if val is True:
894            return "enabled"
895        elif val is False:
896            return "disabled"
897        elif val is None:
898            return "auto"
899        else:
900            raise Exception("Bad meson feature value")
901
902    def _configure_option(self, opts, key, val):
903        """
904        Returns the meson flag which signifies the value
905        based on the type of the opt
906
907        Parameters:
908        opt                 The meson option which we are setting
909        val                 The value being converted
910        """
911        if isinstance(opts[key], options.UserBooleanOption):
912            str_val = self._configure_boolean(val)
913        elif isinstance(opts[key], options.UserFeatureOption):
914            str_val = self._configure_feature(val)
915        else:
916            raise Exception("Unknown meson option type")
917        return "-D{}={}".format(key, str_val)
918
919    def get_configure_flags(self, build_for_testing):
920        self.build_for_testing = build_for_testing
921        meson_options = {}
922        if os.path.exists("meson.options"):
923            meson_options = self._parse_options("meson.options")
924        elif os.path.exists("meson_options.txt"):
925            meson_options = self._parse_options("meson_options.txt")
926        meson_flags = [
927            "-Db_colorout=never",
928            "-Dwerror=true",
929            "-Dwarning_level=3",
930            "-Dcpp_args='-DBOOST_USE_VALGRIND'",
931        ]
932        if build_for_testing:
933            # -Ddebug=true -Doptimization=g is helpful for abi-dumper but isn't a combination that
934            # is supported by meson's build types. Configure it manually.
935            meson_flags.append("-Ddebug=true")
936            meson_flags.append("-Doptimization=g")
937        else:
938            meson_flags.append("--buildtype=debugoptimized")
939        if OptionKey("tests") in meson_options:
940            meson_flags.append(
941                self._configure_option(
942                    meson_options, OptionKey("tests"), build_for_testing
943                )
944            )
945        if OptionKey("examples") in meson_options:
946            meson_flags.append(
947                self._configure_option(
948                    meson_options, OptionKey("examples"), build_for_testing
949                )
950            )
951        if OptionKey("itests") in meson_options:
952            meson_flags.append(
953                self._configure_option(
954                    meson_options, OptionKey("itests"), INTEGRATION_TEST
955                )
956            )
957        if MESON_FLAGS.get(self.package) is not None:
958            meson_flags.extend(MESON_FLAGS.get(self.package))
959        return meson_flags
960
961    def configure(self, build_for_testing):
962        meson_flags = self.get_configure_flags(build_for_testing)
963        try:
964            check_call_cmd(
965                "meson", "setup", "--reconfigure", "build", *meson_flags
966            )
967        except Exception:
968            shutil.rmtree("build", ignore_errors=True)
969            check_call_cmd("meson", "setup", "build", *meson_flags)
970
971        self.package = Meson._project_name("build")
972
973    def build(self):
974        check_call_cmd("ninja", "-C", "build")
975
976    def install(self):
977        check_call_cmd("sudo", "-n", "--", "ninja", "-C", "build", "install")
978        check_call_cmd("sudo", "-n", "--", "ldconfig")
979
980    def test(self):
981        # It is useful to check various settings of the meson.build file
982        # for compatibility, such as meson_version checks.  We shouldn't
983        # do this in the configure path though because it affects subprojects
984        # and dependencies as well, but we only want this applied to the
985        # project-under-test (otherwise an upstream dependency could fail
986        # this check without our control).
987        self._extra_meson_checks()
988
989        try:
990            test_args = ("--repeat", str(args.repeat), "-C", "build")
991            check_call_cmd("meson", "test", "--print-errorlogs", *test_args)
992
993        except CalledProcessError:
994            raise Exception("Unit tests failed")
995
996    def _setup_exists(self, setup):
997        """
998        Returns whether the meson build supports the named test setup.
999
1000        Parameter descriptions:
1001        setup              The setup target to check
1002        """
1003        try:
1004            with open(os.devnull, "w"):
1005                output = subprocess.check_output(
1006                    [
1007                        "meson",
1008                        "test",
1009                        "-C",
1010                        "build",
1011                        "--setup",
1012                        "{}:{}".format(self.package, setup),
1013                        "__likely_not_a_test__",
1014                    ],
1015                    stderr=subprocess.STDOUT,
1016                )
1017        except CalledProcessError as e:
1018            output = e.output
1019        output = output.decode("utf-8")
1020        return not re.search("Unknown test setup '[^']+'[.]", output)
1021
1022    def _maybe_valgrind(self):
1023        """
1024        Potentially runs the unit tests through valgrind for the package
1025        via `meson test`. The package can specify custom valgrind
1026        configurations by utilizing add_test_setup() in a meson.build
1027        """
1028        if not is_valgrind_safe():
1029            sys.stderr.write("###### Skipping valgrind ######\n")
1030            return
1031        try:
1032            if self._setup_exists("valgrind"):
1033                check_call_cmd(
1034                    "meson",
1035                    "test",
1036                    "-t",
1037                    "10",
1038                    "-C",
1039                    "build",
1040                    "--print-errorlogs",
1041                    "--setup",
1042                    "{}:valgrind".format(self.package),
1043                )
1044            else:
1045                check_call_cmd(
1046                    "meson",
1047                    "test",
1048                    "-t",
1049                    "10",
1050                    "-C",
1051                    "build",
1052                    "--print-errorlogs",
1053                    "--wrapper",
1054                    "valgrind --error-exitcode=1",
1055                )
1056        except CalledProcessError:
1057            raise Exception("Valgrind tests failed")
1058
1059    def analyze(self):
1060        self._maybe_valgrind()
1061
1062        # Run clang-tidy only if the project has a configuration
1063        if os.path.isfile(".clang-tidy"):
1064            clang_env = os.environ.copy()
1065            clang_env["CC"] = "clang"
1066            clang_env["CXX"] = "clang++"
1067            # Clang-20 currently has some issue with libstdcpp's
1068            # std::forward_like which results in a bunch of compile errors.
1069            # Adding -fno-builtin-std-forward_like causes them to go away.
1070            clang_env["CXXFLAGS"] = "-fno-builtin-std-forward_like"
1071            clang_env["CC_LD"] = "lld"
1072            clang_env["CXX_LD"] = "lld"
1073            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
1074                check_call_cmd("meson", "setup", build_dir, env=clang_env)
1075                if not os.path.isfile(".openbmc-no-clang"):
1076                    check_call_cmd(
1077                        "meson", "compile", "-C", build_dir, env=clang_env
1078                    )
1079                try:
1080                    check_call_cmd(
1081                        "ninja",
1082                        "-C",
1083                        build_dir,
1084                        "clang-tidy-fix",
1085                        env=clang_env,
1086                    )
1087                except subprocess.CalledProcessError:
1088                    check_call_cmd(
1089                        "git",
1090                        "-C",
1091                        CODE_SCAN_DIR,
1092                        "--no-pager",
1093                        "diff",
1094                        env=clang_env,
1095                    )
1096                    raise
1097        # Run the basic clang static analyzer otherwise
1098        else:
1099            check_call_cmd("ninja", "-C", "build", "scan-build")
1100
1101        # Run tests through sanitizers
1102        # b_lundef is needed if clang++ is CXX since it resolves the
1103        # asan symbols at runtime only. We don't want to set it earlier
1104        # in the build process to ensure we don't have undefined
1105        # runtime code.
1106        if is_sanitize_safe():
1107            meson_flags = self.get_configure_flags(self.build_for_testing)
1108            meson_flags.append("-Db_sanitize=address,undefined")
1109            try:
1110                check_call_cmd(
1111                    "meson", "setup", "--reconfigure", "build", *meson_flags
1112                )
1113            except Exception:
1114                shutil.rmtree("build", ignore_errors=True)
1115                check_call_cmd("meson", "setup", "build", *meson_flags)
1116            check_call_cmd(
1117                "meson",
1118                "test",
1119                "-C",
1120                "build",
1121                "--print-errorlogs",
1122                "--logbase",
1123                "testlog-ubasan",
1124            )
1125            # TODO: Fix memory sanitizer
1126            # check_call_cmd('meson', 'configure', 'build',
1127            #                '-Db_sanitize=memory')
1128            # check_call_cmd('meson', 'test', '-C', 'build'
1129            #                '--logbase', 'testlog-msan')
1130            meson_flags = [
1131                s.replace(
1132                    "-Db_sanitize=address,undefined", "-Db_sanitize=none"
1133                )
1134                for s in meson_flags
1135            ]
1136            try:
1137                check_call_cmd(
1138                    "meson", "setup", "--reconfigure", "build", *meson_flags
1139                )
1140            except Exception:
1141                shutil.rmtree("build", ignore_errors=True)
1142                check_call_cmd("meson", "setup", "build", *meson_flags)
1143        else:
1144            sys.stderr.write("###### Skipping sanitizers ######\n")
1145
1146        # Run coverage checks
1147        check_call_cmd("meson", "configure", "build", "-Db_coverage=true")
1148        self.test()
1149        # Only build coverage HTML if coverage files were produced
1150        for root, dirs, files in os.walk("build"):
1151            if any([f.endswith(".gcda") for f in files]):
1152                check_call_cmd("ninja", "-C", "build", "coverage-html")
1153                break
1154        check_call_cmd("meson", "configure", "build", "-Db_coverage=false")
1155        run_cppcheck()
1156
1157    def _extra_meson_checks(self):
1158        with open(os.path.join(self.path, "meson.build"), "rt") as f:
1159            build_contents = f.read()
1160
1161        # Find project's specified meson_version.
1162        meson_version = None
1163        pattern = r"meson_version:[^']*'([^']*)'"
1164        for match in re.finditer(pattern, build_contents):
1165            group = match.group(1)
1166            meson_version = group
1167
1168        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1169        # identify this.  Add to our unit-test checks so that we don't
1170        # get a meson.build missing this.
1171        pattern = r"'cpp_std=c\+\+20'"
1172        for match in re.finditer(pattern, build_contents):
1173            if not meson_version or not meson_version_compare(
1174                meson_version, ">=0.57"
1175            ):
1176                raise Exception(
1177                    "C++20 support requires specifying in meson.build: "
1178                    + "meson_version: '>=0.57'"
1179                )
1180
1181        # C++23 requires at least Meson 1.1.1 but Meson itself doesn't
1182        # identify this.  Add to our unit-test checks so that we don't
1183        # get a meson.build missing this.
1184        pattern = r"'cpp_std=c\+\+23'"
1185        for match in re.finditer(pattern, build_contents):
1186            if not meson_version or not meson_version_compare(
1187                meson_version, ">=1.1.1"
1188            ):
1189                raise Exception(
1190                    "C++23 support requires specifying in meson.build: "
1191                    + "meson_version: '>=1.1.1'"
1192                )
1193
1194        if "get_variable(" in build_contents:
1195            if not meson_version or not meson_version_compare(
1196                meson_version, ">=0.58"
1197            ):
1198                raise Exception(
1199                    "dep.get_variable() with positional argument requires "
1200                    + "meson_version: '>=0.58'"
1201                )
1202
1203        if "relative_to(" in build_contents:
1204            if not meson_version or not meson_version_compare(
1205                meson_version, ">=1.3.0"
1206            ):
1207                raise Exception(
1208                    "fs.relative_to() requires meson_version: '>=1.3.0'"
1209                )
1210
1211
1212class Package(object):
1213    def __init__(self, name=None, path=None):
1214        self.supported = [Meson, Autotools, CMake]
1215        self.name = name
1216        self.path = path
1217        self.test_only = False
1218
1219    def build_systems(self):
1220        instances = (system(self.name, self.path) for system in self.supported)
1221        return (instance for instance in instances if instance.probe())
1222
1223    def build_system(self, preferred=None):
1224        systems = list(self.build_systems())
1225
1226        if not systems:
1227            return None
1228
1229        if preferred:
1230            return {type(system): system for system in systems}[preferred]
1231
1232        return next(iter(systems))
1233
1234    def install(self, system=None):
1235        if not system:
1236            system = self.build_system()
1237
1238        system.configure(False)
1239        system.build()
1240        system.install()
1241
1242    def _test_one(self, system):
1243        system.configure(True)
1244        system.build()
1245        system.install()
1246        system.test()
1247        if not TEST_ONLY:
1248            system.analyze()
1249
1250    def test(self):
1251        for system in self.build_systems():
1252            self._test_one(system)
1253
1254
1255def find_file(filename, basedir):
1256    """
1257    Finds all occurrences of a file (or list of files) in the base
1258    directory and passes them back with their relative paths.
1259
1260    Parameter descriptions:
1261    filename              The name of the file (or list of files) to
1262                          find
1263    basedir               The base directory search in
1264    """
1265
1266    if not isinstance(filename, list):
1267        filename = [filename]
1268
1269    filepaths = []
1270    for root, dirs, files in os.walk(basedir):
1271        if os.path.split(root)[-1] == "subprojects":
1272            for f in files:
1273                subproject = ".".join(f.split(".")[0:-1])
1274                if f.endswith(".wrap") and subproject in dirs:
1275                    # don't find files in meson subprojects with wraps
1276                    dirs.remove(subproject)
1277        for f in filename:
1278            if f in files:
1279                filepaths.append(os.path.join(root, f))
1280    return filepaths
1281
1282
1283if __name__ == "__main__":
1284    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1285    CONFIGURE_FLAGS = {
1286        "phosphor-logging": [
1287            "--enable-metadata-processing",
1288            "--enable-openpower-pel-extension",
1289            "YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml",
1290        ]
1291    }
1292
1293    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1294    MESON_FLAGS = {
1295        "phosphor-dbus-interfaces": [
1296            "-Ddata_com_ibm=true",
1297            "-Ddata_org_open_power=true",
1298        ],
1299        "phosphor-logging": ["-Dopenpower-pel-extension=enabled"],
1300    }
1301
1302    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1303    DEPENDENCIES = {
1304        "AC_CHECK_LIB": {"mapper": "phosphor-objmgr"},
1305        "AC_CHECK_HEADER": {
1306            "host-ipmid": "phosphor-host-ipmid",
1307            "blobs-ipmid": "phosphor-ipmi-blobs",
1308            "sdbusplus": "sdbusplus",
1309            "sdeventplus": "sdeventplus",
1310            "stdplus": "stdplus",
1311            "gpioplus": "gpioplus",
1312            "phosphor-logging/log.hpp": "phosphor-logging",
1313        },
1314        "AC_PATH_PROG": {"sdbus++": "sdbusplus"},
1315        "PKG_CHECK_MODULES": {
1316            "phosphor-dbus-interfaces": "phosphor-dbus-interfaces",
1317            "libipmid": "phosphor-host-ipmid",
1318            "libipmid-host": "phosphor-host-ipmid",
1319            "sdbusplus": "sdbusplus",
1320            "sdeventplus": "sdeventplus",
1321            "stdplus": "stdplus",
1322            "gpioplus": "gpioplus",
1323            "phosphor-logging": "phosphor-logging",
1324            "phosphor-snmp": "phosphor-snmp",
1325            "ipmiblob": "ipmi-blob-tool",
1326            "hei": "openpower-libhei",
1327            "phosphor-ipmi-blobs": "phosphor-ipmi-blobs",
1328            "libcr51sign": "google-misc",
1329        },
1330    }
1331
1332    # Offset into array of macro parameters MACRO(0, 1, ...N)
1333    DEPENDENCIES_OFFSET = {
1334        "AC_CHECK_LIB": 0,
1335        "AC_CHECK_HEADER": 0,
1336        "AC_PATH_PROG": 1,
1337        "PKG_CHECK_MODULES": 1,
1338    }
1339
1340    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1341    DEPENDENCIES_REGEX = {"phosphor-logging": r"\S+-dbus-interfaces$"}
1342
1343    # Set command line arguments
1344    parser = argparse.ArgumentParser()
1345    parser.add_argument(
1346        "-w",
1347        "--workspace",
1348        dest="WORKSPACE",
1349        required=True,
1350        help="Workspace directory location(i.e. /home)",
1351    )
1352    parser.add_argument(
1353        "-p",
1354        "--package",
1355        dest="PACKAGE",
1356        required=True,
1357        help="OpenBMC package to be unit tested",
1358    )
1359    parser.add_argument(
1360        "-t",
1361        "--test-only",
1362        dest="TEST_ONLY",
1363        action="store_true",
1364        required=False,
1365        default=False,
1366        help="Only run test cases, no other validation",
1367    )
1368    parser.add_argument(
1369        "--no-cppcheck",
1370        dest="NO_CPPCHECK",
1371        action="store_true",
1372        required=False,
1373        default=False,
1374        help="Do not run cppcheck",
1375    )
1376    arg_inttests = parser.add_mutually_exclusive_group()
1377    arg_inttests.add_argument(
1378        "--integration-tests",
1379        dest="INTEGRATION_TEST",
1380        action="store_true",
1381        required=False,
1382        default=True,
1383        help="Enable integration tests [default].",
1384    )
1385    arg_inttests.add_argument(
1386        "--no-integration-tests",
1387        dest="INTEGRATION_TEST",
1388        action="store_false",
1389        required=False,
1390        help="Disable integration tests.",
1391    )
1392    parser.add_argument(
1393        "-v",
1394        "--verbose",
1395        action="store_true",
1396        help="Print additional package status messages",
1397    )
1398    parser.add_argument(
1399        "-r", "--repeat", help="Repeat tests N times", type=int, default=1
1400    )
1401    parser.add_argument(
1402        "-b",
1403        "--branch",
1404        dest="BRANCH",
1405        required=False,
1406        help="Branch to target for dependent repositories",
1407        default="master",
1408    )
1409    parser.add_argument(
1410        "-n",
1411        "--noformat",
1412        dest="FORMAT",
1413        action="store_false",
1414        required=False,
1415        help="Whether or not to run format code",
1416    )
1417    args = parser.parse_args(sys.argv[1:])
1418    WORKSPACE = args.WORKSPACE
1419    UNIT_TEST_PKG = args.PACKAGE
1420    TEST_ONLY = args.TEST_ONLY
1421    NO_CPPCHECK = args.NO_CPPCHECK
1422    INTEGRATION_TEST = args.INTEGRATION_TEST
1423    BRANCH = args.BRANCH
1424    FORMAT_CODE = args.FORMAT
1425    if args.verbose:
1426
1427        def printline(*line):
1428            for arg in line:
1429                print(arg, end=" ")
1430            print()
1431
1432    else:
1433
1434        def printline(*line):
1435            pass
1436
1437    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1438
1439    # Run format-code.sh, which will in turn call any repo-level formatters.
1440    if FORMAT_CODE:
1441        check_call_cmd(
1442            os.path.join(
1443                WORKSPACE, "openbmc-build-scripts", "scripts", "format-code.sh"
1444            ),
1445            CODE_SCAN_DIR,
1446        )
1447
1448        # Check to see if any files changed
1449        check_call_cmd(
1450            "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff", "--exit-code"
1451        )
1452
1453    # Check if this repo has a supported make infrastructure
1454    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1455    if not pkg.build_system():
1456        print("No valid build system, exit")
1457        sys.exit(0)
1458
1459    prev_umask = os.umask(000)
1460
1461    # Determine dependencies and add them
1462    dep_added = dict()
1463    dep_added[UNIT_TEST_PKG] = False
1464
1465    # Create dependency tree
1466    dep_tree = DepTree(UNIT_TEST_PKG)
1467    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1468
1469    # Reorder Dependency Tree
1470    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1471        dep_tree.ReorderDeps(pkg_name, regex_str)
1472    if args.verbose:
1473        dep_tree.PrintTree()
1474
1475    install_list = dep_tree.GetInstallList()
1476
1477    # We don't want to treat our package as a dependency
1478    install_list.remove(UNIT_TEST_PKG)
1479
1480    # Install reordered dependencies
1481    for dep in install_list:
1482        build_and_install(dep, False)
1483
1484    # Run package unit tests
1485    build_and_install(UNIT_TEST_PKG, True)
1486
1487    os.umask(prev_umask)
1488
1489    # Run any custom CI scripts the repo has, of which there can be
1490    # multiple of and anywhere in the repository.
1491    ci_scripts = find_file(["run-ci.sh", "run-ci"], CODE_SCAN_DIR)
1492    if ci_scripts:
1493        os.chdir(CODE_SCAN_DIR)
1494        for ci_script in ci_scripts:
1495            check_call_cmd(ci_script)
1496