1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10import argparse
11import multiprocessing
12import os
13import platform
14import re
15import shutil
16import subprocess
17import sys
18from subprocess import CalledProcessError, check_call
19from tempfile import TemporaryDirectory
20from urllib.parse import urljoin
21
22from git import Repo
23# interpreter is not used directly but this resolves dependency ordering
24# that would be broken if we didn't include it.
25from mesonbuild import interpreter  # noqa: F401
26from mesonbuild import coredata, optinterpreter
27from mesonbuild.mesonlib import OptionKey
28from mesonbuild.mesonlib import version_compare as meson_version_compare
29
30
31class DepTree:
32    """
33    Represents package dependency tree, where each node is a DepTree with a
34    name and DepTree children.
35    """
36
37    def __init__(self, name):
38        """
39        Create new DepTree.
40
41        Parameter descriptions:
42        name               Name of new tree node.
43        """
44        self.name = name
45        self.children = list()
46
47    def AddChild(self, name):
48        """
49        Add new child node to current node.
50
51        Parameter descriptions:
52        name               Name of new child
53        """
54        new_child = DepTree(name)
55        self.children.append(new_child)
56        return new_child
57
58    def AddChildNode(self, node):
59        """
60        Add existing child node to current node.
61
62        Parameter descriptions:
63        node               Tree node to add
64        """
65        self.children.append(node)
66
67    def RemoveChild(self, name):
68        """
69        Remove child node.
70
71        Parameter descriptions:
72        name               Name of child to remove
73        """
74        for child in self.children:
75            if child.name == name:
76                self.children.remove(child)
77                return
78
79    def GetNode(self, name):
80        """
81        Return node with matching name. Return None if not found.
82
83        Parameter descriptions:
84        name               Name of node to return
85        """
86        if self.name == name:
87            return self
88        for child in self.children:
89            node = child.GetNode(name)
90            if node:
91                return node
92        return None
93
94    def GetParentNode(self, name, parent_node=None):
95        """
96        Return parent of node with matching name. Return none if not found.
97
98        Parameter descriptions:
99        name               Name of node to get parent of
100        parent_node        Parent of current node
101        """
102        if self.name == name:
103            return parent_node
104        for child in self.children:
105            found_node = child.GetParentNode(name, self)
106            if found_node:
107                return found_node
108        return None
109
110    def GetPath(self, name, path=None):
111        """
112        Return list of node names from head to matching name.
113        Return None if not found.
114
115        Parameter descriptions:
116        name               Name of node
117        path               List of node names from head to current node
118        """
119        if not path:
120            path = []
121        if self.name == name:
122            path.append(self.name)
123            return path
124        for child in self.children:
125            match = child.GetPath(name, path + [self.name])
126            if match:
127                return match
128        return None
129
130    def GetPathRegex(self, name, regex_str, path=None):
131        """
132        Return list of node paths that end in name, or match regex_str.
133        Return empty list if not found.
134
135        Parameter descriptions:
136        name               Name of node to search for
137        regex_str          Regex string to match node names
138        path               Path of node names from head to current node
139        """
140        new_paths = []
141        if not path:
142            path = []
143        match = re.match(regex_str, self.name)
144        if (self.name == name) or (match):
145            new_paths.append(path + [self.name])
146        for child in self.children:
147            return_paths = None
148            full_path = path + [self.name]
149            return_paths = child.GetPathRegex(name, regex_str, full_path)
150            for i in return_paths:
151                new_paths.append(i)
152        return new_paths
153
154    def MoveNode(self, from_name, to_name):
155        """
156        Mode existing from_name node to become child of to_name node.
157
158        Parameter descriptions:
159        from_name          Name of node to make a child of to_name
160        to_name            Name of node to make parent of from_name
161        """
162        parent_from_node = self.GetParentNode(from_name)
163        from_node = self.GetNode(from_name)
164        parent_from_node.RemoveChild(from_name)
165        to_node = self.GetNode(to_name)
166        to_node.AddChildNode(from_node)
167
168    def ReorderDeps(self, name, regex_str):
169        """
170        Reorder dependency tree.  If tree contains nodes with names that
171        match 'name' and 'regex_str', move 'regex_str' nodes that are
172        to the right of 'name' node, so that they become children of the
173        'name' node.
174
175        Parameter descriptions:
176        name               Name of node to look for
177        regex_str          Regex string to match names to
178        """
179        name_path = self.GetPath(name)
180        if not name_path:
181            return
182        paths = self.GetPathRegex(name, regex_str)
183        is_name_in_paths = False
184        name_index = 0
185        for i in range(len(paths)):
186            path = paths[i]
187            if path[-1] == name:
188                is_name_in_paths = True
189                name_index = i
190                break
191        if not is_name_in_paths:
192            return
193        for i in range(name_index + 1, len(paths)):
194            path = paths[i]
195            if name in path:
196                continue
197            from_name = path[-1]
198            self.MoveNode(from_name, name)
199
200    def GetInstallList(self):
201        """
202        Return post-order list of node names.
203
204        Parameter descriptions:
205        """
206        install_list = []
207        for child in self.children:
208            child_install_list = child.GetInstallList()
209            install_list.extend(child_install_list)
210        install_list.append(self.name)
211        return install_list
212
213    def PrintTree(self, level=0):
214        """
215        Print pre-order node names with indentation denoting node depth level.
216
217        Parameter descriptions:
218        level              Current depth level
219        """
220        INDENT_PER_LEVEL = 4
221        print(" " * (level * INDENT_PER_LEVEL) + self.name)
222        for child in self.children:
223            child.PrintTree(level + 1)
224
225
226def check_call_cmd(*cmd):
227    """
228    Verbose prints the directory location the given command is called from and
229    the command, then executes the command using check_call.
230
231    Parameter descriptions:
232    dir                 Directory location command is to be called from
233    cmd                 List of parameters constructing the complete command
234    """
235    printline(os.getcwd(), ">", " ".join(cmd))
236    check_call(cmd)
237
238
239def clone_pkg(pkg, branch):
240    """
241    Clone the given openbmc package's git repository from gerrit into
242    the WORKSPACE location
243
244    Parameter descriptions:
245    pkg                 Name of the package to clone
246    branch              Branch to clone from pkg
247    """
248    pkg_dir = os.path.join(WORKSPACE, pkg)
249    if os.path.exists(os.path.join(pkg_dir, ".git")):
250        return pkg_dir
251    pkg_repo = urljoin("https://gerrit.openbmc.org/openbmc/", pkg)
252    os.mkdir(pkg_dir)
253    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
254    try:
255        # first try the branch
256        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
257        repo_inst = clone.working_dir
258    except Exception:
259        printline("Input branch not found, default to master")
260        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
261        repo_inst = clone.working_dir
262    return repo_inst
263
264
265def make_target_exists(target):
266    """
267    Runs a check against the makefile in the current directory to determine
268    if the target exists so that it can be built.
269
270    Parameter descriptions:
271    target              The make target we are checking
272    """
273    try:
274        cmd = ["make", "-n", target]
275        with open(os.devnull, "w") as devnull:
276            check_call(cmd, stdout=devnull, stderr=devnull)
277        return True
278    except CalledProcessError:
279        return False
280
281
282make_parallel = [
283    "make",
284    # Run enough jobs to saturate all the cpus
285    "-j",
286    str(multiprocessing.cpu_count()),
287    # Don't start more jobs if the load avg is too high
288    "-l",
289    str(multiprocessing.cpu_count()),
290    # Synchronize the output so logs aren't intermixed in stdout / stderr
291    "-O",
292]
293
294
295def build_and_install(name, build_for_testing=False):
296    """
297    Builds and installs the package in the environment. Optionally
298    builds the examples and test cases for package.
299
300    Parameter description:
301    name                The name of the package we are building
302    build_for_testing   Enable options related to testing on the package?
303    """
304    os.chdir(os.path.join(WORKSPACE, name))
305
306    # Refresh dynamic linker run time bindings for dependencies
307    check_call_cmd("sudo", "-n", "--", "ldconfig")
308
309    pkg = Package()
310    if build_for_testing:
311        pkg.test()
312    else:
313        pkg.install()
314
315
316def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
317    """
318    For each package (name), starting with the package to be unit tested,
319    extract its dependencies. For each package dependency defined, recursively
320    apply the same strategy
321
322    Parameter descriptions:
323    name                Name of the package
324    pkgdir              Directory where package source is located
325    dep_added           Current dict of dependencies and added status
326    head                Head node of the dependency tree
327    branch              Branch to clone from pkg
328    dep_tree            Current dependency tree node
329    """
330    if not dep_tree:
331        dep_tree = head
332
333    with open("/tmp/depcache", "r") as depcache:
334        cache = depcache.readline()
335
336    # Read out pkg dependencies
337    pkg = Package(name, pkgdir)
338
339    build = pkg.build_system()
340    if not build:
341        raise Exception(f"Unable to find build system for {name}.")
342
343    for dep in set(build.dependencies()):
344        if dep in cache:
345            continue
346        # Dependency package not already known
347        if dep_added.get(dep) is None:
348            print(f"Adding {dep} dependency to {name}.")
349            # Dependency package not added
350            new_child = dep_tree.AddChild(dep)
351            dep_added[dep] = False
352            dep_pkgdir = clone_pkg(dep, branch)
353            # Determine this dependency package's
354            # dependencies and add them before
355            # returning to add this package
356            dep_added = build_dep_tree(
357                dep, dep_pkgdir, dep_added, head, branch, new_child
358            )
359        else:
360            # Dependency package known and added
361            if dep_added[dep]:
362                continue
363            else:
364                # Cyclic dependency failure
365                raise Exception("Cyclic dependencies found in " + name)
366
367    if not dep_added[name]:
368        dep_added[name] = True
369
370    return dep_added
371
372
373def run_cppcheck():
374    if not os.path.exists(os.path.join("build", "compile_commands.json")):
375        return None
376
377    with TemporaryDirectory() as cpp_dir:
378        # http://cppcheck.sourceforge.net/manual.pdf
379        try:
380            check_call_cmd(
381                "cppcheck",
382                "-j",
383                str(multiprocessing.cpu_count()),
384                "--enable=style,performance,portability,missingInclude",
385                "--suppress=useStlAlgorithm",
386                "--suppress=unusedStructMember",
387                "--suppress=postfixOperator",
388                "--suppress=unreadVariable",
389                "--suppress=knownConditionTrueFalse",
390                "--library=googletest",
391                "--project=build/compile_commands.json",
392                f"--cppcheck-build-dir={cpp_dir}",
393            )
394        except subprocess.CalledProcessError:
395            print("cppcheck found errors")
396
397
398def is_valgrind_safe():
399    """
400    Returns whether it is safe to run valgrind on our platform
401    """
402    src = "unit-test-vg.c"
403    exe = "./unit-test-vg"
404    with open(src, "w") as h:
405        h.write("#include <errno.h>\n")
406        h.write("#include <stdio.h>\n")
407        h.write("#include <stdlib.h>\n")
408        h.write("#include <string.h>\n")
409        h.write("int main() {\n")
410        h.write("char *heap_str = malloc(16);\n")
411        h.write('strcpy(heap_str, "RandString");\n')
412        h.write('int res = strcmp("RandString", heap_str);\n')
413        h.write("free(heap_str);\n")
414        h.write("char errstr[64];\n")
415        h.write("strerror_r(EINVAL, errstr, sizeof(errstr));\n")
416        h.write('printf("%s\\n", errstr);\n')
417        h.write("return res;\n")
418        h.write("}\n")
419    try:
420        with open(os.devnull, "w") as devnull:
421            check_call(
422                ["gcc", "-O2", "-o", exe, src], stdout=devnull, stderr=devnull
423            )
424            check_call(
425                ["valgrind", "--error-exitcode=99", exe],
426                stdout=devnull,
427                stderr=devnull,
428            )
429        return True
430    except Exception:
431        sys.stderr.write("###### Platform is not valgrind safe ######\n")
432        return False
433    finally:
434        os.remove(src)
435        os.remove(exe)
436
437
438def is_sanitize_safe():
439    """
440    Returns whether it is safe to run sanitizers on our platform
441    """
442    src = "unit-test-sanitize.c"
443    exe = "./unit-test-sanitize"
444    with open(src, "w") as h:
445        h.write("int main() { return 0; }\n")
446    try:
447        with open(os.devnull, "w") as devnull:
448            check_call(
449                [
450                    "gcc",
451                    "-O2",
452                    "-fsanitize=address",
453                    "-fsanitize=undefined",
454                    "-o",
455                    exe,
456                    src,
457                ],
458                stdout=devnull,
459                stderr=devnull,
460            )
461            check_call([exe], stdout=devnull, stderr=devnull)
462
463        # TODO - Sanitizer not working on ppc64le
464        # https://github.com/openbmc/openbmc-build-scripts/issues/31
465        if platform.processor() == "ppc64le":
466            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
467            return False
468        else:
469            return True
470    except Exception:
471        sys.stderr.write("###### Platform is not sanitize safe ######\n")
472        return False
473    finally:
474        os.remove(src)
475        os.remove(exe)
476
477
478def maybe_make_valgrind():
479    """
480    Potentially runs the unit tests through valgrind for the package
481    via `make check-valgrind`. If the package does not have valgrind testing
482    then it just skips over this.
483    """
484    # Valgrind testing is currently broken by an aggressive strcmp optimization
485    # that is inlined into optimized code for POWER by gcc 7+. Until we find
486    # a workaround, just don't run valgrind tests on POWER.
487    # https://github.com/openbmc/openbmc/issues/3315
488    if not is_valgrind_safe():
489        sys.stderr.write("###### Skipping valgrind ######\n")
490        return
491    if not make_target_exists("check-valgrind"):
492        return
493
494    try:
495        cmd = make_parallel + ["check-valgrind"]
496        check_call_cmd(*cmd)
497    except CalledProcessError:
498        for root, _, files in os.walk(os.getcwd()):
499            for f in files:
500                if re.search("test-suite-[a-z]+.log", f) is None:
501                    continue
502                check_call_cmd("cat", os.path.join(root, f))
503        raise Exception("Valgrind tests failed")
504
505
506def maybe_make_coverage():
507    """
508    Potentially runs the unit tests through code coverage for the package
509    via `make check-code-coverage`. If the package does not have code coverage
510    testing then it just skips over this.
511    """
512    if not make_target_exists("check-code-coverage"):
513        return
514
515    # Actually run code coverage
516    try:
517        cmd = make_parallel + ["check-code-coverage"]
518        check_call_cmd(*cmd)
519    except CalledProcessError:
520        raise Exception("Code coverage failed")
521
522
523class BuildSystem(object):
524    """
525    Build systems generally provide the means to configure, build, install and
526    test software. The BuildSystem class defines a set of interfaces on top of
527    which Autotools, Meson, CMake and possibly other build system drivers can
528    be implemented, separating out the phases to control whether a package
529    should merely be installed or also tested and analyzed.
530    """
531
532    def __init__(self, package, path):
533        """Initialise the driver with properties independent of the build
534        system
535
536        Keyword arguments:
537        package: The name of the package. Derived from the path if None
538        path: The path to the package. Set to the working directory if None
539        """
540        self.path = "." if not path else path
541        realpath = os.path.realpath(self.path)
542        self.package = package if package else os.path.basename(realpath)
543        self.build_for_testing = False
544
545    def probe(self):
546        """Test if the build system driver can be applied to the package
547
548        Return True if the driver can drive the package's build system,
549        otherwise False.
550
551        Generally probe() is implemented by testing for the presence of the
552        build system's configuration file(s).
553        """
554        raise NotImplementedError
555
556    def dependencies(self):
557        """Provide the package's dependencies
558
559        Returns a list of dependencies. If no dependencies are required then an
560        empty list must be returned.
561
562        Generally dependencies() is implemented by analysing and extracting the
563        data from the build system configuration.
564        """
565        raise NotImplementedError
566
567    def configure(self, build_for_testing):
568        """Configure the source ready for building
569
570        Should raise an exception if configuration failed.
571
572        Keyword arguments:
573        build_for_testing: Mark the package as being built for testing rather
574                           than for installation as a dependency for the
575                           package under test. Setting to True generally
576                           implies that the package will be configured to build
577                           with debug information, at a low level of
578                           optimisation and possibly with sanitizers enabled.
579
580        Generally configure() is implemented by invoking the build system
581        tooling to generate Makefiles or equivalent.
582        """
583        raise NotImplementedError
584
585    def build(self):
586        """Build the software ready for installation and/or testing
587
588        Should raise an exception if the build fails
589
590        Generally build() is implemented by invoking `make` or `ninja`.
591        """
592        raise NotImplementedError
593
594    def install(self):
595        """Install the software ready for use
596
597        Should raise an exception if installation fails
598
599        Like build(), install() is generally implemented by invoking `make` or
600        `ninja`.
601        """
602        raise NotImplementedError
603
604    def test(self):
605        """Build and run the test suite associated with the package
606
607        Should raise an exception if the build or testing fails.
608
609        Like install(), test() is generally implemented by invoking `make` or
610        `ninja`.
611        """
612        raise NotImplementedError
613
614    def analyze(self):
615        """Run any supported analysis tools over the codebase
616
617        Should raise an exception if analysis fails.
618
619        Some analysis tools such as scan-build need injection into the build
620        system. analyze() provides the necessary hook to implement such
621        behaviour. Analyzers independent of the build system can also be
622        specified here but at the cost of possible duplication of code between
623        the build system driver implementations.
624        """
625        raise NotImplementedError
626
627
628class Autotools(BuildSystem):
629    def __init__(self, package=None, path=None):
630        super(Autotools, self).__init__(package, path)
631
632    def probe(self):
633        return os.path.isfile(os.path.join(self.path, "configure.ac"))
634
635    def dependencies(self):
636        configure_ac = os.path.join(self.path, "configure.ac")
637
638        contents = ""
639        # Prepend some special function overrides so we can parse out
640        # dependencies
641        for macro in DEPENDENCIES.keys():
642            contents += (
643                "m4_define(["
644                + macro
645                + "], ["
646                + macro
647                + "_START$"
648                + str(DEPENDENCIES_OFFSET[macro] + 1)
649                + macro
650                + "_END])\n"
651            )
652        with open(configure_ac, "rt") as f:
653            contents += f.read()
654
655        autoconf_cmdline = ["autoconf", "-Wno-undefined", "-"]
656        autoconf_process = subprocess.Popen(
657            autoconf_cmdline,
658            stdin=subprocess.PIPE,
659            stdout=subprocess.PIPE,
660            stderr=subprocess.PIPE,
661        )
662        document = contents.encode("utf-8")
663        (stdout, stderr) = autoconf_process.communicate(input=document)
664        if not stdout:
665            print(stderr)
666            raise Exception("Failed to run autoconf for parsing dependencies")
667
668        # Parse out all of the dependency text
669        matches = []
670        for macro in DEPENDENCIES.keys():
671            pattern = "(" + macro + ")_START(.*?)" + macro + "_END"
672            for match in re.compile(pattern).finditer(stdout.decode("utf-8")):
673                matches.append((match.group(1), match.group(2)))
674
675        # Look up dependencies from the text
676        found_deps = []
677        for macro, deptext in matches:
678            for potential_dep in deptext.split(" "):
679                for known_dep in DEPENDENCIES[macro].keys():
680                    if potential_dep.startswith(known_dep):
681                        found_deps.append(DEPENDENCIES[macro][known_dep])
682
683        return found_deps
684
685    def _configure_feature(self, flag, enabled):
686        """
687        Returns an configure flag as a string
688
689        Parameters:
690        flag                The name of the flag
691        enabled             Whether the flag is enabled or disabled
692        """
693        return "--" + ("enable" if enabled else "disable") + "-" + flag
694
695    def configure(self, build_for_testing):
696        self.build_for_testing = build_for_testing
697        conf_flags = [
698            self._configure_feature("silent-rules", False),
699            self._configure_feature("examples", build_for_testing),
700            self._configure_feature("tests", build_for_testing),
701            self._configure_feature("itests", INTEGRATION_TEST),
702        ]
703        conf_flags.extend(
704            [
705                self._configure_feature("code-coverage", build_for_testing),
706                self._configure_feature("valgrind", build_for_testing),
707            ]
708        )
709        # Add any necessary configure flags for package
710        if CONFIGURE_FLAGS.get(self.package) is not None:
711            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
712        for bootstrap in ["bootstrap.sh", "bootstrap", "autogen.sh"]:
713            if os.path.exists(bootstrap):
714                check_call_cmd("./" + bootstrap)
715                break
716        check_call_cmd("./configure", *conf_flags)
717
718    def build(self):
719        check_call_cmd(*make_parallel)
720
721    def install(self):
722        check_call_cmd("sudo", "-n", "--", *(make_parallel + ["install"]))
723
724    def test(self):
725        try:
726            cmd = make_parallel + ["check"]
727            for i in range(0, args.repeat):
728                check_call_cmd(*cmd)
729
730            maybe_make_valgrind()
731            maybe_make_coverage()
732        except CalledProcessError:
733            for root, _, files in os.walk(os.getcwd()):
734                if "test-suite.log" not in files:
735                    continue
736                check_call_cmd("cat", os.path.join(root, "test-suite.log"))
737            raise Exception("Unit tests failed")
738
739    def analyze(self):
740        run_cppcheck()
741
742
743class CMake(BuildSystem):
744    def __init__(self, package=None, path=None):
745        super(CMake, self).__init__(package, path)
746
747    def probe(self):
748        return os.path.isfile(os.path.join(self.path, "CMakeLists.txt"))
749
750    def dependencies(self):
751        return []
752
753    def configure(self, build_for_testing):
754        self.build_for_testing = build_for_testing
755        if INTEGRATION_TEST:
756            check_call_cmd(
757                "cmake",
758                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
759                "-DITESTS=ON",
760                ".",
761            )
762        else:
763            check_call_cmd("cmake", "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON", ".")
764
765    def build(self):
766        check_call_cmd(
767            "cmake",
768            "--build",
769            ".",
770            "--",
771            "-j",
772            str(multiprocessing.cpu_count()),
773        )
774
775    def install(self):
776        pass
777
778    def test(self):
779        if make_target_exists("test"):
780            check_call_cmd("ctest", ".")
781
782    def analyze(self):
783        if os.path.isfile(".clang-tidy"):
784            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
785                # clang-tidy needs to run on a clang-specific build
786                check_call_cmd(
787                    "cmake",
788                    "-DCMAKE_C_COMPILER=clang",
789                    "-DCMAKE_CXX_COMPILER=clang++",
790                    "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
791                    "-H.",
792                    "-B" + build_dir,
793                )
794
795                check_call_cmd(
796                    "run-clang-tidy", "-header-filter=.*", "-p", build_dir
797                )
798
799        maybe_make_valgrind()
800        maybe_make_coverage()
801        run_cppcheck()
802
803
804class Meson(BuildSystem):
805    def __init__(self, package=None, path=None):
806        super(Meson, self).__init__(package, path)
807
808    def probe(self):
809        return os.path.isfile(os.path.join(self.path, "meson.build"))
810
811    def dependencies(self):
812        meson_build = os.path.join(self.path, "meson.build")
813        if not os.path.exists(meson_build):
814            return []
815
816        found_deps = []
817        for root, dirs, files in os.walk(self.path):
818            if "meson.build" not in files:
819                continue
820            with open(os.path.join(root, "meson.build"), "rt") as f:
821                build_contents = f.read()
822            pattern = r"dependency\('([^']*)'.*?\),?\n"
823            for match in re.finditer(pattern, build_contents):
824                group = match.group(1)
825                maybe_dep = DEPENDENCIES["PKG_CHECK_MODULES"].get(group)
826                if maybe_dep is not None:
827                    found_deps.append(maybe_dep)
828
829        return found_deps
830
831    def _parse_options(self, options_file):
832        """
833        Returns a set of options defined in the provides meson_options.txt file
834
835        Parameters:
836        options_file        The file containing options
837        """
838        oi = optinterpreter.OptionInterpreter("")
839        oi.process(options_file)
840        return oi.options
841
842    def _configure_boolean(self, val):
843        """
844        Returns the meson flag which signifies the value
845
846        True is true which requires the boolean.
847        False is false which disables the boolean.
848
849        Parameters:
850        val                 The value being converted
851        """
852        if val is True:
853            return "true"
854        elif val is False:
855            return "false"
856        else:
857            raise Exception("Bad meson boolean value")
858
859    def _configure_feature(self, val):
860        """
861        Returns the meson flag which signifies the value
862
863        True is enabled which requires the feature.
864        False is disabled which disables the feature.
865        None is auto which autodetects the feature.
866
867        Parameters:
868        val                 The value being converted
869        """
870        if val is True:
871            return "enabled"
872        elif val is False:
873            return "disabled"
874        elif val is None:
875            return "auto"
876        else:
877            raise Exception("Bad meson feature value")
878
879    def _configure_option(self, opts, key, val):
880        """
881        Returns the meson flag which signifies the value
882        based on the type of the opt
883
884        Parameters:
885        opt                 The meson option which we are setting
886        val                 The value being converted
887        """
888        if isinstance(opts[key], coredata.UserBooleanOption):
889            str_val = self._configure_boolean(val)
890        elif isinstance(opts[key], coredata.UserFeatureOption):
891            str_val = self._configure_feature(val)
892        else:
893            raise Exception("Unknown meson option type")
894        return "-D{}={}".format(key, str_val)
895
896    def configure(self, build_for_testing):
897        self.build_for_testing = build_for_testing
898        meson_options = {}
899        if os.path.exists("meson_options.txt"):
900            meson_options = self._parse_options("meson_options.txt")
901        meson_flags = [
902            "-Db_colorout=never",
903            "-Dwerror=true",
904            "-Dwarning_level=3",
905        ]
906        if build_for_testing:
907            meson_flags.append("--buildtype=debug")
908        else:
909            meson_flags.append("--buildtype=debugoptimized")
910        if OptionKey("tests") in meson_options:
911            meson_flags.append(
912                self._configure_option(
913                    meson_options, OptionKey("tests"), build_for_testing
914                )
915            )
916        if OptionKey("examples") in meson_options:
917            meson_flags.append(
918                self._configure_option(
919                    meson_options, OptionKey("examples"), build_for_testing
920                )
921            )
922        if OptionKey("itests") in meson_options:
923            meson_flags.append(
924                self._configure_option(
925                    meson_options, OptionKey("itests"), INTEGRATION_TEST
926                )
927            )
928        if MESON_FLAGS.get(self.package) is not None:
929            meson_flags.extend(MESON_FLAGS.get(self.package))
930        try:
931            check_call_cmd(
932                "meson", "setup", "--reconfigure", "build", *meson_flags
933            )
934        except Exception:
935            shutil.rmtree("build", ignore_errors=True)
936            check_call_cmd("meson", "setup", "build", *meson_flags)
937
938    def build(self):
939        check_call_cmd("ninja", "-C", "build")
940
941    def install(self):
942        check_call_cmd("sudo", "-n", "--", "ninja", "-C", "build", "install")
943
944    def test(self):
945        # It is useful to check various settings of the meson.build file
946        # for compatibility, such as meson_version checks.  We shouldn't
947        # do this in the configure path though because it affects subprojects
948        # and dependencies as well, but we only want this applied to the
949        # project-under-test (otherwise an upstream dependency could fail
950        # this check without our control).
951        self._extra_meson_checks()
952
953        try:
954            test_args = ("--repeat", str(args.repeat), "-C", "build")
955            check_call_cmd("meson", "test", "--print-errorlogs", *test_args)
956
957        except CalledProcessError:
958            raise Exception("Unit tests failed")
959
960    def _setup_exists(self, setup):
961        """
962        Returns whether the meson build supports the named test setup.
963
964        Parameter descriptions:
965        setup              The setup target to check
966        """
967        try:
968            with open(os.devnull, "w"):
969                output = subprocess.check_output(
970                    [
971                        "meson",
972                        "test",
973                        "-C",
974                        "build",
975                        "--setup",
976                        setup,
977                        "-t",
978                        "0",
979                    ],
980                    stderr=subprocess.STDOUT,
981                )
982        except CalledProcessError as e:
983            output = e.output
984        output = output.decode("utf-8")
985        return not re.search("Test setup .* not found from project", output)
986
987    def _maybe_valgrind(self):
988        """
989        Potentially runs the unit tests through valgrind for the package
990        via `meson test`. The package can specify custom valgrind
991        configurations by utilizing add_test_setup() in a meson.build
992        """
993        if not is_valgrind_safe():
994            sys.stderr.write("###### Skipping valgrind ######\n")
995            return
996        try:
997            if self._setup_exists("valgrind"):
998                check_call_cmd(
999                    "meson",
1000                    "test",
1001                    "-t",
1002                    "10",
1003                    "-C",
1004                    "build",
1005                    "--print-errorlogs",
1006                    "--setup",
1007                    "valgrind",
1008                )
1009            else:
1010                check_call_cmd(
1011                    "meson",
1012                    "test",
1013                    "-t",
1014                    "10",
1015                    "-C",
1016                    "build",
1017                    "--print-errorlogs",
1018                    "--wrapper",
1019                    "valgrind",
1020                )
1021        except CalledProcessError:
1022            raise Exception("Valgrind tests failed")
1023
1024    def analyze(self):
1025        self._maybe_valgrind()
1026
1027        # Run clang-tidy only if the project has a configuration
1028        if os.path.isfile(".clang-tidy"):
1029            os.environ["CXX"] = "clang++"
1030            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
1031                check_call_cmd("meson", "setup", build_dir)
1032                try:
1033                    check_call_cmd(
1034                        "run-clang-tidy", "-fix", "-format", "-p", build_dir
1035                    )
1036                except subprocess.CalledProcessError:
1037                    check_call_cmd(
1038                        "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff"
1039                    )
1040                    raise
1041        # Run the basic clang static analyzer otherwise
1042        else:
1043            check_call_cmd("ninja", "-C", "build", "scan-build")
1044
1045        # Run tests through sanitizers
1046        # b_lundef is needed if clang++ is CXX since it resolves the
1047        # asan symbols at runtime only. We don't want to set it earlier
1048        # in the build process to ensure we don't have undefined
1049        # runtime code.
1050        if is_sanitize_safe():
1051            check_call_cmd(
1052                "meson",
1053                "configure",
1054                "build",
1055                "-Db_sanitize=address,undefined",
1056                "-Db_lundef=false",
1057            )
1058            check_call_cmd(
1059                "meson",
1060                "test",
1061                "-C",
1062                "build",
1063                "--print-errorlogs",
1064                "--logbase",
1065                "testlog-ubasan",
1066            )
1067            # TODO: Fix memory sanitizer
1068            # check_call_cmd('meson', 'configure', 'build',
1069            #                '-Db_sanitize=memory')
1070            # check_call_cmd('meson', 'test', '-C', 'build'
1071            #                '--logbase', 'testlog-msan')
1072            check_call_cmd("meson", "configure", "build", "-Db_sanitize=none")
1073        else:
1074            sys.stderr.write("###### Skipping sanitizers ######\n")
1075
1076        # Run coverage checks
1077        check_call_cmd("meson", "configure", "build", "-Db_coverage=true")
1078        self.test()
1079        # Only build coverage HTML if coverage files were produced
1080        for root, dirs, files in os.walk("build"):
1081            if any([f.endswith(".gcda") for f in files]):
1082                check_call_cmd("ninja", "-C", "build", "coverage-html")
1083                break
1084        check_call_cmd("meson", "configure", "build", "-Db_coverage=false")
1085        run_cppcheck()
1086
1087    def _extra_meson_checks(self):
1088        with open(os.path.join(self.path, "meson.build"), "rt") as f:
1089            build_contents = f.read()
1090
1091        # Find project's specified meson_version.
1092        meson_version = None
1093        pattern = r"meson_version:[^']*'([^']*)'"
1094        for match in re.finditer(pattern, build_contents):
1095            group = match.group(1)
1096            meson_version = group
1097
1098        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1099        # identify this.  Add to our unit-test checks so that we don't
1100        # get a meson.build missing this.
1101        pattern = r"'cpp_std=c\+\+20'"
1102        for match in re.finditer(pattern, build_contents):
1103            if not meson_version or not meson_version_compare(
1104                meson_version, ">=0.57"
1105            ):
1106                raise Exception(
1107                    "C++20 support requires specifying in meson.build: "
1108                    + "meson_version: '>=0.57'"
1109                )
1110
1111
1112class Package(object):
1113    def __init__(self, name=None, path=None):
1114        self.supported = [Meson, Autotools, CMake]
1115        self.name = name
1116        self.path = path
1117        self.test_only = False
1118
1119    def build_systems(self):
1120        instances = (system(self.name, self.path) for system in self.supported)
1121        return (instance for instance in instances if instance.probe())
1122
1123    def build_system(self, preferred=None):
1124        systems = list(self.build_systems())
1125
1126        if not systems:
1127            return None
1128
1129        if preferred:
1130            return {type(system): system for system in systems}[preferred]
1131
1132        return next(iter(systems))
1133
1134    def install(self, system=None):
1135        if not system:
1136            system = self.build_system()
1137
1138        system.configure(False)
1139        system.build()
1140        system.install()
1141
1142    def _test_one(self, system):
1143        system.configure(True)
1144        system.build()
1145        system.install()
1146        system.test()
1147        if not TEST_ONLY:
1148            system.analyze()
1149
1150    def test(self):
1151        for system in self.build_systems():
1152            self._test_one(system)
1153
1154
1155def find_file(filename, basedir):
1156    """
1157    Finds all occurrences of a file (or list of files) in the base
1158    directory and passes them back with their relative paths.
1159
1160    Parameter descriptions:
1161    filename              The name of the file (or list of files) to
1162                          find
1163    basedir               The base directory search in
1164    """
1165
1166    if not isinstance(filename, list):
1167        filename = [filename]
1168
1169    filepaths = []
1170    for root, dirs, files in os.walk(basedir):
1171        if os.path.split(root)[-1] == "subprojects":
1172            for f in files:
1173                subproject = ".".join(f.split(".")[0:-1])
1174                if f.endswith(".wrap") and subproject in dirs:
1175                    # don't find files in meson subprojects with wraps
1176                    dirs.remove(subproject)
1177        for f in filename:
1178            if f in files:
1179                filepaths.append(os.path.join(root, f))
1180    return filepaths
1181
1182
1183if __name__ == "__main__":
1184    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1185    CONFIGURE_FLAGS = {
1186        "phosphor-logging": [
1187            "--enable-metadata-processing",
1188            "--enable-openpower-pel-extension",
1189            "YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml",
1190        ]
1191    }
1192
1193    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1194    MESON_FLAGS = {
1195        "phosphor-dbus-interfaces": [
1196            "-Ddata_com_ibm=true",
1197            "-Ddata_org_open_power=true",
1198        ],
1199        "phosphor-logging": ["-Dopenpower-pel-extension=enabled"],
1200    }
1201
1202    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1203    DEPENDENCIES = {
1204        "AC_CHECK_LIB": {"mapper": "phosphor-objmgr"},
1205        "AC_CHECK_HEADER": {
1206            "host-ipmid": "phosphor-host-ipmid",
1207            "blobs-ipmid": "phosphor-ipmi-blobs",
1208            "sdbusplus": "sdbusplus",
1209            "sdeventplus": "sdeventplus",
1210            "stdplus": "stdplus",
1211            "gpioplus": "gpioplus",
1212            "phosphor-logging/log.hpp": "phosphor-logging",
1213        },
1214        "AC_PATH_PROG": {"sdbus++": "sdbusplus"},
1215        "PKG_CHECK_MODULES": {
1216            "phosphor-dbus-interfaces": "phosphor-dbus-interfaces",
1217            "libipmid": "phosphor-host-ipmid",
1218            "libipmid-host": "phosphor-host-ipmid",
1219            "sdbusplus": "sdbusplus",
1220            "sdeventplus": "sdeventplus",
1221            "stdplus": "stdplus",
1222            "gpioplus": "gpioplus",
1223            "phosphor-logging": "phosphor-logging",
1224            "phosphor-snmp": "phosphor-snmp",
1225            "ipmiblob": "ipmi-blob-tool",
1226            "hei": "openpower-libhei",
1227            "phosphor-ipmi-blobs": "phosphor-ipmi-blobs",
1228            "libcr51sign": "google-misc",
1229        },
1230    }
1231
1232    # Offset into array of macro parameters MACRO(0, 1, ...N)
1233    DEPENDENCIES_OFFSET = {
1234        "AC_CHECK_LIB": 0,
1235        "AC_CHECK_HEADER": 0,
1236        "AC_PATH_PROG": 1,
1237        "PKG_CHECK_MODULES": 1,
1238    }
1239
1240    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1241    DEPENDENCIES_REGEX = {"phosphor-logging": r"\S+-dbus-interfaces$"}
1242
1243    # Set command line arguments
1244    parser = argparse.ArgumentParser()
1245    parser.add_argument(
1246        "-w",
1247        "--workspace",
1248        dest="WORKSPACE",
1249        required=True,
1250        help="Workspace directory location(i.e. /home)",
1251    )
1252    parser.add_argument(
1253        "-p",
1254        "--package",
1255        dest="PACKAGE",
1256        required=True,
1257        help="OpenBMC package to be unit tested",
1258    )
1259    parser.add_argument(
1260        "-t",
1261        "--test-only",
1262        dest="TEST_ONLY",
1263        action="store_true",
1264        required=False,
1265        default=False,
1266        help="Only run test cases, no other validation",
1267    )
1268    arg_inttests = parser.add_mutually_exclusive_group()
1269    arg_inttests.add_argument(
1270        "--integration-tests",
1271        dest="INTEGRATION_TEST",
1272        action="store_true",
1273        required=False,
1274        default=True,
1275        help="Enable integration tests [default].",
1276    )
1277    arg_inttests.add_argument(
1278        "--no-integration-tests",
1279        dest="INTEGRATION_TEST",
1280        action="store_false",
1281        required=False,
1282        help="Disable integration tests.",
1283    )
1284    parser.add_argument(
1285        "-v",
1286        "--verbose",
1287        action="store_true",
1288        help="Print additional package status messages",
1289    )
1290    parser.add_argument(
1291        "-r", "--repeat", help="Repeat tests N times", type=int, default=1
1292    )
1293    parser.add_argument(
1294        "-b",
1295        "--branch",
1296        dest="BRANCH",
1297        required=False,
1298        help="Branch to target for dependent repositories",
1299        default="master",
1300    )
1301    parser.add_argument(
1302        "-n",
1303        "--noformat",
1304        dest="FORMAT",
1305        action="store_false",
1306        required=False,
1307        help="Whether or not to run format code",
1308    )
1309    args = parser.parse_args(sys.argv[1:])
1310    WORKSPACE = args.WORKSPACE
1311    UNIT_TEST_PKG = args.PACKAGE
1312    TEST_ONLY = args.TEST_ONLY
1313    INTEGRATION_TEST = args.INTEGRATION_TEST
1314    BRANCH = args.BRANCH
1315    FORMAT_CODE = args.FORMAT
1316    if args.verbose:
1317
1318        def printline(*line):
1319            for arg in line:
1320                print(arg, end=" ")
1321            print()
1322
1323    else:
1324
1325        def printline(*line):
1326            pass
1327
1328    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1329
1330    # Run format-code.sh, which will in turn call any repo-level formatters.
1331    if FORMAT_CODE:
1332        check_call_cmd(
1333            os.path.join(
1334                WORKSPACE, "openbmc-build-scripts", "scripts", "format-code.sh"
1335            ),
1336            CODE_SCAN_DIR,
1337        )
1338
1339        # Check to see if any files changed
1340        check_call_cmd(
1341            "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff", "--exit-code"
1342        )
1343
1344    # Check if this repo has a supported make infrastructure
1345    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1346    if not pkg.build_system():
1347        print("No valid build system, exit")
1348        sys.exit(0)
1349
1350    prev_umask = os.umask(000)
1351
1352    # Determine dependencies and add them
1353    dep_added = dict()
1354    dep_added[UNIT_TEST_PKG] = False
1355
1356    # Create dependency tree
1357    dep_tree = DepTree(UNIT_TEST_PKG)
1358    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1359
1360    # Reorder Dependency Tree
1361    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1362        dep_tree.ReorderDeps(pkg_name, regex_str)
1363    if args.verbose:
1364        dep_tree.PrintTree()
1365
1366    install_list = dep_tree.GetInstallList()
1367
1368    # We don't want to treat our package as a dependency
1369    install_list.remove(UNIT_TEST_PKG)
1370
1371    # Install reordered dependencies
1372    for dep in install_list:
1373        build_and_install(dep, False)
1374
1375    # Run package unit tests
1376    build_and_install(UNIT_TEST_PKG, True)
1377
1378    os.umask(prev_umask)
1379
1380    # Run any custom CI scripts the repo has, of which there can be
1381    # multiple of and anywhere in the repository.
1382    ci_scripts = find_file(["run-ci.sh", "run-ci"], CODE_SCAN_DIR)
1383    if ci_scripts:
1384        os.chdir(CODE_SCAN_DIR)
1385        for ci_script in ci_scripts:
1386            check_call_cmd(ci_script)
1387