1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10import argparse
11import json
12import multiprocessing
13import os
14import platform
15import re
16import shutil
17import subprocess
18import sys
19from subprocess import CalledProcessError, check_call
20from tempfile import TemporaryDirectory
21from urllib.parse import urljoin
22
23from git import Repo
24
25# interpreter is not used directly but this resolves dependency ordering
26# that would be broken if we didn't include it.
27from mesonbuild import interpreter  # noqa: F401
28from mesonbuild import coredata, optinterpreter
29from mesonbuild.mesonlib import OptionKey
30from mesonbuild.mesonlib import version_compare as meson_version_compare
31
32
33class DepTree:
34    """
35    Represents package dependency tree, where each node is a DepTree with a
36    name and DepTree children.
37    """
38
39    def __init__(self, name):
40        """
41        Create new DepTree.
42
43        Parameter descriptions:
44        name               Name of new tree node.
45        """
46        self.name = name
47        self.children = list()
48
49    def AddChild(self, name):
50        """
51        Add new child node to current node.
52
53        Parameter descriptions:
54        name               Name of new child
55        """
56        new_child = DepTree(name)
57        self.children.append(new_child)
58        return new_child
59
60    def AddChildNode(self, node):
61        """
62        Add existing child node to current node.
63
64        Parameter descriptions:
65        node               Tree node to add
66        """
67        self.children.append(node)
68
69    def RemoveChild(self, name):
70        """
71        Remove child node.
72
73        Parameter descriptions:
74        name               Name of child to remove
75        """
76        for child in self.children:
77            if child.name == name:
78                self.children.remove(child)
79                return
80
81    def GetNode(self, name):
82        """
83        Return node with matching name. Return None if not found.
84
85        Parameter descriptions:
86        name               Name of node to return
87        """
88        if self.name == name:
89            return self
90        for child in self.children:
91            node = child.GetNode(name)
92            if node:
93                return node
94        return None
95
96    def GetParentNode(self, name, parent_node=None):
97        """
98        Return parent of node with matching name. Return none if not found.
99
100        Parameter descriptions:
101        name               Name of node to get parent of
102        parent_node        Parent of current node
103        """
104        if self.name == name:
105            return parent_node
106        for child in self.children:
107            found_node = child.GetParentNode(name, self)
108            if found_node:
109                return found_node
110        return None
111
112    def GetPath(self, name, path=None):
113        """
114        Return list of node names from head to matching name.
115        Return None if not found.
116
117        Parameter descriptions:
118        name               Name of node
119        path               List of node names from head to current node
120        """
121        if not path:
122            path = []
123        if self.name == name:
124            path.append(self.name)
125            return path
126        for child in self.children:
127            match = child.GetPath(name, path + [self.name])
128            if match:
129                return match
130        return None
131
132    def GetPathRegex(self, name, regex_str, path=None):
133        """
134        Return list of node paths that end in name, or match regex_str.
135        Return empty list if not found.
136
137        Parameter descriptions:
138        name               Name of node to search for
139        regex_str          Regex string to match node names
140        path               Path of node names from head to current node
141        """
142        new_paths = []
143        if not path:
144            path = []
145        match = re.match(regex_str, self.name)
146        if (self.name == name) or (match):
147            new_paths.append(path + [self.name])
148        for child in self.children:
149            return_paths = None
150            full_path = path + [self.name]
151            return_paths = child.GetPathRegex(name, regex_str, full_path)
152            for i in return_paths:
153                new_paths.append(i)
154        return new_paths
155
156    def MoveNode(self, from_name, to_name):
157        """
158        Mode existing from_name node to become child of to_name node.
159
160        Parameter descriptions:
161        from_name          Name of node to make a child of to_name
162        to_name            Name of node to make parent of from_name
163        """
164        parent_from_node = self.GetParentNode(from_name)
165        from_node = self.GetNode(from_name)
166        parent_from_node.RemoveChild(from_name)
167        to_node = self.GetNode(to_name)
168        to_node.AddChildNode(from_node)
169
170    def ReorderDeps(self, name, regex_str):
171        """
172        Reorder dependency tree.  If tree contains nodes with names that
173        match 'name' and 'regex_str', move 'regex_str' nodes that are
174        to the right of 'name' node, so that they become children of the
175        'name' node.
176
177        Parameter descriptions:
178        name               Name of node to look for
179        regex_str          Regex string to match names to
180        """
181        name_path = self.GetPath(name)
182        if not name_path:
183            return
184        paths = self.GetPathRegex(name, regex_str)
185        is_name_in_paths = False
186        name_index = 0
187        for i in range(len(paths)):
188            path = paths[i]
189            if path[-1] == name:
190                is_name_in_paths = True
191                name_index = i
192                break
193        if not is_name_in_paths:
194            return
195        for i in range(name_index + 1, len(paths)):
196            path = paths[i]
197            if name in path:
198                continue
199            from_name = path[-1]
200            self.MoveNode(from_name, name)
201
202    def GetInstallList(self):
203        """
204        Return post-order list of node names.
205
206        Parameter descriptions:
207        """
208        install_list = []
209        for child in self.children:
210            child_install_list = child.GetInstallList()
211            install_list.extend(child_install_list)
212        install_list.append(self.name)
213        return install_list
214
215    def PrintTree(self, level=0):
216        """
217        Print pre-order node names with indentation denoting node depth level.
218
219        Parameter descriptions:
220        level              Current depth level
221        """
222        INDENT_PER_LEVEL = 4
223        print(" " * (level * INDENT_PER_LEVEL) + self.name)
224        for child in self.children:
225            child.PrintTree(level + 1)
226
227
228def check_call_cmd(*cmd, **kwargs):
229    """
230    Verbose prints the directory location the given command is called from and
231    the command, then executes the command using check_call.
232
233    Parameter descriptions:
234    dir                 Directory location command is to be called from
235    cmd                 List of parameters constructing the complete command
236    """
237    printline(os.getcwd(), ">", " ".join(cmd))
238    check_call(cmd, **kwargs)
239
240
241def clone_pkg(pkg, branch):
242    """
243    Clone the given openbmc package's git repository from gerrit into
244    the WORKSPACE location
245
246    Parameter descriptions:
247    pkg                 Name of the package to clone
248    branch              Branch to clone from pkg
249    """
250    pkg_dir = os.path.join(WORKSPACE, pkg)
251    if os.path.exists(os.path.join(pkg_dir, ".git")):
252        return pkg_dir
253    pkg_repo = urljoin("https://gerrit.openbmc.org/openbmc/", pkg)
254    os.mkdir(pkg_dir)
255    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
256    try:
257        # first try the branch
258        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
259        repo_inst = clone.working_dir
260    except Exception:
261        printline("Input branch not found, default to master")
262        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
263        repo_inst = clone.working_dir
264    return repo_inst
265
266
267def make_target_exists(target):
268    """
269    Runs a check against the makefile in the current directory to determine
270    if the target exists so that it can be built.
271
272    Parameter descriptions:
273    target              The make target we are checking
274    """
275    try:
276        cmd = ["make", "-n", target]
277        with open(os.devnull, "w") as devnull:
278            check_call(cmd, stdout=devnull, stderr=devnull)
279        return True
280    except CalledProcessError:
281        return False
282
283
284make_parallel = [
285    "make",
286    # Run enough jobs to saturate all the cpus
287    "-j",
288    str(multiprocessing.cpu_count()),
289    # Don't start more jobs if the load avg is too high
290    "-l",
291    str(multiprocessing.cpu_count()),
292    # Synchronize the output so logs aren't intermixed in stdout / stderr
293    "-O",
294]
295
296
297def build_and_install(name, build_for_testing=False):
298    """
299    Builds and installs the package in the environment. Optionally
300    builds the examples and test cases for package.
301
302    Parameter description:
303    name                The name of the package we are building
304    build_for_testing   Enable options related to testing on the package?
305    """
306    os.chdir(os.path.join(WORKSPACE, name))
307
308    # Refresh dynamic linker run time bindings for dependencies
309    check_call_cmd("sudo", "-n", "--", "ldconfig")
310
311    pkg = Package()
312    if build_for_testing:
313        pkg.test()
314    else:
315        pkg.install()
316
317
318def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
319    """
320    For each package (name), starting with the package to be unit tested,
321    extract its dependencies. For each package dependency defined, recursively
322    apply the same strategy
323
324    Parameter descriptions:
325    name                Name of the package
326    pkgdir              Directory where package source is located
327    dep_added           Current dict of dependencies and added status
328    head                Head node of the dependency tree
329    branch              Branch to clone from pkg
330    dep_tree            Current dependency tree node
331    """
332    if not dep_tree:
333        dep_tree = head
334
335    with open("/tmp/depcache", "r") as depcache:
336        cache = depcache.readline()
337
338    # Read out pkg dependencies
339    pkg = Package(name, pkgdir)
340
341    build = pkg.build_system()
342    if not build:
343        raise Exception(f"Unable to find build system for {name}.")
344
345    for dep in set(build.dependencies()):
346        if dep in cache:
347            continue
348        # Dependency package not already known
349        if dep_added.get(dep) is None:
350            print(f"Adding {dep} dependency to {name}.")
351            # Dependency package not added
352            new_child = dep_tree.AddChild(dep)
353            dep_added[dep] = False
354            dep_pkgdir = clone_pkg(dep, branch)
355            # Determine this dependency package's
356            # dependencies and add them before
357            # returning to add this package
358            dep_added = build_dep_tree(
359                dep, dep_pkgdir, dep_added, head, branch, new_child
360            )
361        else:
362            # Dependency package known and added
363            if dep_added[dep]:
364                continue
365            else:
366                # Cyclic dependency failure
367                raise Exception("Cyclic dependencies found in " + name)
368
369    if not dep_added[name]:
370        dep_added[name] = True
371
372    return dep_added
373
374
375def run_cppcheck():
376    if not os.path.exists(os.path.join("build", "compile_commands.json")):
377        return None
378
379    with TemporaryDirectory() as cpp_dir:
380        # http://cppcheck.sourceforge.net/manual.pdf
381        try:
382            check_call_cmd(
383                "cppcheck",
384                "-j",
385                str(multiprocessing.cpu_count()),
386                "--enable=style,performance,portability,missingInclude",
387                "--inline-suppr",
388                "--suppress=useStlAlgorithm",
389                "--suppress=unusedStructMember",
390                "--suppress=postfixOperator",
391                "--suppress=unreadVariable",
392                "--suppress=knownConditionTrueFalse",
393                "--library=googletest",
394                "--project=build/compile_commands.json",
395                f"--cppcheck-build-dir={cpp_dir}",
396            )
397        except subprocess.CalledProcessError:
398            print("cppcheck found errors")
399
400
401def is_valgrind_safe():
402    """
403    Returns whether it is safe to run valgrind on our platform
404    """
405    src = "unit-test-vg.c"
406    exe = "./unit-test-vg"
407    with open(src, "w") as h:
408        h.write("#include <errno.h>\n")
409        h.write("#include <stdio.h>\n")
410        h.write("#include <stdlib.h>\n")
411        h.write("#include <string.h>\n")
412        h.write("int main() {\n")
413        h.write("char *heap_str = malloc(16);\n")
414        h.write('strcpy(heap_str, "RandString");\n')
415        h.write('int res = strcmp("RandString", heap_str);\n')
416        h.write("free(heap_str);\n")
417        h.write("char errstr[64];\n")
418        h.write("strerror_r(EINVAL, errstr, sizeof(errstr));\n")
419        h.write('printf("%s\\n", errstr);\n')
420        h.write("return res;\n")
421        h.write("}\n")
422    try:
423        with open(os.devnull, "w") as devnull:
424            check_call(
425                ["gcc", "-O2", "-o", exe, src], stdout=devnull, stderr=devnull
426            )
427            check_call(
428                ["valgrind", "--error-exitcode=99", exe],
429                stdout=devnull,
430                stderr=devnull,
431            )
432        return True
433    except Exception:
434        sys.stderr.write("###### Platform is not valgrind safe ######\n")
435        return False
436    finally:
437        os.remove(src)
438        os.remove(exe)
439
440
441def is_sanitize_safe():
442    """
443    Returns whether it is safe to run sanitizers on our platform
444    """
445    src = "unit-test-sanitize.c"
446    exe = "./unit-test-sanitize"
447    with open(src, "w") as h:
448        h.write("int main() { return 0; }\n")
449    try:
450        with open(os.devnull, "w") as devnull:
451            check_call(
452                [
453                    "gcc",
454                    "-O2",
455                    "-fsanitize=address",
456                    "-fsanitize=undefined",
457                    "-o",
458                    exe,
459                    src,
460                ],
461                stdout=devnull,
462                stderr=devnull,
463            )
464            check_call([exe], stdout=devnull, stderr=devnull)
465
466        # TODO - Sanitizer not working on ppc64le
467        # https://github.com/openbmc/openbmc-build-scripts/issues/31
468        if platform.processor() == "ppc64le":
469            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
470            return False
471        else:
472            return True
473    except Exception:
474        sys.stderr.write("###### Platform is not sanitize safe ######\n")
475        return False
476    finally:
477        os.remove(src)
478        os.remove(exe)
479
480
481def maybe_make_valgrind():
482    """
483    Potentially runs the unit tests through valgrind for the package
484    via `make check-valgrind`. If the package does not have valgrind testing
485    then it just skips over this.
486    """
487    # Valgrind testing is currently broken by an aggressive strcmp optimization
488    # that is inlined into optimized code for POWER by gcc 7+. Until we find
489    # a workaround, just don't run valgrind tests on POWER.
490    # https://github.com/openbmc/openbmc/issues/3315
491    if not is_valgrind_safe():
492        sys.stderr.write("###### Skipping valgrind ######\n")
493        return
494    if not make_target_exists("check-valgrind"):
495        return
496
497    try:
498        cmd = make_parallel + ["check-valgrind"]
499        check_call_cmd(*cmd)
500    except CalledProcessError:
501        for root, _, files in os.walk(os.getcwd()):
502            for f in files:
503                if re.search("test-suite-[a-z]+.log", f) is None:
504                    continue
505                check_call_cmd("cat", os.path.join(root, f))
506        raise Exception("Valgrind tests failed")
507
508
509def maybe_make_coverage():
510    """
511    Potentially runs the unit tests through code coverage for the package
512    via `make check-code-coverage`. If the package does not have code coverage
513    testing then it just skips over this.
514    """
515    if not make_target_exists("check-code-coverage"):
516        return
517
518    # Actually run code coverage
519    try:
520        cmd = make_parallel + ["check-code-coverage"]
521        check_call_cmd(*cmd)
522    except CalledProcessError:
523        raise Exception("Code coverage failed")
524
525
526class BuildSystem(object):
527    """
528    Build systems generally provide the means to configure, build, install and
529    test software. The BuildSystem class defines a set of interfaces on top of
530    which Autotools, Meson, CMake and possibly other build system drivers can
531    be implemented, separating out the phases to control whether a package
532    should merely be installed or also tested and analyzed.
533    """
534
535    def __init__(self, package, path):
536        """Initialise the driver with properties independent of the build
537        system
538
539        Keyword arguments:
540        package: The name of the package. Derived from the path if None
541        path: The path to the package. Set to the working directory if None
542        """
543        self.path = "." if not path else path
544        realpath = os.path.realpath(self.path)
545        self.package = package if package else os.path.basename(realpath)
546        self.build_for_testing = False
547
548    def probe(self):
549        """Test if the build system driver can be applied to the package
550
551        Return True if the driver can drive the package's build system,
552        otherwise False.
553
554        Generally probe() is implemented by testing for the presence of the
555        build system's configuration file(s).
556        """
557        raise NotImplementedError
558
559    def dependencies(self):
560        """Provide the package's dependencies
561
562        Returns a list of dependencies. If no dependencies are required then an
563        empty list must be returned.
564
565        Generally dependencies() is implemented by analysing and extracting the
566        data from the build system configuration.
567        """
568        raise NotImplementedError
569
570    def configure(self, build_for_testing):
571        """Configure the source ready for building
572
573        Should raise an exception if configuration failed.
574
575        Keyword arguments:
576        build_for_testing: Mark the package as being built for testing rather
577                           than for installation as a dependency for the
578                           package under test. Setting to True generally
579                           implies that the package will be configured to build
580                           with debug information, at a low level of
581                           optimisation and possibly with sanitizers enabled.
582
583        Generally configure() is implemented by invoking the build system
584        tooling to generate Makefiles or equivalent.
585        """
586        raise NotImplementedError
587
588    def build(self):
589        """Build the software ready for installation and/or testing
590
591        Should raise an exception if the build fails
592
593        Generally build() is implemented by invoking `make` or `ninja`.
594        """
595        raise NotImplementedError
596
597    def install(self):
598        """Install the software ready for use
599
600        Should raise an exception if installation fails
601
602        Like build(), install() is generally implemented by invoking `make` or
603        `ninja`.
604        """
605        raise NotImplementedError
606
607    def test(self):
608        """Build and run the test suite associated with the package
609
610        Should raise an exception if the build or testing fails.
611
612        Like install(), test() is generally implemented by invoking `make` or
613        `ninja`.
614        """
615        raise NotImplementedError
616
617    def analyze(self):
618        """Run any supported analysis tools over the codebase
619
620        Should raise an exception if analysis fails.
621
622        Some analysis tools such as scan-build need injection into the build
623        system. analyze() provides the necessary hook to implement such
624        behaviour. Analyzers independent of the build system can also be
625        specified here but at the cost of possible duplication of code between
626        the build system driver implementations.
627        """
628        raise NotImplementedError
629
630
631class Autotools(BuildSystem):
632    def __init__(self, package=None, path=None):
633        super(Autotools, self).__init__(package, path)
634
635    def probe(self):
636        return os.path.isfile(os.path.join(self.path, "configure.ac"))
637
638    def dependencies(self):
639        configure_ac = os.path.join(self.path, "configure.ac")
640
641        contents = ""
642        # Prepend some special function overrides so we can parse out
643        # dependencies
644        for macro in DEPENDENCIES.keys():
645            contents += (
646                "m4_define(["
647                + macro
648                + "], ["
649                + macro
650                + "_START$"
651                + str(DEPENDENCIES_OFFSET[macro] + 1)
652                + macro
653                + "_END])\n"
654            )
655        with open(configure_ac, "rt") as f:
656            contents += f.read()
657
658        autoconf_cmdline = ["autoconf", "-Wno-undefined", "-"]
659        autoconf_process = subprocess.Popen(
660            autoconf_cmdline,
661            stdin=subprocess.PIPE,
662            stdout=subprocess.PIPE,
663            stderr=subprocess.PIPE,
664        )
665        document = contents.encode("utf-8")
666        (stdout, stderr) = autoconf_process.communicate(input=document)
667        if not stdout:
668            print(stderr)
669            raise Exception("Failed to run autoconf for parsing dependencies")
670
671        # Parse out all of the dependency text
672        matches = []
673        for macro in DEPENDENCIES.keys():
674            pattern = "(" + macro + ")_START(.*?)" + macro + "_END"
675            for match in re.compile(pattern).finditer(stdout.decode("utf-8")):
676                matches.append((match.group(1), match.group(2)))
677
678        # Look up dependencies from the text
679        found_deps = []
680        for macro, deptext in matches:
681            for potential_dep in deptext.split(" "):
682                for known_dep in DEPENDENCIES[macro].keys():
683                    if potential_dep.startswith(known_dep):
684                        found_deps.append(DEPENDENCIES[macro][known_dep])
685
686        return found_deps
687
688    def _configure_feature(self, flag, enabled):
689        """
690        Returns an configure flag as a string
691
692        Parameters:
693        flag                The name of the flag
694        enabled             Whether the flag is enabled or disabled
695        """
696        return "--" + ("enable" if enabled else "disable") + "-" + flag
697
698    def configure(self, build_for_testing):
699        self.build_for_testing = build_for_testing
700        conf_flags = [
701            self._configure_feature("silent-rules", False),
702            self._configure_feature("examples", build_for_testing),
703            self._configure_feature("tests", build_for_testing),
704            self._configure_feature("itests", INTEGRATION_TEST),
705        ]
706        conf_flags.extend(
707            [
708                self._configure_feature("code-coverage", False),
709                self._configure_feature("valgrind", build_for_testing),
710            ]
711        )
712        # Add any necessary configure flags for package
713        if CONFIGURE_FLAGS.get(self.package) is not None:
714            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
715        for bootstrap in ["bootstrap.sh", "bootstrap", "autogen.sh"]:
716            if os.path.exists(bootstrap):
717                check_call_cmd("./" + bootstrap)
718                break
719        check_call_cmd("./configure", *conf_flags)
720
721    def build(self):
722        check_call_cmd(*make_parallel)
723
724    def install(self):
725        check_call_cmd("sudo", "-n", "--", *(make_parallel + ["install"]))
726        check_call_cmd("sudo", "-n", "--", "ldconfig")
727
728    def test(self):
729        try:
730            cmd = make_parallel + ["check"]
731            for i in range(0, args.repeat):
732                check_call_cmd(*cmd)
733
734            maybe_make_valgrind()
735            maybe_make_coverage()
736        except CalledProcessError:
737            for root, _, files in os.walk(os.getcwd()):
738                if "test-suite.log" not in files:
739                    continue
740                check_call_cmd("cat", os.path.join(root, "test-suite.log"))
741            raise Exception("Unit tests failed")
742
743    def analyze(self):
744        run_cppcheck()
745
746
747class CMake(BuildSystem):
748    def __init__(self, package=None, path=None):
749        super(CMake, self).__init__(package, path)
750
751    def probe(self):
752        return os.path.isfile(os.path.join(self.path, "CMakeLists.txt"))
753
754    def dependencies(self):
755        return []
756
757    def configure(self, build_for_testing):
758        self.build_for_testing = build_for_testing
759        if INTEGRATION_TEST:
760            check_call_cmd(
761                "cmake",
762                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
763                "-DCMAKE_CXX_FLAGS='-DBOOST_USE_VALGRIND'",
764                "-DITESTS=ON",
765                ".",
766            )
767        else:
768            check_call_cmd(
769                "cmake",
770                "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
771                "-DCMAKE_CXX_FLAGS='-DBOOST_USE_VALGRIND'",
772                ".",
773            )
774
775    def build(self):
776        check_call_cmd(
777            "cmake",
778            "--build",
779            ".",
780            "--",
781            "-j",
782            str(multiprocessing.cpu_count()),
783        )
784
785    def install(self):
786        check_call_cmd("sudo", "cmake", "--install", ".")
787        check_call_cmd("sudo", "-n", "--", "ldconfig")
788
789    def test(self):
790        if make_target_exists("test"):
791            check_call_cmd("ctest", ".")
792
793    def analyze(self):
794        if os.path.isfile(".clang-tidy"):
795            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
796                # clang-tidy needs to run on a clang-specific build
797                check_call_cmd(
798                    "cmake",
799                    "-DCMAKE_C_COMPILER=clang",
800                    "-DCMAKE_CXX_COMPILER=clang++",
801                    "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
802                    "-H.",
803                    "-B" + build_dir,
804                )
805
806                check_call_cmd(
807                    "run-clang-tidy", "-header-filter=.*", "-p", build_dir
808                )
809
810        maybe_make_valgrind()
811        maybe_make_coverage()
812        run_cppcheck()
813
814
815class Meson(BuildSystem):
816    @staticmethod
817    def _project_name(path):
818        doc = subprocess.check_output(
819            ["meson", "introspect", "--projectinfo", path],
820            stderr=subprocess.STDOUT,
821        ).decode("utf-8")
822        return json.loads(doc)["descriptive_name"]
823
824    def __init__(self, package=None, path=None):
825        super(Meson, self).__init__(package, path)
826
827    def probe(self):
828        return os.path.isfile(os.path.join(self.path, "meson.build"))
829
830    def dependencies(self):
831        meson_build = os.path.join(self.path, "meson.build")
832        if not os.path.exists(meson_build):
833            return []
834
835        found_deps = []
836        for root, dirs, files in os.walk(self.path):
837            if "meson.build" not in files:
838                continue
839            with open(os.path.join(root, "meson.build"), "rt") as f:
840                build_contents = f.read()
841            pattern = r"dependency\('([^']*)'.*?\),?\n"
842            for match in re.finditer(pattern, build_contents):
843                group = match.group(1)
844                maybe_dep = DEPENDENCIES["PKG_CHECK_MODULES"].get(group)
845                if maybe_dep is not None:
846                    found_deps.append(maybe_dep)
847
848        return found_deps
849
850    def _parse_options(self, options_file):
851        """
852        Returns a set of options defined in the provides meson_options.txt file
853
854        Parameters:
855        options_file        The file containing options
856        """
857        oi = optinterpreter.OptionInterpreter("")
858        oi.process(options_file)
859        return oi.options
860
861    def _configure_boolean(self, val):
862        """
863        Returns the meson flag which signifies the value
864
865        True is true which requires the boolean.
866        False is false which disables the boolean.
867
868        Parameters:
869        val                 The value being converted
870        """
871        if val is True:
872            return "true"
873        elif val is False:
874            return "false"
875        else:
876            raise Exception("Bad meson boolean value")
877
878    def _configure_feature(self, val):
879        """
880        Returns the meson flag which signifies the value
881
882        True is enabled which requires the feature.
883        False is disabled which disables the feature.
884        None is auto which autodetects the feature.
885
886        Parameters:
887        val                 The value being converted
888        """
889        if val is True:
890            return "enabled"
891        elif val is False:
892            return "disabled"
893        elif val is None:
894            return "auto"
895        else:
896            raise Exception("Bad meson feature value")
897
898    def _configure_option(self, opts, key, val):
899        """
900        Returns the meson flag which signifies the value
901        based on the type of the opt
902
903        Parameters:
904        opt                 The meson option which we are setting
905        val                 The value being converted
906        """
907        if isinstance(opts[key], coredata.UserBooleanOption):
908            str_val = self._configure_boolean(val)
909        elif isinstance(opts[key], coredata.UserFeatureOption):
910            str_val = self._configure_feature(val)
911        else:
912            raise Exception("Unknown meson option type")
913        return "-D{}={}".format(key, str_val)
914
915    def get_configure_flags(self, build_for_testing):
916        self.build_for_testing = build_for_testing
917        meson_options = {}
918        if os.path.exists("meson.options"):
919            meson_options = self._parse_options("meson.options")
920        elif os.path.exists("meson_options.txt"):
921            meson_options = self._parse_options("meson_options.txt")
922        meson_flags = [
923            "-Db_colorout=never",
924            "-Dwerror=true",
925            "-Dwarning_level=3",
926            "-Dcpp_args='-DBOOST_USE_VALGRIND'",
927        ]
928        if build_for_testing:
929            # -Ddebug=true -Doptimization=g is helpful for abi-dumper but isn't a combination that
930            # is supported by meson's build types. Configure it manually.
931            meson_flags.append("-Ddebug=true")
932            meson_flags.append("-Doptimization=g")
933        else:
934            meson_flags.append("--buildtype=debugoptimized")
935        if OptionKey("tests") in meson_options:
936            meson_flags.append(
937                self._configure_option(
938                    meson_options, OptionKey("tests"), build_for_testing
939                )
940            )
941        if OptionKey("examples") in meson_options:
942            meson_flags.append(
943                self._configure_option(
944                    meson_options, OptionKey("examples"), build_for_testing
945                )
946            )
947        if OptionKey("itests") in meson_options:
948            meson_flags.append(
949                self._configure_option(
950                    meson_options, OptionKey("itests"), INTEGRATION_TEST
951                )
952            )
953        if MESON_FLAGS.get(self.package) is not None:
954            meson_flags.extend(MESON_FLAGS.get(self.package))
955        return meson_flags
956
957    def configure(self, build_for_testing):
958        meson_flags = self.get_configure_flags(build_for_testing)
959        try:
960            check_call_cmd(
961                "meson", "setup", "--reconfigure", "build", *meson_flags
962            )
963        except Exception:
964            shutil.rmtree("build", ignore_errors=True)
965            check_call_cmd("meson", "setup", "build", *meson_flags)
966
967        self.package = Meson._project_name("build")
968
969    def build(self):
970        check_call_cmd("ninja", "-C", "build")
971
972    def install(self):
973        check_call_cmd("sudo", "-n", "--", "ninja", "-C", "build", "install")
974        check_call_cmd("sudo", "-n", "--", "ldconfig")
975
976    def test(self):
977        # It is useful to check various settings of the meson.build file
978        # for compatibility, such as meson_version checks.  We shouldn't
979        # do this in the configure path though because it affects subprojects
980        # and dependencies as well, but we only want this applied to the
981        # project-under-test (otherwise an upstream dependency could fail
982        # this check without our control).
983        self._extra_meson_checks()
984
985        try:
986            test_args = ("--repeat", str(args.repeat), "-C", "build")
987            check_call_cmd("meson", "test", "--print-errorlogs", *test_args)
988
989        except CalledProcessError:
990            raise Exception("Unit tests failed")
991
992    def _setup_exists(self, setup):
993        """
994        Returns whether the meson build supports the named test setup.
995
996        Parameter descriptions:
997        setup              The setup target to check
998        """
999        try:
1000            with open(os.devnull, "w"):
1001                output = subprocess.check_output(
1002                    [
1003                        "meson",
1004                        "test",
1005                        "-C",
1006                        "build",
1007                        "--setup",
1008                        "{}:{}".format(self.package, setup),
1009                        "__likely_not_a_test__",
1010                    ],
1011                    stderr=subprocess.STDOUT,
1012                )
1013        except CalledProcessError as e:
1014            output = e.output
1015        output = output.decode("utf-8")
1016        return not re.search("Unknown test setup '[^']+'[.]", output)
1017
1018    def _maybe_valgrind(self):
1019        """
1020        Potentially runs the unit tests through valgrind for the package
1021        via `meson test`. The package can specify custom valgrind
1022        configurations by utilizing add_test_setup() in a meson.build
1023        """
1024        if not is_valgrind_safe():
1025            sys.stderr.write("###### Skipping valgrind ######\n")
1026            return
1027        try:
1028            if self._setup_exists("valgrind"):
1029                check_call_cmd(
1030                    "meson",
1031                    "test",
1032                    "-t",
1033                    "10",
1034                    "-C",
1035                    "build",
1036                    "--print-errorlogs",
1037                    "--setup",
1038                    "{}:valgrind".format(self.package),
1039                )
1040            else:
1041                check_call_cmd(
1042                    "meson",
1043                    "test",
1044                    "-t",
1045                    "10",
1046                    "-C",
1047                    "build",
1048                    "--print-errorlogs",
1049                    "--wrapper",
1050                    "valgrind --error-exitcode=1",
1051                )
1052        except CalledProcessError:
1053            raise Exception("Valgrind tests failed")
1054
1055    def analyze(self):
1056        self._maybe_valgrind()
1057
1058        # Run clang-tidy only if the project has a configuration
1059        if os.path.isfile(".clang-tidy"):
1060            os.environ["CXX"] = "clang++"
1061            with TemporaryDirectory(prefix="build", dir=".") as build_dir:
1062                check_call_cmd("meson", "setup", build_dir)
1063                if not os.path.isfile(".openbmc-no-clang"):
1064                    check_call_cmd("meson", "compile", "-C", build_dir)
1065                try:
1066                    check_call_cmd("ninja", "-C", build_dir, "clang-tidy-fix")
1067                except subprocess.CalledProcessError:
1068                    check_call_cmd(
1069                        "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff"
1070                    )
1071                    raise
1072        # Run the basic clang static analyzer otherwise
1073        else:
1074            check_call_cmd("ninja", "-C", "build", "scan-build")
1075
1076        # Run tests through sanitizers
1077        # b_lundef is needed if clang++ is CXX since it resolves the
1078        # asan symbols at runtime only. We don't want to set it earlier
1079        # in the build process to ensure we don't have undefined
1080        # runtime code.
1081        if is_sanitize_safe():
1082            meson_flags = self.get_configure_flags(self.build_for_testing)
1083            meson_flags.append("-Db_sanitize=address,undefined")
1084            try:
1085                check_call_cmd(
1086                    "meson", "setup", "--reconfigure", "build", *meson_flags
1087                )
1088            except Exception:
1089                shutil.rmtree("build", ignore_errors=True)
1090                check_call_cmd("meson", "setup", "build", *meson_flags)
1091            check_call_cmd(
1092                "meson",
1093                "test",
1094                "-C",
1095                "build",
1096                "--print-errorlogs",
1097                "--logbase",
1098                "testlog-ubasan",
1099                env=os.environ | {"UBSAN_OPTIONS": "halt_on_error=1"},
1100            )
1101            # TODO: Fix memory sanitizer
1102            # check_call_cmd('meson', 'configure', 'build',
1103            #                '-Db_sanitize=memory')
1104            # check_call_cmd('meson', 'test', '-C', 'build'
1105            #                '--logbase', 'testlog-msan')
1106            check_call_cmd("meson", "configure", "build", "-Db_sanitize=none")
1107        else:
1108            sys.stderr.write("###### Skipping sanitizers ######\n")
1109
1110        # Run coverage checks
1111        check_call_cmd("meson", "configure", "build", "-Db_coverage=true")
1112        self.test()
1113        # Only build coverage HTML if coverage files were produced
1114        for root, dirs, files in os.walk("build"):
1115            if any([f.endswith(".gcda") for f in files]):
1116                check_call_cmd("ninja", "-C", "build", "coverage-html")
1117                break
1118        check_call_cmd("meson", "configure", "build", "-Db_coverage=false")
1119        run_cppcheck()
1120
1121    def _extra_meson_checks(self):
1122        with open(os.path.join(self.path, "meson.build"), "rt") as f:
1123            build_contents = f.read()
1124
1125        # Find project's specified meson_version.
1126        meson_version = None
1127        pattern = r"meson_version:[^']*'([^']*)'"
1128        for match in re.finditer(pattern, build_contents):
1129            group = match.group(1)
1130            meson_version = group
1131
1132        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1133        # identify this.  Add to our unit-test checks so that we don't
1134        # get a meson.build missing this.
1135        pattern = r"'cpp_std=c\+\+20'"
1136        for match in re.finditer(pattern, build_contents):
1137            if not meson_version or not meson_version_compare(
1138                meson_version, ">=0.57"
1139            ):
1140                raise Exception(
1141                    "C++20 support requires specifying in meson.build: "
1142                    + "meson_version: '>=0.57'"
1143                )
1144
1145        # C++23 requires at least Meson 1.1.1 but Meson itself doesn't
1146        # identify this.  Add to our unit-test checks so that we don't
1147        # get a meson.build missing this.
1148        pattern = r"'cpp_std=c\+\+23'"
1149        for match in re.finditer(pattern, build_contents):
1150            if not meson_version or not meson_version_compare(
1151                meson_version, ">=1.1.1"
1152            ):
1153                raise Exception(
1154                    "C++23 support requires specifying in meson.build: "
1155                    + "meson_version: '>=1.1.1'"
1156                )
1157
1158        if "get_variable(" in build_contents:
1159            if not meson_version or not meson_version_compare(
1160                meson_version, ">=0.58"
1161            ):
1162                raise Exception(
1163                    "dep.get_variable() with positional argument requires "
1164                    + "meson_Version: '>=0.58'"
1165                )
1166
1167
1168class Package(object):
1169    def __init__(self, name=None, path=None):
1170        self.supported = [Meson, Autotools, CMake]
1171        self.name = name
1172        self.path = path
1173        self.test_only = False
1174
1175    def build_systems(self):
1176        instances = (system(self.name, self.path) for system in self.supported)
1177        return (instance for instance in instances if instance.probe())
1178
1179    def build_system(self, preferred=None):
1180        systems = list(self.build_systems())
1181
1182        if not systems:
1183            return None
1184
1185        if preferred:
1186            return {type(system): system for system in systems}[preferred]
1187
1188        return next(iter(systems))
1189
1190    def install(self, system=None):
1191        if not system:
1192            system = self.build_system()
1193
1194        system.configure(False)
1195        system.build()
1196        system.install()
1197
1198    def _test_one(self, system):
1199        system.configure(True)
1200        system.build()
1201        system.install()
1202        system.test()
1203        if not TEST_ONLY:
1204            system.analyze()
1205
1206    def test(self):
1207        for system in self.build_systems():
1208            self._test_one(system)
1209
1210
1211def find_file(filename, basedir):
1212    """
1213    Finds all occurrences of a file (or list of files) in the base
1214    directory and passes them back with their relative paths.
1215
1216    Parameter descriptions:
1217    filename              The name of the file (or list of files) to
1218                          find
1219    basedir               The base directory search in
1220    """
1221
1222    if not isinstance(filename, list):
1223        filename = [filename]
1224
1225    filepaths = []
1226    for root, dirs, files in os.walk(basedir):
1227        if os.path.split(root)[-1] == "subprojects":
1228            for f in files:
1229                subproject = ".".join(f.split(".")[0:-1])
1230                if f.endswith(".wrap") and subproject in dirs:
1231                    # don't find files in meson subprojects with wraps
1232                    dirs.remove(subproject)
1233        for f in filename:
1234            if f in files:
1235                filepaths.append(os.path.join(root, f))
1236    return filepaths
1237
1238
1239if __name__ == "__main__":
1240    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1241    CONFIGURE_FLAGS = {
1242        "phosphor-logging": [
1243            "--enable-metadata-processing",
1244            "--enable-openpower-pel-extension",
1245            "YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml",
1246        ]
1247    }
1248
1249    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1250    MESON_FLAGS = {
1251        "phosphor-dbus-interfaces": [
1252            "-Ddata_com_ibm=true",
1253            "-Ddata_org_open_power=true",
1254        ],
1255        "phosphor-logging": ["-Dopenpower-pel-extension=enabled"],
1256    }
1257
1258    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1259    DEPENDENCIES = {
1260        "AC_CHECK_LIB": {"mapper": "phosphor-objmgr"},
1261        "AC_CHECK_HEADER": {
1262            "host-ipmid": "phosphor-host-ipmid",
1263            "blobs-ipmid": "phosphor-ipmi-blobs",
1264            "sdbusplus": "sdbusplus",
1265            "sdeventplus": "sdeventplus",
1266            "stdplus": "stdplus",
1267            "gpioplus": "gpioplus",
1268            "phosphor-logging/log.hpp": "phosphor-logging",
1269        },
1270        "AC_PATH_PROG": {"sdbus++": "sdbusplus"},
1271        "PKG_CHECK_MODULES": {
1272            "phosphor-dbus-interfaces": "phosphor-dbus-interfaces",
1273            "libipmid": "phosphor-host-ipmid",
1274            "libipmid-host": "phosphor-host-ipmid",
1275            "sdbusplus": "sdbusplus",
1276            "sdeventplus": "sdeventplus",
1277            "stdplus": "stdplus",
1278            "gpioplus": "gpioplus",
1279            "phosphor-logging": "phosphor-logging",
1280            "phosphor-snmp": "phosphor-snmp",
1281            "ipmiblob": "ipmi-blob-tool",
1282            "hei": "openpower-libhei",
1283            "phosphor-ipmi-blobs": "phosphor-ipmi-blobs",
1284            "libcr51sign": "google-misc",
1285        },
1286    }
1287
1288    # Offset into array of macro parameters MACRO(0, 1, ...N)
1289    DEPENDENCIES_OFFSET = {
1290        "AC_CHECK_LIB": 0,
1291        "AC_CHECK_HEADER": 0,
1292        "AC_PATH_PROG": 1,
1293        "PKG_CHECK_MODULES": 1,
1294    }
1295
1296    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1297    DEPENDENCIES_REGEX = {"phosphor-logging": r"\S+-dbus-interfaces$"}
1298
1299    # Set command line arguments
1300    parser = argparse.ArgumentParser()
1301    parser.add_argument(
1302        "-w",
1303        "--workspace",
1304        dest="WORKSPACE",
1305        required=True,
1306        help="Workspace directory location(i.e. /home)",
1307    )
1308    parser.add_argument(
1309        "-p",
1310        "--package",
1311        dest="PACKAGE",
1312        required=True,
1313        help="OpenBMC package to be unit tested",
1314    )
1315    parser.add_argument(
1316        "-t",
1317        "--test-only",
1318        dest="TEST_ONLY",
1319        action="store_true",
1320        required=False,
1321        default=False,
1322        help="Only run test cases, no other validation",
1323    )
1324    arg_inttests = parser.add_mutually_exclusive_group()
1325    arg_inttests.add_argument(
1326        "--integration-tests",
1327        dest="INTEGRATION_TEST",
1328        action="store_true",
1329        required=False,
1330        default=True,
1331        help="Enable integration tests [default].",
1332    )
1333    arg_inttests.add_argument(
1334        "--no-integration-tests",
1335        dest="INTEGRATION_TEST",
1336        action="store_false",
1337        required=False,
1338        help="Disable integration tests.",
1339    )
1340    parser.add_argument(
1341        "-v",
1342        "--verbose",
1343        action="store_true",
1344        help="Print additional package status messages",
1345    )
1346    parser.add_argument(
1347        "-r", "--repeat", help="Repeat tests N times", type=int, default=1
1348    )
1349    parser.add_argument(
1350        "-b",
1351        "--branch",
1352        dest="BRANCH",
1353        required=False,
1354        help="Branch to target for dependent repositories",
1355        default="master",
1356    )
1357    parser.add_argument(
1358        "-n",
1359        "--noformat",
1360        dest="FORMAT",
1361        action="store_false",
1362        required=False,
1363        help="Whether or not to run format code",
1364    )
1365    args = parser.parse_args(sys.argv[1:])
1366    WORKSPACE = args.WORKSPACE
1367    UNIT_TEST_PKG = args.PACKAGE
1368    TEST_ONLY = args.TEST_ONLY
1369    INTEGRATION_TEST = args.INTEGRATION_TEST
1370    BRANCH = args.BRANCH
1371    FORMAT_CODE = args.FORMAT
1372    if args.verbose:
1373
1374        def printline(*line):
1375            for arg in line:
1376                print(arg, end=" ")
1377            print()
1378
1379    else:
1380
1381        def printline(*line):
1382            pass
1383
1384    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1385
1386    # Run format-code.sh, which will in turn call any repo-level formatters.
1387    if FORMAT_CODE:
1388        check_call_cmd(
1389            os.path.join(
1390                WORKSPACE, "openbmc-build-scripts", "scripts", "format-code.sh"
1391            ),
1392            CODE_SCAN_DIR,
1393        )
1394
1395        # Check to see if any files changed
1396        check_call_cmd(
1397            "git", "-C", CODE_SCAN_DIR, "--no-pager", "diff", "--exit-code"
1398        )
1399
1400    # Check if this repo has a supported make infrastructure
1401    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1402    if not pkg.build_system():
1403        print("No valid build system, exit")
1404        sys.exit(0)
1405
1406    prev_umask = os.umask(000)
1407
1408    # Determine dependencies and add them
1409    dep_added = dict()
1410    dep_added[UNIT_TEST_PKG] = False
1411
1412    # Create dependency tree
1413    dep_tree = DepTree(UNIT_TEST_PKG)
1414    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1415
1416    # Reorder Dependency Tree
1417    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1418        dep_tree.ReorderDeps(pkg_name, regex_str)
1419    if args.verbose:
1420        dep_tree.PrintTree()
1421
1422    install_list = dep_tree.GetInstallList()
1423
1424    # We don't want to treat our package as a dependency
1425    install_list.remove(UNIT_TEST_PKG)
1426
1427    # Install reordered dependencies
1428    for dep in install_list:
1429        build_and_install(dep, False)
1430
1431    # Run package unit tests
1432    build_and_install(UNIT_TEST_PKG, True)
1433
1434    os.umask(prev_umask)
1435
1436    # Run any custom CI scripts the repo has, of which there can be
1437    # multiple of and anywhere in the repository.
1438    ci_scripts = find_file(["run-ci.sh", "run-ci"], CODE_SCAN_DIR)
1439    if ci_scripts:
1440        os.chdir(CODE_SCAN_DIR)
1441        for ci_script in ci_scripts:
1442            check_call_cmd(ci_script)
1443