1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11# interpreter is not used directly but this resolves dependency ordering
12# that would be broken if we didn't include it.
13from mesonbuild import interpreter
14from mesonbuild import coredata, optinterpreter
15from mesonbuild.mesonlib import OptionKey
16from mesonbuild.mesonlib import version_compare as meson_version_compare
17from urllib.parse import urljoin
18from subprocess import check_call, call, CalledProcessError
19import os
20import sys
21import argparse
22import multiprocessing
23import re
24import subprocess
25import shutil
26import platform
27
28
29class DepTree():
30    """
31    Represents package dependency tree, where each node is a DepTree with a
32    name and DepTree children.
33    """
34
35    def __init__(self, name):
36        """
37        Create new DepTree.
38
39        Parameter descriptions:
40        name               Name of new tree node.
41        """
42        self.name = name
43        self.children = list()
44
45    def AddChild(self, name):
46        """
47        Add new child node to current node.
48
49        Parameter descriptions:
50        name               Name of new child
51        """
52        new_child = DepTree(name)
53        self.children.append(new_child)
54        return new_child
55
56    def AddChildNode(self, node):
57        """
58        Add existing child node to current node.
59
60        Parameter descriptions:
61        node               Tree node to add
62        """
63        self.children.append(node)
64
65    def RemoveChild(self, name):
66        """
67        Remove child node.
68
69        Parameter descriptions:
70        name               Name of child to remove
71        """
72        for child in self.children:
73            if child.name == name:
74                self.children.remove(child)
75                return
76
77    def GetNode(self, name):
78        """
79        Return node with matching name. Return None if not found.
80
81        Parameter descriptions:
82        name               Name of node to return
83        """
84        if self.name == name:
85            return self
86        for child in self.children:
87            node = child.GetNode(name)
88            if node:
89                return node
90        return None
91
92    def GetParentNode(self, name, parent_node=None):
93        """
94        Return parent of node with matching name. Return none if not found.
95
96        Parameter descriptions:
97        name               Name of node to get parent of
98        parent_node        Parent of current node
99        """
100        if self.name == name:
101            return parent_node
102        for child in self.children:
103            found_node = child.GetParentNode(name, self)
104            if found_node:
105                return found_node
106        return None
107
108    def GetPath(self, name, path=None):
109        """
110        Return list of node names from head to matching name.
111        Return None if not found.
112
113        Parameter descriptions:
114        name               Name of node
115        path               List of node names from head to current node
116        """
117        if not path:
118            path = []
119        if self.name == name:
120            path.append(self.name)
121            return path
122        for child in self.children:
123            match = child.GetPath(name, path + [self.name])
124            if match:
125                return match
126        return None
127
128    def GetPathRegex(self, name, regex_str, path=None):
129        """
130        Return list of node paths that end in name, or match regex_str.
131        Return empty list if not found.
132
133        Parameter descriptions:
134        name               Name of node to search for
135        regex_str          Regex string to match node names
136        path               Path of node names from head to current node
137        """
138        new_paths = []
139        if not path:
140            path = []
141        match = re.match(regex_str, self.name)
142        if (self.name == name) or (match):
143            new_paths.append(path + [self.name])
144        for child in self.children:
145            return_paths = None
146            full_path = path + [self.name]
147            return_paths = child.GetPathRegex(name, regex_str, full_path)
148            for i in return_paths:
149                new_paths.append(i)
150        return new_paths
151
152    def MoveNode(self, from_name, to_name):
153        """
154        Mode existing from_name node to become child of to_name node.
155
156        Parameter descriptions:
157        from_name          Name of node to make a child of to_name
158        to_name            Name of node to make parent of from_name
159        """
160        parent_from_node = self.GetParentNode(from_name)
161        from_node = self.GetNode(from_name)
162        parent_from_node.RemoveChild(from_name)
163        to_node = self.GetNode(to_name)
164        to_node.AddChildNode(from_node)
165
166    def ReorderDeps(self, name, regex_str):
167        """
168        Reorder dependency tree.  If tree contains nodes with names that
169        match 'name' and 'regex_str', move 'regex_str' nodes that are
170        to the right of 'name' node, so that they become children of the
171        'name' node.
172
173        Parameter descriptions:
174        name               Name of node to look for
175        regex_str          Regex string to match names to
176        """
177        name_path = self.GetPath(name)
178        if not name_path:
179            return
180        paths = self.GetPathRegex(name, regex_str)
181        is_name_in_paths = False
182        name_index = 0
183        for i in range(len(paths)):
184            path = paths[i]
185            if path[-1] == name:
186                is_name_in_paths = True
187                name_index = i
188                break
189        if not is_name_in_paths:
190            return
191        for i in range(name_index + 1, len(paths)):
192            path = paths[i]
193            if name in path:
194                continue
195            from_name = path[-1]
196            self.MoveNode(from_name, name)
197
198    def GetInstallList(self):
199        """
200        Return post-order list of node names.
201
202        Parameter descriptions:
203        """
204        install_list = []
205        for child in self.children:
206            child_install_list = child.GetInstallList()
207            install_list.extend(child_install_list)
208        install_list.append(self.name)
209        return install_list
210
211    def PrintTree(self, level=0):
212        """
213        Print pre-order node names with indentation denoting node depth level.
214
215        Parameter descriptions:
216        level              Current depth level
217        """
218        INDENT_PER_LEVEL = 4
219        print(' ' * (level * INDENT_PER_LEVEL) + self.name)
220        for child in self.children:
221            child.PrintTree(level + 1)
222
223
224def check_call_cmd(*cmd):
225    """
226    Verbose prints the directory location the given command is called from and
227    the command, then executes the command using check_call.
228
229    Parameter descriptions:
230    dir                 Directory location command is to be called from
231    cmd                 List of parameters constructing the complete command
232    """
233    printline(os.getcwd(), ">", " ".join(cmd))
234    check_call(cmd)
235
236
237def clone_pkg(pkg, branch):
238    """
239    Clone the given openbmc package's git repository from gerrit into
240    the WORKSPACE location
241
242    Parameter descriptions:
243    pkg                 Name of the package to clone
244    branch              Branch to clone from pkg
245    """
246    pkg_dir = os.path.join(WORKSPACE, pkg)
247    if os.path.exists(os.path.join(pkg_dir, '.git')):
248        return pkg_dir
249    pkg_repo = urljoin('https://gerrit.openbmc.org/openbmc/', pkg)
250    os.mkdir(pkg_dir)
251    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
252    try:
253        # first try the branch
254        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
255        repo_inst = clone.working_dir
256    except:
257        printline("Input branch not found, default to master")
258        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
259        repo_inst = clone.working_dir
260    return repo_inst
261
262
263def make_target_exists(target):
264    """
265    Runs a check against the makefile in the current directory to determine
266    if the target exists so that it can be built.
267
268    Parameter descriptions:
269    target              The make target we are checking
270    """
271    try:
272        cmd = ['make', '-n', target]
273        with open(os.devnull, 'w') as devnull:
274            check_call(cmd, stdout=devnull, stderr=devnull)
275        return True
276    except CalledProcessError:
277        return False
278
279
280make_parallel = [
281    'make',
282    # Run enough jobs to saturate all the cpus
283    '-j', str(multiprocessing.cpu_count()),
284    # Don't start more jobs if the load avg is too high
285    '-l', str(multiprocessing.cpu_count()),
286    # Synchronize the output so logs aren't intermixed in stdout / stderr
287    '-O',
288]
289
290
291def build_and_install(name, build_for_testing=False):
292    """
293    Builds and installs the package in the environment. Optionally
294    builds the examples and test cases for package.
295
296    Parameter description:
297    name                The name of the package we are building
298    build_for_testing   Enable options related to testing on the package?
299    """
300    os.chdir(os.path.join(WORKSPACE, name))
301
302    # Refresh dynamic linker run time bindings for dependencies
303    check_call_cmd('sudo', '-n', '--', 'ldconfig')
304
305    pkg = Package()
306    if build_for_testing:
307        pkg.test()
308    else:
309        pkg.install()
310
311
312def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
313    """
314    For each package (name), starting with the package to be unit tested,
315    extract its dependencies. For each package dependency defined, recursively
316    apply the same strategy
317
318    Parameter descriptions:
319    name                Name of the package
320    pkgdir              Directory where package source is located
321    dep_added           Current dict of dependencies and added status
322    head                Head node of the dependency tree
323    branch              Branch to clone from pkg
324    dep_tree            Current dependency tree node
325    """
326    if not dep_tree:
327        dep_tree = head
328
329    with open("/tmp/depcache", "r") as depcache:
330        cache = depcache.readline()
331
332    # Read out pkg dependencies
333    pkg = Package(name, pkgdir)
334
335    build = pkg.build_system()
336    if build == None:
337        raise Exception(f"Unable to find build system for {name}.")
338
339    for dep in set(build.dependencies()):
340        if dep in cache:
341            continue
342        # Dependency package not already known
343        if dep_added.get(dep) is None:
344            print(f"Adding {dep} dependency to {name}.")
345            # Dependency package not added
346            new_child = dep_tree.AddChild(dep)
347            dep_added[dep] = False
348            dep_pkgdir = clone_pkg(dep, branch)
349            # Determine this dependency package's
350            # dependencies and add them before
351            # returning to add this package
352            dep_added = build_dep_tree(dep,
353                                       dep_pkgdir,
354                                       dep_added,
355                                       head,
356                                       branch,
357                                       new_child)
358        else:
359            # Dependency package known and added
360            if dep_added[dep]:
361                continue
362            else:
363                # Cyclic dependency failure
364                raise Exception("Cyclic dependencies found in "+name)
365
366    if not dep_added[name]:
367        dep_added[name] = True
368
369    return dep_added
370
371
372def run_cppcheck():
373    if not os.path.exists(os.path.join("build", "compile_commands.json")):
374        return None
375
376    try:
377        os.mkdir("cppcheck-temp")
378    except FileExistsError as e:
379        pass
380
381    # http://cppcheck.sourceforge.net/manual.pdf
382    try:
383        check_call_cmd(
384            'cppcheck',
385            '-j', str(multiprocessing.cpu_count()),
386            '--enable=style,performance,portability,missingInclude',
387            '--suppress=useStlAlgorithm',
388            '--suppress=unusedStructMember',
389            '--suppress=postfixOperator',
390            '--suppress=unreadVariable',
391            '--suppress=knownConditionTrueFalse',
392            '--library=googletest',
393            '--project=build/compile_commands.json',
394            '--cppcheck-build-dir=cppcheck-temp',
395        )
396    except subprocess.CalledProcessError:
397        print("cppcheck found errors")
398
399
400def is_valgrind_safe():
401    """
402    Returns whether it is safe to run valgrind on our platform
403    """
404    src = 'unit-test-vg.c'
405    exe = './unit-test-vg'
406    with open(src, 'w') as h:
407        h.write('#include <errno.h>\n')
408        h.write('#include <stdio.h>\n')
409        h.write('#include <stdlib.h>\n')
410        h.write('#include <string.h>\n')
411        h.write('int main() {\n')
412        h.write('char *heap_str = malloc(16);\n')
413        h.write('strcpy(heap_str, "RandString");\n')
414        h.write('int res = strcmp("RandString", heap_str);\n')
415        h.write('free(heap_str);\n')
416        h.write('char errstr[64];\n')
417        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
418        h.write('printf("%s\\n", errstr);\n')
419        h.write('return res;\n')
420        h.write('}\n')
421    try:
422        with open(os.devnull, 'w') as devnull:
423            check_call(['gcc', '-O2', '-o', exe, src],
424                       stdout=devnull, stderr=devnull)
425            check_call(['valgrind', '--error-exitcode=99', exe],
426                       stdout=devnull, stderr=devnull)
427        return True
428    except:
429        sys.stderr.write("###### Platform is not valgrind safe ######\n")
430        return False
431    finally:
432        os.remove(src)
433        os.remove(exe)
434
435
436def is_sanitize_safe():
437    """
438    Returns whether it is safe to run sanitizers on our platform
439    """
440    src = 'unit-test-sanitize.c'
441    exe = './unit-test-sanitize'
442    with open(src, 'w') as h:
443        h.write('int main() { return 0; }\n')
444    try:
445        with open(os.devnull, 'w') as devnull:
446            check_call(['gcc', '-O2', '-fsanitize=address',
447                        '-fsanitize=undefined', '-o', exe, src],
448                       stdout=devnull, stderr=devnull)
449            check_call([exe], stdout=devnull, stderr=devnull)
450
451        # TODO - Sanitizer not working on ppc64le
452        # https://github.com/openbmc/openbmc-build-scripts/issues/31
453        if (platform.processor() == 'ppc64le'):
454            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
455            return False
456        else:
457            return True
458    except:
459        sys.stderr.write("###### Platform is not sanitize safe ######\n")
460        return False
461    finally:
462        os.remove(src)
463        os.remove(exe)
464
465
466def maybe_make_valgrind():
467    """
468    Potentially runs the unit tests through valgrind for the package
469    via `make check-valgrind`. If the package does not have valgrind testing
470    then it just skips over this.
471    """
472    # Valgrind testing is currently broken by an aggressive strcmp optimization
473    # that is inlined into optimized code for POWER by gcc 7+. Until we find
474    # a workaround, just don't run valgrind tests on POWER.
475    # https://github.com/openbmc/openbmc/issues/3315
476    if not is_valgrind_safe():
477        sys.stderr.write("###### Skipping valgrind ######\n")
478        return
479    if not make_target_exists('check-valgrind'):
480        return
481
482    try:
483        cmd = make_parallel + ['check-valgrind']
484        check_call_cmd(*cmd)
485    except CalledProcessError:
486        for root, _, files in os.walk(os.getcwd()):
487            for f in files:
488                if re.search('test-suite-[a-z]+.log', f) is None:
489                    continue
490                check_call_cmd('cat', os.path.join(root, f))
491        raise Exception('Valgrind tests failed')
492
493
494def maybe_make_coverage():
495    """
496    Potentially runs the unit tests through code coverage for the package
497    via `make check-code-coverage`. If the package does not have code coverage
498    testing then it just skips over this.
499    """
500    if not make_target_exists('check-code-coverage'):
501        return
502
503    # Actually run code coverage
504    try:
505        cmd = make_parallel + ['check-code-coverage']
506        check_call_cmd(*cmd)
507    except CalledProcessError:
508        raise Exception('Code coverage failed')
509
510
511class BuildSystem(object):
512    """
513    Build systems generally provide the means to configure, build, install and
514    test software. The BuildSystem class defines a set of interfaces on top of
515    which Autotools, Meson, CMake and possibly other build system drivers can
516    be implemented, separating out the phases to control whether a package
517    should merely be installed or also tested and analyzed.
518    """
519
520    def __init__(self, package, path):
521        """Initialise the driver with properties independent of the build system
522
523        Keyword arguments:
524        package: The name of the package. Derived from the path if None
525        path: The path to the package. Set to the working directory if None
526        """
527        self.path = "." if not path else path
528        realpath = os.path.realpath(self.path)
529        self.package = package if package else os.path.basename(realpath)
530        self.build_for_testing = False
531
532    def probe(self):
533        """Test if the build system driver can be applied to the package
534
535        Return True if the driver can drive the package's build system,
536        otherwise False.
537
538        Generally probe() is implemented by testing for the presence of the
539        build system's configuration file(s).
540        """
541        raise NotImplemented
542
543    def dependencies(self):
544        """Provide the package's dependencies
545
546        Returns a list of dependencies. If no dependencies are required then an
547        empty list must be returned.
548
549        Generally dependencies() is implemented by analysing and extracting the
550        data from the build system configuration.
551        """
552        raise NotImplemented
553
554    def configure(self, build_for_testing):
555        """Configure the source ready for building
556
557        Should raise an exception if configuration failed.
558
559        Keyword arguments:
560        build_for_testing: Mark the package as being built for testing rather
561                           than for installation as a dependency for the
562                           package under test. Setting to True generally
563                           implies that the package will be configured to build
564                           with debug information, at a low level of
565                           optimisation and possibly with sanitizers enabled.
566
567        Generally configure() is implemented by invoking the build system
568        tooling to generate Makefiles or equivalent.
569        """
570        raise NotImplemented
571
572    def build(self):
573        """Build the software ready for installation and/or testing
574
575        Should raise an exception if the build fails
576
577        Generally build() is implemented by invoking `make` or `ninja`.
578        """
579        raise NotImplemented
580
581    def install(self):
582        """Install the software ready for use
583
584        Should raise an exception if installation fails
585
586        Like build(), install() is generally implemented by invoking `make` or
587        `ninja`.
588        """
589        raise NotImplemented
590
591    def test(self):
592        """Build and run the test suite associated with the package
593
594        Should raise an exception if the build or testing fails.
595
596        Like install(), test() is generally implemented by invoking `make` or
597        `ninja`.
598        """
599        raise NotImplemented
600
601    def analyze(self):
602        """Run any supported analysis tools over the codebase
603
604        Should raise an exception if analysis fails.
605
606        Some analysis tools such as scan-build need injection into the build
607        system. analyze() provides the necessary hook to implement such
608        behaviour. Analyzers independent of the build system can also be
609        specified here but at the cost of possible duplication of code between
610        the build system driver implementations.
611        """
612        raise NotImplemented
613
614
615class Autotools(BuildSystem):
616    def __init__(self, package=None, path=None):
617        super(Autotools, self).__init__(package, path)
618
619    def probe(self):
620        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
621
622    def dependencies(self):
623        configure_ac = os.path.join(self.path, 'configure.ac')
624
625        contents = ''
626        # Prepend some special function overrides so we can parse out
627        # dependencies
628        for macro in DEPENDENCIES.keys():
629            contents += ('m4_define([' + macro + '], [' + macro + '_START$' +
630                         str(DEPENDENCIES_OFFSET[macro] + 1) +
631                         macro + '_END])\n')
632        with open(configure_ac, "rt") as f:
633            contents += f.read()
634
635        autoconf_cmdline = ['autoconf', '-Wno-undefined', '-']
636        autoconf_process = subprocess.Popen(autoconf_cmdline,
637                                            stdin=subprocess.PIPE,
638                                            stdout=subprocess.PIPE,
639                                            stderr=subprocess.PIPE)
640        document = contents.encode('utf-8')
641        (stdout, stderr) = autoconf_process.communicate(input=document)
642        if not stdout:
643            print(stderr)
644            raise Exception("Failed to run autoconf for parsing dependencies")
645
646        # Parse out all of the dependency text
647        matches = []
648        for macro in DEPENDENCIES.keys():
649            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
650            for match in re.compile(pattern).finditer(stdout.decode('utf-8')):
651                matches.append((match.group(1), match.group(2)))
652
653        # Look up dependencies from the text
654        found_deps = []
655        for macro, deptext in matches:
656            for potential_dep in deptext.split(' '):
657                for known_dep in DEPENDENCIES[macro].keys():
658                    if potential_dep.startswith(known_dep):
659                        found_deps.append(DEPENDENCIES[macro][known_dep])
660
661        return found_deps
662
663    def _configure_feature(self, flag, enabled):
664        """
665        Returns an configure flag as a string
666
667        Parameters:
668        flag                The name of the flag
669        enabled             Whether the flag is enabled or disabled
670        """
671        return '--' + ('enable' if enabled else 'disable') + '-' + flag
672
673    def configure(self, build_for_testing):
674        self.build_for_testing = build_for_testing
675        conf_flags = [
676            self._configure_feature('silent-rules', False),
677            self._configure_feature('examples', build_for_testing),
678            self._configure_feature('tests', build_for_testing),
679            self._configure_feature('itests', INTEGRATION_TEST),
680        ]
681        conf_flags.extend([
682            self._configure_feature('code-coverage', build_for_testing),
683            self._configure_feature('valgrind', build_for_testing),
684        ])
685        # Add any necessary configure flags for package
686        if CONFIGURE_FLAGS.get(self.package) is not None:
687            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
688        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
689            if os.path.exists(bootstrap):
690                check_call_cmd('./' + bootstrap)
691                break
692        check_call_cmd('./configure', *conf_flags)
693
694    def build(self):
695        check_call_cmd(*make_parallel)
696
697    def install(self):
698        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
699
700    def test(self):
701        try:
702            cmd = make_parallel + ['check']
703            for i in range(0, args.repeat):
704                check_call_cmd(*cmd)
705
706            maybe_make_valgrind()
707            maybe_make_coverage()
708        except CalledProcessError:
709            for root, _, files in os.walk(os.getcwd()):
710                if 'test-suite.log' not in files:
711                    continue
712                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
713            raise Exception('Unit tests failed')
714
715    def analyze(self):
716        run_cppcheck()
717
718
719class CMake(BuildSystem):
720    def __init__(self, package=None, path=None):
721        super(CMake, self).__init__(package, path)
722
723    def probe(self):
724        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
725
726    def dependencies(self):
727        return []
728
729    def configure(self, build_for_testing):
730        self.build_for_testing = build_for_testing
731        if INTEGRATION_TEST:
732            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
733                           '-DITESTS=ON', '.')
734        else:
735            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
736
737    def build(self):
738        check_call_cmd('cmake', '--build', '.', '--', '-j',
739                       str(multiprocessing.cpu_count()))
740
741    def install(self):
742        pass
743
744    def test(self):
745        if make_target_exists('test'):
746            check_call_cmd('ctest', '.')
747
748    def analyze(self):
749        if os.path.isfile('.clang-tidy'):
750            try:
751                os.mkdir("tidy-build")
752            except FileExistsError as e:
753                pass
754            # clang-tidy needs to run on a clang-specific build
755            check_call_cmd('cmake', '-DCMAKE_C_COMPILER=clang',
756                           '-DCMAKE_CXX_COMPILER=clang++',
757                           '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
758                           '-H.',
759                           '-Btidy-build')
760            # we need to cd here because otherwise clang-tidy doesn't find the
761            # .clang-tidy file in the roots of repos.  Its arguably a "bug"
762            # with run-clang-tidy at a minimum it's "weird" that it requires
763            # the .clang-tidy to be up a dir
764            os.chdir("tidy-build")
765            try:
766                check_call_cmd('run-clang-tidy', "-header-filter=.*", '-p',
767                               '.')
768            finally:
769                os.chdir("..")
770
771        maybe_make_valgrind()
772        maybe_make_coverage()
773        run_cppcheck()
774
775
776class Meson(BuildSystem):
777    def __init__(self, package=None, path=None):
778        super(Meson, self).__init__(package, path)
779
780    def probe(self):
781        return os.path.isfile(os.path.join(self.path, 'meson.build'))
782
783    def dependencies(self):
784        meson_build = os.path.join(self.path, 'meson.build')
785        if not os.path.exists(meson_build):
786            return []
787
788        found_deps = []
789        for root, dirs, files in os.walk(self.path):
790            if 'meson.build' not in files:
791                continue
792            with open(os.path.join(root, 'meson.build'), 'rt') as f:
793                build_contents = f.read()
794            pattern = r"dependency\('([^']*)'.*?\),?\n"
795            for match in re.finditer(pattern, build_contents):
796                group = match.group(1)
797                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group)
798                if maybe_dep is not None:
799                    found_deps.append(maybe_dep)
800
801        return found_deps
802
803    def _parse_options(self, options_file):
804        """
805        Returns a set of options defined in the provides meson_options.txt file
806
807        Parameters:
808        options_file        The file containing options
809        """
810        oi = optinterpreter.OptionInterpreter('')
811        oi.process(options_file)
812        return oi.options
813
814    def _configure_boolean(self, val):
815        """
816        Returns the meson flag which signifies the value
817
818        True is true which requires the boolean.
819        False is false which disables the boolean.
820
821        Parameters:
822        val                 The value being converted
823        """
824        if val is True:
825            return 'true'
826        elif val is False:
827            return 'false'
828        else:
829            raise Exception("Bad meson boolean value")
830
831    def _configure_feature(self, val):
832        """
833        Returns the meson flag which signifies the value
834
835        True is enabled which requires the feature.
836        False is disabled which disables the feature.
837        None is auto which autodetects the feature.
838
839        Parameters:
840        val                 The value being converted
841        """
842        if val is True:
843            return "enabled"
844        elif val is False:
845            return "disabled"
846        elif val is None:
847            return "auto"
848        else:
849            raise Exception("Bad meson feature value")
850
851    def _configure_option(self, opts, key, val):
852        """
853        Returns the meson flag which signifies the value
854        based on the type of the opt
855
856        Parameters:
857        opt                 The meson option which we are setting
858        val                 The value being converted
859        """
860        if isinstance(opts[key], coredata.UserBooleanOption):
861            str_val = self._configure_boolean(val)
862        elif isinstance(opts[key], coredata.UserFeatureOption):
863            str_val = self._configure_feature(val)
864        else:
865            raise Exception('Unknown meson option type')
866        return "-D{}={}".format(key, str_val)
867
868    def configure(self, build_for_testing):
869        self.build_for_testing = build_for_testing
870        meson_options = {}
871        if os.path.exists("meson_options.txt"):
872            meson_options = self._parse_options("meson_options.txt")
873        meson_flags = [
874            '-Db_colorout=never',
875            '-Dwerror=true',
876            '-Dwarning_level=3',
877        ]
878        if build_for_testing:
879            meson_flags.append('--buildtype=debug')
880        else:
881            meson_flags.append('--buildtype=debugoptimized')
882        if OptionKey('tests') in meson_options:
883            meson_flags.append(self._configure_option(
884                meson_options, OptionKey('tests'), build_for_testing))
885        if OptionKey('examples') in meson_options:
886            meson_flags.append(self._configure_option(
887                meson_options, OptionKey('examples'), build_for_testing))
888        if OptionKey('itests') in meson_options:
889            meson_flags.append(self._configure_option(
890                meson_options, OptionKey('itests'), INTEGRATION_TEST))
891        if MESON_FLAGS.get(self.package) is not None:
892            meson_flags.extend(MESON_FLAGS.get(self.package))
893        try:
894            check_call_cmd('meson', 'setup', '--reconfigure', 'build',
895                           *meson_flags)
896        except:
897            shutil.rmtree('build')
898            check_call_cmd('meson', 'setup', 'build', *meson_flags)
899
900    def build(self):
901        check_call_cmd('ninja', '-C', 'build')
902
903    def install(self):
904        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
905
906    def test(self):
907        # It is useful to check various settings of the meson.build file
908        # for compatibility, such as meson_version checks.  We shouldn't
909        # do this in the configure path though because it affects subprojects
910        # and dependencies as well, but we only want this applied to the
911        # project-under-test (otherwise an upstream dependency could fail
912        # this check without our control).
913        self._extra_meson_checks()
914
915        try:
916            test_args = ('--repeat', str(args.repeat), '-C', 'build')
917            check_call_cmd('meson', 'test', '--print-errorlogs', *test_args)
918
919        except CalledProcessError:
920            raise Exception('Unit tests failed')
921
922    def _setup_exists(self, setup):
923        """
924        Returns whether the meson build supports the named test setup.
925
926        Parameter descriptions:
927        setup              The setup target to check
928        """
929        try:
930            with open(os.devnull, 'w') as devnull:
931                output = subprocess.check_output(
932                    ['meson', 'test', '-C', 'build',
933                     '--setup', setup, '-t', '0'],
934                    stderr=subprocess.STDOUT)
935        except CalledProcessError as e:
936            output = e.output
937        output = output.decode('utf-8')
938        return not re.search('Test setup .* not found from project', output)
939
940    def _maybe_valgrind(self):
941        """
942        Potentially runs the unit tests through valgrind for the package
943        via `meson test`. The package can specify custom valgrind
944        configurations by utilizing add_test_setup() in a meson.build
945        """
946        if not is_valgrind_safe():
947            sys.stderr.write("###### Skipping valgrind ######\n")
948            return
949        try:
950            if self._setup_exists('valgrind'):
951                check_call_cmd('meson', 'test', '-t', '10', '-C', 'build',
952                               '--print-errorlogs', '--setup', 'valgrind')
953            else:
954                check_call_cmd('meson', 'test', '-t', '10', '-C', 'build',
955                               '--print-errorlogs', '--wrapper', 'valgrind')
956        except CalledProcessError:
957            raise Exception('Valgrind tests failed')
958
959    def analyze(self):
960        self._maybe_valgrind()
961
962        # Run clang-tidy only if the project has a configuration
963        if os.path.isfile('.clang-tidy'):
964            os.environ["CXX"] = "clang++"
965            check_call_cmd('meson', 'setup', 'build-clang')
966            os.chdir("build-clang")
967            try:
968                check_call_cmd('run-clang-tidy', '-fix', '-format', '-p', '.')
969            except subprocess.CalledProcessError:
970                check_call_cmd("git", "-C", CODE_SCAN_DIR,
971                               "--no-pager", "diff")
972                raise
973            finally:
974                os.chdir("..")
975
976        # Run the basic clang static analyzer otherwise
977        else:
978            check_call_cmd('ninja', '-C', 'build',
979                           'scan-build')
980
981        # Run tests through sanitizers
982        # b_lundef is needed if clang++ is CXX since it resolves the
983        # asan symbols at runtime only. We don't want to set it earlier
984        # in the build process to ensure we don't have undefined
985        # runtime code.
986        if is_sanitize_safe():
987            check_call_cmd('meson', 'configure', 'build',
988                           '-Db_sanitize=address,undefined',
989                           '-Db_lundef=false')
990            check_call_cmd('meson', 'test', '-C', 'build', '--print-errorlogs',
991                           '--logbase', 'testlog-ubasan')
992            # TODO: Fix memory sanitizer
993            # check_call_cmd('meson', 'configure', 'build',
994            #                '-Db_sanitize=memory')
995            # check_call_cmd('meson', 'test', '-C', 'build'
996            #                '--logbase', 'testlog-msan')
997            check_call_cmd('meson', 'configure', 'build',
998                           '-Db_sanitize=none')
999        else:
1000            sys.stderr.write("###### Skipping sanitizers ######\n")
1001
1002        # Run coverage checks
1003        check_call_cmd('meson', 'configure', 'build',
1004                       '-Db_coverage=true')
1005        self.test()
1006        # Only build coverage HTML if coverage files were produced
1007        for root, dirs, files in os.walk('build'):
1008            if any([f.endswith('.gcda') for f in files]):
1009                check_call_cmd('ninja', '-C', 'build',
1010                               'coverage-html')
1011                break
1012        check_call_cmd('meson', 'configure', 'build',
1013                       '-Db_coverage=false')
1014        run_cppcheck()
1015
1016    def _extra_meson_checks(self):
1017        with open(os.path.join(self.path, 'meson.build'), 'rt') as f:
1018            build_contents = f.read()
1019
1020        # Find project's specified meson_version.
1021        meson_version = None
1022        pattern = r"meson_version:[^']*'([^']*)'"
1023        for match in re.finditer(pattern, build_contents):
1024            group = match.group(1)
1025            meson_version = group
1026
1027        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1028        # identify this.  Add to our unit-test checks so that we don't
1029        # get a meson.build missing this.
1030        pattern = r"'cpp_std=c\+\+20'"
1031        for match in re.finditer(pattern, build_contents):
1032            if not meson_version or \
1033                    not meson_version_compare(meson_version, ">=0.57"):
1034                raise Exception(
1035                    "C++20 support requires specifying in meson.build: "
1036                    + "meson_version: '>=0.57'"
1037                )
1038
1039
1040class Package(object):
1041    def __init__(self, name=None, path=None):
1042        self.supported = [Meson, Autotools, CMake]
1043        self.name = name
1044        self.path = path
1045        self.test_only = False
1046
1047    def build_systems(self):
1048        instances = (system(self.name, self.path) for system in self.supported)
1049        return (instance for instance in instances if instance.probe())
1050
1051    def build_system(self, preferred=None):
1052        systems = list(self.build_systems())
1053
1054        if not systems:
1055            return None
1056
1057        if preferred:
1058            return {type(system): system for system in systems}[preferred]
1059
1060        return next(iter(systems))
1061
1062    def install(self, system=None):
1063        if not system:
1064            system = self.build_system()
1065
1066        system.configure(False)
1067        system.build()
1068        system.install()
1069
1070    def _test_one(self, system):
1071        system.configure(True)
1072        system.build()
1073        system.install()
1074        system.test()
1075        if not TEST_ONLY:
1076            system.analyze()
1077
1078    def test(self):
1079        for system in self.build_systems():
1080            self._test_one(system)
1081
1082
1083def find_file(filename, basedir):
1084    """
1085    Finds all occurrences of a file (or list of files) in the base
1086    directory and passes them back with their relative paths.
1087
1088    Parameter descriptions:
1089    filename              The name of the file (or list of files) to
1090                          find
1091    basedir               The base directory search in
1092    """
1093
1094    if not isinstance(filename, list):
1095        filename = [filename]
1096
1097    filepaths = []
1098    for root, dirs, files in os.walk(basedir):
1099        if os.path.split(root)[-1] == 'subprojects':
1100            for f in files:
1101                subproject = '.'.join(f.split('.')[0:-1])
1102                if f.endswith('.wrap') and subproject in dirs:
1103                    # don't find files in meson subprojects with wraps
1104                    dirs.remove(subproject)
1105        for f in filename:
1106            if f in files:
1107                filepaths.append(os.path.join(root, f))
1108    return filepaths
1109
1110
1111if __name__ == '__main__':
1112    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1113    CONFIGURE_FLAGS = {
1114        'phosphor-logging':
1115        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
1116         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
1117    }
1118
1119    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1120    MESON_FLAGS = {
1121        'phosphor-dbus-interfaces':
1122        ['-Ddata_com_ibm=true', '-Ddata_org_open_power=true'],
1123        'phosphor-logging':
1124        ['-Dopenpower-pel-extension=enabled']
1125    }
1126
1127    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1128    DEPENDENCIES = {
1129        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
1130        'AC_CHECK_HEADER': {
1131            'host-ipmid': 'phosphor-host-ipmid',
1132            'blobs-ipmid': 'phosphor-ipmi-blobs',
1133            'sdbusplus': 'sdbusplus',
1134            'sdeventplus': 'sdeventplus',
1135            'stdplus': 'stdplus',
1136            'gpioplus': 'gpioplus',
1137            'phosphor-logging/log.hpp': 'phosphor-logging',
1138        },
1139        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1140        'PKG_CHECK_MODULES': {
1141            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1142            'libipmid': 'phosphor-host-ipmid',
1143            'libipmid-host': 'phosphor-host-ipmid',
1144            'sdbusplus': 'sdbusplus',
1145            'sdeventplus': 'sdeventplus',
1146            'stdplus': 'stdplus',
1147            'gpioplus': 'gpioplus',
1148            'phosphor-logging': 'phosphor-logging',
1149            'phosphor-snmp': 'phosphor-snmp',
1150            'ipmiblob': 'ipmi-blob-tool',
1151            'hei': 'openpower-libhei',
1152            'phosphor-ipmi-blobs': 'phosphor-ipmi-blobs',
1153            'libcr51sign': 'google-misc',
1154        },
1155    }
1156
1157    # Offset into array of macro parameters MACRO(0, 1, ...N)
1158    DEPENDENCIES_OFFSET = {
1159        'AC_CHECK_LIB': 0,
1160        'AC_CHECK_HEADER': 0,
1161        'AC_PATH_PROG': 1,
1162        'PKG_CHECK_MODULES': 1,
1163    }
1164
1165    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1166    DEPENDENCIES_REGEX = {
1167        'phosphor-logging': r'\S+-dbus-interfaces$'
1168    }
1169
1170    # Set command line arguments
1171    parser = argparse.ArgumentParser()
1172    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1173                        help="Workspace directory location(i.e. /home)")
1174    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1175                        help="OpenBMC package to be unit tested")
1176    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1177                        action="store_true", required=False, default=False,
1178                        help="Only run test cases, no other validation")
1179    arg_inttests = parser.add_mutually_exclusive_group()
1180    arg_inttests.add_argument("--integration-tests", dest="INTEGRATION_TEST",
1181                              action="store_true", required=False, default=True,
1182                              help="Enable integration tests [default].")
1183    arg_inttests.add_argument("--no-integration-tests", dest="INTEGRATION_TEST",
1184                              action="store_false", required=False,
1185                              help="Disable integration tests.")
1186    parser.add_argument("-v", "--verbose", action="store_true",
1187                        help="Print additional package status messages")
1188    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1189                        type=int, default=1)
1190    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1191                        help="Branch to target for dependent repositories",
1192                        default="master")
1193    parser.add_argument("-n", "--noformat", dest="FORMAT",
1194                        action="store_false", required=False,
1195                        help="Whether or not to run format code")
1196    args = parser.parse_args(sys.argv[1:])
1197    WORKSPACE = args.WORKSPACE
1198    UNIT_TEST_PKG = args.PACKAGE
1199    TEST_ONLY = args.TEST_ONLY
1200    INTEGRATION_TEST = args.INTEGRATION_TEST
1201    BRANCH = args.BRANCH
1202    FORMAT_CODE = args.FORMAT
1203    if args.verbose:
1204        def printline(*line):
1205            for arg in line:
1206                print(arg, end=' ')
1207            print()
1208    else:
1209        def printline(*line):
1210            pass
1211
1212    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1213
1214    # First validate code formatting if repo has style formatting files.
1215    # The format-code.sh checks for these files.
1216    if FORMAT_CODE:
1217        format_scripts = find_file(['format-code.sh', 'format-code'],
1218                                   CODE_SCAN_DIR)
1219
1220        # use default format-code.sh if no other found
1221        if not format_scripts:
1222            format_scripts.append(os.path.join(WORKSPACE, "format-code.sh"))
1223
1224        for f in format_scripts:
1225            check_call_cmd(f, CODE_SCAN_DIR)
1226
1227        # Check to see if any files changed
1228        check_call_cmd("git", "-C", CODE_SCAN_DIR,
1229                       "--no-pager", "diff", "--exit-code")
1230
1231    # Check if this repo has a supported make infrastructure
1232    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1233    if not pkg.build_system():
1234        print("No valid build system, exit")
1235        sys.exit(0)
1236
1237    prev_umask = os.umask(000)
1238
1239    # Determine dependencies and add them
1240    dep_added = dict()
1241    dep_added[UNIT_TEST_PKG] = False
1242
1243    # Create dependency tree
1244    dep_tree = DepTree(UNIT_TEST_PKG)
1245    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1246
1247    # Reorder Dependency Tree
1248    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1249        dep_tree.ReorderDeps(pkg_name, regex_str)
1250    if args.verbose:
1251        dep_tree.PrintTree()
1252
1253    install_list = dep_tree.GetInstallList()
1254
1255    # We don't want to treat our package as a dependency
1256    install_list.remove(UNIT_TEST_PKG)
1257
1258    # Install reordered dependencies
1259    for dep in install_list:
1260        build_and_install(dep, False)
1261
1262    # Run package unit tests
1263    build_and_install(UNIT_TEST_PKG, True)
1264
1265    os.umask(prev_umask)
1266
1267    # Run any custom CI scripts the repo has, of which there can be
1268    # multiple of and anywhere in the repository.
1269    ci_scripts = find_file(['run-ci.sh', 'run-ci'], CODE_SCAN_DIR)
1270    if ci_scripts:
1271        os.chdir(CODE_SCAN_DIR)
1272        for ci_script in ci_scripts:
1273            check_call_cmd(ci_script)
1274