xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 8cb74fca7f2ed448790d2826c4cb78b627e0be8a)
1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urllib.parse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import subprocess
19import shutil
20import platform
21
22
23class DepTree():
24    """
25    Represents package dependency tree, where each node is a DepTree with a
26    name and DepTree children.
27    """
28
29    def __init__(self, name):
30        """
31        Create new DepTree.
32
33        Parameter descriptions:
34        name               Name of new tree node.
35        """
36        self.name = name
37        self.children = list()
38
39    def AddChild(self, name):
40        """
41        Add new child node to current node.
42
43        Parameter descriptions:
44        name               Name of new child
45        """
46        new_child = DepTree(name)
47        self.children.append(new_child)
48        return new_child
49
50    def AddChildNode(self, node):
51        """
52        Add existing child node to current node.
53
54        Parameter descriptions:
55        node               Tree node to add
56        """
57        self.children.append(node)
58
59    def RemoveChild(self, name):
60        """
61        Remove child node.
62
63        Parameter descriptions:
64        name               Name of child to remove
65        """
66        for child in self.children:
67            if child.name == name:
68                self.children.remove(child)
69                return
70
71    def GetNode(self, name):
72        """
73        Return node with matching name. Return None if not found.
74
75        Parameter descriptions:
76        name               Name of node to return
77        """
78        if self.name == name:
79            return self
80        for child in self.children:
81            node = child.GetNode(name)
82            if node:
83                return node
84        return None
85
86    def GetParentNode(self, name, parent_node=None):
87        """
88        Return parent of node with matching name. Return none if not found.
89
90        Parameter descriptions:
91        name               Name of node to get parent of
92        parent_node        Parent of current node
93        """
94        if self.name == name:
95            return parent_node
96        for child in self.children:
97            found_node = child.GetParentNode(name, self)
98            if found_node:
99                return found_node
100        return None
101
102    def GetPath(self, name, path=None):
103        """
104        Return list of node names from head to matching name.
105        Return None if not found.
106
107        Parameter descriptions:
108        name               Name of node
109        path               List of node names from head to current node
110        """
111        if not path:
112            path = []
113        if self.name == name:
114            path.append(self.name)
115            return path
116        for child in self.children:
117            match = child.GetPath(name, path + [self.name])
118            if match:
119                return match
120        return None
121
122    def GetPathRegex(self, name, regex_str, path=None):
123        """
124        Return list of node paths that end in name, or match regex_str.
125        Return empty list if not found.
126
127        Parameter descriptions:
128        name               Name of node to search for
129        regex_str          Regex string to match node names
130        path               Path of node names from head to current node
131        """
132        new_paths = []
133        if not path:
134            path = []
135        match = re.match(regex_str, self.name)
136        if (self.name == name) or (match):
137            new_paths.append(path + [self.name])
138        for child in self.children:
139            return_paths = None
140            full_path = path + [self.name]
141            return_paths = child.GetPathRegex(name, regex_str, full_path)
142            for i in return_paths:
143                new_paths.append(i)
144        return new_paths
145
146    def MoveNode(self, from_name, to_name):
147        """
148        Mode existing from_name node to become child of to_name node.
149
150        Parameter descriptions:
151        from_name          Name of node to make a child of to_name
152        to_name            Name of node to make parent of from_name
153        """
154        parent_from_node = self.GetParentNode(from_name)
155        from_node = self.GetNode(from_name)
156        parent_from_node.RemoveChild(from_name)
157        to_node = self.GetNode(to_name)
158        to_node.AddChildNode(from_node)
159
160    def ReorderDeps(self, name, regex_str):
161        """
162        Reorder dependency tree.  If tree contains nodes with names that
163        match 'name' and 'regex_str', move 'regex_str' nodes that are
164        to the right of 'name' node, so that they become children of the
165        'name' node.
166
167        Parameter descriptions:
168        name               Name of node to look for
169        regex_str          Regex string to match names to
170        """
171        name_path = self.GetPath(name)
172        if not name_path:
173            return
174        paths = self.GetPathRegex(name, regex_str)
175        is_name_in_paths = False
176        name_index = 0
177        for i in range(len(paths)):
178            path = paths[i]
179            if path[-1] == name:
180                is_name_in_paths = True
181                name_index = i
182                break
183        if not is_name_in_paths:
184            return
185        for i in range(name_index + 1, len(paths)):
186            path = paths[i]
187            if name in path:
188                continue
189            from_name = path[-1]
190            self.MoveNode(from_name, name)
191
192    def GetInstallList(self):
193        """
194        Return post-order list of node names.
195
196        Parameter descriptions:
197        """
198        install_list = []
199        for child in self.children:
200            child_install_list = child.GetInstallList()
201            install_list.extend(child_install_list)
202        install_list.append(self.name)
203        return install_list
204
205    def PrintTree(self, level=0):
206        """
207        Print pre-order node names with indentation denoting node depth level.
208
209        Parameter descriptions:
210        level              Current depth level
211        """
212        INDENT_PER_LEVEL = 4
213        print(' ' * (level * INDENT_PER_LEVEL) + self.name)
214        for child in self.children:
215            child.PrintTree(level + 1)
216
217
218def check_call_cmd(*cmd):
219    """
220    Verbose prints the directory location the given command is called from and
221    the command, then executes the command using check_call.
222
223    Parameter descriptions:
224    dir                 Directory location command is to be called from
225    cmd                 List of parameters constructing the complete command
226    """
227    printline(os.getcwd(), ">", " ".join(cmd))
228    check_call(cmd)
229
230
231def clone_pkg(pkg, branch):
232    """
233    Clone the given openbmc package's git repository from gerrit into
234    the WORKSPACE location
235
236    Parameter descriptions:
237    pkg                 Name of the package to clone
238    branch              Branch to clone from pkg
239    """
240    pkg_dir = os.path.join(WORKSPACE, pkg)
241    if os.path.exists(os.path.join(pkg_dir, '.git')):
242        return pkg_dir
243    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
244    os.mkdir(pkg_dir)
245    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
246    try:
247        # first try the branch
248        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
249        repo_inst = clone.working_dir
250    except:
251        printline("Input branch not found, default to master")
252        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
253        repo_inst = clone.working_dir
254    return repo_inst
255
256
257def make_target_exists(target):
258    """
259    Runs a check against the makefile in the current directory to determine
260    if the target exists so that it can be built.
261
262    Parameter descriptions:
263    target              The make target we are checking
264    """
265    try:
266        cmd = ['make', '-n', target]
267        with open(os.devnull, 'w') as devnull:
268            check_call(cmd, stdout=devnull, stderr=devnull)
269        return True
270    except CalledProcessError:
271        return False
272
273
274make_parallel = [
275    'make',
276    # Run enough jobs to saturate all the cpus
277    '-j', str(multiprocessing.cpu_count()),
278    # Don't start more jobs if the load avg is too high
279    '-l', str(multiprocessing.cpu_count()),
280    # Synchronize the output so logs aren't intermixed in stdout / stderr
281    '-O',
282]
283
284
285def build_and_install(name, build_for_testing=False):
286    """
287    Builds and installs the package in the environment. Optionally
288    builds the examples and test cases for package.
289
290    Parameter description:
291    name                The name of the package we are building
292    build_for_testing   Enable options related to testing on the package?
293    """
294    os.chdir(os.path.join(WORKSPACE, name))
295
296    # Refresh dynamic linker run time bindings for dependencies
297    check_call_cmd('sudo', '-n', '--', 'ldconfig')
298
299    pkg = Package()
300    if build_for_testing:
301        pkg.test()
302    else:
303        pkg.install()
304
305
306def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
307    """
308    For each package (name), starting with the package to be unit tested,
309    extract its dependencies. For each package dependency defined, recursively
310    apply the same strategy
311
312    Parameter descriptions:
313    name                Name of the package
314    pkgdir              Directory where package source is located
315    dep_added           Current dict of dependencies and added status
316    head                Head node of the dependency tree
317    branch              Branch to clone from pkg
318    dep_tree            Current dependency tree node
319    """
320    if not dep_tree:
321        dep_tree = head
322
323    with open("/tmp/depcache", "r") as depcache:
324        cache = depcache.readline()
325
326    # Read out pkg dependencies
327    pkg = Package(name, pkgdir)
328
329    for dep in set(pkg.build_system().dependencies()):
330        if dep in cache:
331            continue
332        # Dependency package not already known
333        if dep_added.get(dep) is None:
334            # Dependency package not added
335            new_child = dep_tree.AddChild(dep)
336            dep_added[dep] = False
337            dep_pkgdir = clone_pkg(dep, branch)
338            # Determine this dependency package's
339            # dependencies and add them before
340            # returning to add this package
341            dep_added = build_dep_tree(dep,
342                                       dep_pkgdir,
343                                       dep_added,
344                                       head,
345                                       branch,
346                                       new_child)
347        else:
348            # Dependency package known and added
349            if dep_added[dep]:
350                continue
351            else:
352                # Cyclic dependency failure
353                raise Exception("Cyclic dependencies found in "+name)
354
355    if not dep_added[name]:
356        dep_added[name] = True
357
358    return dep_added
359
360
361def run_cppcheck():
362    match_re = re.compile(r'((?!\.mako\.).)*\.[ch](?:pp)?$', re.I)
363    cppcheck_files = []
364    stdout = subprocess.check_output(['git', 'ls-files'])
365
366    for f in stdout.decode('utf-8').split():
367        if match_re.match(f):
368            cppcheck_files.append(f)
369
370    if not cppcheck_files:
371        # skip cppcheck if there arent' any c or cpp sources.
372        print("no files")
373        return None
374
375    # http://cppcheck.sourceforge.net/manual.pdf
376    params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
377              '--enable=all', '--file-list=-']
378
379    cppcheck_process = subprocess.Popen(
380        params,
381        stdout=subprocess.PIPE,
382        stderr=subprocess.PIPE,
383        stdin=subprocess.PIPE)
384    (stdout, stderr) = cppcheck_process.communicate(
385        input='\n'.join(cppcheck_files).encode('utf-8'))
386
387    if cppcheck_process.wait():
388        raise Exception('Cppcheck failed')
389    print(stdout.decode('utf-8'))
390    print(stderr.decode('utf-8'))
391
392
393def is_valgrind_safe():
394    """
395    Returns whether it is safe to run valgrind on our platform
396    """
397    src = 'unit-test-vg.c'
398    exe = './unit-test-vg'
399    with open(src, 'w') as h:
400        h.write('#include <errno.h>\n')
401        h.write('#include <stdio.h>\n')
402        h.write('#include <stdlib.h>\n')
403        h.write('#include <string.h>\n')
404        h.write('int main() {\n')
405        h.write('char *heap_str = malloc(16);\n')
406        h.write('strcpy(heap_str, "RandString");\n')
407        h.write('int res = strcmp("RandString", heap_str);\n')
408        h.write('free(heap_str);\n')
409        h.write('char errstr[64];\n')
410        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
411        h.write('printf("%s\\n", errstr);\n')
412        h.write('return res;\n')
413        h.write('}\n')
414    try:
415        with open(os.devnull, 'w') as devnull:
416            check_call(['gcc', '-O2', '-o', exe, src],
417                       stdout=devnull, stderr=devnull)
418            check_call(['valgrind', '--error-exitcode=99', exe],
419                       stdout=devnull, stderr=devnull)
420        return True
421    except:
422        sys.stderr.write("###### Platform is not valgrind safe ######\n")
423        return False
424    finally:
425        os.remove(src)
426        os.remove(exe)
427
428
429def is_sanitize_safe():
430    """
431    Returns whether it is safe to run sanitizers on our platform
432    """
433    src = 'unit-test-sanitize.c'
434    exe = './unit-test-sanitize'
435    with open(src, 'w') as h:
436        h.write('int main() { return 0; }\n')
437    try:
438        with open(os.devnull, 'w') as devnull:
439            check_call(['gcc', '-O2', '-fsanitize=address',
440                        '-fsanitize=undefined', '-o', exe, src],
441                       stdout=devnull, stderr=devnull)
442            check_call([exe], stdout=devnull, stderr=devnull)
443        return True
444    except:
445        sys.stderr.write("###### Platform is not sanitize safe ######\n")
446        return False
447    finally:
448        os.remove(src)
449        os.remove(exe)
450
451
452def maybe_make_valgrind():
453    """
454    Potentially runs the unit tests through valgrind for the package
455    via `make check-valgrind`. If the package does not have valgrind testing
456    then it just skips over this.
457    """
458    # Valgrind testing is currently broken by an aggressive strcmp optimization
459    # that is inlined into optimized code for POWER by gcc 7+. Until we find
460    # a workaround, just don't run valgrind tests on POWER.
461    # https://github.com/openbmc/openbmc/issues/3315
462    if not is_valgrind_safe():
463        sys.stderr.write("###### Skipping valgrind ######\n")
464        return
465    if not make_target_exists('check-valgrind'):
466        return
467
468    try:
469        cmd = make_parallel + ['check-valgrind']
470        check_call_cmd(*cmd)
471    except CalledProcessError:
472        for root, _, files in os.walk(os.getcwd()):
473            for f in files:
474                if re.search('test-suite-[a-z]+.log', f) is None:
475                    continue
476                check_call_cmd('cat', os.path.join(root, f))
477        raise Exception('Valgrind tests failed')
478
479
480def maybe_make_coverage():
481    """
482    Potentially runs the unit tests through code coverage for the package
483    via `make check-code-coverage`. If the package does not have code coverage
484    testing then it just skips over this.
485    """
486    if not make_target_exists('check-code-coverage'):
487        return
488
489    # Actually run code coverage
490    try:
491        cmd = make_parallel + ['check-code-coverage']
492        check_call_cmd(*cmd)
493    except CalledProcessError:
494        raise Exception('Code coverage failed')
495
496
497class BuildSystem(object):
498    """
499    Build systems generally provide the means to configure, build, install and
500    test software. The BuildSystem class defines a set of interfaces on top of
501    which Autotools, Meson, CMake and possibly other build system drivers can
502    be implemented, separating out the phases to control whether a package
503    should merely be installed or also tested and analyzed.
504    """
505    def __init__(self, package, path):
506        """Initialise the driver with properties independent of the build system
507
508        Keyword arguments:
509        package: The name of the package. Derived from the path if None
510        path: The path to the package. Set to the working directory if None
511        """
512        self.path = "." if not path else path
513        realpath = os.path.realpath(self.path)
514        self.package = package if package else os.path.basename(realpath)
515        self.build_for_testing = False
516
517    def probe(self):
518        """Test if the build system driver can be applied to the package
519
520        Return True if the driver can drive the package's build system,
521        otherwise False.
522
523        Generally probe() is implemented by testing for the presence of the
524        build system's configuration file(s).
525        """
526        raise NotImplemented
527
528    def dependencies(self):
529        """Provide the package's dependencies
530
531        Returns a list of dependencies. If no dependencies are required then an
532        empty list must be returned.
533
534        Generally dependencies() is implemented by analysing and extracting the
535        data from the build system configuration.
536        """
537        raise NotImplemented
538
539    def configure(self, build_for_testing):
540        """Configure the source ready for building
541
542        Should raise an exception if configuration failed.
543
544        Keyword arguments:
545        build_for_testing: Mark the package as being built for testing rather
546                           than for installation as a dependency for the
547                           package under test. Setting to True generally
548                           implies that the package will be configured to build
549                           with debug information, at a low level of
550                           optimisation and possibly with sanitizers enabled.
551
552        Generally configure() is implemented by invoking the build system
553        tooling to generate Makefiles or equivalent.
554        """
555        raise NotImplemented
556
557    def build(self):
558        """Build the software ready for installation and/or testing
559
560        Should raise an exception if the build fails
561
562        Generally build() is implemented by invoking `make` or `ninja`.
563        """
564        raise NotImplemented
565
566    def install(self):
567        """Install the software ready for use
568
569        Should raise an exception if installation fails
570
571        Like build(), install() is generally implemented by invoking `make` or
572        `ninja`.
573        """
574        raise NotImplemented
575
576    def test(self):
577        """Build and run the test suite associated with the package
578
579        Should raise an exception if the build or testing fails.
580
581        Like install(), test() is generally implemented by invoking `make` or
582        `ninja`.
583        """
584        raise NotImplemented
585
586    def analyze(self):
587        """Run any supported analysis tools over the codebase
588
589        Should raise an exception if analysis fails.
590
591        Some analysis tools such as scan-build need injection into the build
592        system. analyze() provides the necessary hook to implement such
593        behaviour. Analyzers independent of the build system can also be
594        specified here but at the cost of possible duplication of code between
595        the build system driver implementations.
596        """
597        raise NotImplemented
598
599
600class Autotools(BuildSystem):
601    def __init__(self, package=None, path=None):
602        super(Autotools, self).__init__(package, path)
603
604    def probe(self):
605        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
606
607    def dependencies(self):
608        configure_ac = os.path.join(self.path, 'configure.ac')
609
610        contents = ''
611        # Prepend some special function overrides so we can parse out
612        # dependencies
613        for macro in DEPENDENCIES.keys():
614            contents += ('m4_define([' + macro + '], [' + macro + '_START$' +
615                         str(DEPENDENCIES_OFFSET[macro] + 1) +
616                         macro + '_END])\n')
617        with open(configure_ac, "rt") as f:
618            contents += f.read()
619
620        autoconf_cmdline = ['autoconf', '-Wno-undefined', '-']
621        autoconf_process = subprocess.Popen(autoconf_cmdline,
622                                            stdin=subprocess.PIPE,
623                                            stdout=subprocess.PIPE,
624                                            stderr=subprocess.PIPE)
625        document = contents.encode('utf-8')
626        (stdout, stderr) = autoconf_process.communicate(input=document)
627        if not stdout:
628            print(stderr)
629            raise Exception("Failed to run autoconf for parsing dependencies")
630
631        # Parse out all of the dependency text
632        matches = []
633        for macro in DEPENDENCIES.keys():
634            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
635            for match in re.compile(pattern).finditer(stdout.decode('utf-8')):
636                matches.append((match.group(1), match.group(2)))
637
638        # Look up dependencies from the text
639        found_deps = []
640        for macro, deptext in matches:
641            for potential_dep in deptext.split(' '):
642                for known_dep in DEPENDENCIES[macro].keys():
643                    if potential_dep.startswith(known_dep):
644                        found_deps.append(DEPENDENCIES[macro][known_dep])
645
646        return found_deps
647
648    def _configure_feature(self, flag, enabled):
649        """
650        Returns an configure flag as a string
651
652        Parameters:
653        flag                The name of the flag
654        enabled             Whether the flag is enabled or disabled
655        """
656        return '--' + ('enable' if enabled else 'disable') + '-' + flag
657
658    def configure(self, build_for_testing):
659        self.build_for_testing = build_for_testing
660        conf_flags = [
661            self._configure_feature('silent-rules', False),
662            self._configure_feature('examples', build_for_testing),
663            self._configure_feature('tests', build_for_testing),
664        ]
665        if not TEST_ONLY:
666            conf_flags.extend([
667                self._configure_feature('code-coverage', build_for_testing),
668                self._configure_feature('valgrind', build_for_testing),
669            ])
670        # Add any necessary configure flags for package
671        if CONFIGURE_FLAGS.get(self.package) is not None:
672            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
673        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
674            if os.path.exists(bootstrap):
675                check_call_cmd('./' + bootstrap)
676                break
677        check_call_cmd('./configure', *conf_flags)
678
679    def build(self):
680        check_call_cmd(*make_parallel)
681
682    def install(self):
683        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
684
685    def test(self):
686        try:
687            cmd = make_parallel + ['check']
688            for i in range(0, args.repeat):
689                check_call_cmd(*cmd)
690        except CalledProcessError:
691            for root, _, files in os.walk(os.getcwd()):
692                if 'test-suite.log' not in files:
693                    continue
694                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
695            raise Exception('Unit tests failed')
696
697    def analyze(self):
698        maybe_make_valgrind()
699        maybe_make_coverage()
700        run_cppcheck()
701
702
703class CMake(BuildSystem):
704    def __init__(self, package=None, path=None):
705        super(CMake, self).__init__(package, path)
706
707    def probe(self):
708        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
709
710    def dependencies(self):
711        return []
712
713    def configure(self, build_for_testing):
714        self.build_for_testing = build_for_testing
715        check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
716
717    def build(self):
718        check_call_cmd('cmake', '--build', '.', '--', '-j',
719                       str(multiprocessing.cpu_count()))
720
721    def install(self):
722        pass
723
724    def test(self):
725        if make_target_exists('test'):
726            check_call_cmd('ctest', '.')
727
728    def analyze(self):
729        if os.path.isfile('.clang-tidy'):
730            check_call_cmd('run-clang-tidy-8.py', '-p', '.')
731        maybe_make_valgrind()
732        maybe_make_coverage()
733        run_cppcheck()
734
735
736class Meson(BuildSystem):
737    def __init__(self, package=None, path=None):
738        super(Meson, self).__init__(package, path)
739
740    def probe(self):
741        return os.path.isfile(os.path.join(self.path, 'meson.build'))
742
743    def dependencies(self):
744        meson_build = os.path.join(self.path, 'meson.build')
745        if not os.path.exists(meson_build):
746            return []
747
748        found_deps = []
749        for root, dirs, files in os.walk(self.path):
750            if 'meson.build' not in files:
751                continue
752            with open(os.path.join(root, 'meson.build'), 'rt') as f:
753                build_contents = f.read()
754            pattern = r"dependency\('([^']*)'.*?\)\n"
755            for match in re.finditer(pattern, build_contents):
756                group = match.group(1)
757                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group)
758                if maybe_dep is not None:
759                    found_deps.append(maybe_dep)
760
761        return found_deps
762
763    def _parse_options(self, options_file):
764        """
765        Returns a set of options defined in the provides meson_options.txt file
766
767        Parameters:
768        options_file        The file containing options
769        """
770        options_contents = ''
771        with open(options_file, "rt") as f:
772            options_contents += f.read()
773        options = set()
774        pattern = 'option\\(\\s*\'([^\']*)\''
775        for match in re.compile(pattern).finditer(options_contents):
776            options.add(match.group(1))
777        return options
778
779    def _configure_feature(self, val):
780        """
781        Returns the meson flag which signifies the value
782
783        True is enabled which requires the feature.
784        False is disabled which disables the feature.
785        None is auto which autodetects the feature.
786
787        Parameters:
788        val                 The value being converted
789        """
790        if val is True:
791            return "enabled"
792        elif val is False:
793            return "disabled"
794        elif val is None:
795            return "auto"
796        else:
797            raise Exception("Bad meson feature value")
798
799    def configure(self, build_for_testing):
800        self.build_for_testing = build_for_testing
801        meson_options = set()
802        if os.path.exists("meson_options.txt"):
803            meson_options = self._parse_options("meson_options.txt")
804        meson_flags = [
805            '-Db_colorout=never',
806            '-Dwerror=true',
807            '-Dwarning_level=3',
808        ]
809        if build_for_testing:
810            meson_flags.append('--buildtype=debug')
811        else:
812            meson_flags.append('--buildtype=debugoptimized')
813        if 'tests' in meson_options:
814            flag_args = self._configure_feature(build_for_testing)
815            meson_flags.append('-Dtests=' + flag_args)
816        if 'examples' in meson_options:
817            meson_flags.append('-Dexamples=' + str(build_for_testing).lower())
818        if MESON_FLAGS.get(self.package) is not None:
819            meson_flags.extend(MESON_FLAGS.get(self.package))
820        try:
821            check_call_cmd('meson', 'setup', '--reconfigure', 'build',
822                           *meson_flags)
823        except:
824            shutil.rmtree('build')
825            check_call_cmd('meson', 'setup', 'build', *meson_flags)
826
827    def build(self):
828        check_call_cmd('ninja', '-C', 'build')
829
830    def install(self):
831        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
832
833    def test(self):
834        try:
835            check_call_cmd('meson', 'test', '-C', 'build')
836        except CalledProcessError:
837            for root, _, files in os.walk(os.getcwd()):
838                if 'testlog.txt' not in files:
839                    continue
840                check_call_cmd('cat', os.path.join(root, 'testlog.txt'))
841            raise Exception('Unit tests failed')
842
843    def _setup_exists(self, setup):
844        """
845        Returns whether the meson build supports the named test setup.
846
847        Parameter descriptions:
848        setup              The setup target to check
849        """
850        try:
851            with open(os.devnull, 'w') as devnull:
852                output = subprocess.check_output(
853                        ['meson', 'test', '-C', 'build',
854                         '--setup', setup, '-t', '0'],
855                        stderr=subprocess.STDOUT)
856        except CalledProcessError as e:
857            output = e.output
858        output = output.decode('utf-8')
859        return not re.search('Test setup .* not found from project', output)
860
861    def _maybe_valgrind(self):
862        """
863        Potentially runs the unit tests through valgrind for the package
864        via `meson test`. The package can specify custom valgrind
865        configurations by utilizing add_test_setup() in a meson.build
866        """
867        if not is_valgrind_safe():
868            sys.stderr.write("###### Skipping valgrind ######\n")
869            return
870        try:
871            if self._setup_exists('valgrind'):
872                check_call_cmd('meson', 'test', '-C', 'build',
873                               '--setup', 'valgrind')
874            else:
875                check_call_cmd('meson', 'test', '-C', 'build',
876                               '--wrapper', 'valgrind')
877        except CalledProcessError:
878            for root, _, files in os.walk(os.getcwd()):
879                if 'testlog-valgrind.txt' not in files:
880                    continue
881                cat_args = os.path.join(root, 'testlog-valgrind.txt')
882                check_call_cmd('cat', cat_args)
883            raise Exception('Valgrind tests failed')
884
885    def analyze(self):
886        self._maybe_valgrind()
887
888        # Run clang-tidy only if the project has a configuration
889        if os.path.isfile('.clang-tidy'):
890            check_call_cmd('run-clang-tidy-8.py', '-p',
891                           'build')
892        # Run the basic clang static analyzer otherwise
893        else:
894            check_call_cmd('ninja', '-C', 'build',
895                           'scan-build')
896
897        # Run tests through sanitizers
898        # b_lundef is needed if clang++ is CXX since it resolves the
899        # asan symbols at runtime only. We don't want to set it earlier
900        # in the build process to ensure we don't have undefined
901        # runtime code.
902        if is_sanitize_safe():
903            check_call_cmd('meson', 'configure', 'build',
904                           '-Db_sanitize=address,undefined',
905                           '-Db_lundef=false')
906            check_call_cmd('meson', 'test', '-C', 'build',
907                           '--logbase', 'testlog-ubasan')
908            # TODO: Fix memory sanitizer
909            # check_call_cmd('meson', 'configure', 'build',
910            #                '-Db_sanitize=memory')
911            # check_call_cmd('meson', 'test', '-C', 'build'
912            #                '--logbase', 'testlog-msan')
913            check_call_cmd('meson', 'configure', 'build',
914                           '-Db_sanitize=none', '-Db_lundef=true')
915        else:
916            sys.stderr.write("###### Skipping sanitizers ######\n")
917
918        # Run coverage checks
919        check_call_cmd('meson', 'configure', 'build',
920                       '-Db_coverage=true')
921        self.test()
922        # Only build coverage HTML if coverage files were produced
923        for root, dirs, files in os.walk('build'):
924            if any([f.endswith('.gcda') for f in files]):
925                check_call_cmd('ninja', '-C', 'build',
926                               'coverage-html')
927                break
928        check_call_cmd('meson', 'configure', 'build',
929                       '-Db_coverage=false')
930        run_cppcheck()
931
932
933class Package(object):
934    def __init__(self, name=None, path=None):
935        self.supported = [Autotools, Meson, CMake]
936        self.name = name
937        self.path = path
938        self.test_only = False
939
940    def build_systems(self):
941        instances = (system(self.name, self.path) for system in self.supported)
942        return (instance for instance in instances if instance.probe())
943
944    def build_system(self, preferred=None):
945        systems = list(self.build_systems())
946
947        if not systems:
948            return None
949
950        if preferred:
951            return {type(system): system for system in systems}[preferred]
952
953        return next(iter(systems))
954
955    def install(self, system=None):
956        if not system:
957            system = self.build_system()
958
959        system.configure(False)
960        system.build()
961        system.install()
962
963    def _test_one(self, system):
964        system.configure(True)
965        system.build()
966        system.install()
967        system.test()
968        system.analyze()
969
970    def test(self):
971        for system in self.build_systems():
972            self._test_one(system)
973
974
975def find_file(filename, basedir):
976    """
977    Finds all occurrences of a file in the base directory
978    and passes them back with their relative paths.
979
980    Parameter descriptions:
981    filename              The name of the file to find
982    basedir               The base directory search in
983    """
984
985    filepaths = []
986    for root, dirs, files in os.walk(basedir):
987        if filename in files:
988            filepaths.append(os.path.join(root, filename))
989    return filepaths
990
991
992if __name__ == '__main__':
993    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
994    CONFIGURE_FLAGS = {
995        'sdbusplus': ['--enable-transaction'],
996        'phosphor-logging':
997        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
998         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
999    }
1000
1001    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1002    MESON_FLAGS = {
1003    }
1004
1005    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1006    DEPENDENCIES = {
1007        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
1008        'AC_CHECK_HEADER': {
1009            'host-ipmid': 'phosphor-host-ipmid',
1010            'blobs-ipmid': 'phosphor-ipmi-blobs',
1011            'sdbusplus': 'sdbusplus',
1012            'sdeventplus': 'sdeventplus',
1013            'stdplus': 'stdplus',
1014            'gpioplus': 'gpioplus',
1015            'phosphor-logging/log.hpp': 'phosphor-logging',
1016        },
1017        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1018        'PKG_CHECK_MODULES': {
1019            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1020            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
1021            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
1022            'libipmid': 'phosphor-host-ipmid',
1023            'libipmid-host': 'phosphor-host-ipmid',
1024            'sdbusplus': 'sdbusplus',
1025            'sdeventplus': 'sdeventplus',
1026            'stdplus': 'stdplus',
1027            'gpioplus': 'gpioplus',
1028            'phosphor-logging': 'phosphor-logging',
1029            'phosphor-snmp': 'phosphor-snmp',
1030            'ipmiblob': 'ipmi-blob-tool',
1031        },
1032    }
1033
1034    # Offset into array of macro parameters MACRO(0, 1, ...N)
1035    DEPENDENCIES_OFFSET = {
1036        'AC_CHECK_LIB': 0,
1037        'AC_CHECK_HEADER': 0,
1038        'AC_PATH_PROG': 1,
1039        'PKG_CHECK_MODULES': 1,
1040    }
1041
1042    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1043    DEPENDENCIES_REGEX = {
1044        'phosphor-logging': r'\S+-dbus-interfaces$'
1045    }
1046
1047    # Set command line arguments
1048    parser = argparse.ArgumentParser()
1049    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1050                        help="Workspace directory location(i.e. /home)")
1051    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1052                        help="OpenBMC package to be unit tested")
1053    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1054                        action="store_true", required=False, default=False,
1055                        help="Only run test cases, no other validation")
1056    parser.add_argument("-v", "--verbose", action="store_true",
1057                        help="Print additional package status messages")
1058    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1059                        type=int, default=1)
1060    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1061                        help="Branch to target for dependent repositories",
1062                        default="master")
1063    parser.add_argument("-n", "--noformat", dest="FORMAT",
1064                        action="store_false", required=False,
1065                        help="Whether or not to run format code")
1066    args = parser.parse_args(sys.argv[1:])
1067    WORKSPACE = args.WORKSPACE
1068    UNIT_TEST_PKG = args.PACKAGE
1069    TEST_ONLY = args.TEST_ONLY
1070    BRANCH = args.BRANCH
1071    FORMAT_CODE = args.FORMAT
1072    if args.verbose:
1073        def printline(*line):
1074            for arg in line:
1075                print(arg, end=' ')
1076            print()
1077    else:
1078        def printline(*line):
1079            pass
1080
1081    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
1082
1083    # First validate code formatting if repo has style formatting files.
1084    # The format-code.sh checks for these files.
1085    if FORMAT_CODE:
1086        check_call_cmd("./format-code.sh", CODE_SCAN_DIR)
1087
1088    # Check if this repo has a supported make infrastructure
1089    pkg = Package(UNIT_TEST_PKG, os.path.join(WORKSPACE, UNIT_TEST_PKG))
1090    if not pkg.build_system():
1091        print("No valid build system, exit")
1092        sys.exit(0)
1093
1094    prev_umask = os.umask(000)
1095
1096    # Determine dependencies and add them
1097    dep_added = dict()
1098    dep_added[UNIT_TEST_PKG] = False
1099
1100    # Create dependency tree
1101    dep_tree = DepTree(UNIT_TEST_PKG)
1102    build_dep_tree(UNIT_TEST_PKG,
1103                   os.path.join(WORKSPACE, UNIT_TEST_PKG),
1104                   dep_added,
1105                   dep_tree,
1106                   BRANCH)
1107
1108    # Reorder Dependency Tree
1109    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1110        dep_tree.ReorderDeps(pkg_name, regex_str)
1111    if args.verbose:
1112        dep_tree.PrintTree()
1113
1114    install_list = dep_tree.GetInstallList()
1115
1116    # We don't want to treat our package as a dependency
1117    install_list.remove(UNIT_TEST_PKG)
1118
1119    # Install reordered dependencies
1120    for dep in install_list:
1121        build_and_install(dep, False)
1122
1123    # Run package unit tests
1124    build_and_install(UNIT_TEST_PKG, True)
1125
1126    os.umask(prev_umask)
1127
1128    # Run any custom CI scripts the repo has, of which there can be
1129    # multiple of and anywhere in the repository.
1130    ci_scripts = find_file('run-ci.sh', os.path.join(WORKSPACE, UNIT_TEST_PKG))
1131    if ci_scripts:
1132        os.chdir(os.path.join(WORKSPACE, UNIT_TEST_PKG))
1133        for ci_script in ci_scripts:
1134            check_call_cmd('sh', ci_script)
1135