xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 6015ca1169185fef8434b31fef93a96e87861a0a)
1#!/usr/bin/env python
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urlparse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import sets
19import subprocess
20import shutil
21import platform
22
23
24class DepTree():
25    """
26    Represents package dependency tree, where each node is a DepTree with a
27    name and DepTree children.
28    """
29
30    def __init__(self, name):
31        """
32        Create new DepTree.
33
34        Parameter descriptions:
35        name               Name of new tree node.
36        """
37        self.name = name
38        self.children = list()
39
40    def AddChild(self, name):
41        """
42        Add new child node to current node.
43
44        Parameter descriptions:
45        name               Name of new child
46        """
47        new_child = DepTree(name)
48        self.children.append(new_child)
49        return new_child
50
51    def AddChildNode(self, node):
52        """
53        Add existing child node to current node.
54
55        Parameter descriptions:
56        node               Tree node to add
57        """
58        self.children.append(node)
59
60    def RemoveChild(self, name):
61        """
62        Remove child node.
63
64        Parameter descriptions:
65        name               Name of child to remove
66        """
67        for child in self.children:
68            if child.name == name:
69                self.children.remove(child)
70                return
71
72    def GetNode(self, name):
73        """
74        Return node with matching name. Return None if not found.
75
76        Parameter descriptions:
77        name               Name of node to return
78        """
79        if self.name == name:
80            return self
81        for child in self.children:
82            node = child.GetNode(name)
83            if node:
84                return node
85        return None
86
87    def GetParentNode(self, name, parent_node=None):
88        """
89        Return parent of node with matching name. Return none if not found.
90
91        Parameter descriptions:
92        name               Name of node to get parent of
93        parent_node        Parent of current node
94        """
95        if self.name == name:
96            return parent_node
97        for child in self.children:
98            found_node = child.GetParentNode(name, self)
99            if found_node:
100                return found_node
101        return None
102
103    def GetPath(self, name, path=None):
104        """
105        Return list of node names from head to matching name.
106        Return None if not found.
107
108        Parameter descriptions:
109        name               Name of node
110        path               List of node names from head to current node
111        """
112        if not path:
113            path = []
114        if self.name == name:
115            path.append(self.name)
116            return path
117        for child in self.children:
118            match = child.GetPath(name, path + [self.name])
119            if match:
120                return match
121        return None
122
123    def GetPathRegex(self, name, regex_str, path=None):
124        """
125        Return list of node paths that end in name, or match regex_str.
126        Return empty list if not found.
127
128        Parameter descriptions:
129        name               Name of node to search for
130        regex_str          Regex string to match node names
131        path               Path of node names from head to current node
132        """
133        new_paths = []
134        if not path:
135            path = []
136        match = re.match(regex_str, self.name)
137        if (self.name == name) or (match):
138            new_paths.append(path + [self.name])
139        for child in self.children:
140            return_paths = None
141            full_path = path + [self.name]
142            return_paths = child.GetPathRegex(name, regex_str, full_path)
143            for i in return_paths:
144                new_paths.append(i)
145        return new_paths
146
147    def MoveNode(self, from_name, to_name):
148        """
149        Mode existing from_name node to become child of to_name node.
150
151        Parameter descriptions:
152        from_name          Name of node to make a child of to_name
153        to_name            Name of node to make parent of from_name
154        """
155        parent_from_node = self.GetParentNode(from_name)
156        from_node = self.GetNode(from_name)
157        parent_from_node.RemoveChild(from_name)
158        to_node = self.GetNode(to_name)
159        to_node.AddChildNode(from_node)
160
161    def ReorderDeps(self, name, regex_str):
162        """
163        Reorder dependency tree.  If tree contains nodes with names that
164        match 'name' and 'regex_str', move 'regex_str' nodes that are
165        to the right of 'name' node, so that they become children of the
166        'name' node.
167
168        Parameter descriptions:
169        name               Name of node to look for
170        regex_str          Regex string to match names to
171        """
172        name_path = self.GetPath(name)
173        if not name_path:
174            return
175        paths = self.GetPathRegex(name, regex_str)
176        is_name_in_paths = False
177        name_index = 0
178        for i in range(len(paths)):
179            path = paths[i]
180            if path[-1] == name:
181                is_name_in_paths = True
182                name_index = i
183                break
184        if not is_name_in_paths:
185            return
186        for i in range(name_index + 1, len(paths)):
187            path = paths[i]
188            if name in path:
189                continue
190            from_name = path[-1]
191            self.MoveNode(from_name, name)
192
193    def GetInstallList(self):
194        """
195        Return post-order list of node names.
196
197        Parameter descriptions:
198        """
199        install_list = []
200        for child in self.children:
201            child_install_list = child.GetInstallList()
202            install_list.extend(child_install_list)
203        install_list.append(self.name)
204        return install_list
205
206    def PrintTree(self, level=0):
207        """
208        Print pre-order node names with indentation denoting node depth level.
209
210        Parameter descriptions:
211        level              Current depth level
212        """
213        INDENT_PER_LEVEL = 4
214        print ' ' * (level * INDENT_PER_LEVEL) + self.name
215        for child in self.children:
216            child.PrintTree(level + 1)
217
218
219def check_call_cmd(*cmd):
220    """
221    Verbose prints the directory location the given command is called from and
222    the command, then executes the command using check_call.
223
224    Parameter descriptions:
225    dir                 Directory location command is to be called from
226    cmd                 List of parameters constructing the complete command
227    """
228    printline(os.getcwd(), ">", " ".join(cmd))
229    check_call(cmd)
230
231
232def clone_pkg(pkg, branch):
233    """
234    Clone the given openbmc package's git repository from gerrit into
235    the WORKSPACE location
236
237    Parameter descriptions:
238    pkg                 Name of the package to clone
239    branch              Branch to clone from pkg
240    """
241    pkg_dir = os.path.join(WORKSPACE, pkg)
242    if os.path.exists(os.path.join(pkg_dir, '.git')):
243        return pkg_dir
244    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
245    os.mkdir(pkg_dir)
246    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
247    try:
248        # first try the branch
249        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
250        repo_inst = clone.working_dir
251    except:
252        printline("Input branch not found, default to master")
253        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
254        repo_inst = clone.working_dir
255    return repo_inst
256
257
258def make_target_exists(target):
259    """
260    Runs a check against the makefile in the current directory to determine
261    if the target exists so that it can be built.
262
263    Parameter descriptions:
264    target              The make target we are checking
265    """
266    try:
267        cmd = ['make', '-n', target]
268        with open(os.devnull, 'w') as devnull:
269            check_call(cmd, stdout=devnull, stderr=devnull)
270        return True
271    except CalledProcessError:
272        return False
273
274
275make_parallel = [
276    'make',
277    # Run enough jobs to saturate all the cpus
278    '-j', str(multiprocessing.cpu_count()),
279    # Don't start more jobs if the load avg is too high
280    '-l', str(multiprocessing.cpu_count()),
281    # Synchronize the output so logs aren't intermixed in stdout / stderr
282    '-O',
283]
284
285
286def build_and_install(name, build_for_testing=False):
287    """
288    Builds and installs the package in the environment. Optionally
289    builds the examples and test cases for package.
290
291    Parameter description:
292    name                The name of the package we are building
293    build_for_testing   Enable options related to testing on the package?
294    """
295    os.chdir(os.path.join(WORKSPACE, name))
296
297    # Refresh dynamic linker run time bindings for dependencies
298    check_call_cmd('sudo', '-n', '--', 'ldconfig')
299
300    pkg = Package()
301    if build_for_testing:
302        pkg.test()
303    else:
304        pkg.install()
305
306
307def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
308    """
309    For each package (name), starting with the package to be unit tested,
310    extract its dependencies. For each package dependency defined, recursively
311    apply the same strategy
312
313    Parameter descriptions:
314    name                Name of the package
315    pkgdir              Directory where package source is located
316    dep_added           Current dict of dependencies and added status
317    head                Head node of the dependency tree
318    branch              Branch to clone from pkg
319    dep_tree            Current dependency tree node
320    """
321    if not dep_tree:
322        dep_tree = head
323
324    with open("/tmp/depcache", "r") as depcache:
325        cache = depcache.readline()
326
327    # Read out pkg dependencies
328    pkg = Package(name, pkgdir)
329
330    for dep in sets.Set(pkg.build_system().dependencies()):
331        if dep in cache:
332            continue
333        # Dependency package not already known
334        if dep_added.get(dep) is None:
335            # Dependency package not added
336            new_child = dep_tree.AddChild(dep)
337            dep_added[dep] = False
338            dep_pkgdir = clone_pkg(dep, branch)
339            # Determine this dependency package's
340            # dependencies and add them before
341            # returning to add this package
342            dep_added = build_dep_tree(dep,
343                                       dep_pkgdir,
344                                       dep_added,
345                                       head,
346                                       branch,
347                                       new_child)
348        else:
349            # Dependency package known and added
350            if dep_added[dep]:
351                continue
352            else:
353                # Cyclic dependency failure
354                raise Exception("Cyclic dependencies found in "+name)
355
356    if not dep_added[name]:
357        dep_added[name] = True
358
359    return dep_added
360
361
362def run_cppcheck():
363    match_re = re.compile('((?!\.mako\.).)*\.[ch](?:pp)?$', re.I)
364    cppcheck_files = []
365    stdout = subprocess.check_output(['git', 'ls-files'])
366
367    for f in stdout.decode('utf-8').split():
368        if match_re.match(f):
369            cppcheck_files.append(f)
370
371    if not cppcheck_files:
372        # skip cppcheck if there arent' any c or cpp sources.
373        print("no files")
374        return None
375
376    # http://cppcheck.sourceforge.net/manual.pdf
377    params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
378              '--enable=all', '--file-list=-']
379
380    cppcheck_process = subprocess.Popen(
381        params,
382        stdout=subprocess.PIPE,
383        stderr=subprocess.PIPE,
384        stdin=subprocess.PIPE)
385    (stdout, stderr) = cppcheck_process.communicate(
386        input='\n'.join(cppcheck_files))
387
388    if cppcheck_process.wait():
389        raise Exception('Cppcheck failed')
390    print(stdout)
391    print(stderr)
392
393def is_valgrind_safe():
394    """
395    Returns whether it is safe to run valgrind on our platform
396    """
397    src = 'unit-test-vg.c'
398    exe = './unit-test-vg'
399    with open(src, 'w') as h:
400        h.write('#include <errno.h>\n')
401        h.write('#include <stdio.h>\n')
402        h.write('#include <stdlib.h>\n')
403        h.write('#include <string.h>\n')
404        h.write('int main() {\n')
405        h.write('char *heap_str = malloc(16);\n')
406        h.write('strcpy(heap_str, "RandString");\n')
407        h.write('int res = strcmp("RandString", heap_str);\n')
408        h.write('free(heap_str);\n')
409        h.write('char errstr[64];\n')
410        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
411        h.write('printf("%s\\n", errstr);\n')
412        h.write('return res;\n')
413        h.write('}\n')
414    try:
415        with open(os.devnull, 'w') as devnull:
416            check_call(['gcc', '-O2', '-o', exe, src],
417                       stdout=devnull, stderr=devnull)
418            check_call(['valgrind', '--error-exitcode=99', exe],
419                       stdout=devnull, stderr=devnull)
420        return True
421    except:
422        sys.stderr.write("###### Platform is not valgrind safe ######\n")
423        return False
424    finally:
425        os.remove(src)
426        os.remove(exe)
427
428def is_sanitize_safe():
429    """
430    Returns whether it is safe to run sanitizers on our platform
431    """
432    src = 'unit-test-sanitize.c'
433    exe = './unit-test-sanitize'
434    with open(src, 'w') as h:
435        h.write('int main() { return 0; }\n')
436    try:
437        with open(os.devnull, 'w') as devnull:
438            check_call(['gcc', '-O2', '-fsanitize=address',
439                        '-fsanitize=undefined', '-o', exe, src],
440                       stdout=devnull, stderr=devnull)
441            check_call([exe], stdout=devnull, stderr=devnull)
442        return True
443    except:
444        sys.stderr.write("###### Platform is not sanitize safe ######\n")
445        return False
446    finally:
447        os.remove(src)
448        os.remove(exe)
449
450
451def maybe_make_valgrind():
452    """
453    Potentially runs the unit tests through valgrind for the package
454    via `make check-valgrind`. If the package does not have valgrind testing
455    then it just skips over this.
456    """
457    # Valgrind testing is currently broken by an aggressive strcmp optimization
458    # that is inlined into optimized code for POWER by gcc 7+. Until we find
459    # a workaround, just don't run valgrind tests on POWER.
460    # https://github.com/openbmc/openbmc/issues/3315
461    if not is_valgrind_safe():
462        sys.stderr.write("###### Skipping valgrind ######\n")
463        return
464    if not make_target_exists('check-valgrind'):
465        return
466
467    try:
468        cmd = make_parallel + ['check-valgrind']
469        check_call_cmd(*cmd)
470    except CalledProcessError:
471        for root, _, files in os.walk(os.getcwd()):
472            for f in files:
473                if re.search('test-suite-[a-z]+.log', f) is None:
474                    continue
475                check_call_cmd('cat', os.path.join(root, f))
476        raise Exception('Valgrind tests failed')
477
478def maybe_make_coverage():
479    """
480    Potentially runs the unit tests through code coverage for the package
481    via `make check-code-coverage`. If the package does not have code coverage
482    testing then it just skips over this.
483    """
484    if not make_target_exists('check-code-coverage'):
485        return
486
487    # Actually run code coverage
488    try:
489        cmd = make_parallel + ['check-code-coverage']
490        check_call_cmd(*cmd)
491    except CalledProcessError:
492        raise Exception('Code coverage failed')
493
494
495class BuildSystem(object):
496    """
497    Build systems generally provide the means to configure, build, install and
498    test software. The BuildSystem class defines a set of interfaces on top of
499    which Autotools, Meson, CMake and possibly other build system drivers can
500    be implemented, separating out the phases to control whether a package
501    should merely be installed or also tested and analyzed.
502    """
503    def __init__(self, package, path):
504        """Initialise the driver with properties independent of the build system
505
506        Keyword arguments:
507        package: The name of the package. Derived from the path if None
508        path: The path to the package. Set to the working directory if None
509        """
510        self.path = "." if not path else path
511        self.package = package if package else os.path.basename(os.path.realpath(self.path))
512        self.build_for_testing = False
513
514    def probe(self):
515        """Test if the build system driver can be applied to the package
516
517        Return True if the driver can drive the package's build system,
518        otherwise False.
519
520        Generally probe() is implemented by testing for the presence of the
521        build system's configuration file(s).
522        """
523        raise NotImplemented
524
525    def dependencies(self):
526        """Provide the package's dependencies
527
528        Returns a list of dependencies. If no dependencies are required then an
529        empty list must be returned.
530
531        Generally dependencies() is implemented by analysing and extracting the
532        data from the build system configuration.
533        """
534        raise NotImplemented
535
536    def configure(self, build_for_testing):
537        """Configure the source ready for building
538
539        Should raise an exception if configuration failed.
540
541        Keyword arguments:
542        build_for_testing: Mark the package as being built for testing rather
543                           than for installation as a dependency for the
544                           package under test. Setting to True generally
545                           implies that the package will be configured to build
546                           with debug information, at a low level of
547                           optimisation and possibly with sanitizers enabled.
548
549        Generally configure() is implemented by invoking the build system
550        tooling to generate Makefiles or equivalent.
551        """
552        raise NotImplemented
553
554    def build(self):
555        """Build the software ready for installation and/or testing
556
557        Should raise an exception if the build fails
558
559        Generally build() is implemented by invoking `make` or `ninja`.
560        """
561        raise NotImplemented
562
563    def install(self):
564        """Install the software ready for use
565
566        Should raise an exception if installation fails
567
568        Like build(), install() is generally implemented by invoking `make` or
569        `ninja`.
570        """
571        raise NotImplemented
572
573    def test(self):
574        """Build and run the test suite associated with the package
575
576        Should raise an exception if the build or testing fails.
577
578        Like install(), test() is generally implemented by invoking `make` or
579        `ninja`.
580        """
581        raise NotImplemented
582
583    def analyze(self):
584        """Run any supported analysis tools over the codebase
585
586        Should raise an exception if analysis fails.
587
588        Some analysis tools such as scan-build need injection into the build
589        system. analyze() provides the necessary hook to implement such
590        behaviour. Analyzers independent of the build system can also be
591        specified here but at the cost of possible duplication of code between
592        the build system driver implementations.
593        """
594        raise NotImplemented
595
596
597class Autotools(BuildSystem):
598    def __init__(self, package=None, path=None):
599        super(Autotools, self).__init__(package, path)
600
601    def probe(self):
602        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
603
604    def dependencies(self):
605        configure_ac = os.path.join(self.path, 'configure.ac')
606
607        configure_ac_contents = ''
608        # Prepend some special function overrides so we can parse out
609        # dependencies
610        for macro in DEPENDENCIES.iterkeys():
611            configure_ac_contents += ('m4_define([' + macro + '], [' +
612                                      macro + '_START$' +
613                                      str(DEPENDENCIES_OFFSET[macro] + 1) +
614                                      macro + '_END])\n')
615        with open(configure_ac, "rt") as f:
616            configure_ac_contents += f.read()
617
618        autoconf_process = subprocess.Popen(['autoconf', '-Wno-undefined', '-'],
619                                            stdin=subprocess.PIPE,
620                                            stdout=subprocess.PIPE,
621                                            stderr=subprocess.PIPE)
622        (stdout, stderr) = autoconf_process.communicate(input=configure_ac_contents)
623        if not stdout:
624            print(stderr)
625            raise Exception("Failed to run autoconf for parsing dependencies")
626
627        # Parse out all of the dependency text
628        matches = []
629        for macro in DEPENDENCIES.iterkeys():
630            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
631            for match in re.compile(pattern).finditer(stdout):
632                matches.append((match.group(1), match.group(2)))
633
634        # Look up dependencies from the text
635        found_deps = []
636        for macro, deptext in matches:
637            for potential_dep in deptext.split(' '):
638                for known_dep in DEPENDENCIES[macro].iterkeys():
639                    if potential_dep.startswith(known_dep):
640                        found_deps.append(DEPENDENCIES[macro][known_dep])
641
642        return found_deps
643
644    def _configure_feature(self, flag, enabled):
645        """
646        Returns an configure flag as a string
647
648        Parameters:
649        flag                The name of the flag
650        enabled             Whether the flag is enabled or disabled
651        """
652        return '--' + ('enable' if enabled else 'disable') + '-' + flag
653
654    def configure(self, build_for_testing):
655        self.build_for_testing = build_for_testing
656        conf_flags = [
657            self._configure_feature('silent-rules', False),
658            self._configure_feature('examples', build_for_testing),
659            self._configure_feature('tests', build_for_testing),
660        ]
661        if not TEST_ONLY:
662            conf_flags.extend([
663                self._configure_feature('code-coverage', build_for_testing),
664                self._configure_feature('valgrind', build_for_testing),
665            ])
666        # Add any necessary configure flags for package
667        if CONFIGURE_FLAGS.get(self.package) is not None:
668            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
669        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
670            if os.path.exists(bootstrap):
671                check_call_cmd('./' + bootstrap)
672                break
673        check_call_cmd('./configure', *conf_flags)
674
675    def build(self):
676        check_call_cmd(*make_parallel)
677
678    def install(self):
679        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
680
681    def test(self):
682        try:
683            cmd = make_parallel + ['check']
684            for i in range(0, args.repeat):
685                check_call_cmd(*cmd)
686        except CalledProcessError:
687            for root, _, files in os.walk(os.getcwd()):
688                if 'test-suite.log' not in files:
689                    continue
690                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
691            raise Exception('Unit tests failed')
692
693    def analyze(self):
694        maybe_make_valgrind()
695        maybe_make_coverage()
696        run_cppcheck()
697
698
699class CMake(BuildSystem):
700    def __init__(self, package=None, path=None):
701        super(CMake, self).__init__(package, path)
702
703    def probe(self):
704        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
705
706    def dependencies(self):
707        return []
708
709    def configure(self, build_for_testing):
710        self.build_for_testing = build_for_testing
711        check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
712
713    def build(self):
714        check_call_cmd('cmake', '--build', '.', '--', '-j',
715                       str(multiprocessing.cpu_count()))
716
717    def install(self):
718        pass
719
720    def test(self):
721        if make_target_exists('test'):
722            check_call_cmd('ctest', '.')
723
724    def analyze(self):
725        if os.path.isfile('.clang-tidy'):
726            check_call_cmd('run-clang-tidy-8.py', '-p', '.')
727        maybe_make_valgrind()
728        maybe_make_coverage()
729        run_cppcheck()
730
731
732class Meson(BuildSystem):
733    def __init__(self, package=None, path=None):
734        super(Meson, self).__init__(package, path)
735
736    def probe(self):
737        return os.path.isfile(os.path.join(self.path, 'meson.build'))
738
739    def dependencies(self):
740        meson_build = os.path.join(self.path, 'meson.build')
741        if not os.path.exists(meson_build):
742            return []
743
744        found_deps = []
745        for root, dirs, files in os.walk(self.path):
746            if 'meson.build' not in files:
747                continue
748            with open(os.path.join(root, 'meson.build'), 'rt') as f:
749                build_contents = f.read()
750            for match in re.finditer(r"dependency\('([^']*)'.*?\)\n", build_contents):
751                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(match.group(1))
752                if maybe_dep is not None:
753                    found_deps.append(maybe_dep)
754
755        return found_deps
756
757    def _parse_options(self, options_file):
758        """
759        Returns a set of options defined in the provides meson_options.txt file
760
761        Parameters:
762        options_file        The file containing options
763        """
764        options_contents = ''
765        with open(options_file, "rt") as f:
766            options_contents += f.read()
767        options = sets.Set()
768        pattern = 'option\\(\\s*\'([^\']*)\''
769        for match in re.compile(pattern).finditer(options_contents):
770            options.add(match.group(1))
771        return options
772
773    def _configure_feature(self, val):
774        """
775        Returns the meson flag which signifies the value
776
777        True is enabled which requires the feature.
778        False is disabled which disables the feature.
779        None is auto which autodetects the feature.
780
781        Parameters:
782        val                 The value being converted
783        """
784        if val is True:
785            return "enabled"
786        elif val is False:
787            return "disabled"
788        elif val is None:
789            return "auto"
790        else:
791            raise Exception("Bad meson feature value")
792
793    def configure(self, build_for_testing):
794        self.build_for_testing = build_for_testing
795        meson_options = sets.Set()
796        if os.path.exists("meson_options.txt"):
797            meson_options = self._parse_options("meson_options.txt")
798        meson_flags = [
799            '-Db_colorout=never',
800            '-Dwerror=true',
801            '-Dwarning_level=3',
802        ]
803        if build_for_testing:
804            meson_flags.append('--buildtype=debug')
805        else:
806            meson_flags.append('--buildtype=debugoptimized')
807        if 'tests' in meson_options:
808            meson_flags.append('-Dtests=' + self._configure_feature(build_for_testing))
809        if 'examples' in meson_options:
810            meson_flags.append('-Dexamples=' + str(build_for_testing).lower())
811        if MESON_FLAGS.get(self.package) is not None:
812            meson_flags.extend(MESON_FLAGS.get(self.package))
813        try:
814            check_call_cmd('meson', 'setup', '--reconfigure', 'build', *meson_flags)
815        except:
816            shutil.rmtree('build')
817            check_call_cmd('meson', 'setup', 'build', *meson_flags)
818
819    def build(self):
820        check_call_cmd('ninja', '-C', 'build')
821
822    def install(self):
823        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
824
825    def test(self):
826        try:
827            check_call_cmd('meson', 'test', '-C', 'build')
828        except CalledProcessError:
829            for root, _, files in os.walk(os.getcwd()):
830                if 'testlog.txt' not in files:
831                    continue
832                check_call_cmd('cat', os.path.join(root, 'testlog.txt'))
833            raise Exception('Unit tests failed')
834
835    def _setup_exists(self, setup):
836        """
837        Returns whether the meson build supports the named test setup.
838
839        Parameter descriptions:
840        setup              The setup target to check
841        """
842        try:
843            with open(os.devnull, 'w') as devnull:
844                output = subprocess.check_output(
845                        ['meson', 'test', '-C', 'build',
846                         '--setup', setup, '-t', '0'],
847                        stderr=subprocess.STDOUT)
848        except CalledProcessError as e:
849            output = e.output
850        return not re.search('Test setup .* not found from project', output)
851
852    def _maybe_valgrind(self):
853        """
854        Potentially runs the unit tests through valgrind for the package
855        via `meson test`. The package can specify custom valgrind configurations
856        by utilizing add_test_setup() in a meson.build
857        """
858        if not is_valgrind_safe():
859            sys.stderr.write("###### Skipping valgrind ######\n")
860            return
861        try:
862            if self._setup_exists('valgrind'):
863                check_call_cmd('meson', 'test', '-C', 'build',
864                               '--setup', 'valgrind')
865            else:
866                check_call_cmd('meson', 'test', '-C', 'build',
867                               '--wrapper', 'valgrind')
868        except CalledProcessError:
869            for root, _, files in os.walk(os.getcwd()):
870                if 'testlog-valgrind.txt' not in files:
871                    continue
872                check_call_cmd('cat', os.path.join(root, 'testlog-valgrind.txt'))
873            raise Exception('Valgrind tests failed')
874
875    def analyze(self):
876        self._maybe_valgrind()
877
878        # Run clang-tidy only if the project has a configuration
879        if os.path.isfile('.clang-tidy'):
880            check_call_cmd('run-clang-tidy-8.py', '-p',
881                           'build')
882        # Run the basic clang static analyzer otherwise
883        else:
884            check_call_cmd('ninja', '-C', 'build',
885                           'scan-build')
886
887        # Run tests through sanitizers
888        # b_lundef is needed if clang++ is CXX since it resolves the
889        # asan symbols at runtime only. We don't want to set it earlier
890        # in the build process to ensure we don't have undefined
891        # runtime code.
892        if is_sanitize_safe():
893            check_call_cmd('meson', 'configure', 'build',
894                           '-Db_sanitize=address,undefined',
895                           '-Db_lundef=false')
896            check_call_cmd('meson', 'test', '-C', 'build',
897                           '--logbase', 'testlog-ubasan')
898            # TODO: Fix memory sanitizer
899            # check_call_cmd('meson', 'configure', 'build',
900            #                '-Db_sanitize=memory')
901            # check_call_cmd('meson', 'test', '-C', 'build'
902            #                '--logbase', 'testlog-msan')
903            check_call_cmd('meson', 'configure', 'build',
904                           '-Db_sanitize=none', '-Db_lundef=true')
905        else:
906            sys.stderr.write("###### Skipping sanitizers ######\n")
907
908        # Run coverage checks
909        check_call_cmd('meson', 'configure', 'build',
910                       '-Db_coverage=true')
911        self.test()
912        # Only build coverage HTML if coverage files were produced
913        for root, dirs, files in os.walk('build'):
914            if any([f.endswith('.gcda') for f in files]):
915                check_call_cmd('ninja', '-C', 'build',
916                               'coverage-html')
917                break
918        check_call_cmd('meson', 'configure', 'build',
919                       '-Db_coverage=false')
920        run_cppcheck()
921
922
923class Package(object):
924    def __init__(self, name=None, path=None):
925        self.supported = [Autotools, Meson, CMake]
926        self.name = name
927        self.path = path
928        self.test_only = False
929
930    def build_systems(self):
931        instances = (system(self.name, self.path) for system in self.supported)
932        return (instance for instance in instances if instance.probe())
933
934    def build_system(self, preferred=None):
935        systems = self.build_systems()
936
937        if preferred:
938            return {type(system): system for system in systems}[preferred]
939
940        return next(iter(systems))
941
942    def install(self, system=None):
943        if not system:
944            system = self.build_system()
945
946        system.configure(False)
947        system.build()
948        system.install()
949
950    def _test_one(self, system):
951        system.configure(True)
952        system.build()
953        system.install()
954        system.test()
955        system.analyze()
956
957    def test(self):
958        for system in self.build_systems():
959            self._test_one(system)
960
961
962def find_file(filename, basedir):
963    """
964    Finds all occurrences of a file in the base directory
965    and passes them back with their relative paths.
966
967    Parameter descriptions:
968    filename              The name of the file to find
969    basedir               The base directory search in
970    """
971
972    filepaths = []
973    for root, dirs, files in os.walk(basedir):
974        if filename in files:
975            filepaths.append(os.path.join(root, filename))
976    return filepaths
977
978if __name__ == '__main__':
979    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
980    CONFIGURE_FLAGS = {
981        'sdbusplus': ['--enable-transaction'],
982        'phosphor-logging':
983        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
984         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
985    }
986
987    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
988    MESON_FLAGS = {
989    }
990
991    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
992    DEPENDENCIES = {
993        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
994        'AC_CHECK_HEADER': {
995            'host-ipmid': 'phosphor-host-ipmid',
996            'blobs-ipmid': 'phosphor-ipmi-blobs',
997            'sdbusplus': 'sdbusplus',
998            'sdeventplus': 'sdeventplus',
999            'stdplus': 'stdplus',
1000            'gpioplus': 'gpioplus',
1001            'phosphor-logging/log.hpp': 'phosphor-logging',
1002        },
1003        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1004        'PKG_CHECK_MODULES': {
1005            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1006            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
1007            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
1008            'libipmid': 'phosphor-host-ipmid',
1009            'libipmid-host': 'phosphor-host-ipmid',
1010            'sdbusplus': 'sdbusplus',
1011            'sdeventplus': 'sdeventplus',
1012            'stdplus': 'stdplus',
1013            'gpioplus': 'gpioplus',
1014            'phosphor-logging': 'phosphor-logging',
1015            'phosphor-snmp': 'phosphor-snmp',
1016            'ipmiblob': 'ipmi-blob-tool',
1017        },
1018    }
1019
1020    # Offset into array of macro parameters MACRO(0, 1, ...N)
1021    DEPENDENCIES_OFFSET = {
1022        'AC_CHECK_LIB': 0,
1023        'AC_CHECK_HEADER': 0,
1024        'AC_PATH_PROG': 1,
1025        'PKG_CHECK_MODULES': 1,
1026    }
1027
1028    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1029    DEPENDENCIES_REGEX = {
1030        'phosphor-logging': r'\S+-dbus-interfaces$'
1031    }
1032
1033    # Set command line arguments
1034    parser = argparse.ArgumentParser()
1035    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1036                        help="Workspace directory location(i.e. /home)")
1037    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1038                        help="OpenBMC package to be unit tested")
1039    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1040                        action="store_true", required=False, default=False,
1041                        help="Only run test cases, no other validation")
1042    parser.add_argument("-v", "--verbose", action="store_true",
1043                        help="Print additional package status messages")
1044    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1045                        type=int, default=1)
1046    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1047                        help="Branch to target for dependent repositories",
1048                        default="master")
1049    parser.add_argument("-n", "--noformat", dest="FORMAT",
1050                        action="store_false", required=False,
1051                        help="Whether or not to run format code")
1052    args = parser.parse_args(sys.argv[1:])
1053    WORKSPACE = args.WORKSPACE
1054    UNIT_TEST_PKG = args.PACKAGE
1055    TEST_ONLY = args.TEST_ONLY
1056    BRANCH = args.BRANCH
1057    FORMAT_CODE = args.FORMAT
1058    if args.verbose:
1059        def printline(*line):
1060            for arg in line:
1061                print arg,
1062            print
1063    else:
1064        printline = lambda *l: None
1065
1066    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
1067
1068    # First validate code formatting if repo has style formatting files.
1069    # The format-code.sh checks for these files.
1070    if FORMAT_CODE:
1071        check_call_cmd("./format-code.sh", CODE_SCAN_DIR)
1072
1073    prev_umask = os.umask(000)
1074
1075    # Determine dependencies and add them
1076    dep_added = dict()
1077    dep_added[UNIT_TEST_PKG] = False
1078
1079    # Create dependency tree
1080    dep_tree = DepTree(UNIT_TEST_PKG)
1081    build_dep_tree(UNIT_TEST_PKG,
1082                   os.path.join(WORKSPACE, UNIT_TEST_PKG),
1083                   dep_added,
1084                   dep_tree,
1085                   BRANCH)
1086
1087    # Reorder Dependency Tree
1088    for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems():
1089        dep_tree.ReorderDeps(pkg_name, regex_str)
1090    if args.verbose:
1091        dep_tree.PrintTree()
1092
1093    install_list = dep_tree.GetInstallList()
1094
1095    # We don't want to treat our package as a dependency
1096    install_list.remove(UNIT_TEST_PKG)
1097
1098    # Install reordered dependencies
1099    for dep in install_list:
1100        build_and_install(dep, False)
1101
1102    # Run package unit tests
1103    build_and_install(UNIT_TEST_PKG, True)
1104
1105    os.umask(prev_umask)
1106
1107    # Run any custom CI scripts the repo has, of which there can be
1108    # multiple of and anywhere in the repository.
1109    ci_scripts = find_file('run-ci.sh', os.path.join(WORKSPACE, UNIT_TEST_PKG))
1110    if ci_scripts:
1111        os.chdir(os.path.join(WORKSPACE, UNIT_TEST_PKG))
1112        for ci_script in ci_scripts:
1113            check_call_cmd('sh', ci_script)
1114