xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 4c9928e535740cc7b06e1620e455aa9028841eb9)
1#!/usr/bin/env python
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urlparse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import sets
19import subprocess
20import shutil
21import platform
22
23
24class DepTree():
25    """
26    Represents package dependency tree, where each node is a DepTree with a
27    name and DepTree children.
28    """
29
30    def __init__(self, name):
31        """
32        Create new DepTree.
33
34        Parameter descriptions:
35        name               Name of new tree node.
36        """
37        self.name = name
38        self.children = list()
39
40    def AddChild(self, name):
41        """
42        Add new child node to current node.
43
44        Parameter descriptions:
45        name               Name of new child
46        """
47        new_child = DepTree(name)
48        self.children.append(new_child)
49        return new_child
50
51    def AddChildNode(self, node):
52        """
53        Add existing child node to current node.
54
55        Parameter descriptions:
56        node               Tree node to add
57        """
58        self.children.append(node)
59
60    def RemoveChild(self, name):
61        """
62        Remove child node.
63
64        Parameter descriptions:
65        name               Name of child to remove
66        """
67        for child in self.children:
68            if child.name == name:
69                self.children.remove(child)
70                return
71
72    def GetNode(self, name):
73        """
74        Return node with matching name. Return None if not found.
75
76        Parameter descriptions:
77        name               Name of node to return
78        """
79        if self.name == name:
80            return self
81        for child in self.children:
82            node = child.GetNode(name)
83            if node:
84                return node
85        return None
86
87    def GetParentNode(self, name, parent_node=None):
88        """
89        Return parent of node with matching name. Return none if not found.
90
91        Parameter descriptions:
92        name               Name of node to get parent of
93        parent_node        Parent of current node
94        """
95        if self.name == name:
96            return parent_node
97        for child in self.children:
98            found_node = child.GetParentNode(name, self)
99            if found_node:
100                return found_node
101        return None
102
103    def GetPath(self, name, path=None):
104        """
105        Return list of node names from head to matching name.
106        Return None if not found.
107
108        Parameter descriptions:
109        name               Name of node
110        path               List of node names from head to current node
111        """
112        if not path:
113            path = []
114        if self.name == name:
115            path.append(self.name)
116            return path
117        for child in self.children:
118            match = child.GetPath(name, path + [self.name])
119            if match:
120                return match
121        return None
122
123    def GetPathRegex(self, name, regex_str, path=None):
124        """
125        Return list of node paths that end in name, or match regex_str.
126        Return empty list if not found.
127
128        Parameter descriptions:
129        name               Name of node to search for
130        regex_str          Regex string to match node names
131        path               Path of node names from head to current node
132        """
133        new_paths = []
134        if not path:
135            path = []
136        match = re.match(regex_str, self.name)
137        if (self.name == name) or (match):
138            new_paths.append(path + [self.name])
139        for child in self.children:
140            return_paths = None
141            full_path = path + [self.name]
142            return_paths = child.GetPathRegex(name, regex_str, full_path)
143            for i in return_paths:
144                new_paths.append(i)
145        return new_paths
146
147    def MoveNode(self, from_name, to_name):
148        """
149        Mode existing from_name node to become child of to_name node.
150
151        Parameter descriptions:
152        from_name          Name of node to make a child of to_name
153        to_name            Name of node to make parent of from_name
154        """
155        parent_from_node = self.GetParentNode(from_name)
156        from_node = self.GetNode(from_name)
157        parent_from_node.RemoveChild(from_name)
158        to_node = self.GetNode(to_name)
159        to_node.AddChildNode(from_node)
160
161    def ReorderDeps(self, name, regex_str):
162        """
163        Reorder dependency tree.  If tree contains nodes with names that
164        match 'name' and 'regex_str', move 'regex_str' nodes that are
165        to the right of 'name' node, so that they become children of the
166        'name' node.
167
168        Parameter descriptions:
169        name               Name of node to look for
170        regex_str          Regex string to match names to
171        """
172        name_path = self.GetPath(name)
173        if not name_path:
174            return
175        paths = self.GetPathRegex(name, regex_str)
176        is_name_in_paths = False
177        name_index = 0
178        for i in range(len(paths)):
179            path = paths[i]
180            if path[-1] == name:
181                is_name_in_paths = True
182                name_index = i
183                break
184        if not is_name_in_paths:
185            return
186        for i in range(name_index + 1, len(paths)):
187            path = paths[i]
188            if name in path:
189                continue
190            from_name = path[-1]
191            self.MoveNode(from_name, name)
192
193    def GetInstallList(self):
194        """
195        Return post-order list of node names.
196
197        Parameter descriptions:
198        """
199        install_list = []
200        for child in self.children:
201            child_install_list = child.GetInstallList()
202            install_list.extend(child_install_list)
203        install_list.append(self.name)
204        return install_list
205
206    def PrintTree(self, level=0):
207        """
208        Print pre-order node names with indentation denoting node depth level.
209
210        Parameter descriptions:
211        level              Current depth level
212        """
213        INDENT_PER_LEVEL = 4
214        print ' ' * (level * INDENT_PER_LEVEL) + self.name
215        for child in self.children:
216            child.PrintTree(level + 1)
217
218
219def check_call_cmd(*cmd):
220    """
221    Verbose prints the directory location the given command is called from and
222    the command, then executes the command using check_call.
223
224    Parameter descriptions:
225    dir                 Directory location command is to be called from
226    cmd                 List of parameters constructing the complete command
227    """
228    printline(os.getcwd(), ">", " ".join(cmd))
229    check_call(cmd)
230
231
232def clone_pkg(pkg, branch):
233    """
234    Clone the given openbmc package's git repository from gerrit into
235    the WORKSPACE location
236
237    Parameter descriptions:
238    pkg                 Name of the package to clone
239    branch              Branch to clone from pkg
240    """
241    pkg_dir = os.path.join(WORKSPACE, pkg)
242    if os.path.exists(os.path.join(pkg_dir, '.git')):
243        return pkg_dir
244    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
245    os.mkdir(pkg_dir)
246    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
247    try:
248        # first try the branch
249        repo_inst = Repo.clone_from(pkg_repo, pkg_dir,
250                branch=branch).working_dir
251    except:
252        printline("Input branch not found, default to master")
253        repo_inst = Repo.clone_from(pkg_repo, pkg_dir,
254                branch="master").working_dir
255    return repo_inst
256
257
258def get_autoconf_deps(pkgdir):
259    """
260    Parse the given 'configure.ac' file for package dependencies and return
261    a list of the dependencies found. If the package is not autoconf it is just
262    ignored.
263
264    Parameter descriptions:
265    pkgdir              Directory where package source is located
266    """
267    configure_ac = os.path.join(pkgdir, 'configure.ac')
268    if not os.path.exists(configure_ac):
269        return []
270
271    configure_ac_contents = ''
272    # Prepend some special function overrides so we can parse out dependencies
273    for macro in DEPENDENCIES.iterkeys():
274        configure_ac_contents += ('m4_define([' + macro + '], [' +
275                macro + '_START$' + str(DEPENDENCIES_OFFSET[macro] + 1) +
276                macro + '_END])\n')
277    with open(configure_ac, "rt") as f:
278        configure_ac_contents += f.read()
279
280    autoconf_process = subprocess.Popen(['autoconf', '-Wno-undefined', '-'],
281            stdin=subprocess.PIPE, stdout=subprocess.PIPE,
282            stderr=subprocess.PIPE)
283    (stdout, stderr) = autoconf_process.communicate(input=configure_ac_contents)
284    if not stdout:
285        print(stderr)
286        raise Exception("Failed to run autoconf for parsing dependencies")
287
288    # Parse out all of the dependency text
289    matches = []
290    for macro in DEPENDENCIES.iterkeys():
291        pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
292        for match in re.compile(pattern).finditer(stdout):
293            matches.append((match.group(1), match.group(2)))
294
295    # Look up dependencies from the text
296    found_deps = []
297    for macro, deptext in matches:
298        for potential_dep in deptext.split(' '):
299            for known_dep in DEPENDENCIES[macro].iterkeys():
300                if potential_dep.startswith(known_dep):
301                    found_deps.append(DEPENDENCIES[macro][known_dep])
302
303    return found_deps
304
305def get_meson_deps(pkgdir):
306    """
307    Parse the given 'meson.build' file for package dependencies and return
308    a list of the dependencies found. If the package is not meson compatible
309    it is just ignored.
310
311    Parameter descriptions:
312    pkgdir              Directory where package source is located
313    """
314    meson_build = os.path.join(pkgdir, 'meson.build')
315    if not os.path.exists(meson_build):
316        return []
317
318    found_deps = []
319    for root, dirs, files in os.walk(pkgdir):
320        if 'meson.build' not in files:
321            continue
322        with open(os.path.join(root, 'meson.build'), 'rt') as f:
323            build_contents = f.read()
324        for match in re.finditer(r"dependency\('([^']*)'.*?\)\n", build_contents):
325            maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(match.group(1))
326            if maybe_dep is not None:
327                found_deps.append(maybe_dep)
328
329    return found_deps
330
331make_parallel = [
332    'make',
333    # Run enough jobs to saturate all the cpus
334    '-j', str(multiprocessing.cpu_count()),
335    # Don't start more jobs if the load avg is too high
336    '-l', str(multiprocessing.cpu_count()),
337    # Synchronize the output so logs aren't intermixed in stdout / stderr
338    '-O',
339]
340
341def enFlag(flag, enabled):
342    """
343    Returns an configure flag as a string
344
345    Parameters:
346    flag                The name of the flag
347    enabled             Whether the flag is enabled or disabled
348    """
349    return '--' + ('enable' if enabled else 'disable') + '-' + flag
350
351def mesonFeature(val):
352    """
353    Returns the meson flag which signifies the value
354
355    True is enabled which requires the feature.
356    False is disabled which disables the feature.
357    None is auto which autodetects the feature.
358
359    Parameters:
360    val                 The value being converted
361    """
362    if val is True:
363        return "enabled"
364    elif val is False:
365        return "disabled"
366    elif val is None:
367        return "auto"
368    else:
369        raise Exception("Bad meson feature value")
370
371def parse_meson_options(options_file):
372    """
373    Returns a set of options defined in the provides meson_options.txt file
374
375    Parameters:
376    options_file        The file containing options
377    """
378    options_contents = ''
379    with open(options_file, "rt") as f:
380        options_contents += f.read()
381    options = sets.Set()
382    pattern = 'option\\(\\s*\'([^\']*)\''
383    for match in re.compile(pattern).finditer(options_contents):
384        options.add(match.group(1))
385    return options
386
387def build_and_install(pkg, build_for_testing=False):
388    """
389    Builds and installs the package in the environment. Optionally
390    builds the examples and test cases for package.
391
392    Parameter description:
393    pkg                 The package we are building
394    build_for_testing   Enable options related to testing on the package?
395    """
396    os.chdir(os.path.join(WORKSPACE, pkg))
397
398    # Refresh dynamic linker run time bindings for dependencies
399    check_call_cmd('sudo', '-n', '--', 'ldconfig')
400
401    # Build & install this package
402    # Always try using meson first
403    if os.path.exists('meson.build'):
404        meson_options = sets.Set()
405        if os.path.exists("meson_options.txt"):
406            meson_options = parse_meson_options("meson_options.txt")
407        meson_flags = [
408            '-Db_colorout=never',
409            '-Dwerror=true',
410            '-Dwarning_level=3',
411        ]
412        if build_for_testing:
413            meson_flags.append('--buildtype=debug')
414        else:
415            meson_flags.append('--buildtype=debugoptimized')
416        if 'tests' in meson_options:
417            meson_flags.append('-Dtests=' + mesonFeature(build_for_testing))
418        if 'examples' in meson_options:
419            meson_flags.append('-Dexamples=' + str(build_for_testing).lower())
420        if MESON_FLAGS.get(pkg) is not None:
421            meson_flags.extend(MESON_FLAGS.get(pkg))
422        try:
423            check_call_cmd('meson', 'setup', '--reconfigure', 'build', *meson_flags)
424        except:
425            shutil.rmtree('build')
426            check_call_cmd('meson', 'setup', 'build', *meson_flags)
427        check_call_cmd('ninja', '-C', 'build')
428        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
429    # Assume we are autoconf otherwise
430    else:
431        conf_flags = [
432            enFlag('silent-rules', False),
433            enFlag('examples', build_for_testing),
434            enFlag('tests', build_for_testing),
435        ]
436        if not TEST_ONLY:
437            conf_flags.extend([
438                enFlag('code-coverage', build_for_testing),
439                enFlag('valgrind', build_for_testing),
440            ])
441        # Add any necessary configure flags for package
442        if CONFIGURE_FLAGS.get(pkg) is not None:
443            conf_flags.extend(CONFIGURE_FLAGS.get(pkg))
444        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
445            if os.path.exists(bootstrap):
446                check_call_cmd('./' + bootstrap)
447                break
448        check_call_cmd('./configure', *conf_flags)
449        check_call_cmd(*make_parallel)
450        check_call_cmd('sudo', '-n', '--', *(make_parallel + [ 'install' ]))
451
452def build_dep_tree(pkg, pkgdir, dep_added, head, branch, dep_tree=None):
453    """
454    For each package(pkg), starting with the package to be unit tested,
455    parse its 'configure.ac' file from within the package's directory(pkgdir)
456    for each package dependency defined recursively doing the same thing
457    on each package found as a dependency.
458
459    Parameter descriptions:
460    pkg                 Name of the package
461    pkgdir              Directory where package source is located
462    dep_added           Current dict of dependencies and added status
463    head                Head node of the dependency tree
464    branch              Branch to clone from pkg
465    dep_tree            Current dependency tree node
466    """
467    if not dep_tree:
468        dep_tree = head
469
470    with open("/tmp/depcache", "r") as depcache:
471        cache = depcache.readline()
472
473    # Read out pkg dependencies
474    pkg_deps = []
475    pkg_deps += get_autoconf_deps(pkgdir)
476    pkg_deps += get_meson_deps(pkgdir)
477
478    for dep in sets.Set(pkg_deps):
479        if dep in cache:
480            continue
481        # Dependency package not already known
482        if dep_added.get(dep) is None:
483            # Dependency package not added
484            new_child = dep_tree.AddChild(dep)
485            dep_added[dep] = False
486            dep_pkgdir = clone_pkg(dep,branch)
487            # Determine this dependency package's
488            # dependencies and add them before
489            # returning to add this package
490            dep_added = build_dep_tree(dep,
491                                       dep_pkgdir,
492                                       dep_added,
493                                       head,
494                                       branch,
495                                       new_child)
496        else:
497            # Dependency package known and added
498            if dep_added[dep]:
499                continue
500            else:
501                # Cyclic dependency failure
502                raise Exception("Cyclic dependencies found in "+pkg)
503
504    if not dep_added[pkg]:
505        dep_added[pkg] = True
506
507    return dep_added
508
509def make_target_exists(target):
510    """
511    Runs a check against the makefile in the current directory to determine
512    if the target exists so that it can be built.
513
514    Parameter descriptions:
515    target              The make target we are checking
516    """
517    try:
518        cmd = [ 'make', '-n', target ]
519        with open(os.devnull, 'w') as devnull:
520            check_call(cmd, stdout=devnull, stderr=devnull)
521        return True
522    except CalledProcessError:
523        return False
524
525def run_unit_tests():
526    """
527    Runs the unit tests for the package via `make check`
528    """
529    try:
530        cmd = make_parallel + [ 'check' ]
531        for i in range(0, args.repeat):
532            check_call_cmd(*cmd)
533    except CalledProcessError:
534        for root, _, files in os.walk(os.getcwd()):
535            if 'test-suite.log' not in files:
536                continue
537            check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
538        raise Exception('Unit tests failed')
539
540def run_cppcheck():
541    match_re = re.compile('((?!\.mako\.).)*\.[ch](?:pp)?$', re.I)
542    cppcheck_files = []
543    stdout = subprocess.check_output(['git', 'ls-files'])
544
545    for f in stdout.decode('utf-8').split():
546        if match_re.match(f):
547            cppcheck_files.append(f)
548
549    if not cppcheck_files:
550        # skip cppcheck if there arent' any c or cpp sources.
551        print("no files")
552        return None
553
554    # http://cppcheck.sourceforge.net/manual.pdf
555    params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
556              '--enable=all', '--file-list=-']
557
558    cppcheck_process = subprocess.Popen(
559        params,
560        stdout=subprocess.PIPE,
561        stderr=subprocess.PIPE,
562        stdin=subprocess.PIPE)
563    (stdout, stderr) = cppcheck_process.communicate(
564        input='\n'.join(cppcheck_files))
565
566    if cppcheck_process.wait():
567        raise Exception('Cppcheck failed')
568    print(stdout)
569    print(stderr)
570
571def is_valgrind_safe():
572    """
573    Returns whether it is safe to run valgrind on our platform
574    """
575    src = 'unit-test-vg.c'
576    exe = './unit-test-vg'
577    with open(src, 'w') as h:
578        h.write('#include <errno.h>\n')
579        h.write('#include <stdio.h>\n')
580        h.write('#include <stdlib.h>\n')
581        h.write('#include <string.h>\n')
582        h.write('int main() {\n')
583        h.write('char *heap_str = malloc(16);\n')
584        h.write('strcpy(heap_str, "RandString");\n')
585        h.write('int res = strcmp("RandString", heap_str);\n')
586        h.write('free(heap_str);\n')
587        h.write('char errstr[64];\n')
588        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
589        h.write('printf("%s\\n", errstr);\n')
590        h.write('return res;\n')
591        h.write('}\n')
592    try:
593        with open(os.devnull, 'w') as devnull:
594            check_call(['gcc', '-O2', '-o', exe, src],
595                       stdout=devnull, stderr=devnull)
596            check_call(['valgrind', '--error-exitcode=99', exe],
597                       stdout=devnull, stderr=devnull)
598        return True
599    except:
600        sys.stderr.write("###### Platform is not valgrind safe ######\n")
601        return False
602    finally:
603        os.remove(src)
604        os.remove(exe)
605
606def is_sanitize_safe():
607    """
608    Returns whether it is safe to run sanitizers on our platform
609    """
610    src = 'unit-test-sanitize.c'
611    exe = './unit-test-sanitize'
612    with open(src, 'w') as h:
613        h.write('int main() { return 0; }\n')
614    try:
615        with open(os.devnull, 'w') as devnull:
616            check_call(['gcc', '-O2', '-fsanitize=address',
617                        '-fsanitize=undefined', '-o', exe, src],
618                       stdout=devnull, stderr=devnull)
619            check_call([exe], stdout=devnull, stderr=devnull)
620        return True
621    except:
622        sys.stderr.write("###### Platform is not sanitize safe ######\n")
623        return False
624    finally:
625        os.remove(src)
626        os.remove(exe)
627
628def meson_setup_exists(setup):
629    """
630    Returns whether the meson build supports the named test setup.
631
632    Parameter descriptions:
633    setup              The setup target to check
634    """
635    try:
636        with open(os.devnull, 'w') as devnull:
637            output = subprocess.check_output(
638                    ['meson', 'test', '-C', 'build',
639                     '--setup', setup, '-t', '0'],
640                    stderr=subprocess.STDOUT)
641    except CalledProcessError as e:
642        output = e.output
643    return not re.search('Test setup .* not found from project', output)
644
645def run_unit_tests_meson():
646    """
647    Runs the unit tests for the meson based package
648    """
649    try:
650        check_call_cmd('meson', 'test', '-C', 'build')
651    except CalledProcessError:
652        for root, _, files in os.walk(os.getcwd()):
653            if 'testlog.txt' not in files:
654                continue
655            check_call_cmd('cat', os.path.join(root, 'testlog.txt'))
656        raise Exception('Unit tests failed')
657
658def maybe_meson_valgrind():
659    """
660    Potentially runs the unit tests through valgrind for the package
661    via `meson test`. The package can specify custom valgrind configurations
662    by utilizing add_test_setup() in a meson.build
663    """
664    if not is_valgrind_safe():
665        sys.stderr.write("###### Skipping valgrind ######\n")
666        return
667    try:
668        if meson_setup_exists('valgrind'):
669            check_call_cmd('meson', 'test', '-C', 'build',
670                           '--setup', 'valgrind')
671        else:
672            check_call_cmd('meson', 'test', '-C', 'build',
673                           '--wrapper', 'valgrind')
674    except CalledProcessError:
675        for root, _, files in os.walk(os.getcwd()):
676            if 'testlog-valgrind.txt' not in files:
677                continue
678            check_call_cmd('cat', os.path.join(root, 'testlog-valgrind.txt'))
679        raise Exception('Valgrind tests failed')
680
681def maybe_make_valgrind():
682    """
683    Potentially runs the unit tests through valgrind for the package
684    via `make check-valgrind`. If the package does not have valgrind testing
685    then it just skips over this.
686    """
687    # Valgrind testing is currently broken by an aggressive strcmp optimization
688    # that is inlined into optimized code for POWER by gcc 7+. Until we find
689    # a workaround, just don't run valgrind tests on POWER.
690    # https://github.com/openbmc/openbmc/issues/3315
691    if not is_valgrind_safe():
692        sys.stderr.write("###### Skipping valgrind ######\n")
693        return
694    if not make_target_exists('check-valgrind'):
695        return
696
697    try:
698        cmd = make_parallel + [ 'check-valgrind' ]
699        check_call_cmd(*cmd)
700    except CalledProcessError:
701        for root, _, files in os.walk(os.getcwd()):
702            for f in files:
703                if re.search('test-suite-[a-z]+.log', f) is None:
704                    continue
705                check_call_cmd('cat', os.path.join(root, f))
706        raise Exception('Valgrind tests failed')
707
708def maybe_make_coverage():
709    """
710    Potentially runs the unit tests through code coverage for the package
711    via `make check-code-coverage`. If the package does not have code coverage
712    testing then it just skips over this.
713    """
714    if not make_target_exists('check-code-coverage'):
715        return
716
717    # Actually run code coverage
718    try:
719        cmd = make_parallel + [ 'check-code-coverage' ]
720        check_call_cmd(*cmd)
721    except CalledProcessError:
722        raise Exception('Code coverage failed')
723
724def find_file(filename, basedir):
725    """
726    Finds all occurrences of a file in the base directory
727    and passes them back with their relative paths.
728
729    Parameter descriptions:
730    filename              The name of the file to find
731    basedir               The base directory search in
732    """
733
734    filepaths = []
735    for root, dirs, files in os.walk(basedir):
736        if filename in files:
737                filepaths.append(os.path.join(root, filename))
738    return filepaths
739
740if __name__ == '__main__':
741    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
742    CONFIGURE_FLAGS = {
743        'sdbusplus': ['--enable-transaction'],
744        'phosphor-logging':
745        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
746         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
747    }
748
749    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
750    MESON_FLAGS = {
751    }
752
753    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
754    DEPENDENCIES = {
755        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
756        'AC_CHECK_HEADER': {
757            'host-ipmid': 'phosphor-host-ipmid',
758            'blobs-ipmid': 'phosphor-ipmi-blobs',
759            'sdbusplus': 'sdbusplus',
760            'sdeventplus': 'sdeventplus',
761            'stdplus': 'stdplus',
762            'gpioplus': 'gpioplus',
763            'phosphor-logging/log.hpp': 'phosphor-logging',
764        },
765        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
766        'PKG_CHECK_MODULES': {
767            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
768            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
769            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
770            'libipmid': 'phosphor-host-ipmid',
771            'libipmid-host': 'phosphor-host-ipmid',
772            'sdbusplus': 'sdbusplus',
773            'sdeventplus': 'sdeventplus',
774            'stdplus': 'stdplus',
775            'gpioplus': 'gpioplus',
776            'phosphor-logging': 'phosphor-logging',
777            'phosphor-snmp': 'phosphor-snmp',
778            'ipmiblob': 'ipmi-blob-tool',
779        },
780    }
781
782    # Offset into array of macro parameters MACRO(0, 1, ...N)
783    DEPENDENCIES_OFFSET = {
784        'AC_CHECK_LIB': 0,
785        'AC_CHECK_HEADER': 0,
786        'AC_PATH_PROG': 1,
787        'PKG_CHECK_MODULES': 1,
788    }
789
790    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
791    DEPENDENCIES_REGEX = {
792        'phosphor-logging': r'\S+-dbus-interfaces$'
793    }
794
795    # Set command line arguments
796    parser = argparse.ArgumentParser()
797    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
798                        help="Workspace directory location(i.e. /home)")
799    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
800                        help="OpenBMC package to be unit tested")
801    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
802                        action="store_true", required=False, default=False,
803                        help="Only run test cases, no other validation")
804    parser.add_argument("-v", "--verbose", action="store_true",
805                        help="Print additional package status messages")
806    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
807                        type=int, default=1)
808    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
809                        help="Branch to target for dependent repositories",
810                        default="master")
811    parser.add_argument("-n", "--noformat", dest="FORMAT",
812                        action="store_false", required=False,
813                        help="Whether or not to run format code")
814    args = parser.parse_args(sys.argv[1:])
815    WORKSPACE = args.WORKSPACE
816    UNIT_TEST_PKG = args.PACKAGE
817    TEST_ONLY = args.TEST_ONLY
818    BRANCH = args.BRANCH
819    FORMAT_CODE = args.FORMAT
820    if args.verbose:
821        def printline(*line):
822            for arg in line:
823                print arg,
824            print
825    else:
826        printline = lambda *l: None
827
828    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
829
830    # First validate code formatting if repo has style formatting files.
831    # The format-code.sh checks for these files.
832    if FORMAT_CODE:
833        check_call_cmd("./format-code.sh", CODE_SCAN_DIR)
834
835    # Automake and meson
836    if (os.path.isfile(CODE_SCAN_DIR + "/configure.ac") or
837        os.path.isfile(CODE_SCAN_DIR + '/meson.build')):
838        prev_umask = os.umask(000)
839        # Determine dependencies and add them
840        dep_added = dict()
841        dep_added[UNIT_TEST_PKG] = False
842        # Create dependency tree
843        dep_tree = DepTree(UNIT_TEST_PKG)
844        build_dep_tree(UNIT_TEST_PKG,
845                       os.path.join(WORKSPACE, UNIT_TEST_PKG),
846                       dep_added,
847                       dep_tree,
848                       BRANCH)
849
850        # Reorder Dependency Tree
851        for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems():
852            dep_tree.ReorderDeps(pkg_name, regex_str)
853        if args.verbose:
854            dep_tree.PrintTree()
855        install_list = dep_tree.GetInstallList()
856        # We don't want to treat our package as a dependency
857        install_list.remove(UNIT_TEST_PKG)
858        # install reordered dependencies
859        for dep in install_list:
860            build_and_install(dep, False)
861        os.chdir(os.path.join(WORKSPACE, UNIT_TEST_PKG))
862        # Run package unit tests
863        build_and_install(UNIT_TEST_PKG, True)
864        if os.path.isfile(CODE_SCAN_DIR + '/meson.build'):
865            if not TEST_ONLY:
866                maybe_meson_valgrind()
867
868                # Run clang-tidy only if the project has a configuration
869                if os.path.isfile('.clang-tidy'):
870                    check_call_cmd('run-clang-tidy-8.py', '-p',
871                                   'build')
872                # Run the basic clang static analyzer otherwise
873                else:
874                    check_call_cmd('ninja', '-C', 'build',
875                                   'scan-build')
876
877                # Run tests through sanitizers
878                # b_lundef is needed if clang++ is CXX since it resolves the
879                # asan symbols at runtime only. We don't want to set it earlier
880                # in the build process to ensure we don't have undefined
881                # runtime code.
882                if is_sanitize_safe():
883                    check_call_cmd('meson', 'configure', 'build',
884                                   '-Db_sanitize=address,undefined',
885                                   '-Db_lundef=false')
886                    check_call_cmd('meson', 'test', '-C', 'build',
887                                   '--logbase', 'testlog-ubasan')
888                    # TODO: Fix memory sanitizer
889                    #check_call_cmd('meson', 'configure', 'build',
890                    #               '-Db_sanitize=memory')
891                    #check_call_cmd('meson', 'test', '-C', 'build'
892                    #               '--logbase', 'testlog-msan')
893                    check_call_cmd('meson', 'configure', 'build',
894                                   '-Db_sanitize=none', '-Db_lundef=true')
895                else:
896                    sys.stderr.write("###### Skipping sanitizers ######\n")
897
898                # Run coverage checks
899                check_call_cmd('meson', 'configure', 'build',
900                               '-Db_coverage=true')
901                run_unit_tests_meson()
902                # Only build coverage HTML if coverage files were produced
903                for root, dirs, files in os.walk('build'):
904                    if any([f.endswith('.gcda') for f in files]):
905                        check_call_cmd('ninja', '-C', 'build',
906                                       'coverage-html')
907                        break
908                check_call_cmd('meson', 'configure', 'build',
909                               '-Db_coverage=false')
910            else:
911                run_unit_tests_meson()
912
913        else:
914            run_unit_tests()
915            if not TEST_ONLY:
916                maybe_make_valgrind()
917                maybe_make_coverage()
918        if not TEST_ONLY:
919            run_cppcheck()
920
921        os.umask(prev_umask)
922
923    # Cmake
924    elif os.path.isfile(CODE_SCAN_DIR + "/CMakeLists.txt"):
925        os.chdir(os.path.join(WORKSPACE, UNIT_TEST_PKG))
926        check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
927        check_call_cmd('cmake', '--build', '.', '--', '-j',
928                       str(multiprocessing.cpu_count()))
929        if make_target_exists('test'):
930            check_call_cmd('ctest', '.')
931        if not TEST_ONLY:
932            maybe_make_valgrind()
933            maybe_make_coverage()
934            run_cppcheck()
935            if os.path.isfile('.clang-tidy'):
936                check_call_cmd('run-clang-tidy-8.py', '-p', '.')
937
938    else:
939        print "Not a supported repo for CI Tests, exit"
940        quit()
941
942    # Run any custom CI scripts the repo has, of which there can be
943    # multiple of and anywhere in the repository.
944    ci_scripts = find_file('run-ci.sh', os.path.join(WORKSPACE, UNIT_TEST_PKG))
945    if ci_scripts:
946        os.chdir(os.path.join(WORKSPACE, UNIT_TEST_PKG))
947        for ci_script in ci_scripts:
948            check_call_cmd('sh', ci_script)
949