xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 3f1d12015ae7418f7e4821981ed60d1b7bd1e1b7)
1#!/usr/bin/env python
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urlparse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import sets
19import subprocess
20import shutil
21import platform
22
23
24class DepTree():
25    """
26    Represents package dependency tree, where each node is a DepTree with a
27    name and DepTree children.
28    """
29
30    def __init__(self, name):
31        """
32        Create new DepTree.
33
34        Parameter descriptions:
35        name               Name of new tree node.
36        """
37        self.name = name
38        self.children = list()
39
40    def AddChild(self, name):
41        """
42        Add new child node to current node.
43
44        Parameter descriptions:
45        name               Name of new child
46        """
47        new_child = DepTree(name)
48        self.children.append(new_child)
49        return new_child
50
51    def AddChildNode(self, node):
52        """
53        Add existing child node to current node.
54
55        Parameter descriptions:
56        node               Tree node to add
57        """
58        self.children.append(node)
59
60    def RemoveChild(self, name):
61        """
62        Remove child node.
63
64        Parameter descriptions:
65        name               Name of child to remove
66        """
67        for child in self.children:
68            if child.name == name:
69                self.children.remove(child)
70                return
71
72    def GetNode(self, name):
73        """
74        Return node with matching name. Return None if not found.
75
76        Parameter descriptions:
77        name               Name of node to return
78        """
79        if self.name == name:
80            return self
81        for child in self.children:
82            node = child.GetNode(name)
83            if node:
84                return node
85        return None
86
87    def GetParentNode(self, name, parent_node=None):
88        """
89        Return parent of node with matching name. Return none if not found.
90
91        Parameter descriptions:
92        name               Name of node to get parent of
93        parent_node        Parent of current node
94        """
95        if self.name == name:
96            return parent_node
97        for child in self.children:
98            found_node = child.GetParentNode(name, self)
99            if found_node:
100                return found_node
101        return None
102
103    def GetPath(self, name, path=None):
104        """
105        Return list of node names from head to matching name.
106        Return None if not found.
107
108        Parameter descriptions:
109        name               Name of node
110        path               List of node names from head to current node
111        """
112        if not path:
113            path = []
114        if self.name == name:
115            path.append(self.name)
116            return path
117        for child in self.children:
118            match = child.GetPath(name, path + [self.name])
119            if match:
120                return match
121        return None
122
123    def GetPathRegex(self, name, regex_str, path=None):
124        """
125        Return list of node paths that end in name, or match regex_str.
126        Return empty list if not found.
127
128        Parameter descriptions:
129        name               Name of node to search for
130        regex_str          Regex string to match node names
131        path               Path of node names from head to current node
132        """
133        new_paths = []
134        if not path:
135            path = []
136        match = re.match(regex_str, self.name)
137        if (self.name == name) or (match):
138            new_paths.append(path + [self.name])
139        for child in self.children:
140            return_paths = None
141            full_path = path + [self.name]
142            return_paths = child.GetPathRegex(name, regex_str, full_path)
143            for i in return_paths:
144                new_paths.append(i)
145        return new_paths
146
147    def MoveNode(self, from_name, to_name):
148        """
149        Mode existing from_name node to become child of to_name node.
150
151        Parameter descriptions:
152        from_name          Name of node to make a child of to_name
153        to_name            Name of node to make parent of from_name
154        """
155        parent_from_node = self.GetParentNode(from_name)
156        from_node = self.GetNode(from_name)
157        parent_from_node.RemoveChild(from_name)
158        to_node = self.GetNode(to_name)
159        to_node.AddChildNode(from_node)
160
161    def ReorderDeps(self, name, regex_str):
162        """
163        Reorder dependency tree.  If tree contains nodes with names that
164        match 'name' and 'regex_str', move 'regex_str' nodes that are
165        to the right of 'name' node, so that they become children of the
166        'name' node.
167
168        Parameter descriptions:
169        name               Name of node to look for
170        regex_str          Regex string to match names to
171        """
172        name_path = self.GetPath(name)
173        if not name_path:
174            return
175        paths = self.GetPathRegex(name, regex_str)
176        is_name_in_paths = False
177        name_index = 0
178        for i in range(len(paths)):
179            path = paths[i]
180            if path[-1] == name:
181                is_name_in_paths = True
182                name_index = i
183                break
184        if not is_name_in_paths:
185            return
186        for i in range(name_index + 1, len(paths)):
187            path = paths[i]
188            if name in path:
189                continue
190            from_name = path[-1]
191            self.MoveNode(from_name, name)
192
193    def GetInstallList(self):
194        """
195        Return post-order list of node names.
196
197        Parameter descriptions:
198        """
199        install_list = []
200        for child in self.children:
201            child_install_list = child.GetInstallList()
202            install_list.extend(child_install_list)
203        install_list.append(self.name)
204        return install_list
205
206    def PrintTree(self, level=0):
207        """
208        Print pre-order node names with indentation denoting node depth level.
209
210        Parameter descriptions:
211        level              Current depth level
212        """
213        INDENT_PER_LEVEL = 4
214        print ' ' * (level * INDENT_PER_LEVEL) + self.name
215        for child in self.children:
216            child.PrintTree(level + 1)
217
218
219def check_call_cmd(dir, *cmd):
220    """
221    Verbose prints the directory location the given command is called from and
222    the command, then executes the command using check_call.
223
224    Parameter descriptions:
225    dir                 Directory location command is to be called from
226    cmd                 List of parameters constructing the complete command
227    """
228    printline(dir, ">", " ".join(cmd))
229    check_call(cmd)
230
231
232def clone_pkg(pkg):
233    """
234    Clone the given openbmc package's git repository from gerrit into
235    the WORKSPACE location
236
237    Parameter descriptions:
238    pkg                 Name of the package to clone
239    """
240    pkg_dir = os.path.join(WORKSPACE, pkg)
241    if os.path.exists(os.path.join(pkg_dir, '.git')):
242        return pkg_dir
243    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
244    os.mkdir(pkg_dir)
245    printline(pkg_dir, "> git clone", pkg_repo, "./")
246    return Repo.clone_from(pkg_repo, pkg_dir).working_dir
247
248
249def get_autoconf_deps(pkgdir):
250    """
251    Parse the given 'configure.ac' file for package dependencies and return
252    a list of the dependencies found. If the package is not autoconf it is just
253    ignored.
254
255    Parameter descriptions:
256    pkgdir              Directory where package source is located
257    """
258    configure_ac = os.path.join(pkgdir, 'configure.ac')
259    if not os.path.exists(configure_ac):
260        return []
261
262    configure_ac_contents = ''
263    # Prepend some special function overrides so we can parse out dependencies
264    for macro in DEPENDENCIES.iterkeys():
265        configure_ac_contents += ('m4_define([' + macro + '], [' +
266                macro + '_START$' + str(DEPENDENCIES_OFFSET[macro] + 1) +
267                macro + '_END])\n')
268    with open(configure_ac, "rt") as f:
269        configure_ac_contents += f.read()
270
271    autoconf_process = subprocess.Popen(['autoconf', '-Wno-undefined', '-'],
272            stdin=subprocess.PIPE, stdout=subprocess.PIPE,
273            stderr=subprocess.PIPE)
274    (stdout, stderr) = autoconf_process.communicate(input=configure_ac_contents)
275    if not stdout:
276        print(stderr)
277        raise Exception("Failed to run autoconf for parsing dependencies")
278
279    # Parse out all of the dependency text
280    matches = []
281    for macro in DEPENDENCIES.iterkeys():
282        pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
283        for match in re.compile(pattern).finditer(stdout):
284            matches.append((match.group(1), match.group(2)))
285
286    # Look up dependencies from the text
287    found_deps = []
288    for macro, deptext in matches:
289        for potential_dep in deptext.split(' '):
290            for known_dep in DEPENDENCIES[macro].iterkeys():
291                if potential_dep.startswith(known_dep):
292                    found_deps.append(DEPENDENCIES[macro][known_dep])
293
294    return found_deps
295
296def get_meson_deps(pkgdir):
297    """
298    Parse the given 'meson.build' file for package dependencies and return
299    a list of the dependencies found. If the package is not meson compatible
300    it is just ignored.
301
302    Parameter descriptions:
303    pkgdir              Directory where package source is located
304    """
305    meson_build = os.path.join(pkgdir, 'meson.build')
306    if not os.path.exists(meson_build):
307        return []
308
309    found_deps = []
310    for root, dirs, files in os.walk(pkgdir):
311        if 'meson.build' not in files:
312            continue
313        with open(os.path.join(root, 'meson.build'), 'rt') as f:
314            build_contents = f.read()
315        for match in re.finditer(r"dependency\('([^']*)'.*?\)\n", build_contents):
316            maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(match.group(1))
317            if maybe_dep is not None:
318                found_deps.append(maybe_dep)
319
320    return found_deps
321
322make_parallel = [
323    'make',
324    # Run enough jobs to saturate all the cpus
325    '-j', str(multiprocessing.cpu_count()),
326    # Don't start more jobs if the load avg is too high
327    '-l', str(multiprocessing.cpu_count()),
328    # Synchronize the output so logs aren't intermixed in stdout / stderr
329    '-O',
330]
331
332def enFlag(flag, enabled):
333    """
334    Returns an configure flag as a string
335
336    Parameters:
337    flag                The name of the flag
338    enabled             Whether the flag is enabled or disabled
339    """
340    return '--' + ('enable' if enabled else 'disable') + '-' + flag
341
342def mesonFeature(val):
343    """
344    Returns the meson flag which signifies the value
345
346    True is enabled which requires the feature.
347    False is disabled which disables the feature.
348    None is auto which autodetects the feature.
349
350    Parameters:
351    val                 The value being converted
352    """
353    if val is True:
354        return "enabled"
355    elif val is False:
356        return "disabled"
357    elif val is None:
358        return "auto"
359    else:
360        raise Exception("Bad meson feature value")
361
362
363def build_and_install(pkg, build_for_testing=False):
364    """
365    Builds and installs the package in the environment. Optionally
366    builds the examples and test cases for package.
367
368    Parameter description:
369    pkg                 The package we are building
370    build_for_testing   Enable options related to testing on the package?
371    """
372    pkgdir = os.path.join(WORKSPACE, pkg)
373    os.chdir(pkgdir)
374
375    # Refresh dynamic linker run time bindings for dependencies
376    check_call_cmd(pkgdir, 'sudo', '-n', '--', 'ldconfig')
377
378    # Build & install this package
379    # Always try using meson first
380    if os.path.exists('meson.build'):
381        meson_flags = [
382            '-Db_colorout=never',
383            '-Dtests=' + mesonFeature(build_for_testing),
384            '-Dexamples=' + str(build_for_testing).lower(),
385            '-Db_coverage=' + str(build_for_testing).lower(),
386        ]
387        if MESON_FLAGS.get(pkg) is not None:
388            meson_flags.extend(MESON_FLAGS.get(pkg))
389        try:
390            check_call_cmd(pkgdir, 'meson', 'setup', '--reconfigure', 'build', *meson_flags)
391        except:
392            shutil.rmtree('build')
393            check_call_cmd(pkgdir, 'meson', 'setup', 'build', *meson_flags)
394        check_call_cmd(pkgdir, 'ninja', '-C', 'build')
395        check_call_cmd(pkgdir, 'sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
396    # Assume we are autoconf otherwise
397    else:
398        conf_flags = [
399            enFlag('silent-rules', False),
400            enFlag('examples', build_for_testing),
401            enFlag('tests', build_for_testing),
402            enFlag('code-coverage', build_for_testing),
403            enFlag('valgrind', build_for_testing),
404        ]
405        # Add any necessary configure flags for package
406        if CONFIGURE_FLAGS.get(pkg) is not None:
407            conf_flags.extend(CONFIGURE_FLAGS.get(pkg))
408        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
409            if os.path.exists(bootstrap):
410                check_call_cmd(pkgdir, './' + bootstrap)
411                break
412        check_call_cmd(pkgdir, './configure', *conf_flags)
413        check_call_cmd(pkgdir, *make_parallel)
414        check_call_cmd(pkgdir, 'sudo', '-n', '--', *(make_parallel + [ 'install' ]))
415
416def build_dep_tree(pkg, pkgdir, dep_added, head, dep_tree=None):
417    """
418    For each package(pkg), starting with the package to be unit tested,
419    parse its 'configure.ac' file from within the package's directory(pkgdir)
420    for each package dependency defined recursively doing the same thing
421    on each package found as a dependency.
422
423    Parameter descriptions:
424    pkg                 Name of the package
425    pkgdir              Directory where package source is located
426    dep_added           Current dict of dependencies and added status
427    head                Head node of the dependency tree
428    dep_tree            Current dependency tree node
429    """
430    if not dep_tree:
431        dep_tree = head
432
433    with open("/tmp/depcache", "r") as depcache:
434        cache = depcache.readline()
435
436    # Read out pkg dependencies
437    pkg_deps = []
438    pkg_deps += get_autoconf_deps(pkgdir)
439    pkg_deps += get_meson_deps(pkgdir)
440
441    for dep in sets.Set(pkg_deps):
442        if dep in cache:
443            continue
444        # Dependency package not already known
445        if dep_added.get(dep) is None:
446            # Dependency package not added
447            new_child = dep_tree.AddChild(dep)
448            dep_added[dep] = False
449            dep_pkgdir = clone_pkg(dep)
450            # Determine this dependency package's
451            # dependencies and add them before
452            # returning to add this package
453            dep_added = build_dep_tree(dep,
454                                       dep_pkgdir,
455                                       dep_added,
456                                       head,
457                                       new_child)
458        else:
459            # Dependency package known and added
460            if dep_added[dep]:
461                continue
462            else:
463                # Cyclic dependency failure
464                raise Exception("Cyclic dependencies found in "+pkg)
465
466    if not dep_added[pkg]:
467        dep_added[pkg] = True
468
469    return dep_added
470
471def make_target_exists(target):
472    """
473    Runs a check against the makefile in the current directory to determine
474    if the target exists so that it can be built.
475
476    Parameter descriptions:
477    target              The make target we are checking
478    """
479    try:
480        cmd = [ 'make', '-n', target ]
481        with open(os.devnull, 'w') as devnull:
482            check_call(cmd, stdout=devnull, stderr=devnull)
483        return True
484    except CalledProcessError:
485        return False
486
487def run_unit_tests(top_dir):
488    """
489    Runs the unit tests for the package via `make check`
490
491    Parameter descriptions:
492    top_dir             The root directory of our project
493    """
494    try:
495        cmd = make_parallel + [ 'check' ]
496        for i in range(0, args.repeat):
497            check_call_cmd(top_dir,  *cmd)
498    except CalledProcessError:
499        for root, _, files in os.walk(top_dir):
500            if 'test-suite.log' not in files:
501                continue
502            check_call_cmd(root, 'cat', os.path.join(root, 'test-suite.log'))
503        raise Exception('Unit tests failed')
504
505def run_cppcheck(top_dir):
506    try:
507        # http://cppcheck.sourceforge.net/manual.pdf
508        ignore_list = ['-i%s' % path for path in os.listdir(top_dir) \
509                       if path.endswith('-src') or path.endswith('-build')]
510        ignore_list.extend(('-itest', '-iscripts'))
511        params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
512                  '--enable=all']
513        params.extend(ignore_list)
514        params.append('.')
515
516        check_call_cmd(top_dir, *params)
517    except CalledProcessError:
518        raise Exception('Cppcheck failed')
519
520def maybe_run_valgrind(top_dir):
521    """
522    Potentially runs the unit tests through valgrind for the package
523    via `make check-valgrind`. If the package does not have valgrind testing
524    then it just skips over this.
525
526    Parameter descriptions:
527    top_dir             The root directory of our project
528    """
529    # Valgrind testing is currently broken by an aggressive strcmp optimization
530    # that is inlined into optimized code for POWER by gcc 7+. Until we find
531    # a workaround, just don't run valgrind tests on POWER.
532    # https://github.com/openbmc/openbmc/issues/3315
533    if re.match('ppc64', platform.machine()) is not None:
534        return
535    if not make_target_exists('check-valgrind'):
536        return
537
538    try:
539        cmd = make_parallel + [ 'check-valgrind' ]
540        check_call_cmd(top_dir,  *cmd)
541    except CalledProcessError:
542        for root, _, files in os.walk(top_dir):
543            for f in files:
544                if re.search('test-suite-[a-z]+.log', f) is None:
545                    continue
546                check_call_cmd(root, 'cat', os.path.join(root, f))
547        raise Exception('Valgrind tests failed')
548
549def maybe_run_coverage(top_dir):
550    """
551    Potentially runs the unit tests through code coverage for the package
552    via `make check-code-coverage`. If the package does not have code coverage
553    testing then it just skips over this.
554
555    Parameter descriptions:
556    top_dir             The root directory of our project
557    """
558    if not make_target_exists('check-code-coverage'):
559        return
560
561    # Actually run code coverage
562    try:
563        cmd = make_parallel + [ 'check-code-coverage' ]
564        check_call_cmd(top_dir,  *cmd)
565    except CalledProcessError:
566        raise Exception('Code coverage failed')
567
568if __name__ == '__main__':
569    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
570    CONFIGURE_FLAGS = {
571        'phosphor-objmgr': ['--enable-unpatched-systemd'],
572        'sdbusplus': ['--enable-transaction'],
573        'phosphor-logging':
574        ['--enable-metadata-processing',
575         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
576    }
577
578    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
579    MESON_FLAGS = {
580    }
581
582    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
583    DEPENDENCIES = {
584        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
585        'AC_CHECK_HEADER': {
586            'host-ipmid': 'phosphor-host-ipmid',
587            'blobs-ipmid': 'phosphor-ipmi-blobs',
588            'sdbusplus': 'sdbusplus',
589            'sdeventplus': 'sdeventplus',
590            'gpioplus': 'gpioplus',
591            'phosphor-logging/log.hpp': 'phosphor-logging',
592        },
593        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
594        'PKG_CHECK_MODULES': {
595            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
596            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
597            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
598            'sdbusplus': 'sdbusplus',
599            'sdeventplus': 'sdeventplus',
600            'gpioplus': 'gpioplus',
601            'phosphor-logging': 'phosphor-logging',
602            'phosphor-snmp': 'phosphor-snmp',
603        },
604    }
605
606    # Offset into array of macro parameters MACRO(0, 1, ...N)
607    DEPENDENCIES_OFFSET = {
608        'AC_CHECK_LIB': 0,
609        'AC_CHECK_HEADER': 0,
610        'AC_PATH_PROG': 1,
611        'PKG_CHECK_MODULES': 1,
612    }
613
614    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
615    DEPENDENCIES_REGEX = {
616        'phosphor-logging': r'\S+-dbus-interfaces$'
617    }
618
619    # Set command line arguments
620    parser = argparse.ArgumentParser()
621    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
622                        help="Workspace directory location(i.e. /home)")
623    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
624                        help="OpenBMC package to be unit tested")
625    parser.add_argument("-v", "--verbose", action="store_true",
626                        help="Print additional package status messages")
627    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
628                        type=int, default=1)
629    args = parser.parse_args(sys.argv[1:])
630    WORKSPACE = args.WORKSPACE
631    UNIT_TEST_PKG = args.PACKAGE
632    if args.verbose:
633        def printline(*line):
634            for arg in line:
635                print arg,
636            print
637    else:
638        printline = lambda *l: None
639
640    # First validate code formatting if repo has style formatting files.
641    # The format-code.sh checks for these files.
642    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
643    check_call_cmd(WORKSPACE, "./format-code.sh", CODE_SCAN_DIR)
644
645    # Automake and meson
646    if (os.path.isfile(CODE_SCAN_DIR + "/configure.ac") or
647        os.path.isfile(CODE_SCAN_DIR + '/meson.build')):
648        prev_umask = os.umask(000)
649        # Determine dependencies and add them
650        dep_added = dict()
651        dep_added[UNIT_TEST_PKG] = False
652        # Create dependency tree
653        dep_tree = DepTree(UNIT_TEST_PKG)
654        build_dep_tree(UNIT_TEST_PKG,
655                       os.path.join(WORKSPACE, UNIT_TEST_PKG),
656                       dep_added,
657                       dep_tree)
658
659        # Reorder Dependency Tree
660        for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems():
661            dep_tree.ReorderDeps(pkg_name, regex_str)
662        if args.verbose:
663            dep_tree.PrintTree()
664        install_list = dep_tree.GetInstallList()
665        # We don't want to treat our package as a dependency
666        install_list.remove(UNIT_TEST_PKG)
667        # install reordered dependencies
668        for dep in install_list:
669            build_and_install(dep, False)
670        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
671        os.chdir(top_dir)
672        # Run package unit tests
673        build_and_install(UNIT_TEST_PKG, True)
674        if os.path.isfile(CODE_SCAN_DIR + '/meson.build'):
675            check_call_cmd(top_dir, 'meson', 'test', '-C', 'build')
676            check_call_cmd(top_dir, 'ninja', '-C', 'build', 'coverage-html')
677            check_call_cmd(top_dir, 'meson', 'test', '-C', 'build', '--wrap', 'valgrind')
678        else:
679            run_unit_tests(top_dir)
680            maybe_run_valgrind(top_dir)
681            maybe_run_coverage(top_dir)
682        run_cppcheck(top_dir)
683
684        os.umask(prev_umask)
685
686    # Cmake
687    elif os.path.isfile(CODE_SCAN_DIR + "/CMakeLists.txt"):
688        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
689        os.chdir(top_dir)
690        check_call_cmd(top_dir, 'cmake', '.')
691        check_call_cmd(top_dir, 'cmake', '--build', '.', '--', '-j',
692                       str(multiprocessing.cpu_count()))
693        if make_target_exists('test'):
694            check_call_cmd(top_dir, 'ctest', '.')
695        maybe_run_valgrind(top_dir)
696        maybe_run_coverage(top_dir)
697        run_cppcheck(top_dir)
698
699    else:
700        print "Not a supported repo for CI Tests, exit"
701        quit()
702