xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 54d4fafa4aff87c5fb833f1fe49b05751ca3110d)
1#!/usr/bin/env python
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urlparse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import sets
19import subprocess
20import platform
21
22
23class DepTree():
24    """
25    Represents package dependency tree, where each node is a DepTree with a
26    name and DepTree children.
27    """
28
29    def __init__(self, name):
30        """
31        Create new DepTree.
32
33        Parameter descriptions:
34        name               Name of new tree node.
35        """
36        self.name = name
37        self.children = list()
38
39    def AddChild(self, name):
40        """
41        Add new child node to current node.
42
43        Parameter descriptions:
44        name               Name of new child
45        """
46        new_child = DepTree(name)
47        self.children.append(new_child)
48        return new_child
49
50    def AddChildNode(self, node):
51        """
52        Add existing child node to current node.
53
54        Parameter descriptions:
55        node               Tree node to add
56        """
57        self.children.append(node)
58
59    def RemoveChild(self, name):
60        """
61        Remove child node.
62
63        Parameter descriptions:
64        name               Name of child to remove
65        """
66        for child in self.children:
67            if child.name == name:
68                self.children.remove(child)
69                return
70
71    def GetNode(self, name):
72        """
73        Return node with matching name. Return None if not found.
74
75        Parameter descriptions:
76        name               Name of node to return
77        """
78        if self.name == name:
79            return self
80        for child in self.children:
81            node = child.GetNode(name)
82            if node:
83                return node
84        return None
85
86    def GetParentNode(self, name, parent_node=None):
87        """
88        Return parent of node with matching name. Return none if not found.
89
90        Parameter descriptions:
91        name               Name of node to get parent of
92        parent_node        Parent of current node
93        """
94        if self.name == name:
95            return parent_node
96        for child in self.children:
97            found_node = child.GetParentNode(name, self)
98            if found_node:
99                return found_node
100        return None
101
102    def GetPath(self, name, path=None):
103        """
104        Return list of node names from head to matching name.
105        Return None if not found.
106
107        Parameter descriptions:
108        name               Name of node
109        path               List of node names from head to current node
110        """
111        if not path:
112            path = []
113        if self.name == name:
114            path.append(self.name)
115            return path
116        for child in self.children:
117            match = child.GetPath(name, path + [self.name])
118            if match:
119                return match
120        return None
121
122    def GetPathRegex(self, name, regex_str, path=None):
123        """
124        Return list of node paths that end in name, or match regex_str.
125        Return empty list if not found.
126
127        Parameter descriptions:
128        name               Name of node to search for
129        regex_str          Regex string to match node names
130        path               Path of node names from head to current node
131        """
132        new_paths = []
133        if not path:
134            path = []
135        match = re.match(regex_str, self.name)
136        if (self.name == name) or (match):
137            new_paths.append(path + [self.name])
138        for child in self.children:
139            return_paths = None
140            full_path = path + [self.name]
141            return_paths = child.GetPathRegex(name, regex_str, full_path)
142            for i in return_paths:
143                new_paths.append(i)
144        return new_paths
145
146    def MoveNode(self, from_name, to_name):
147        """
148        Mode existing from_name node to become child of to_name node.
149
150        Parameter descriptions:
151        from_name          Name of node to make a child of to_name
152        to_name            Name of node to make parent of from_name
153        """
154        parent_from_node = self.GetParentNode(from_name)
155        from_node = self.GetNode(from_name)
156        parent_from_node.RemoveChild(from_name)
157        to_node = self.GetNode(to_name)
158        to_node.AddChildNode(from_node)
159
160    def ReorderDeps(self, name, regex_str):
161        """
162        Reorder dependency tree.  If tree contains nodes with names that
163        match 'name' and 'regex_str', move 'regex_str' nodes that are
164        to the right of 'name' node, so that they become children of the
165        'name' node.
166
167        Parameter descriptions:
168        name               Name of node to look for
169        regex_str          Regex string to match names to
170        """
171        name_path = self.GetPath(name)
172        if not name_path:
173            return
174        paths = self.GetPathRegex(name, regex_str)
175        is_name_in_paths = False
176        name_index = 0
177        for i in range(len(paths)):
178            path = paths[i]
179            if path[-1] == name:
180                is_name_in_paths = True
181                name_index = i
182                break
183        if not is_name_in_paths:
184            return
185        for i in range(name_index + 1, len(paths)):
186            path = paths[i]
187            if name in path:
188                continue
189            from_name = path[-1]
190            self.MoveNode(from_name, name)
191
192    def GetInstallList(self):
193        """
194        Return post-order list of node names.
195
196        Parameter descriptions:
197        """
198        install_list = []
199        for child in self.children:
200            child_install_list = child.GetInstallList()
201            install_list.extend(child_install_list)
202        install_list.append(self.name)
203        return install_list
204
205    def PrintTree(self, level=0):
206        """
207        Print pre-order node names with indentation denoting node depth level.
208
209        Parameter descriptions:
210        level              Current depth level
211        """
212        INDENT_PER_LEVEL = 4
213        print ' ' * (level * INDENT_PER_LEVEL) + self.name
214        for child in self.children:
215            child.PrintTree(level + 1)
216
217
218def check_call_cmd(dir, *cmd):
219    """
220    Verbose prints the directory location the given command is called from and
221    the command, then executes the command using check_call.
222
223    Parameter descriptions:
224    dir                 Directory location command is to be called from
225    cmd                 List of parameters constructing the complete command
226    """
227    printline(dir, ">", " ".join(cmd))
228    check_call(cmd)
229
230
231def clone_pkg(pkg):
232    """
233    Clone the given openbmc package's git repository from gerrit into
234    the WORKSPACE location
235
236    Parameter descriptions:
237    pkg                 Name of the package to clone
238    """
239    pkg_dir = os.path.join(WORKSPACE, pkg)
240    if os.path.exists(os.path.join(pkg_dir, '.git')):
241        return pkg_dir
242    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
243    os.mkdir(pkg_dir)
244    printline(pkg_dir, "> git clone", pkg_repo, "./")
245    return Repo.clone_from(pkg_repo, pkg_dir).working_dir
246
247
248def get_autoconf_deps(pkgdir):
249    """
250    Parse the given 'configure.ac' file for package dependencies and return
251    a list of the dependencies found. If the package is not autoconf it is just
252    ignored.
253
254    Parameter descriptions:
255    pkgdir              Directory where package source is located
256    """
257    configure_ac = os.path.join(pkgdir, 'configure.ac')
258    if not os.path.exists(configure_ac):
259        return []
260
261    configure_ac_contents = ''
262    # Prepend some special function overrides so we can parse out dependencies
263    for macro in DEPENDENCIES.iterkeys():
264        configure_ac_contents += ('m4_define([' + macro + '], [' +
265                macro + '_START$' + str(DEPENDENCIES_OFFSET[macro] + 1) +
266                macro + '_END])\n')
267    with open(configure_ac, "rt") as f:
268        configure_ac_contents += f.read()
269
270    autoconf_process = subprocess.Popen(['autoconf', '-Wno-undefined', '-'],
271            stdin=subprocess.PIPE, stdout=subprocess.PIPE,
272            stderr=subprocess.PIPE)
273    (stdout, stderr) = autoconf_process.communicate(input=configure_ac_contents)
274    if not stdout:
275        print(stderr)
276        raise Exception("Failed to run autoconf for parsing dependencies")
277
278    # Parse out all of the dependency text
279    matches = []
280    for macro in DEPENDENCIES.iterkeys():
281        pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
282        for match in re.compile(pattern).finditer(stdout):
283            matches.append((match.group(1), match.group(2)))
284
285    # Look up dependencies from the text
286    found_deps = []
287    for macro, deptext in matches:
288        for potential_dep in deptext.split(' '):
289            for known_dep in DEPENDENCIES[macro].iterkeys():
290                if potential_dep.startswith(known_dep):
291                    found_deps.append(DEPENDENCIES[macro][known_dep])
292
293    return found_deps
294
295make_parallel = [
296    'make',
297    # Run enough jobs to saturate all the cpus
298    '-j', str(multiprocessing.cpu_count()),
299    # Don't start more jobs if the load avg is too high
300    '-l', str(multiprocessing.cpu_count()),
301    # Synchronize the output so logs aren't intermixed in stdout / stderr
302    '-O',
303]
304
305def enFlag(flag, enabled):
306    """
307    Returns an configure flag as a string
308
309    Parameters:
310    flag                The name of the flag
311    enabled             Whether the flag is enabled or disabled
312    """
313    return '--' + ('enable' if enabled else 'disable') + '-' + flag
314
315def build_and_install(pkg, build_for_testing=False):
316    """
317    Builds and installs the package in the environment. Optionally
318    builds the examples and test cases for package.
319
320    Parameter description:
321    pkg                 The package we are building
322    build_for_testing   Enable options related to testing on the package?
323    """
324    pkgdir = os.path.join(WORKSPACE, pkg)
325    os.chdir(pkgdir)
326
327    # Refresh dynamic linker run time bindings for dependencies
328    check_call_cmd(pkgdir, 'sudo', '-n', '--', 'ldconfig')
329
330    # Build & install this package
331    conf_flags = [
332        enFlag('silent-rules', False),
333        enFlag('examples', build_for_testing),
334        enFlag('tests', build_for_testing),
335        enFlag('code-coverage', build_for_testing),
336        enFlag('valgrind', build_for_testing),
337    ]
338    # Add any necessary configure flags for package
339    if CONFIGURE_FLAGS.get(pkg) is not None:
340        conf_flags.extend(CONFIGURE_FLAGS.get(pkg))
341    for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
342        if os.path.exists(bootstrap):
343            check_call_cmd(pkgdir, './' + bootstrap)
344            break
345    check_call_cmd(pkgdir, './configure', *conf_flags)
346    check_call_cmd(pkgdir, *make_parallel)
347    check_call_cmd(pkgdir, 'sudo', '-n', '--', *(make_parallel + [ 'install' ]))
348
349def build_dep_tree(pkg, pkgdir, dep_added, head, dep_tree=None):
350    """
351    For each package(pkg), starting with the package to be unit tested,
352    parse its 'configure.ac' file from within the package's directory(pkgdir)
353    for each package dependency defined recursively doing the same thing
354    on each package found as a dependency.
355
356    Parameter descriptions:
357    pkg                 Name of the package
358    pkgdir              Directory where package source is located
359    dep_added           Current dict of dependencies and added status
360    head                Head node of the dependency tree
361    dep_tree            Current dependency tree node
362    """
363    if not dep_tree:
364        dep_tree = head
365
366    with open("/tmp/depcache", "r") as depcache:
367        cache = depcache.readline()
368
369    # Read out pkg dependencies
370    pkg_deps = []
371    pkg_deps += get_autoconf_deps(pkgdir)
372
373    for dep in sets.Set(pkg_deps):
374        if dep in cache:
375            continue
376        # Dependency package not already known
377        if dep_added.get(dep) is None:
378            # Dependency package not added
379            new_child = dep_tree.AddChild(dep)
380            dep_added[dep] = False
381            dep_pkgdir = clone_pkg(dep)
382            # Determine this dependency package's
383            # dependencies and add them before
384            # returning to add this package
385            dep_added = build_dep_tree(dep,
386                                       dep_pkgdir,
387                                       dep_added,
388                                       head,
389                                       new_child)
390        else:
391            # Dependency package known and added
392            if dep_added[dep]:
393                continue
394            else:
395                # Cyclic dependency failure
396                raise Exception("Cyclic dependencies found in "+pkg)
397
398    if not dep_added[pkg]:
399        dep_added[pkg] = True
400
401    return dep_added
402
403def make_target_exists(target):
404    """
405    Runs a check against the makefile in the current directory to determine
406    if the target exists so that it can be built.
407
408    Parameter descriptions:
409    target              The make target we are checking
410    """
411    try:
412        cmd = [ 'make', '-n', target ]
413        with open(os.devnull, 'w') as devnull:
414            check_call(cmd, stdout=devnull, stderr=devnull)
415        return True
416    except CalledProcessError:
417        return False
418
419def run_unit_tests(top_dir):
420    """
421    Runs the unit tests for the package via `make check`
422
423    Parameter descriptions:
424    top_dir             The root directory of our project
425    """
426    try:
427        cmd = make_parallel + [ 'check' ]
428        for i in range(0, args.repeat):
429            check_call_cmd(top_dir,  *cmd)
430    except CalledProcessError:
431        for root, _, files in os.walk(top_dir):
432            if 'test-suite.log' not in files:
433                continue
434            check_call_cmd(root, 'cat', os.path.join(root, 'test-suite.log'))
435        raise Exception('Unit tests failed')
436
437def run_cppcheck(top_dir):
438    try:
439        # http://cppcheck.sourceforge.net/manual.pdf
440        ignore_list = ['-i%s' % path for path in os.listdir(top_dir) \
441                       if path.endswith('-src') or path.endswith('-build')]
442        ignore_list.extend(('-itest', '-iscripts'))
443        params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
444                  '--enable=all']
445        params.extend(ignore_list)
446        params.append('.')
447
448        check_call_cmd(top_dir, *params)
449    except CalledProcessError:
450        raise Exception('Cppcheck failed')
451
452def maybe_run_valgrind(top_dir):
453    """
454    Potentially runs the unit tests through valgrind for the package
455    via `make check-valgrind`. If the package does not have valgrind testing
456    then it just skips over this.
457
458    Parameter descriptions:
459    top_dir             The root directory of our project
460    """
461    # Valgrind testing is currently broken by an aggressive strcmp optimization
462    # that is inlined into optimized code for POWER by gcc 7+. Until we find
463    # a workaround, just don't run valgrind tests on POWER.
464    # https://github.com/openbmc/openbmc/issues/3315
465    if re.match('ppc64', platform.machine()) is not None:
466        return
467    if not make_target_exists('check-valgrind'):
468        return
469
470    try:
471        cmd = make_parallel + [ 'check-valgrind' ]
472        check_call_cmd(top_dir,  *cmd)
473    except CalledProcessError:
474        for root, _, files in os.walk(top_dir):
475            for f in files:
476                if re.search('test-suite-[a-z]+.log', f) is None:
477                    continue
478                check_call_cmd(root, 'cat', os.path.join(root, f))
479        raise Exception('Valgrind tests failed')
480
481def maybe_run_coverage(top_dir):
482    """
483    Potentially runs the unit tests through code coverage for the package
484    via `make check-code-coverage`. If the package does not have code coverage
485    testing then it just skips over this.
486
487    Parameter descriptions:
488    top_dir             The root directory of our project
489    """
490    if not make_target_exists('check-code-coverage'):
491        return
492
493    # Actually run code coverage
494    try:
495        cmd = make_parallel + [ 'check-code-coverage' ]
496        check_call_cmd(top_dir,  *cmd)
497    except CalledProcessError:
498        raise Exception('Code coverage failed')
499
500if __name__ == '__main__':
501    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
502    CONFIGURE_FLAGS = {
503        'phosphor-objmgr': ['--enable-unpatched-systemd'],
504        'sdbusplus': ['--enable-transaction'],
505        'phosphor-logging':
506        ['--enable-metadata-processing',
507         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
508    }
509
510    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
511    DEPENDENCIES = {
512        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
513        'AC_CHECK_HEADER': {
514            'host-ipmid': 'phosphor-host-ipmid',
515            'blobs-ipmid': 'phosphor-ipmi-blobs',
516            'sdbusplus': 'sdbusplus',
517            'sdeventplus': 'sdeventplus',
518            'gpioplus': 'gpioplus',
519            'phosphor-logging/log.hpp': 'phosphor-logging',
520        },
521        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
522        'PKG_CHECK_MODULES': {
523            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
524            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
525            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
526            'sdbusplus': 'sdbusplus',
527            'sdeventplus': 'sdeventplus',
528            'gpioplus': 'gpioplus',
529            'phosphor-logging': 'phosphor-logging',
530            'phosphor-snmp': 'phosphor-snmp',
531        },
532    }
533
534    # Offset into array of macro parameters MACRO(0, 1, ...N)
535    DEPENDENCIES_OFFSET = {
536        'AC_CHECK_LIB': 0,
537        'AC_CHECK_HEADER': 0,
538        'AC_PATH_PROG': 1,
539        'PKG_CHECK_MODULES': 1,
540    }
541
542    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
543    DEPENDENCIES_REGEX = {
544        'phosphor-logging': r'\S+-dbus-interfaces$'
545    }
546
547    # Set command line arguments
548    parser = argparse.ArgumentParser()
549    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
550                        help="Workspace directory location(i.e. /home)")
551    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
552                        help="OpenBMC package to be unit tested")
553    parser.add_argument("-v", "--verbose", action="store_true",
554                        help="Print additional package status messages")
555    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
556                        type=int, default=1)
557    args = parser.parse_args(sys.argv[1:])
558    WORKSPACE = args.WORKSPACE
559    UNIT_TEST_PKG = args.PACKAGE
560    if args.verbose:
561        def printline(*line):
562            for arg in line:
563                print arg,
564            print
565    else:
566        printline = lambda *l: None
567
568    # First validate code formatting if repo has style formatting files.
569    # The format-code.sh checks for these files.
570    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
571    check_call_cmd(WORKSPACE, "./format-code.sh", CODE_SCAN_DIR)
572
573    # Automake
574    if os.path.isfile(CODE_SCAN_DIR + "/configure.ac"):
575        prev_umask = os.umask(000)
576        # Determine dependencies and add them
577        dep_added = dict()
578        dep_added[UNIT_TEST_PKG] = False
579        # Create dependency tree
580        dep_tree = DepTree(UNIT_TEST_PKG)
581        build_dep_tree(UNIT_TEST_PKG,
582                       os.path.join(WORKSPACE, UNIT_TEST_PKG),
583                       dep_added,
584                       dep_tree)
585
586        # Reorder Dependency Tree
587        for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems():
588            dep_tree.ReorderDeps(pkg_name, regex_str)
589        if args.verbose:
590            dep_tree.PrintTree()
591        install_list = dep_tree.GetInstallList()
592        # We don't want to treat our package as a dependency
593        install_list.remove(UNIT_TEST_PKG)
594        # install reordered dependencies
595        for dep in install_list:
596            build_and_install(dep, False)
597        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
598        os.chdir(top_dir)
599        # Run package unit tests
600        build_and_install(UNIT_TEST_PKG, True)
601        run_unit_tests(top_dir)
602        maybe_run_valgrind(top_dir)
603        maybe_run_coverage(top_dir)
604        run_cppcheck(top_dir)
605
606        os.umask(prev_umask)
607
608    # Cmake
609    elif os.path.isfile(CODE_SCAN_DIR + "/CMakeLists.txt"):
610        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
611        os.chdir(top_dir)
612        check_call_cmd(top_dir, 'cmake', '.')
613        check_call_cmd(top_dir, 'cmake', '--build', '.', '--', '-j',
614                       str(multiprocessing.cpu_count()))
615        if make_target_exists('test'):
616            check_call_cmd(top_dir, 'ctest', '.')
617        maybe_run_valgrind(top_dir)
618        maybe_run_coverage(top_dir)
619        run_cppcheck(top_dir)
620
621    else:
622        print "Not a supported repo for CI Tests, exit"
623        quit()
624