xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 9a32d5ee7f2a6ae6e0f027f87fc3b41d690815d5)
1#!/usr/bin/env python
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urlparse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import sets
19import platform
20
21
22class DepTree():
23    """
24    Represents package dependency tree, where each node is a DepTree with a
25    name and DepTree children.
26    """
27
28    def __init__(self, name):
29        """
30        Create new DepTree.
31
32        Parameter descriptions:
33        name               Name of new tree node.
34        """
35        self.name = name
36        self.children = list()
37
38    def AddChild(self, name):
39        """
40        Add new child node to current node.
41
42        Parameter descriptions:
43        name               Name of new child
44        """
45        new_child = DepTree(name)
46        self.children.append(new_child)
47        return new_child
48
49    def AddChildNode(self, node):
50        """
51        Add existing child node to current node.
52
53        Parameter descriptions:
54        node               Tree node to add
55        """
56        self.children.append(node)
57
58    def RemoveChild(self, name):
59        """
60        Remove child node.
61
62        Parameter descriptions:
63        name               Name of child to remove
64        """
65        for child in self.children:
66            if child.name == name:
67                self.children.remove(child)
68                return
69
70    def GetNode(self, name):
71        """
72        Return node with matching name. Return None if not found.
73
74        Parameter descriptions:
75        name               Name of node to return
76        """
77        if self.name == name:
78            return self
79        for child in self.children:
80            node = child.GetNode(name)
81            if node:
82                return node
83        return None
84
85    def GetParentNode(self, name, parent_node=None):
86        """
87        Return parent of node with matching name. Return none if not found.
88
89        Parameter descriptions:
90        name               Name of node to get parent of
91        parent_node        Parent of current node
92        """
93        if self.name == name:
94            return parent_node
95        for child in self.children:
96            found_node = child.GetParentNode(name, self)
97            if found_node:
98                return found_node
99        return None
100
101    def GetPath(self, name, path=None):
102        """
103        Return list of node names from head to matching name.
104        Return None if not found.
105
106        Parameter descriptions:
107        name               Name of node
108        path               List of node names from head to current node
109        """
110        if not path:
111            path = []
112        if self.name == name:
113            path.append(self.name)
114            return path
115        for child in self.children:
116            match = child.GetPath(name, path + [self.name])
117            if match:
118                return match
119        return None
120
121    def GetPathRegex(self, name, regex_str, path=None):
122        """
123        Return list of node paths that end in name, or match regex_str.
124        Return empty list if not found.
125
126        Parameter descriptions:
127        name               Name of node to search for
128        regex_str          Regex string to match node names
129        path               Path of node names from head to current node
130        """
131        new_paths = []
132        if not path:
133            path = []
134        match = re.match(regex_str, self.name)
135        if (self.name == name) or (match):
136            new_paths.append(path + [self.name])
137        for child in self.children:
138            return_paths = None
139            full_path = path + [self.name]
140            return_paths = child.GetPathRegex(name, regex_str, full_path)
141            for i in return_paths:
142                new_paths.append(i)
143        return new_paths
144
145    def MoveNode(self, from_name, to_name):
146        """
147        Mode existing from_name node to become child of to_name node.
148
149        Parameter descriptions:
150        from_name          Name of node to make a child of to_name
151        to_name            Name of node to make parent of from_name
152        """
153        parent_from_node = self.GetParentNode(from_name)
154        from_node = self.GetNode(from_name)
155        parent_from_node.RemoveChild(from_name)
156        to_node = self.GetNode(to_name)
157        to_node.AddChildNode(from_node)
158
159    def ReorderDeps(self, name, regex_str):
160        """
161        Reorder dependency tree.  If tree contains nodes with names that
162        match 'name' and 'regex_str', move 'regex_str' nodes that are
163        to the right of 'name' node, so that they become children of the
164        'name' node.
165
166        Parameter descriptions:
167        name               Name of node to look for
168        regex_str          Regex string to match names to
169        """
170        name_path = self.GetPath(name)
171        if not name_path:
172            return
173        paths = self.GetPathRegex(name, regex_str)
174        is_name_in_paths = False
175        name_index = 0
176        for i in range(len(paths)):
177            path = paths[i]
178            if path[-1] == name:
179                is_name_in_paths = True
180                name_index = i
181                break
182        if not is_name_in_paths:
183            return
184        for i in range(name_index + 1, len(paths)):
185            path = paths[i]
186            if name in path:
187                continue
188            from_name = path[-1]
189            self.MoveNode(from_name, name)
190
191    def GetInstallList(self):
192        """
193        Return post-order list of node names.
194
195        Parameter descriptions:
196        """
197        install_list = []
198        for child in self.children:
199            child_install_list = child.GetInstallList()
200            install_list.extend(child_install_list)
201        install_list.append(self.name)
202        return install_list
203
204    def PrintTree(self, level=0):
205        """
206        Print pre-order node names with indentation denoting node depth level.
207
208        Parameter descriptions:
209        level              Current depth level
210        """
211        INDENT_PER_LEVEL = 4
212        print ' ' * (level * INDENT_PER_LEVEL) + self.name
213        for child in self.children:
214            child.PrintTree(level + 1)
215
216
217def check_call_cmd(dir, *cmd):
218    """
219    Verbose prints the directory location the given command is called from and
220    the command, then executes the command using check_call.
221
222    Parameter descriptions:
223    dir                 Directory location command is to be called from
224    cmd                 List of parameters constructing the complete command
225    """
226    printline(dir, ">", " ".join(cmd))
227    check_call(cmd)
228
229
230def clone_pkg(pkg):
231    """
232    Clone the given openbmc package's git repository from gerrit into
233    the WORKSPACE location
234
235    Parameter descriptions:
236    pkg                 Name of the package to clone
237    """
238    pkg_dir = os.path.join(WORKSPACE, pkg)
239    if os.path.exists(os.path.join(pkg_dir, '.git')):
240        return pkg_dir
241    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
242    os.mkdir(pkg_dir)
243    printline(pkg_dir, "> git clone", pkg_repo, "./")
244    return Repo.clone_from(pkg_repo, pkg_dir).working_dir
245
246
247def get_deps(configure_ac):
248    """
249    Parse the given 'configure.ac' file for package dependencies and return
250    a list of the dependencies found.
251
252    Parameter descriptions:
253    configure_ac        Opened 'configure.ac' file object
254    """
255    line = ""
256    dep_pkgs = set()
257    for cfg_line in configure_ac:
258        # Remove whitespace & newline
259        cfg_line = cfg_line.rstrip()
260        # Check for line breaks
261        if cfg_line.endswith('\\'):
262            line += str(cfg_line[:-1])
263            continue
264        line = line+cfg_line
265
266        # Find any defined dependency
267        line_has = lambda x: x if x in line else None
268        macros = set(filter(line_has, DEPENDENCIES.iterkeys()))
269        if len(macros) == 1:
270            macro = ''.join(macros)
271            deps = filter(line_has, DEPENDENCIES[macro].iterkeys())
272            dep_pkgs.update(map(lambda x: DEPENDENCIES[macro][x], deps))
273
274        line = ""
275    deps = list(dep_pkgs)
276
277    return deps
278
279def get_autoconf_deps(pkgdir):
280    """
281    Parse the given 'configure.ac' file for package dependencies and return
282    a list of the dependencies found. If the package is not autoconf it is just
283    ignored.
284
285    Parameter descriptions:
286    pkgdir              Directory where package source is located
287    """
288    configure_ac = os.path.join(pkgdir, 'configure.ac')
289    if not os.path.exists(configure_ac):
290        return []
291
292    with open(configure_ac, "rt") as f:
293        return get_deps(f)
294
295make_parallel = [
296    'make',
297    # Run enough jobs to saturate all the cpus
298    '-j', str(multiprocessing.cpu_count()),
299    # Don't start more jobs if the load avg is too high
300    '-l', str(multiprocessing.cpu_count()),
301    # Synchronize the output so logs aren't intermixed in stdout / stderr
302    '-O',
303]
304
305def enFlag(flag, enabled):
306    """
307    Returns an configure flag as a string
308
309    Parameters:
310    flag                The name of the flag
311    enabled             Whether the flag is enabled or disabled
312    """
313    return '--' + ('enable' if enabled else 'disable') + '-' + flag
314
315def build_and_install(pkg, build_for_testing=False):
316    """
317    Builds and installs the package in the environment. Optionally
318    builds the examples and test cases for package.
319
320    Parameter description:
321    pkg                 The package we are building
322    build_for_testing   Enable options related to testing on the package?
323    """
324    pkgdir = os.path.join(WORKSPACE, pkg)
325    # Build & install this package
326    conf_flags = [
327        enFlag('silent-rules', False),
328        enFlag('examples', build_for_testing),
329        enFlag('tests', build_for_testing),
330        enFlag('code-coverage', build_for_testing),
331        enFlag('valgrind', build_for_testing),
332    ]
333    os.chdir(pkgdir)
334    # Add any necessary configure flags for package
335    if CONFIGURE_FLAGS.get(pkg) is not None:
336        conf_flags.extend(CONFIGURE_FLAGS.get(pkg))
337    for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
338        if os.path.exists(bootstrap):
339            check_call_cmd(pkgdir, './' + bootstrap)
340            break
341    check_call_cmd(pkgdir, './configure', *conf_flags)
342    check_call_cmd(pkgdir, *make_parallel)
343    check_call_cmd(pkgdir, 'sudo', '-n', '--', *(make_parallel + [ 'install' ]))
344
345def build_dep_tree(pkg, pkgdir, dep_added, head, dep_tree=None):
346    """
347    For each package(pkg), starting with the package to be unit tested,
348    parse its 'configure.ac' file from within the package's directory(pkgdir)
349    for each package dependency defined recursively doing the same thing
350    on each package found as a dependency.
351
352    Parameter descriptions:
353    pkg                 Name of the package
354    pkgdir              Directory where package source is located
355    dep_added           Current dict of dependencies and added status
356    head                Head node of the dependency tree
357    dep_tree            Current dependency tree node
358    """
359    if not dep_tree:
360        dep_tree = head
361
362    with open("/tmp/depcache", "r") as depcache:
363        cache = depcache.readline()
364
365    # Read out pkg dependencies
366    pkg_deps = []
367    pkg_deps += get_autoconf_deps(pkgdir)
368
369    for dep in sets.Set(pkg_deps):
370        if dep in cache:
371            continue
372        # Dependency package not already known
373        if dep_added.get(dep) is None:
374            # Dependency package not added
375            new_child = dep_tree.AddChild(dep)
376            dep_added[dep] = False
377            dep_pkgdir = clone_pkg(dep)
378            # Determine this dependency package's
379            # dependencies and add them before
380            # returning to add this package
381            dep_added = build_dep_tree(dep,
382                                       dep_pkgdir,
383                                       dep_added,
384                                       head,
385                                       new_child)
386        else:
387            # Dependency package known and added
388            if dep_added[dep]:
389                continue
390            else:
391                # Cyclic dependency failure
392                raise Exception("Cyclic dependencies found in "+pkg)
393
394    if not dep_added[pkg]:
395        dep_added[pkg] = True
396
397    return dep_added
398
399def make_target_exists(target):
400    """
401    Runs a check against the makefile in the current directory to determine
402    if the target exists so that it can be built.
403
404    Parameter descriptions:
405    target              The make target we are checking
406    """
407    try:
408        cmd = [ 'make', '-n', target ]
409        with open(os.devnull, 'w') as devnull:
410            check_call(cmd, stdout=devnull, stderr=devnull)
411        return True
412    except CalledProcessError:
413        return False
414
415def run_unit_tests(top_dir):
416    """
417    Runs the unit tests for the package via `make check`
418
419    Parameter descriptions:
420    top_dir             The root directory of our project
421    """
422    try:
423        cmd = make_parallel + [ 'check' ]
424        for i in range(0, args.repeat):
425            check_call_cmd(top_dir,  *cmd)
426    except CalledProcessError:
427        for root, _, files in os.walk(top_dir):
428            if 'test-suite.log' not in files:
429                continue
430            check_call_cmd(root, 'cat', os.path.join(root, 'test-suite.log'))
431        raise Exception('Unit tests failed')
432
433def run_cppcheck(top_dir):
434    try:
435        # http://cppcheck.sourceforge.net/manual.pdf
436        ignore_list = ['-i%s' % path for path in os.listdir(top_dir) \
437                       if path.endswith('-src') or path.endswith('-build')]
438        ignore_list.extend(('-itest', '-iscripts'))
439        params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
440                  '--enable=all']
441        params.extend(ignore_list)
442        params.append('.')
443
444        check_call_cmd(top_dir, *params)
445    except CalledProcessError:
446        raise Exception('Cppcheck failed')
447
448def maybe_run_valgrind(top_dir):
449    """
450    Potentially runs the unit tests through valgrind for the package
451    via `make check-valgrind`. If the package does not have valgrind testing
452    then it just skips over this.
453
454    Parameter descriptions:
455    top_dir             The root directory of our project
456    """
457    # Valgrind testing is currently broken by an aggressive strcmp optimization
458    # that is inlined into optimized code for POWER by gcc 7+. Until we find
459    # a workaround, just don't run valgrind tests on POWER.
460    # https://github.com/openbmc/openbmc/issues/3315
461    if re.match('ppc64', platform.machine()) is not None:
462        return
463    if not make_target_exists('check-valgrind'):
464        return
465
466    try:
467        cmd = make_parallel + [ 'check-valgrind' ]
468        check_call_cmd(top_dir,  *cmd)
469    except CalledProcessError:
470        for root, _, files in os.walk(top_dir):
471            for f in files:
472                if re.search('test-suite-[a-z]+.log', f) is None:
473                    continue
474                check_call_cmd(root, 'cat', os.path.join(root, f))
475        raise Exception('Valgrind tests failed')
476
477def maybe_run_coverage(top_dir):
478    """
479    Potentially runs the unit tests through code coverage for the package
480    via `make check-code-coverage`. If the package does not have code coverage
481    testing then it just skips over this.
482
483    Parameter descriptions:
484    top_dir             The root directory of our project
485    """
486    if not make_target_exists('check-code-coverage'):
487        return
488
489    # Actually run code coverage
490    try:
491        cmd = make_parallel + [ 'check-code-coverage' ]
492        check_call_cmd(top_dir,  *cmd)
493    except CalledProcessError:
494        raise Exception('Code coverage failed')
495
496if __name__ == '__main__':
497    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
498    CONFIGURE_FLAGS = {
499        'phosphor-objmgr': ['--enable-unpatched-systemd'],
500        'sdbusplus': ['--enable-transaction'],
501        'phosphor-logging':
502        ['--enable-metadata-processing',
503         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
504    }
505
506    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
507    DEPENDENCIES = {
508        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
509        'AC_CHECK_HEADER': {
510            'host-ipmid': 'phosphor-host-ipmid',
511            'blobs-ipmid': 'phosphor-ipmi-blobs',
512            'sdbusplus': 'sdbusplus',
513            'sdeventplus': 'sdeventplus',
514            'gpioplus': 'gpioplus',
515            'phosphor-logging/log.hpp': 'phosphor-logging',
516        },
517        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
518        'PKG_CHECK_MODULES': {
519            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
520            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
521            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
522            'sdbusplus': 'sdbusplus',
523            'sdeventplus': 'sdeventplus',
524            'gpioplus': 'gpioplus',
525            'phosphor-logging': 'phosphor-logging',
526            'phosphor-snmp': 'phosphor-snmp',
527        },
528    }
529
530    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
531    DEPENDENCIES_REGEX = {
532        'phosphor-logging': r'\S+-dbus-interfaces$'
533    }
534
535    # Set command line arguments
536    parser = argparse.ArgumentParser()
537    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
538                        help="Workspace directory location(i.e. /home)")
539    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
540                        help="OpenBMC package to be unit tested")
541    parser.add_argument("-v", "--verbose", action="store_true",
542                        help="Print additional package status messages")
543    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
544                        type=int, default=1)
545    args = parser.parse_args(sys.argv[1:])
546    WORKSPACE = args.WORKSPACE
547    UNIT_TEST_PKG = args.PACKAGE
548    if args.verbose:
549        def printline(*line):
550            for arg in line:
551                print arg,
552            print
553    else:
554        printline = lambda *l: None
555
556    # First validate code formatting if repo has style formatting files.
557    # The format-code.sh checks for these files.
558    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
559    check_call_cmd(WORKSPACE, "./format-code.sh", CODE_SCAN_DIR)
560
561    # Automake
562    if os.path.isfile(CODE_SCAN_DIR + "/configure.ac"):
563        prev_umask = os.umask(000)
564        # Determine dependencies and add them
565        dep_added = dict()
566        dep_added[UNIT_TEST_PKG] = False
567        # Create dependency tree
568        dep_tree = DepTree(UNIT_TEST_PKG)
569        build_dep_tree(UNIT_TEST_PKG,
570                       os.path.join(WORKSPACE, UNIT_TEST_PKG),
571                       dep_added,
572                       dep_tree)
573
574        # Reorder Dependency Tree
575        for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems():
576            dep_tree.ReorderDeps(pkg_name, regex_str)
577        if args.verbose:
578            dep_tree.PrintTree()
579        install_list = dep_tree.GetInstallList()
580        # We don't want to treat our package as a dependency
581        install_list.remove(UNIT_TEST_PKG)
582        # install reordered dependencies
583        for dep in install_list:
584            build_and_install(dep, False)
585        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
586        os.chdir(top_dir)
587        # Refresh dynamic linker run time bindings for dependencies
588        check_call_cmd(top_dir, 'sudo', '-n', '--', 'ldconfig')
589        # Run package unit tests
590        build_and_install(UNIT_TEST_PKG, True)
591        run_unit_tests(top_dir)
592        maybe_run_valgrind(top_dir)
593        maybe_run_coverage(top_dir)
594        run_cppcheck(top_dir)
595
596        os.umask(prev_umask)
597
598    # Cmake
599    elif os.path.isfile(CODE_SCAN_DIR + "/CMakeLists.txt"):
600        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
601        os.chdir(top_dir)
602        check_call_cmd(top_dir, 'cmake', '.')
603        check_call_cmd(top_dir, 'cmake', '--build', '.', '--', '-j',
604                       str(multiprocessing.cpu_count()))
605        if make_target_exists('test'):
606            check_call_cmd(top_dir, 'ctest', '.')
607        maybe_run_valgrind(top_dir)
608        maybe_run_coverage(top_dir)
609        run_cppcheck(top_dir)
610
611    else:
612        print "Not a supported repo for CI Tests, exit"
613        quit()
614