xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision e67f5fc22866e519227108023657fcde1590d60e)
1#!/usr/bin/env python
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urlparse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import sets
19import subprocess
20import platform
21
22
23class DepTree():
24    """
25    Represents package dependency tree, where each node is a DepTree with a
26    name and DepTree children.
27    """
28
29    def __init__(self, name):
30        """
31        Create new DepTree.
32
33        Parameter descriptions:
34        name               Name of new tree node.
35        """
36        self.name = name
37        self.children = list()
38
39    def AddChild(self, name):
40        """
41        Add new child node to current node.
42
43        Parameter descriptions:
44        name               Name of new child
45        """
46        new_child = DepTree(name)
47        self.children.append(new_child)
48        return new_child
49
50    def AddChildNode(self, node):
51        """
52        Add existing child node to current node.
53
54        Parameter descriptions:
55        node               Tree node to add
56        """
57        self.children.append(node)
58
59    def RemoveChild(self, name):
60        """
61        Remove child node.
62
63        Parameter descriptions:
64        name               Name of child to remove
65        """
66        for child in self.children:
67            if child.name == name:
68                self.children.remove(child)
69                return
70
71    def GetNode(self, name):
72        """
73        Return node with matching name. Return None if not found.
74
75        Parameter descriptions:
76        name               Name of node to return
77        """
78        if self.name == name:
79            return self
80        for child in self.children:
81            node = child.GetNode(name)
82            if node:
83                return node
84        return None
85
86    def GetParentNode(self, name, parent_node=None):
87        """
88        Return parent of node with matching name. Return none if not found.
89
90        Parameter descriptions:
91        name               Name of node to get parent of
92        parent_node        Parent of current node
93        """
94        if self.name == name:
95            return parent_node
96        for child in self.children:
97            found_node = child.GetParentNode(name, self)
98            if found_node:
99                return found_node
100        return None
101
102    def GetPath(self, name, path=None):
103        """
104        Return list of node names from head to matching name.
105        Return None if not found.
106
107        Parameter descriptions:
108        name               Name of node
109        path               List of node names from head to current node
110        """
111        if not path:
112            path = []
113        if self.name == name:
114            path.append(self.name)
115            return path
116        for child in self.children:
117            match = child.GetPath(name, path + [self.name])
118            if match:
119                return match
120        return None
121
122    def GetPathRegex(self, name, regex_str, path=None):
123        """
124        Return list of node paths that end in name, or match regex_str.
125        Return empty list if not found.
126
127        Parameter descriptions:
128        name               Name of node to search for
129        regex_str          Regex string to match node names
130        path               Path of node names from head to current node
131        """
132        new_paths = []
133        if not path:
134            path = []
135        match = re.match(regex_str, self.name)
136        if (self.name == name) or (match):
137            new_paths.append(path + [self.name])
138        for child in self.children:
139            return_paths = None
140            full_path = path + [self.name]
141            return_paths = child.GetPathRegex(name, regex_str, full_path)
142            for i in return_paths:
143                new_paths.append(i)
144        return new_paths
145
146    def MoveNode(self, from_name, to_name):
147        """
148        Mode existing from_name node to become child of to_name node.
149
150        Parameter descriptions:
151        from_name          Name of node to make a child of to_name
152        to_name            Name of node to make parent of from_name
153        """
154        parent_from_node = self.GetParentNode(from_name)
155        from_node = self.GetNode(from_name)
156        parent_from_node.RemoveChild(from_name)
157        to_node = self.GetNode(to_name)
158        to_node.AddChildNode(from_node)
159
160    def ReorderDeps(self, name, regex_str):
161        """
162        Reorder dependency tree.  If tree contains nodes with names that
163        match 'name' and 'regex_str', move 'regex_str' nodes that are
164        to the right of 'name' node, so that they become children of the
165        'name' node.
166
167        Parameter descriptions:
168        name               Name of node to look for
169        regex_str          Regex string to match names to
170        """
171        name_path = self.GetPath(name)
172        if not name_path:
173            return
174        paths = self.GetPathRegex(name, regex_str)
175        is_name_in_paths = False
176        name_index = 0
177        for i in range(len(paths)):
178            path = paths[i]
179            if path[-1] == name:
180                is_name_in_paths = True
181                name_index = i
182                break
183        if not is_name_in_paths:
184            return
185        for i in range(name_index + 1, len(paths)):
186            path = paths[i]
187            if name in path:
188                continue
189            from_name = path[-1]
190            self.MoveNode(from_name, name)
191
192    def GetInstallList(self):
193        """
194        Return post-order list of node names.
195
196        Parameter descriptions:
197        """
198        install_list = []
199        for child in self.children:
200            child_install_list = child.GetInstallList()
201            install_list.extend(child_install_list)
202        install_list.append(self.name)
203        return install_list
204
205    def PrintTree(self, level=0):
206        """
207        Print pre-order node names with indentation denoting node depth level.
208
209        Parameter descriptions:
210        level              Current depth level
211        """
212        INDENT_PER_LEVEL = 4
213        print ' ' * (level * INDENT_PER_LEVEL) + self.name
214        for child in self.children:
215            child.PrintTree(level + 1)
216
217
218def check_call_cmd(dir, *cmd):
219    """
220    Verbose prints the directory location the given command is called from and
221    the command, then executes the command using check_call.
222
223    Parameter descriptions:
224    dir                 Directory location command is to be called from
225    cmd                 List of parameters constructing the complete command
226    """
227    printline(dir, ">", " ".join(cmd))
228    check_call(cmd)
229
230
231def clone_pkg(pkg):
232    """
233    Clone the given openbmc package's git repository from gerrit into
234    the WORKSPACE location
235
236    Parameter descriptions:
237    pkg                 Name of the package to clone
238    """
239    pkg_dir = os.path.join(WORKSPACE, pkg)
240    if os.path.exists(os.path.join(pkg_dir, '.git')):
241        return pkg_dir
242    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
243    os.mkdir(pkg_dir)
244    printline(pkg_dir, "> git clone", pkg_repo, "./")
245    return Repo.clone_from(pkg_repo, pkg_dir).working_dir
246
247
248def get_autoconf_deps(pkgdir):
249    """
250    Parse the given 'configure.ac' file for package dependencies and return
251    a list of the dependencies found. If the package is not autoconf it is just
252    ignored.
253
254    Parameter descriptions:
255    pkgdir              Directory where package source is located
256    """
257    configure_ac = os.path.join(pkgdir, 'configure.ac')
258    if not os.path.exists(configure_ac):
259        return []
260
261    configure_ac_contents = ''
262    # Prepend some special function overrides so we can parse out dependencies
263    for macro in DEPENDENCIES.iterkeys():
264        configure_ac_contents += ('m4_define([' + macro + '], [' +
265                macro + '_START$' + str(DEPENDENCIES_OFFSET[macro] + 1) +
266                macro + '_END])\n')
267    with open(configure_ac, "rt") as f:
268        configure_ac_contents += f.read()
269
270    autoconf_process = subprocess.Popen(['autoconf', '-Wno-undefined', '-'],
271            stdin=subprocess.PIPE, stdout=subprocess.PIPE,
272            stderr=subprocess.PIPE)
273    (stdout, stderr) = autoconf_process.communicate(input=configure_ac_contents)
274    if not stdout:
275        print(stderr)
276        raise Exception("Failed to run autoconf for parsing dependencies")
277
278    # Parse out all of the dependency text
279    matches = []
280    for macro in DEPENDENCIES.iterkeys():
281        pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
282        for match in re.compile(pattern).finditer(stdout):
283            matches.append((match.group(1), match.group(2)))
284
285    # Look up dependencies from the text
286    found_deps = []
287    for macro, deptext in matches:
288        for potential_dep in deptext.split(' '):
289            for known_dep in DEPENDENCIES[macro].iterkeys():
290                if potential_dep.startswith(known_dep):
291                    found_deps.append(DEPENDENCIES[macro][known_dep])
292
293    return found_deps
294
295make_parallel = [
296    'make',
297    # Run enough jobs to saturate all the cpus
298    '-j', str(multiprocessing.cpu_count()),
299    # Don't start more jobs if the load avg is too high
300    '-l', str(multiprocessing.cpu_count()),
301    # Synchronize the output so logs aren't intermixed in stdout / stderr
302    '-O',
303]
304
305def enFlag(flag, enabled):
306    """
307    Returns an configure flag as a string
308
309    Parameters:
310    flag                The name of the flag
311    enabled             Whether the flag is enabled or disabled
312    """
313    return '--' + ('enable' if enabled else 'disable') + '-' + flag
314
315def build_and_install(pkg, build_for_testing=False):
316    """
317    Builds and installs the package in the environment. Optionally
318    builds the examples and test cases for package.
319
320    Parameter description:
321    pkg                 The package we are building
322    build_for_testing   Enable options related to testing on the package?
323    """
324    pkgdir = os.path.join(WORKSPACE, pkg)
325    # Build & install this package
326    conf_flags = [
327        enFlag('silent-rules', False),
328        enFlag('examples', build_for_testing),
329        enFlag('tests', build_for_testing),
330        enFlag('code-coverage', build_for_testing),
331        enFlag('valgrind', build_for_testing),
332    ]
333    os.chdir(pkgdir)
334    # Add any necessary configure flags for package
335    if CONFIGURE_FLAGS.get(pkg) is not None:
336        conf_flags.extend(CONFIGURE_FLAGS.get(pkg))
337    for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
338        if os.path.exists(bootstrap):
339            check_call_cmd(pkgdir, './' + bootstrap)
340            break
341    check_call_cmd(pkgdir, './configure', *conf_flags)
342    check_call_cmd(pkgdir, *make_parallel)
343    check_call_cmd(pkgdir, 'sudo', '-n', '--', *(make_parallel + [ 'install' ]))
344
345def build_dep_tree(pkg, pkgdir, dep_added, head, dep_tree=None):
346    """
347    For each package(pkg), starting with the package to be unit tested,
348    parse its 'configure.ac' file from within the package's directory(pkgdir)
349    for each package dependency defined recursively doing the same thing
350    on each package found as a dependency.
351
352    Parameter descriptions:
353    pkg                 Name of the package
354    pkgdir              Directory where package source is located
355    dep_added           Current dict of dependencies and added status
356    head                Head node of the dependency tree
357    dep_tree            Current dependency tree node
358    """
359    if not dep_tree:
360        dep_tree = head
361
362    with open("/tmp/depcache", "r") as depcache:
363        cache = depcache.readline()
364
365    # Read out pkg dependencies
366    pkg_deps = []
367    pkg_deps += get_autoconf_deps(pkgdir)
368
369    for dep in sets.Set(pkg_deps):
370        if dep in cache:
371            continue
372        # Dependency package not already known
373        if dep_added.get(dep) is None:
374            # Dependency package not added
375            new_child = dep_tree.AddChild(dep)
376            dep_added[dep] = False
377            dep_pkgdir = clone_pkg(dep)
378            # Determine this dependency package's
379            # dependencies and add them before
380            # returning to add this package
381            dep_added = build_dep_tree(dep,
382                                       dep_pkgdir,
383                                       dep_added,
384                                       head,
385                                       new_child)
386        else:
387            # Dependency package known and added
388            if dep_added[dep]:
389                continue
390            else:
391                # Cyclic dependency failure
392                raise Exception("Cyclic dependencies found in "+pkg)
393
394    if not dep_added[pkg]:
395        dep_added[pkg] = True
396
397    return dep_added
398
399def make_target_exists(target):
400    """
401    Runs a check against the makefile in the current directory to determine
402    if the target exists so that it can be built.
403
404    Parameter descriptions:
405    target              The make target we are checking
406    """
407    try:
408        cmd = [ 'make', '-n', target ]
409        with open(os.devnull, 'w') as devnull:
410            check_call(cmd, stdout=devnull, stderr=devnull)
411        return True
412    except CalledProcessError:
413        return False
414
415def run_unit_tests(top_dir):
416    """
417    Runs the unit tests for the package via `make check`
418
419    Parameter descriptions:
420    top_dir             The root directory of our project
421    """
422    try:
423        cmd = make_parallel + [ 'check' ]
424        for i in range(0, args.repeat):
425            check_call_cmd(top_dir,  *cmd)
426    except CalledProcessError:
427        for root, _, files in os.walk(top_dir):
428            if 'test-suite.log' not in files:
429                continue
430            check_call_cmd(root, 'cat', os.path.join(root, 'test-suite.log'))
431        raise Exception('Unit tests failed')
432
433def run_cppcheck(top_dir):
434    try:
435        # http://cppcheck.sourceforge.net/manual.pdf
436        ignore_list = ['-i%s' % path for path in os.listdir(top_dir) \
437                       if path.endswith('-src') or path.endswith('-build')]
438        ignore_list.extend(('-itest', '-iscripts'))
439        params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
440                  '--enable=all']
441        params.extend(ignore_list)
442        params.append('.')
443
444        check_call_cmd(top_dir, *params)
445    except CalledProcessError:
446        raise Exception('Cppcheck failed')
447
448def maybe_run_valgrind(top_dir):
449    """
450    Potentially runs the unit tests through valgrind for the package
451    via `make check-valgrind`. If the package does not have valgrind testing
452    then it just skips over this.
453
454    Parameter descriptions:
455    top_dir             The root directory of our project
456    """
457    # Valgrind testing is currently broken by an aggressive strcmp optimization
458    # that is inlined into optimized code for POWER by gcc 7+. Until we find
459    # a workaround, just don't run valgrind tests on POWER.
460    # https://github.com/openbmc/openbmc/issues/3315
461    if re.match('ppc64', platform.machine()) is not None:
462        return
463    if not make_target_exists('check-valgrind'):
464        return
465
466    try:
467        cmd = make_parallel + [ 'check-valgrind' ]
468        check_call_cmd(top_dir,  *cmd)
469    except CalledProcessError:
470        for root, _, files in os.walk(top_dir):
471            for f in files:
472                if re.search('test-suite-[a-z]+.log', f) is None:
473                    continue
474                check_call_cmd(root, 'cat', os.path.join(root, f))
475        raise Exception('Valgrind tests failed')
476
477def maybe_run_coverage(top_dir):
478    """
479    Potentially runs the unit tests through code coverage for the package
480    via `make check-code-coverage`. If the package does not have code coverage
481    testing then it just skips over this.
482
483    Parameter descriptions:
484    top_dir             The root directory of our project
485    """
486    if not make_target_exists('check-code-coverage'):
487        return
488
489    # Actually run code coverage
490    try:
491        cmd = make_parallel + [ 'check-code-coverage' ]
492        check_call_cmd(top_dir,  *cmd)
493    except CalledProcessError:
494        raise Exception('Code coverage failed')
495
496if __name__ == '__main__':
497    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
498    CONFIGURE_FLAGS = {
499        'phosphor-objmgr': ['--enable-unpatched-systemd'],
500        'sdbusplus': ['--enable-transaction'],
501        'phosphor-logging':
502        ['--enable-metadata-processing',
503         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
504    }
505
506    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
507    DEPENDENCIES = {
508        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
509        'AC_CHECK_HEADER': {
510            'host-ipmid': 'phosphor-host-ipmid',
511            'blobs-ipmid': 'phosphor-ipmi-blobs',
512            'sdbusplus': 'sdbusplus',
513            'sdeventplus': 'sdeventplus',
514            'gpioplus': 'gpioplus',
515            'phosphor-logging/log.hpp': 'phosphor-logging',
516        },
517        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
518        'PKG_CHECK_MODULES': {
519            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
520            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
521            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
522            'sdbusplus': 'sdbusplus',
523            'sdeventplus': 'sdeventplus',
524            'gpioplus': 'gpioplus',
525            'phosphor-logging': 'phosphor-logging',
526            'phosphor-snmp': 'phosphor-snmp',
527        },
528    }
529
530    # Offset into array of macro parameters MACRO(0, 1, ...N)
531    DEPENDENCIES_OFFSET = {
532        'AC_CHECK_LIB': 0,
533        'AC_CHECK_HEADER': 0,
534        'AC_PATH_PROG': 1,
535        'PKG_CHECK_MODULES': 1,
536    }
537
538    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
539    DEPENDENCIES_REGEX = {
540        'phosphor-logging': r'\S+-dbus-interfaces$'
541    }
542
543    # Set command line arguments
544    parser = argparse.ArgumentParser()
545    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
546                        help="Workspace directory location(i.e. /home)")
547    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
548                        help="OpenBMC package to be unit tested")
549    parser.add_argument("-v", "--verbose", action="store_true",
550                        help="Print additional package status messages")
551    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
552                        type=int, default=1)
553    args = parser.parse_args(sys.argv[1:])
554    WORKSPACE = args.WORKSPACE
555    UNIT_TEST_PKG = args.PACKAGE
556    if args.verbose:
557        def printline(*line):
558            for arg in line:
559                print arg,
560            print
561    else:
562        printline = lambda *l: None
563
564    # First validate code formatting if repo has style formatting files.
565    # The format-code.sh checks for these files.
566    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
567    check_call_cmd(WORKSPACE, "./format-code.sh", CODE_SCAN_DIR)
568
569    # Automake
570    if os.path.isfile(CODE_SCAN_DIR + "/configure.ac"):
571        prev_umask = os.umask(000)
572        # Determine dependencies and add them
573        dep_added = dict()
574        dep_added[UNIT_TEST_PKG] = False
575        # Create dependency tree
576        dep_tree = DepTree(UNIT_TEST_PKG)
577        build_dep_tree(UNIT_TEST_PKG,
578                       os.path.join(WORKSPACE, UNIT_TEST_PKG),
579                       dep_added,
580                       dep_tree)
581
582        # Reorder Dependency Tree
583        for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems():
584            dep_tree.ReorderDeps(pkg_name, regex_str)
585        if args.verbose:
586            dep_tree.PrintTree()
587        install_list = dep_tree.GetInstallList()
588        # We don't want to treat our package as a dependency
589        install_list.remove(UNIT_TEST_PKG)
590        # install reordered dependencies
591        for dep in install_list:
592            build_and_install(dep, False)
593        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
594        os.chdir(top_dir)
595        # Refresh dynamic linker run time bindings for dependencies
596        check_call_cmd(top_dir, 'sudo', '-n', '--', 'ldconfig')
597        # Run package unit tests
598        build_and_install(UNIT_TEST_PKG, True)
599        run_unit_tests(top_dir)
600        maybe_run_valgrind(top_dir)
601        maybe_run_coverage(top_dir)
602        run_cppcheck(top_dir)
603
604        os.umask(prev_umask)
605
606    # Cmake
607    elif os.path.isfile(CODE_SCAN_DIR + "/CMakeLists.txt"):
608        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
609        os.chdir(top_dir)
610        check_call_cmd(top_dir, 'cmake', '.')
611        check_call_cmd(top_dir, 'cmake', '--build', '.', '--', '-j',
612                       str(multiprocessing.cpu_count()))
613        if make_target_exists('test'):
614            check_call_cmd(top_dir, 'ctest', '.')
615        maybe_run_valgrind(top_dir)
616        maybe_run_coverage(top_dir)
617        run_cppcheck(top_dir)
618
619    else:
620        print "Not a supported repo for CI Tests, exit"
621        quit()
622