xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision d61316dc88a0818cd36178ab5cb18e993807c476)
1#!/usr/bin/env python
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urlparse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import platform
19
20
21class DepTree():
22    """
23    Represents package dependency tree, where each node is a DepTree with a
24    name and DepTree children.
25    """
26
27    def __init__(self, name):
28        """
29        Create new DepTree.
30
31        Parameter descriptions:
32        name               Name of new tree node.
33        """
34        self.name = name
35        self.children = list()
36
37    def AddChild(self, name):
38        """
39        Add new child node to current node.
40
41        Parameter descriptions:
42        name               Name of new child
43        """
44        new_child = DepTree(name)
45        self.children.append(new_child)
46        return new_child
47
48    def AddChildNode(self, node):
49        """
50        Add existing child node to current node.
51
52        Parameter descriptions:
53        node               Tree node to add
54        """
55        self.children.append(node)
56
57    def RemoveChild(self, name):
58        """
59        Remove child node.
60
61        Parameter descriptions:
62        name               Name of child to remove
63        """
64        for child in self.children:
65            if child.name == name:
66                self.children.remove(child)
67                return
68
69    def GetNode(self, name):
70        """
71        Return node with matching name. Return None if not found.
72
73        Parameter descriptions:
74        name               Name of node to return
75        """
76        if self.name == name:
77            return self
78        for child in self.children:
79            node = child.GetNode(name)
80            if node:
81                return node
82        return None
83
84    def GetParentNode(self, name, parent_node=None):
85        """
86        Return parent of node with matching name. Return none if not found.
87
88        Parameter descriptions:
89        name               Name of node to get parent of
90        parent_node        Parent of current node
91        """
92        if self.name == name:
93            return parent_node
94        for child in self.children:
95            found_node = child.GetParentNode(name, self)
96            if found_node:
97                return found_node
98        return None
99
100    def GetPath(self, name, path=None):
101        """
102        Return list of node names from head to matching name.
103        Return None if not found.
104
105        Parameter descriptions:
106        name               Name of node
107        path               List of node names from head to current node
108        """
109        if not path:
110            path = []
111        if self.name == name:
112            path.append(self.name)
113            return path
114        for child in self.children:
115            match = child.GetPath(name, path + [self.name])
116            if match:
117                return match
118        return None
119
120    def GetPathRegex(self, name, regex_str, path=None):
121        """
122        Return list of node paths that end in name, or match regex_str.
123        Return empty list if not found.
124
125        Parameter descriptions:
126        name               Name of node to search for
127        regex_str          Regex string to match node names
128        path               Path of node names from head to current node
129        """
130        new_paths = []
131        if not path:
132            path = []
133        match = re.match(regex_str, self.name)
134        if (self.name == name) or (match):
135            new_paths.append(path + [self.name])
136        for child in self.children:
137            return_paths = None
138            full_path = path + [self.name]
139            return_paths = child.GetPathRegex(name, regex_str, full_path)
140            for i in return_paths:
141                new_paths.append(i)
142        return new_paths
143
144    def MoveNode(self, from_name, to_name):
145        """
146        Mode existing from_name node to become child of to_name node.
147
148        Parameter descriptions:
149        from_name          Name of node to make a child of to_name
150        to_name            Name of node to make parent of from_name
151        """
152        parent_from_node = self.GetParentNode(from_name)
153        from_node = self.GetNode(from_name)
154        parent_from_node.RemoveChild(from_name)
155        to_node = self.GetNode(to_name)
156        to_node.AddChildNode(from_node)
157
158    def ReorderDeps(self, name, regex_str):
159        """
160        Reorder dependency tree.  If tree contains nodes with names that
161        match 'name' and 'regex_str', move 'regex_str' nodes that are
162        to the right of 'name' node, so that they become children of the
163        'name' node.
164
165        Parameter descriptions:
166        name               Name of node to look for
167        regex_str          Regex string to match names to
168        """
169        name_path = self.GetPath(name)
170        if not name_path:
171            return
172        paths = self.GetPathRegex(name, regex_str)
173        is_name_in_paths = False
174        name_index = 0
175        for i in range(len(paths)):
176            path = paths[i]
177            if path[-1] == name:
178                is_name_in_paths = True
179                name_index = i
180                break
181        if not is_name_in_paths:
182            return
183        for i in range(name_index + 1, len(paths)):
184            path = paths[i]
185            if name in path:
186                continue
187            from_name = path[-1]
188            self.MoveNode(from_name, name)
189
190    def GetInstallList(self):
191        """
192        Return post-order list of node names.
193
194        Parameter descriptions:
195        """
196        install_list = []
197        for child in self.children:
198            child_install_list = child.GetInstallList()
199            install_list.extend(child_install_list)
200        install_list.append(self.name)
201        return install_list
202
203    def PrintTree(self, level=0):
204        """
205        Print pre-order node names with indentation denoting node depth level.
206
207        Parameter descriptions:
208        level              Current depth level
209        """
210        INDENT_PER_LEVEL = 4
211        print ' ' * (level * INDENT_PER_LEVEL) + self.name
212        for child in self.children:
213            child.PrintTree(level + 1)
214
215
216def check_call_cmd(dir, *cmd):
217    """
218    Verbose prints the directory location the given command is called from and
219    the command, then executes the command using check_call.
220
221    Parameter descriptions:
222    dir                 Directory location command is to be called from
223    cmd                 List of parameters constructing the complete command
224    """
225    printline(dir, ">", " ".join(cmd))
226    check_call(cmd)
227
228
229def clone_pkg(pkg):
230    """
231    Clone the given openbmc package's git repository from gerrit into
232    the WORKSPACE location
233
234    Parameter descriptions:
235    pkg                 Name of the package to clone
236    """
237    pkg_dir = os.path.join(WORKSPACE, pkg)
238    if os.path.exists(os.path.join(pkg_dir, '.git')):
239        return pkg_dir
240    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
241    os.mkdir(pkg_dir)
242    printline(pkg_dir, "> git clone", pkg_repo, "./")
243    return Repo.clone_from(pkg_repo, pkg_dir).working_dir
244
245
246def get_deps(configure_ac):
247    """
248    Parse the given 'configure.ac' file for package dependencies and return
249    a list of the dependencies found.
250
251    Parameter descriptions:
252    configure_ac        Opened 'configure.ac' file object
253    """
254    line = ""
255    dep_pkgs = set()
256    for cfg_line in configure_ac:
257        # Remove whitespace & newline
258        cfg_line = cfg_line.rstrip()
259        # Check for line breaks
260        if cfg_line.endswith('\\'):
261            line += str(cfg_line[:-1])
262            continue
263        line = line+cfg_line
264
265        # Find any defined dependency
266        line_has = lambda x: x if x in line else None
267        macros = set(filter(line_has, DEPENDENCIES.iterkeys()))
268        if len(macros) == 1:
269            macro = ''.join(macros)
270            deps = filter(line_has, DEPENDENCIES[macro].iterkeys())
271            dep_pkgs.update(map(lambda x: DEPENDENCIES[macro][x], deps))
272
273        line = ""
274    deps = list(dep_pkgs)
275
276    return deps
277
278
279make_parallel = [
280    'make',
281    # Run enough jobs to saturate all the cpus
282    '-j', str(multiprocessing.cpu_count()),
283    # Don't start more jobs if the load avg is too high
284    '-l', str(multiprocessing.cpu_count()),
285    # Synchronize the output so logs aren't intermixed in stdout / stderr
286    '-O',
287]
288
289def enFlag(flag, enabled):
290    """
291    Returns an configure flag as a string
292
293    Parameters:
294    flag                The name of the flag
295    enabled             Whether the flag is enabled or disabled
296    """
297    return '--' + ('enable' if enabled else 'disable') + '-' + flag
298
299def build_and_install(pkg, build_for_testing=False):
300    """
301    Builds and installs the package in the environment. Optionally
302    builds the examples and test cases for package.
303
304    Parameter description:
305    pkg                 The package we are building
306    build_for_testing   Enable options related to testing on the package?
307    """
308    pkgdir = os.path.join(WORKSPACE, pkg)
309    # Build & install this package
310    conf_flags = [
311        enFlag('silent-rules', False),
312        enFlag('examples', build_for_testing),
313        enFlag('tests', build_for_testing),
314        enFlag('code-coverage', build_for_testing),
315        enFlag('valgrind', build_for_testing),
316    ]
317    os.chdir(pkgdir)
318    # Add any necessary configure flags for package
319    if CONFIGURE_FLAGS.get(pkg) is not None:
320        conf_flags.extend(CONFIGURE_FLAGS.get(pkg))
321    for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
322        if os.path.exists(bootstrap):
323            check_call_cmd(pkgdir, './' + bootstrap)
324            break
325    check_call_cmd(pkgdir, './configure', *conf_flags)
326    check_call_cmd(pkgdir, *make_parallel)
327    check_call_cmd(pkgdir, 'sudo', '-n', '--', *(make_parallel + [ 'install' ]))
328
329def build_dep_tree(pkg, pkgdir, dep_added, head, dep_tree=None):
330    """
331    For each package(pkg), starting with the package to be unit tested,
332    parse its 'configure.ac' file from within the package's directory(pkgdir)
333    for each package dependency defined recursively doing the same thing
334    on each package found as a dependency.
335
336    Parameter descriptions:
337    pkg                 Name of the package
338    pkgdir              Directory where package source is located
339    dep_added           Current list of dependencies and added status
340    head                Head node of the dependency tree
341    dep_tree            Current dependency tree node
342    """
343    if not dep_tree:
344        dep_tree = head
345    os.chdir(pkgdir)
346    # Open package's configure.ac
347    with open("/tmp/depcache", "r") as depcache:
348        cached = depcache.readline()
349    with open("configure.ac", "rt") as configure_ac:
350        # Retrieve dependency list from package's configure.ac
351        configure_ac_deps = get_deps(configure_ac)
352        for dep_pkg in configure_ac_deps:
353            if dep_pkg in cached:
354                continue
355            # Dependency package not already known
356            if dep_added.get(dep_pkg) is None:
357                # Dependency package not added
358                new_child = dep_tree.AddChild(dep_pkg)
359                dep_added[dep_pkg] = False
360                dep_pkgdir = clone_pkg(dep_pkg)
361                # Determine this dependency package's
362                # dependencies and add them before
363                # returning to add this package
364                dep_added = build_dep_tree(dep_pkg,
365                                           dep_pkgdir,
366                                           dep_added,
367                                           head,
368                                           new_child)
369            else:
370                # Dependency package known and added
371                if dep_added[dep_pkg]:
372                    continue
373                else:
374                    # Cyclic dependency failure
375                    raise Exception("Cyclic dependencies found in "+pkg)
376
377    if not dep_added[pkg]:
378        dep_added[pkg] = True
379
380    return dep_added
381
382def make_target_exists(target):
383    """
384    Runs a check against the makefile in the current directory to determine
385    if the target exists so that it can be built.
386
387    Parameter descriptions:
388    target              The make target we are checking
389    """
390    try:
391        cmd = [ 'make', '-n', target ]
392        with open(os.devnull, 'w') as devnull:
393            check_call(cmd, stdout=devnull, stderr=devnull)
394        return True
395    except CalledProcessError:
396        return False
397
398def run_unit_tests(top_dir):
399    """
400    Runs the unit tests for the package via `make check`
401
402    Parameter descriptions:
403    top_dir             The root directory of our project
404    """
405    try:
406        cmd = make_parallel + [ 'check' ]
407        for i in range(0, args.repeat):
408            check_call_cmd(top_dir,  *cmd)
409    except CalledProcessError:
410        for root, _, files in os.walk(top_dir):
411            if 'test-suite.log' not in files:
412                continue
413            check_call_cmd(root, 'cat', os.path.join(root, 'test-suite.log'))
414        raise Exception('Unit tests failed')
415
416def run_cppcheck(top_dir):
417    try:
418        # http://cppcheck.sourceforge.net/manual.pdf
419        ignore_list = ['-i%s' % path for path in os.listdir(top_dir) \
420                       if path.endswith('-src') or path.endswith('-build')]
421        ignore_list.extend(('-itest', '-iscripts'))
422        params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
423                  '--enable=all']
424        params.extend(ignore_list)
425        params.append('.')
426
427        check_call_cmd(top_dir, *params)
428    except CalledProcessError:
429        raise Exception('Cppcheck failed')
430
431def maybe_run_valgrind(top_dir):
432    """
433    Potentially runs the unit tests through valgrind for the package
434    via `make check-valgrind`. If the package does not have valgrind testing
435    then it just skips over this.
436
437    Parameter descriptions:
438    top_dir             The root directory of our project
439    """
440    # Valgrind testing is currently broken by an aggressive strcmp optimization
441    # that is inlined into optimized code for POWER by gcc 7+. Until we find
442    # a workaround, just don't run valgrind tests on POWER.
443    # https://github.com/openbmc/openbmc/issues/3315
444    if re.match('ppc64', platform.machine()) is not None:
445        return
446    if not make_target_exists('check-valgrind'):
447        return
448
449    try:
450        cmd = make_parallel + [ 'check-valgrind' ]
451        check_call_cmd(top_dir,  *cmd)
452    except CalledProcessError:
453        for root, _, files in os.walk(top_dir):
454            for f in files:
455                if re.search('test-suite-[a-z]+.log', f) is None:
456                    continue
457                check_call_cmd(root, 'cat', os.path.join(root, f))
458        raise Exception('Valgrind tests failed')
459
460def maybe_run_coverage(top_dir):
461    """
462    Potentially runs the unit tests through code coverage for the package
463    via `make check-code-coverage`. If the package does not have code coverage
464    testing then it just skips over this.
465
466    Parameter descriptions:
467    top_dir             The root directory of our project
468    """
469    if not make_target_exists('check-code-coverage'):
470        return
471
472    # Actually run code coverage
473    try:
474        cmd = make_parallel + [ 'check-code-coverage' ]
475        check_call_cmd(top_dir,  *cmd)
476    except CalledProcessError:
477        raise Exception('Code coverage failed')
478
479if __name__ == '__main__':
480    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
481    CONFIGURE_FLAGS = {
482        'phosphor-objmgr': ['--enable-unpatched-systemd'],
483        'sdbusplus': ['--enable-transaction'],
484        'phosphor-logging':
485        ['--enable-metadata-processing',
486         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
487    }
488
489    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
490    DEPENDENCIES = {
491        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
492        'AC_CHECK_HEADER': {
493            'host-ipmid': 'phosphor-host-ipmid',
494            'blobs-ipmid': 'phosphor-ipmi-blobs',
495            'sdbusplus': 'sdbusplus',
496            'sdeventplus': 'sdeventplus',
497            'gpioplus': 'gpioplus',
498            'phosphor-logging/log.hpp': 'phosphor-logging',
499        },
500        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
501        'PKG_CHECK_MODULES': {
502            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
503            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
504            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
505            'sdbusplus': 'sdbusplus',
506            'sdeventplus': 'sdeventplus',
507            'gpioplus': 'gpioplus',
508            'phosphor-logging': 'phosphor-logging',
509            'phosphor-snmp': 'phosphor-snmp',
510        },
511    }
512
513    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
514    DEPENDENCIES_REGEX = {
515        'phosphor-logging': r'\S+-dbus-interfaces$'
516    }
517
518    # Set command line arguments
519    parser = argparse.ArgumentParser()
520    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
521                        help="Workspace directory location(i.e. /home)")
522    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
523                        help="OpenBMC package to be unit tested")
524    parser.add_argument("-v", "--verbose", action="store_true",
525                        help="Print additional package status messages")
526    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
527                        type=int, default=1)
528    args = parser.parse_args(sys.argv[1:])
529    WORKSPACE = args.WORKSPACE
530    UNIT_TEST_PKG = args.PACKAGE
531    if args.verbose:
532        def printline(*line):
533            for arg in line:
534                print arg,
535            print
536    else:
537        printline = lambda *l: None
538
539    # First validate code formatting if repo has style formatting files.
540    # The format-code.sh checks for these files.
541    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
542    check_call_cmd(WORKSPACE, "./format-code.sh", CODE_SCAN_DIR)
543
544    # Automake
545    if os.path.isfile(CODE_SCAN_DIR + "/configure.ac"):
546        prev_umask = os.umask(000)
547        # Determine dependencies and add them
548        dep_added = dict()
549        dep_added[UNIT_TEST_PKG] = False
550        # Create dependency tree
551        dep_tree = DepTree(UNIT_TEST_PKG)
552        build_dep_tree(UNIT_TEST_PKG,
553                       os.path.join(WORKSPACE, UNIT_TEST_PKG),
554                       dep_added,
555                       dep_tree)
556
557        # Reorder Dependency Tree
558        for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems():
559            dep_tree.ReorderDeps(pkg_name, regex_str)
560        if args.verbose:
561            dep_tree.PrintTree()
562        install_list = dep_tree.GetInstallList()
563        # We don't want to treat our package as a dependency
564        install_list.remove(UNIT_TEST_PKG)
565        # install reordered dependencies
566        for dep in install_list:
567            build_and_install(dep, False)
568        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
569        os.chdir(top_dir)
570        # Refresh dynamic linker run time bindings for dependencies
571        check_call_cmd(top_dir, 'sudo', '-n', '--', 'ldconfig')
572        # Run package unit tests
573        build_and_install(UNIT_TEST_PKG, True)
574        run_unit_tests(top_dir)
575        maybe_run_valgrind(top_dir)
576        maybe_run_coverage(top_dir)
577        run_cppcheck(top_dir)
578
579        os.umask(prev_umask)
580
581    # Cmake
582    elif os.path.isfile(CODE_SCAN_DIR + "/CMakeLists.txt"):
583        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
584        os.chdir(top_dir)
585        check_call_cmd(top_dir, 'cmake', '.')
586        check_call_cmd(top_dir, 'cmake', '--build', '.', '--', '-j',
587                       str(multiprocessing.cpu_count()))
588        if make_target_exists('test'):
589            check_call_cmd(top_dir, 'ctest', '.')
590        maybe_run_valgrind(top_dir)
591        maybe_run_coverage(top_dir)
592        run_cppcheck(top_dir)
593
594    else:
595        print "Not a supported repo for CI Tests, exit"
596        quit()
597