xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision a045491548753b02ebe477c1ab1297d0a3fb8346)
1#!/usr/bin/env python
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urlparse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import platform
19
20
21class DepTree():
22    """
23    Represents package dependency tree, where each node is a DepTree with a
24    name and DepTree children.
25    """
26
27    def __init__(self, name):
28        """
29        Create new DepTree.
30
31        Parameter descriptions:
32        name               Name of new tree node.
33        """
34        self.name = name
35        self.children = list()
36
37    def AddChild(self, name):
38        """
39        Add new child node to current node.
40
41        Parameter descriptions:
42        name               Name of new child
43        """
44        new_child = DepTree(name)
45        self.children.append(new_child)
46        return new_child
47
48    def AddChildNode(self, node):
49        """
50        Add existing child node to current node.
51
52        Parameter descriptions:
53        node               Tree node to add
54        """
55        self.children.append(node)
56
57    def RemoveChild(self, name):
58        """
59        Remove child node.
60
61        Parameter descriptions:
62        name               Name of child to remove
63        """
64        for child in self.children:
65            if child.name == name:
66                self.children.remove(child)
67                return
68
69    def GetNode(self, name):
70        """
71        Return node with matching name. Return None if not found.
72
73        Parameter descriptions:
74        name               Name of node to return
75        """
76        if self.name == name:
77            return self
78        for child in self.children:
79            node = child.GetNode(name)
80            if node:
81                return node
82        return None
83
84    def GetParentNode(self, name, parent_node=None):
85        """
86        Return parent of node with matching name. Return none if not found.
87
88        Parameter descriptions:
89        name               Name of node to get parent of
90        parent_node        Parent of current node
91        """
92        if self.name == name:
93            return parent_node
94        for child in self.children:
95            found_node = child.GetParentNode(name, self)
96            if found_node:
97                return found_node
98        return None
99
100    def GetPath(self, name, path=None):
101        """
102        Return list of node names from head to matching name.
103        Return None if not found.
104
105        Parameter descriptions:
106        name               Name of node
107        path               List of node names from head to current node
108        """
109        if not path:
110            path = []
111        if self.name == name:
112            path.append(self.name)
113            return path
114        for child in self.children:
115            match = child.GetPath(name, path + [self.name])
116            if match:
117                return match
118        return None
119
120    def GetPathRegex(self, name, regex_str, path=None):
121        """
122        Return list of node paths that end in name, or match regex_str.
123        Return empty list if not found.
124
125        Parameter descriptions:
126        name               Name of node to search for
127        regex_str          Regex string to match node names
128        path               Path of node names from head to current node
129        """
130        new_paths = []
131        if not path:
132            path = []
133        match = re.match(regex_str, self.name)
134        if (self.name == name) or (match):
135            new_paths.append(path + [self.name])
136        for child in self.children:
137            return_paths = None
138            full_path = path + [self.name]
139            return_paths = child.GetPathRegex(name, regex_str, full_path)
140            for i in return_paths:
141                new_paths.append(i)
142        return new_paths
143
144    def MoveNode(self, from_name, to_name):
145        """
146        Mode existing from_name node to become child of to_name node.
147
148        Parameter descriptions:
149        from_name          Name of node to make a child of to_name
150        to_name            Name of node to make parent of from_name
151        """
152        parent_from_node = self.GetParentNode(from_name)
153        from_node = self.GetNode(from_name)
154        parent_from_node.RemoveChild(from_name)
155        to_node = self.GetNode(to_name)
156        to_node.AddChildNode(from_node)
157
158    def ReorderDeps(self, name, regex_str):
159        """
160        Reorder dependency tree.  If tree contains nodes with names that
161        match 'name' and 'regex_str', move 'regex_str' nodes that are
162        to the right of 'name' node, so that they become children of the
163        'name' node.
164
165        Parameter descriptions:
166        name               Name of node to look for
167        regex_str          Regex string to match names to
168        """
169        name_path = self.GetPath(name)
170        if not name_path:
171            return
172        paths = self.GetPathRegex(name, regex_str)
173        is_name_in_paths = False
174        name_index = 0
175        for i in range(len(paths)):
176            path = paths[i]
177            if path[-1] == name:
178                is_name_in_paths = True
179                name_index = i
180                break
181        if not is_name_in_paths:
182            return
183        for i in range(name_index + 1, len(paths)):
184            path = paths[i]
185            if name in path:
186                continue
187            from_name = path[-1]
188            self.MoveNode(from_name, name)
189
190    def GetInstallList(self):
191        """
192        Return post-order list of node names.
193
194        Parameter descriptions:
195        """
196        install_list = []
197        for child in self.children:
198            child_install_list = child.GetInstallList()
199            install_list.extend(child_install_list)
200        install_list.append(self.name)
201        return install_list
202
203    def PrintTree(self, level=0):
204        """
205        Print pre-order node names with indentation denoting node depth level.
206
207        Parameter descriptions:
208        level              Current depth level
209        """
210        INDENT_PER_LEVEL = 4
211        print ' ' * (level * INDENT_PER_LEVEL) + self.name
212        for child in self.children:
213            child.PrintTree(level + 1)
214
215
216def check_call_cmd(dir, *cmd):
217    """
218    Verbose prints the directory location the given command is called from and
219    the command, then executes the command using check_call.
220
221    Parameter descriptions:
222    dir                 Directory location command is to be called from
223    cmd                 List of parameters constructing the complete command
224    """
225    printline(dir, ">", " ".join(cmd))
226    check_call(cmd)
227
228
229def clone_pkg(pkg):
230    """
231    Clone the given openbmc package's git repository from gerrit into
232    the WORKSPACE location
233
234    Parameter descriptions:
235    pkg                 Name of the package to clone
236    """
237    pkg_dir = os.path.join(WORKSPACE, pkg)
238    if os.path.exists(os.path.join(pkg_dir, '.git')):
239        return pkg_dir
240    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
241    os.mkdir(pkg_dir)
242    printline(pkg_dir, "> git clone", pkg_repo, "./")
243    return Repo.clone_from(pkg_repo, pkg_dir).working_dir
244
245
246def get_deps(configure_ac):
247    """
248    Parse the given 'configure.ac' file for package dependencies and return
249    a list of the dependencies found.
250
251    Parameter descriptions:
252    configure_ac        Opened 'configure.ac' file object
253    """
254    line = ""
255    dep_pkgs = set()
256    for cfg_line in configure_ac:
257        # Remove whitespace & newline
258        cfg_line = cfg_line.rstrip()
259        # Check for line breaks
260        if cfg_line.endswith('\\'):
261            line += str(cfg_line[:-1])
262            continue
263        line = line+cfg_line
264
265        # Find any defined dependency
266        line_has = lambda x: x if x in line else None
267        macros = set(filter(line_has, DEPENDENCIES.iterkeys()))
268        if len(macros) == 1:
269            macro = ''.join(macros)
270            deps = filter(line_has, DEPENDENCIES[macro].iterkeys())
271            dep_pkgs.update(map(lambda x: DEPENDENCIES[macro][x], deps))
272
273        line = ""
274    deps = list(dep_pkgs)
275
276    return deps
277
278
279make_parallel = [
280    'make',
281    # Run enough jobs to saturate all the cpus
282    '-j', str(multiprocessing.cpu_count()),
283    # Don't start more jobs if the load avg is too high
284    '-l', str(multiprocessing.cpu_count()),
285    # Synchronize the output so logs aren't intermixed in stdout / stderr
286    '-O',
287]
288
289def enFlag(flag, enabled):
290    """
291    Returns an configure flag as a string
292
293    Parameters:
294    flag                The name of the flag
295    enabled             Whether the flag is enabled or disabled
296    """
297    return '--' + ('enable' if enabled else 'disable') + '-' + flag
298
299def build_and_install(pkg, build_for_testing=False):
300    """
301    Builds and installs the package in the environment. Optionally
302    builds the examples and test cases for package.
303
304    Parameter description:
305    pkg                 The package we are building
306    build_for_testing   Enable options related to testing on the package?
307    """
308    pkgdir = os.path.join(WORKSPACE, pkg)
309    # Build & install this package
310    conf_flags = [
311        enFlag('silent-rules', False),
312        enFlag('tests', build_for_testing),
313        enFlag('code-coverage', build_for_testing),
314        enFlag('valgrind', build_for_testing),
315    ]
316    os.chdir(pkgdir)
317    # Add any necessary configure flags for package
318    if CONFIGURE_FLAGS.get(pkg) is not None:
319        conf_flags.extend(CONFIGURE_FLAGS.get(pkg))
320    for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
321        if os.path.exists(bootstrap):
322            check_call_cmd(pkgdir, './' + bootstrap)
323            break
324    check_call_cmd(pkgdir, './configure', *conf_flags)
325    check_call_cmd(pkgdir, *make_parallel)
326    check_call_cmd(pkgdir, 'sudo', '-n', '--', *(make_parallel + [ 'install' ]))
327
328def install_deps(dep_list):
329    """
330    Install each package in the ordered dep_list.
331
332    Parameter descriptions:
333    dep_list            Ordered list of dependencies
334    """
335    for pkg in dep_list:
336        build_and_install(pkg, True)
337
338def build_dep_tree(pkg, pkgdir, dep_added, head, dep_tree=None):
339    """
340    For each package(pkg), starting with the package to be unit tested,
341    parse its 'configure.ac' file from within the package's directory(pkgdir)
342    for each package dependency defined recursively doing the same thing
343    on each package found as a dependency.
344
345    Parameter descriptions:
346    pkg                 Name of the package
347    pkgdir              Directory where package source is located
348    dep_added           Current list of dependencies and added status
349    head                Head node of the dependency tree
350    dep_tree            Current dependency tree node
351    """
352    if not dep_tree:
353        dep_tree = head
354    os.chdir(pkgdir)
355    # Open package's configure.ac
356    with open("/tmp/depcache", "r") as depcache:
357        cached = depcache.readline()
358    with open("configure.ac", "rt") as configure_ac:
359        # Retrieve dependency list from package's configure.ac
360        configure_ac_deps = get_deps(configure_ac)
361        for dep_pkg in configure_ac_deps:
362            if dep_pkg in cached:
363                continue
364            # Dependency package not already known
365            if dep_added.get(dep_pkg) is None:
366                # Dependency package not added
367                new_child = dep_tree.AddChild(dep_pkg)
368                dep_added[dep_pkg] = False
369                dep_pkgdir = clone_pkg(dep_pkg)
370                # Determine this dependency package's
371                # dependencies and add them before
372                # returning to add this package
373                dep_added = build_dep_tree(dep_pkg,
374                                           dep_pkgdir,
375                                           dep_added,
376                                           head,
377                                           new_child)
378            else:
379                # Dependency package known and added
380                if dep_added[dep_pkg]:
381                    continue
382                else:
383                    # Cyclic dependency failure
384                    raise Exception("Cyclic dependencies found in "+pkg)
385
386    if not dep_added[pkg]:
387        dep_added[pkg] = True
388
389    return dep_added
390
391def make_target_exists(target):
392    """
393    Runs a check against the makefile in the current directory to determine
394    if the target exists so that it can be built.
395
396    Parameter descriptions:
397    target              The make target we are checking
398    """
399    try:
400        cmd = [ 'make', '-n', target ]
401        with open(os.devnull, 'w') as devnull:
402            check_call(cmd, stdout=devnull, stderr=devnull)
403        return True
404    except CalledProcessError:
405        return False
406
407def run_unit_tests(top_dir):
408    """
409    Runs the unit tests for the package via `make check`
410
411    Parameter descriptions:
412    top_dir             The root directory of our project
413    """
414    try:
415        cmd = make_parallel + [ 'check' ]
416        for i in range(0, args.repeat):
417            check_call_cmd(top_dir,  *cmd)
418    except CalledProcessError:
419        for root, _, files in os.walk(top_dir):
420            if 'test-suite.log' not in files:
421                continue
422            check_call_cmd(root, 'cat', os.path.join(root, 'test-suite.log'))
423        raise Exception('Unit tests failed')
424
425def run_cppcheck(top_dir):
426    try:
427        # http://cppcheck.sourceforge.net/manual.pdf
428        ignore_list = ['-i%s' % path for path in os.listdir(top_dir) \
429                       if path.endswith('-src') or path.endswith('-build')]
430        ignore_list.extend(('-itest', '-iscripts'))
431        params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
432                  '--enable=all']
433        params.extend(ignore_list)
434        params.append('.')
435
436        check_call_cmd(top_dir, *params)
437    except CalledProcessError:
438        raise Exception('Cppcheck failed')
439
440def maybe_run_valgrind(top_dir):
441    """
442    Potentially runs the unit tests through valgrind for the package
443    via `make check-valgrind`. If the package does not have valgrind testing
444    then it just skips over this.
445
446    Parameter descriptions:
447    top_dir             The root directory of our project
448    """
449    # Valgrind testing is currently broken by an aggressive strcmp optimization
450    # that is inlined into optimized code for POWER by gcc 7+. Until we find
451    # a workaround, just don't run valgrind tests on POWER.
452    # https://github.com/openbmc/openbmc/issues/3315
453    if re.match('ppc64', platform.machine()) is not None:
454        return
455    if not make_target_exists('check-valgrind'):
456        return
457
458    try:
459        cmd = make_parallel + [ 'check-valgrind' ]
460        check_call_cmd(top_dir,  *cmd)
461    except CalledProcessError:
462        for root, _, files in os.walk(top_dir):
463            for f in files:
464                if re.search('test-suite-[a-z]+.log', f) is None:
465                    continue
466                check_call_cmd(root, 'cat', os.path.join(root, f))
467        raise Exception('Valgrind tests failed')
468
469def maybe_run_coverage(top_dir):
470    """
471    Potentially runs the unit tests through code coverage for the package
472    via `make check-code-coverage`. If the package does not have code coverage
473    testing then it just skips over this.
474
475    Parameter descriptions:
476    top_dir             The root directory of our project
477    """
478    if not make_target_exists('check-code-coverage'):
479        return
480
481    # Actually run code coverage
482    try:
483        cmd = make_parallel + [ 'check-code-coverage' ]
484        check_call_cmd(top_dir,  *cmd)
485    except CalledProcessError:
486        raise Exception('Code coverage failed')
487
488if __name__ == '__main__':
489    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
490    CONFIGURE_FLAGS = {
491        'phosphor-objmgr': ['--enable-unpatched-systemd'],
492        'sdbusplus': ['--enable-transaction'],
493        'phosphor-logging':
494        ['--enable-metadata-processing',
495         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
496    }
497
498    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
499    DEPENDENCIES = {
500        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
501        'AC_CHECK_HEADER': {
502            'host-ipmid': 'phosphor-host-ipmid',
503            'blobs-ipmid': 'phosphor-ipmi-blobs',
504            'sdbusplus': 'sdbusplus',
505            'sdeventplus': 'sdeventplus',
506            'gpioplus': 'gpioplus',
507            'phosphor-logging/log.hpp': 'phosphor-logging',
508        },
509        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
510        'PKG_CHECK_MODULES': {
511            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
512            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
513            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
514            'sdbusplus': 'sdbusplus',
515            'sdeventplus': 'sdeventplus',
516            'gpioplus': 'gpioplus',
517            'phosphor-logging': 'phosphor-logging',
518            'phosphor-snmp': 'phosphor-snmp',
519        },
520    }
521
522    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
523    DEPENDENCIES_REGEX = {
524        'phosphor-logging': r'\S+-dbus-interfaces$'
525    }
526
527    # Set command line arguments
528    parser = argparse.ArgumentParser()
529    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
530                        help="Workspace directory location(i.e. /home)")
531    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
532                        help="OpenBMC package to be unit tested")
533    parser.add_argument("-v", "--verbose", action="store_true",
534                        help="Print additional package status messages")
535    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
536                        type=int, default=1)
537    args = parser.parse_args(sys.argv[1:])
538    WORKSPACE = args.WORKSPACE
539    UNIT_TEST_PKG = args.PACKAGE
540    if args.verbose:
541        def printline(*line):
542            for arg in line:
543                print arg,
544            print
545    else:
546        printline = lambda *l: None
547
548    # First validate code formatting if repo has style formatting files.
549    # The format-code.sh checks for these files.
550    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
551    check_call_cmd(WORKSPACE, "./format-code.sh", CODE_SCAN_DIR)
552
553    # Automake
554    if os.path.isfile(CODE_SCAN_DIR + "/configure.ac"):
555        prev_umask = os.umask(000)
556        # Determine dependencies and add them
557        dep_added = dict()
558        dep_added[UNIT_TEST_PKG] = False
559        # Create dependency tree
560        dep_tree = DepTree(UNIT_TEST_PKG)
561        build_dep_tree(UNIT_TEST_PKG,
562                       os.path.join(WORKSPACE, UNIT_TEST_PKG),
563                       dep_added,
564                       dep_tree)
565
566        # Reorder Dependency Tree
567        for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems():
568            dep_tree.ReorderDeps(pkg_name, regex_str)
569        if args.verbose:
570            dep_tree.PrintTree()
571        install_list = dep_tree.GetInstallList()
572        # install reordered dependencies
573        install_deps(install_list)
574        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
575        os.chdir(top_dir)
576        # Refresh dynamic linker run time bindings for dependencies
577        check_call_cmd(top_dir, 'sudo', '-n', '--', 'ldconfig')
578        # Run package unit tests
579        run_unit_tests(top_dir)
580        maybe_run_valgrind(top_dir)
581        maybe_run_coverage(top_dir)
582        run_cppcheck(top_dir)
583
584        os.umask(prev_umask)
585
586    # Cmake
587    elif os.path.isfile(CODE_SCAN_DIR + "/CMakeLists.txt"):
588        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
589        os.chdir(top_dir)
590        check_call_cmd(top_dir, 'cmake', '.')
591        check_call_cmd(top_dir, 'cmake', '--build', '.', '--', '-j',
592                       str(multiprocessing.cpu_count()))
593        if make_target_exists('test'):
594            check_call_cmd(top_dir, 'ctest', '.')
595        maybe_run_valgrind(top_dir)
596        maybe_run_coverage(top_dir)
597        run_cppcheck(top_dir)
598
599    else:
600        print "Not a supported repo for CI Tests, exit"
601        quit()
602