xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 4e3818dd2eb4200c7c8ad35fd419d02eb4577918)
1#!/usr/bin/env python
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urlparse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import platform
19
20
21class DepTree():
22    """
23    Represents package dependency tree, where each node is a DepTree with a
24    name and DepTree children.
25    """
26
27    def __init__(self, name):
28        """
29        Create new DepTree.
30
31        Parameter descriptions:
32        name               Name of new tree node.
33        """
34        self.name = name
35        self.children = list()
36
37    def AddChild(self, name):
38        """
39        Add new child node to current node.
40
41        Parameter descriptions:
42        name               Name of new child
43        """
44        new_child = DepTree(name)
45        self.children.append(new_child)
46        return new_child
47
48    def AddChildNode(self, node):
49        """
50        Add existing child node to current node.
51
52        Parameter descriptions:
53        node               Tree node to add
54        """
55        self.children.append(node)
56
57    def RemoveChild(self, name):
58        """
59        Remove child node.
60
61        Parameter descriptions:
62        name               Name of child to remove
63        """
64        for child in self.children:
65            if child.name == name:
66                self.children.remove(child)
67                return
68
69    def GetNode(self, name):
70        """
71        Return node with matching name. Return None if not found.
72
73        Parameter descriptions:
74        name               Name of node to return
75        """
76        if self.name == name:
77            return self
78        for child in self.children:
79            node = child.GetNode(name)
80            if node:
81                return node
82        return None
83
84    def GetParentNode(self, name, parent_node=None):
85        """
86        Return parent of node with matching name. Return none if not found.
87
88        Parameter descriptions:
89        name               Name of node to get parent of
90        parent_node        Parent of current node
91        """
92        if self.name == name:
93            return parent_node
94        for child in self.children:
95            found_node = child.GetParentNode(name, self)
96            if found_node:
97                return found_node
98        return None
99
100    def GetPath(self, name, path=None):
101        """
102        Return list of node names from head to matching name.
103        Return None if not found.
104
105        Parameter descriptions:
106        name               Name of node
107        path               List of node names from head to current node
108        """
109        if not path:
110            path = []
111        if self.name == name:
112            path.append(self.name)
113            return path
114        for child in self.children:
115            match = child.GetPath(name, path + [self.name])
116            if match:
117                return match
118        return None
119
120    def GetPathRegex(self, name, regex_str, path=None):
121        """
122        Return list of node paths that end in name, or match regex_str.
123        Return empty list if not found.
124
125        Parameter descriptions:
126        name               Name of node to search for
127        regex_str          Regex string to match node names
128        path               Path of node names from head to current node
129        """
130        new_paths = []
131        if not path:
132            path = []
133        match = re.match(regex_str, self.name)
134        if (self.name == name) or (match):
135            new_paths.append(path + [self.name])
136        for child in self.children:
137            return_paths = None
138            full_path = path + [self.name]
139            return_paths = child.GetPathRegex(name, regex_str, full_path)
140            for i in return_paths:
141                new_paths.append(i)
142        return new_paths
143
144    def MoveNode(self, from_name, to_name):
145        """
146        Mode existing from_name node to become child of to_name node.
147
148        Parameter descriptions:
149        from_name          Name of node to make a child of to_name
150        to_name            Name of node to make parent of from_name
151        """
152        parent_from_node = self.GetParentNode(from_name)
153        from_node = self.GetNode(from_name)
154        parent_from_node.RemoveChild(from_name)
155        to_node = self.GetNode(to_name)
156        to_node.AddChildNode(from_node)
157
158    def ReorderDeps(self, name, regex_str):
159        """
160        Reorder dependency tree.  If tree contains nodes with names that
161        match 'name' and 'regex_str', move 'regex_str' nodes that are
162        to the right of 'name' node, so that they become children of the
163        'name' node.
164
165        Parameter descriptions:
166        name               Name of node to look for
167        regex_str          Regex string to match names to
168        """
169        name_path = self.GetPath(name)
170        if not name_path:
171            return
172        paths = self.GetPathRegex(name, regex_str)
173        is_name_in_paths = False
174        name_index = 0
175        for i in range(len(paths)):
176            path = paths[i]
177            if path[-1] == name:
178                is_name_in_paths = True
179                name_index = i
180                break
181        if not is_name_in_paths:
182            return
183        for i in range(name_index + 1, len(paths)):
184            path = paths[i]
185            if name in path:
186                continue
187            from_name = path[-1]
188            self.MoveNode(from_name, name)
189
190    def GetInstallList(self):
191        """
192        Return post-order list of node names.
193
194        Parameter descriptions:
195        """
196        install_list = []
197        for child in self.children:
198            child_install_list = child.GetInstallList()
199            install_list.extend(child_install_list)
200        install_list.append(self.name)
201        return install_list
202
203    def PrintTree(self, level=0):
204        """
205        Print pre-order node names with indentation denoting node depth level.
206
207        Parameter descriptions:
208        level              Current depth level
209        """
210        INDENT_PER_LEVEL = 4
211        print ' ' * (level * INDENT_PER_LEVEL) + self.name
212        for child in self.children:
213            child.PrintTree(level + 1)
214
215
216def check_call_cmd(dir, *cmd):
217    """
218    Verbose prints the directory location the given command is called from and
219    the command, then executes the command using check_call.
220
221    Parameter descriptions:
222    dir                 Directory location command is to be called from
223    cmd                 List of parameters constructing the complete command
224    """
225    printline(dir, ">", " ".join(cmd))
226    check_call(cmd)
227
228
229def clone_pkg(pkg):
230    """
231    Clone the given openbmc package's git repository from gerrit into
232    the WORKSPACE location
233
234    Parameter descriptions:
235    pkg                 Name of the package to clone
236    """
237    pkg_dir = os.path.join(WORKSPACE, pkg)
238    if os.path.exists(os.path.join(pkg_dir, '.git')):
239        return pkg_dir
240    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
241    os.mkdir(pkg_dir)
242    printline(pkg_dir, "> git clone", pkg_repo, "./")
243    return Repo.clone_from(pkg_repo, pkg_dir).working_dir
244
245
246def get_deps(configure_ac):
247    """
248    Parse the given 'configure.ac' file for package dependencies and return
249    a list of the dependencies found.
250
251    Parameter descriptions:
252    configure_ac        Opened 'configure.ac' file object
253    """
254    line = ""
255    dep_pkgs = set()
256    for cfg_line in configure_ac:
257        # Remove whitespace & newline
258        cfg_line = cfg_line.rstrip()
259        # Check for line breaks
260        if cfg_line.endswith('\\'):
261            line += str(cfg_line[:-1])
262            continue
263        line = line+cfg_line
264
265        # Find any defined dependency
266        line_has = lambda x: x if x in line else None
267        macros = set(filter(line_has, DEPENDENCIES.iterkeys()))
268        if len(macros) == 1:
269            macro = ''.join(macros)
270            deps = filter(line_has, DEPENDENCIES[macro].iterkeys())
271            dep_pkgs.update(map(lambda x: DEPENDENCIES[macro][x], deps))
272
273        line = ""
274    deps = list(dep_pkgs)
275
276    return deps
277
278
279make_parallel = [
280    'make',
281    # Run enough jobs to saturate all the cpus
282    '-j', str(multiprocessing.cpu_count()),
283    # Don't start more jobs if the load avg is too high
284    '-l', str(multiprocessing.cpu_count()),
285    # Synchronize the output so logs aren't intermixed in stdout / stderr
286    '-O',
287]
288
289def enFlag(flag, enabled):
290    """
291    Returns an configure flag as a string
292
293    Parameters:
294    flag                The name of the flag
295    enabled             Whether the flag is enabled or disabled
296    """
297    return '--' + ('enable' if enabled else 'disable') + '-' + flag
298
299def build_and_install(pkg, build_for_testing=False):
300    """
301    Builds and installs the package in the environment. Optionally
302    builds the examples and test cases for package.
303
304    Parameter description:
305    pkg                 The package we are building
306    build_for_testing   Enable options related to testing on the package?
307    """
308    pkgdir = os.path.join(WORKSPACE, pkg)
309    # Build & install this package
310    conf_flags = [
311        enFlag('silent-rules', False),
312        enFlag('examples', build_for_testing),
313        enFlag('tests', build_for_testing),
314        enFlag('code-coverage', build_for_testing),
315        enFlag('valgrind', build_for_testing),
316    ]
317    os.chdir(pkgdir)
318    # Add any necessary configure flags for package
319    if CONFIGURE_FLAGS.get(pkg) is not None:
320        conf_flags.extend(CONFIGURE_FLAGS.get(pkg))
321    for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
322        if os.path.exists(bootstrap):
323            check_call_cmd(pkgdir, './' + bootstrap)
324            break
325    check_call_cmd(pkgdir, './configure', *conf_flags)
326    check_call_cmd(pkgdir, *make_parallel)
327    check_call_cmd(pkgdir, 'sudo', '-n', '--', *(make_parallel + [ 'install' ]))
328
329def install_deps(dep_list):
330    """
331    Install each package in the ordered dep_list.
332
333    Parameter descriptions:
334    dep_list            Ordered list of dependencies
335    """
336    for pkg in dep_list:
337        build_and_install(pkg, True)
338
339def build_dep_tree(pkg, pkgdir, dep_added, head, dep_tree=None):
340    """
341    For each package(pkg), starting with the package to be unit tested,
342    parse its 'configure.ac' file from within the package's directory(pkgdir)
343    for each package dependency defined recursively doing the same thing
344    on each package found as a dependency.
345
346    Parameter descriptions:
347    pkg                 Name of the package
348    pkgdir              Directory where package source is located
349    dep_added           Current list of dependencies and added status
350    head                Head node of the dependency tree
351    dep_tree            Current dependency tree node
352    """
353    if not dep_tree:
354        dep_tree = head
355    os.chdir(pkgdir)
356    # Open package's configure.ac
357    with open("/tmp/depcache", "r") as depcache:
358        cached = depcache.readline()
359    with open("configure.ac", "rt") as configure_ac:
360        # Retrieve dependency list from package's configure.ac
361        configure_ac_deps = get_deps(configure_ac)
362        for dep_pkg in configure_ac_deps:
363            if dep_pkg in cached:
364                continue
365            # Dependency package not already known
366            if dep_added.get(dep_pkg) is None:
367                # Dependency package not added
368                new_child = dep_tree.AddChild(dep_pkg)
369                dep_added[dep_pkg] = False
370                dep_pkgdir = clone_pkg(dep_pkg)
371                # Determine this dependency package's
372                # dependencies and add them before
373                # returning to add this package
374                dep_added = build_dep_tree(dep_pkg,
375                                           dep_pkgdir,
376                                           dep_added,
377                                           head,
378                                           new_child)
379            else:
380                # Dependency package known and added
381                if dep_added[dep_pkg]:
382                    continue
383                else:
384                    # Cyclic dependency failure
385                    raise Exception("Cyclic dependencies found in "+pkg)
386
387    if not dep_added[pkg]:
388        dep_added[pkg] = True
389
390    return dep_added
391
392def make_target_exists(target):
393    """
394    Runs a check against the makefile in the current directory to determine
395    if the target exists so that it can be built.
396
397    Parameter descriptions:
398    target              The make target we are checking
399    """
400    try:
401        cmd = [ 'make', '-n', target ]
402        with open(os.devnull, 'w') as devnull:
403            check_call(cmd, stdout=devnull, stderr=devnull)
404        return True
405    except CalledProcessError:
406        return False
407
408def run_unit_tests(top_dir):
409    """
410    Runs the unit tests for the package via `make check`
411
412    Parameter descriptions:
413    top_dir             The root directory of our project
414    """
415    try:
416        cmd = make_parallel + [ 'check' ]
417        for i in range(0, args.repeat):
418            check_call_cmd(top_dir,  *cmd)
419    except CalledProcessError:
420        for root, _, files in os.walk(top_dir):
421            if 'test-suite.log' not in files:
422                continue
423            check_call_cmd(root, 'cat', os.path.join(root, 'test-suite.log'))
424        raise Exception('Unit tests failed')
425
426def run_cppcheck(top_dir):
427    try:
428        # http://cppcheck.sourceforge.net/manual.pdf
429        ignore_list = ['-i%s' % path for path in os.listdir(top_dir) \
430                       if path.endswith('-src') or path.endswith('-build')]
431        ignore_list.extend(('-itest', '-iscripts'))
432        params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
433                  '--enable=all']
434        params.extend(ignore_list)
435        params.append('.')
436
437        check_call_cmd(top_dir, *params)
438    except CalledProcessError:
439        raise Exception('Cppcheck failed')
440
441def maybe_run_valgrind(top_dir):
442    """
443    Potentially runs the unit tests through valgrind for the package
444    via `make check-valgrind`. If the package does not have valgrind testing
445    then it just skips over this.
446
447    Parameter descriptions:
448    top_dir             The root directory of our project
449    """
450    # Valgrind testing is currently broken by an aggressive strcmp optimization
451    # that is inlined into optimized code for POWER by gcc 7+. Until we find
452    # a workaround, just don't run valgrind tests on POWER.
453    # https://github.com/openbmc/openbmc/issues/3315
454    if re.match('ppc64', platform.machine()) is not None:
455        return
456    if not make_target_exists('check-valgrind'):
457        return
458
459    try:
460        cmd = make_parallel + [ 'check-valgrind' ]
461        check_call_cmd(top_dir,  *cmd)
462    except CalledProcessError:
463        for root, _, files in os.walk(top_dir):
464            for f in files:
465                if re.search('test-suite-[a-z]+.log', f) is None:
466                    continue
467                check_call_cmd(root, 'cat', os.path.join(root, f))
468        raise Exception('Valgrind tests failed')
469
470def maybe_run_coverage(top_dir):
471    """
472    Potentially runs the unit tests through code coverage for the package
473    via `make check-code-coverage`. If the package does not have code coverage
474    testing then it just skips over this.
475
476    Parameter descriptions:
477    top_dir             The root directory of our project
478    """
479    if not make_target_exists('check-code-coverage'):
480        return
481
482    # Actually run code coverage
483    try:
484        cmd = make_parallel + [ 'check-code-coverage' ]
485        check_call_cmd(top_dir,  *cmd)
486    except CalledProcessError:
487        raise Exception('Code coverage failed')
488
489if __name__ == '__main__':
490    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
491    CONFIGURE_FLAGS = {
492        'phosphor-objmgr': ['--enable-unpatched-systemd'],
493        'sdbusplus': ['--enable-transaction'],
494        'phosphor-logging':
495        ['--enable-metadata-processing',
496         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
497    }
498
499    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
500    DEPENDENCIES = {
501        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
502        'AC_CHECK_HEADER': {
503            'host-ipmid': 'phosphor-host-ipmid',
504            'blobs-ipmid': 'phosphor-ipmi-blobs',
505            'sdbusplus': 'sdbusplus',
506            'sdeventplus': 'sdeventplus',
507            'gpioplus': 'gpioplus',
508            'phosphor-logging/log.hpp': 'phosphor-logging',
509        },
510        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
511        'PKG_CHECK_MODULES': {
512            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
513            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
514            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
515            'sdbusplus': 'sdbusplus',
516            'sdeventplus': 'sdeventplus',
517            'gpioplus': 'gpioplus',
518            'phosphor-logging': 'phosphor-logging',
519            'phosphor-snmp': 'phosphor-snmp',
520        },
521    }
522
523    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
524    DEPENDENCIES_REGEX = {
525        'phosphor-logging': r'\S+-dbus-interfaces$'
526    }
527
528    # Set command line arguments
529    parser = argparse.ArgumentParser()
530    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
531                        help="Workspace directory location(i.e. /home)")
532    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
533                        help="OpenBMC package to be unit tested")
534    parser.add_argument("-v", "--verbose", action="store_true",
535                        help="Print additional package status messages")
536    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
537                        type=int, default=1)
538    args = parser.parse_args(sys.argv[1:])
539    WORKSPACE = args.WORKSPACE
540    UNIT_TEST_PKG = args.PACKAGE
541    if args.verbose:
542        def printline(*line):
543            for arg in line:
544                print arg,
545            print
546    else:
547        printline = lambda *l: None
548
549    # First validate code formatting if repo has style formatting files.
550    # The format-code.sh checks for these files.
551    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
552    check_call_cmd(WORKSPACE, "./format-code.sh", CODE_SCAN_DIR)
553
554    # Automake
555    if os.path.isfile(CODE_SCAN_DIR + "/configure.ac"):
556        prev_umask = os.umask(000)
557        # Determine dependencies and add them
558        dep_added = dict()
559        dep_added[UNIT_TEST_PKG] = False
560        # Create dependency tree
561        dep_tree = DepTree(UNIT_TEST_PKG)
562        build_dep_tree(UNIT_TEST_PKG,
563                       os.path.join(WORKSPACE, UNIT_TEST_PKG),
564                       dep_added,
565                       dep_tree)
566
567        # Reorder Dependency Tree
568        for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems():
569            dep_tree.ReorderDeps(pkg_name, regex_str)
570        if args.verbose:
571            dep_tree.PrintTree()
572        install_list = dep_tree.GetInstallList()
573        # install reordered dependencies
574        install_deps(install_list)
575        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
576        os.chdir(top_dir)
577        # Refresh dynamic linker run time bindings for dependencies
578        check_call_cmd(top_dir, 'sudo', '-n', '--', 'ldconfig')
579        # Run package unit tests
580        run_unit_tests(top_dir)
581        maybe_run_valgrind(top_dir)
582        maybe_run_coverage(top_dir)
583        run_cppcheck(top_dir)
584
585        os.umask(prev_umask)
586
587    # Cmake
588    elif os.path.isfile(CODE_SCAN_DIR + "/CMakeLists.txt"):
589        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
590        os.chdir(top_dir)
591        check_call_cmd(top_dir, 'cmake', '.')
592        check_call_cmd(top_dir, 'cmake', '--build', '.', '--', '-j',
593                       str(multiprocessing.cpu_count()))
594        if make_target_exists('test'):
595            check_call_cmd(top_dir, 'ctest', '.')
596        maybe_run_valgrind(top_dir)
597        maybe_run_coverage(top_dir)
598        run_cppcheck(top_dir)
599
600    else:
601        print "Not a supported repo for CI Tests, exit"
602        quit()
603