xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision c048cc0ff7db3164c89a453752f63f92cfe272a7)
1#!/usr/bin/env python
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urlparse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import platform
19
20
21class DepTree():
22    """
23    Represents package dependency tree, where each node is a DepTree with a
24    name and DepTree children.
25    """
26
27    def __init__(self, name):
28        """
29        Create new DepTree.
30
31        Parameter descriptions:
32        name               Name of new tree node.
33        """
34        self.name = name
35        self.children = list()
36
37    def AddChild(self, name):
38        """
39        Add new child node to current node.
40
41        Parameter descriptions:
42        name               Name of new child
43        """
44        new_child = DepTree(name)
45        self.children.append(new_child)
46        return new_child
47
48    def AddChildNode(self, node):
49        """
50        Add existing child node to current node.
51
52        Parameter descriptions:
53        node               Tree node to add
54        """
55        self.children.append(node)
56
57    def RemoveChild(self, name):
58        """
59        Remove child node.
60
61        Parameter descriptions:
62        name               Name of child to remove
63        """
64        for child in self.children:
65            if child.name == name:
66                self.children.remove(child)
67                return
68
69    def GetNode(self, name):
70        """
71        Return node with matching name. Return None if not found.
72
73        Parameter descriptions:
74        name               Name of node to return
75        """
76        if self.name == name:
77            return self
78        for child in self.children:
79            node = child.GetNode(name)
80            if node:
81                return node
82        return None
83
84    def GetParentNode(self, name, parent_node=None):
85        """
86        Return parent of node with matching name. Return none if not found.
87
88        Parameter descriptions:
89        name               Name of node to get parent of
90        parent_node        Parent of current node
91        """
92        if self.name == name:
93            return parent_node
94        for child in self.children:
95            found_node = child.GetParentNode(name, self)
96            if found_node:
97                return found_node
98        return None
99
100    def GetPath(self, name, path=None):
101        """
102        Return list of node names from head to matching name.
103        Return None if not found.
104
105        Parameter descriptions:
106        name               Name of node
107        path               List of node names from head to current node
108        """
109        if not path:
110            path = []
111        if self.name == name:
112            path.append(self.name)
113            return path
114        for child in self.children:
115            match = child.GetPath(name, path + [self.name])
116            if match:
117                return match
118        return None
119
120    def GetPathRegex(self, name, regex_str, path=None):
121        """
122        Return list of node paths that end in name, or match regex_str.
123        Return empty list if not found.
124
125        Parameter descriptions:
126        name               Name of node to search for
127        regex_str          Regex string to match node names
128        path               Path of node names from head to current node
129        """
130        new_paths = []
131        if not path:
132            path = []
133        match = re.match(regex_str, self.name)
134        if (self.name == name) or (match):
135            new_paths.append(path + [self.name])
136        for child in self.children:
137            return_paths = None
138            full_path = path + [self.name]
139            return_paths = child.GetPathRegex(name, regex_str, full_path)
140            for i in return_paths:
141                new_paths.append(i)
142        return new_paths
143
144    def MoveNode(self, from_name, to_name):
145        """
146        Mode existing from_name node to become child of to_name node.
147
148        Parameter descriptions:
149        from_name          Name of node to make a child of to_name
150        to_name            Name of node to make parent of from_name
151        """
152        parent_from_node = self.GetParentNode(from_name)
153        from_node = self.GetNode(from_name)
154        parent_from_node.RemoveChild(from_name)
155        to_node = self.GetNode(to_name)
156        to_node.AddChildNode(from_node)
157
158    def ReorderDeps(self, name, regex_str):
159        """
160        Reorder dependency tree.  If tree contains nodes with names that
161        match 'name' and 'regex_str', move 'regex_str' nodes that are
162        to the right of 'name' node, so that they become children of the
163        'name' node.
164
165        Parameter descriptions:
166        name               Name of node to look for
167        regex_str          Regex string to match names to
168        """
169        name_path = self.GetPath(name)
170        if not name_path:
171            return
172        paths = self.GetPathRegex(name, regex_str)
173        is_name_in_paths = False
174        name_index = 0
175        for i in range(len(paths)):
176            path = paths[i]
177            if path[-1] == name:
178                is_name_in_paths = True
179                name_index = i
180                break
181        if not is_name_in_paths:
182            return
183        for i in range(name_index + 1, len(paths)):
184            path = paths[i]
185            if name in path:
186                continue
187            from_name = path[-1]
188            self.MoveNode(from_name, name)
189
190    def GetInstallList(self):
191        """
192        Return post-order list of node names.
193
194        Parameter descriptions:
195        """
196        install_list = []
197        for child in self.children:
198            child_install_list = child.GetInstallList()
199            install_list.extend(child_install_list)
200        install_list.append(self.name)
201        return install_list
202
203    def PrintTree(self, level=0):
204        """
205        Print pre-order node names with indentation denoting node depth level.
206
207        Parameter descriptions:
208        level              Current depth level
209        """
210        INDENT_PER_LEVEL = 4
211        print ' ' * (level * INDENT_PER_LEVEL) + self.name
212        for child in self.children:
213            child.PrintTree(level + 1)
214
215
216def check_call_cmd(dir, *cmd):
217    """
218    Verbose prints the directory location the given command is called from and
219    the command, then executes the command using check_call.
220
221    Parameter descriptions:
222    dir                 Directory location command is to be called from
223    cmd                 List of parameters constructing the complete command
224    """
225    printline(dir, ">", " ".join(cmd))
226    check_call(cmd)
227
228
229def clone_pkg(pkg):
230    """
231    Clone the given openbmc package's git repository from gerrit into
232    the WORKSPACE location
233
234    Parameter descriptions:
235    pkg                 Name of the package to clone
236    """
237    pkg_dir = os.path.join(WORKSPACE, pkg)
238    if os.path.exists(os.path.join(pkg_dir, '.git')):
239        return pkg_dir
240    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
241    os.mkdir(pkg_dir)
242    printline(pkg_dir, "> git clone", pkg_repo, "./")
243    return Repo.clone_from(pkg_repo, pkg_dir).working_dir
244
245
246def get_deps(configure_ac):
247    """
248    Parse the given 'configure.ac' file for package dependencies and return
249    a list of the dependencies found.
250
251    Parameter descriptions:
252    configure_ac        Opened 'configure.ac' file object
253    """
254    line = ""
255    dep_pkgs = set()
256    for cfg_line in configure_ac:
257        # Remove whitespace & newline
258        cfg_line = cfg_line.rstrip()
259        # Check for line breaks
260        if cfg_line.endswith('\\'):
261            line += str(cfg_line[:-1])
262            continue
263        line = line+cfg_line
264
265        # Find any defined dependency
266        line_has = lambda x: x if x in line else None
267        macros = set(filter(line_has, DEPENDENCIES.iterkeys()))
268        if len(macros) == 1:
269            macro = ''.join(macros)
270            deps = filter(line_has, DEPENDENCIES[macro].iterkeys())
271            dep_pkgs.update(map(lambda x: DEPENDENCIES[macro][x], deps))
272
273        line = ""
274    deps = list(dep_pkgs)
275
276    return deps
277
278def get_autoconf_deps(pkgdir):
279    """
280    Parse the given 'configure.ac' file for package dependencies and return
281    a list of the dependencies found. If the package is not autoconf it is just
282    ignored.
283
284    Parameter descriptions:
285    pkgdir              Directory where package source is located
286    """
287    configure_ac = os.path.join(pkgdir, 'configure.ac')
288    if not os.path.exists(configure_ac):
289        return []
290
291    with open(configure_ac, "rt") as f:
292        return get_deps(f)
293
294make_parallel = [
295    'make',
296    # Run enough jobs to saturate all the cpus
297    '-j', str(multiprocessing.cpu_count()),
298    # Don't start more jobs if the load avg is too high
299    '-l', str(multiprocessing.cpu_count()),
300    # Synchronize the output so logs aren't intermixed in stdout / stderr
301    '-O',
302]
303
304def enFlag(flag, enabled):
305    """
306    Returns an configure flag as a string
307
308    Parameters:
309    flag                The name of the flag
310    enabled             Whether the flag is enabled or disabled
311    """
312    return '--' + ('enable' if enabled else 'disable') + '-' + flag
313
314def build_and_install(pkg, build_for_testing=False):
315    """
316    Builds and installs the package in the environment. Optionally
317    builds the examples and test cases for package.
318
319    Parameter description:
320    pkg                 The package we are building
321    build_for_testing   Enable options related to testing on the package?
322    """
323    pkgdir = os.path.join(WORKSPACE, pkg)
324    # Build & install this package
325    conf_flags = [
326        enFlag('silent-rules', False),
327        enFlag('examples', build_for_testing),
328        enFlag('tests', build_for_testing),
329        enFlag('code-coverage', build_for_testing),
330        enFlag('valgrind', build_for_testing),
331    ]
332    os.chdir(pkgdir)
333    # Add any necessary configure flags for package
334    if CONFIGURE_FLAGS.get(pkg) is not None:
335        conf_flags.extend(CONFIGURE_FLAGS.get(pkg))
336    for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
337        if os.path.exists(bootstrap):
338            check_call_cmd(pkgdir, './' + bootstrap)
339            break
340    check_call_cmd(pkgdir, './configure', *conf_flags)
341    check_call_cmd(pkgdir, *make_parallel)
342    check_call_cmd(pkgdir, 'sudo', '-n', '--', *(make_parallel + [ 'install' ]))
343
344def build_dep_tree(pkg, pkgdir, dep_added, head, dep_tree=None):
345    """
346    For each package(pkg), starting with the package to be unit tested,
347    parse its 'configure.ac' file from within the package's directory(pkgdir)
348    for each package dependency defined recursively doing the same thing
349    on each package found as a dependency.
350
351    Parameter descriptions:
352    pkg                 Name of the package
353    pkgdir              Directory where package source is located
354    dep_added           Current dict of dependencies and added status
355    head                Head node of the dependency tree
356    dep_tree            Current dependency tree node
357    """
358    if not dep_tree:
359        dep_tree = head
360
361    with open("/tmp/depcache", "r") as depcache:
362        cache = depcache.readline()
363
364    # Read out pkg dependencies
365    pkg_deps = []
366    pkg_deps += get_autoconf_deps(pkgdir)
367
368    for dep in pkg_deps:
369        if dep in cache:
370            continue
371        # Dependency package not already known
372        if dep_added.get(dep) is None:
373            # Dependency package not added
374            new_child = dep_tree.AddChild(dep)
375            dep_added[dep] = False
376            dep_pkgdir = clone_pkg(dep)
377            # Determine this dependency package's
378            # dependencies and add them before
379            # returning to add this package
380            dep_added = build_dep_tree(dep,
381                                       dep_pkgdir,
382                                       dep_added,
383                                       head,
384                                       new_child)
385        else:
386            # Dependency package known and added
387            if dep_added[dep]:
388                continue
389            else:
390                # Cyclic dependency failure
391                raise Exception("Cyclic dependencies found in "+pkg)
392
393    if not dep_added[pkg]:
394        dep_added[pkg] = True
395
396    return dep_added
397
398def make_target_exists(target):
399    """
400    Runs a check against the makefile in the current directory to determine
401    if the target exists so that it can be built.
402
403    Parameter descriptions:
404    target              The make target we are checking
405    """
406    try:
407        cmd = [ 'make', '-n', target ]
408        with open(os.devnull, 'w') as devnull:
409            check_call(cmd, stdout=devnull, stderr=devnull)
410        return True
411    except CalledProcessError:
412        return False
413
414def run_unit_tests(top_dir):
415    """
416    Runs the unit tests for the package via `make check`
417
418    Parameter descriptions:
419    top_dir             The root directory of our project
420    """
421    try:
422        cmd = make_parallel + [ 'check' ]
423        for i in range(0, args.repeat):
424            check_call_cmd(top_dir,  *cmd)
425    except CalledProcessError:
426        for root, _, files in os.walk(top_dir):
427            if 'test-suite.log' not in files:
428                continue
429            check_call_cmd(root, 'cat', os.path.join(root, 'test-suite.log'))
430        raise Exception('Unit tests failed')
431
432def run_cppcheck(top_dir):
433    try:
434        # http://cppcheck.sourceforge.net/manual.pdf
435        ignore_list = ['-i%s' % path for path in os.listdir(top_dir) \
436                       if path.endswith('-src') or path.endswith('-build')]
437        ignore_list.extend(('-itest', '-iscripts'))
438        params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
439                  '--enable=all']
440        params.extend(ignore_list)
441        params.append('.')
442
443        check_call_cmd(top_dir, *params)
444    except CalledProcessError:
445        raise Exception('Cppcheck failed')
446
447def maybe_run_valgrind(top_dir):
448    """
449    Potentially runs the unit tests through valgrind for the package
450    via `make check-valgrind`. If the package does not have valgrind testing
451    then it just skips over this.
452
453    Parameter descriptions:
454    top_dir             The root directory of our project
455    """
456    # Valgrind testing is currently broken by an aggressive strcmp optimization
457    # that is inlined into optimized code for POWER by gcc 7+. Until we find
458    # a workaround, just don't run valgrind tests on POWER.
459    # https://github.com/openbmc/openbmc/issues/3315
460    if re.match('ppc64', platform.machine()) is not None:
461        return
462    if not make_target_exists('check-valgrind'):
463        return
464
465    try:
466        cmd = make_parallel + [ 'check-valgrind' ]
467        check_call_cmd(top_dir,  *cmd)
468    except CalledProcessError:
469        for root, _, files in os.walk(top_dir):
470            for f in files:
471                if re.search('test-suite-[a-z]+.log', f) is None:
472                    continue
473                check_call_cmd(root, 'cat', os.path.join(root, f))
474        raise Exception('Valgrind tests failed')
475
476def maybe_run_coverage(top_dir):
477    """
478    Potentially runs the unit tests through code coverage for the package
479    via `make check-code-coverage`. If the package does not have code coverage
480    testing then it just skips over this.
481
482    Parameter descriptions:
483    top_dir             The root directory of our project
484    """
485    if not make_target_exists('check-code-coverage'):
486        return
487
488    # Actually run code coverage
489    try:
490        cmd = make_parallel + [ 'check-code-coverage' ]
491        check_call_cmd(top_dir,  *cmd)
492    except CalledProcessError:
493        raise Exception('Code coverage failed')
494
495if __name__ == '__main__':
496    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
497    CONFIGURE_FLAGS = {
498        'phosphor-objmgr': ['--enable-unpatched-systemd'],
499        'sdbusplus': ['--enable-transaction'],
500        'phosphor-logging':
501        ['--enable-metadata-processing',
502         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
503    }
504
505    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
506    DEPENDENCIES = {
507        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
508        'AC_CHECK_HEADER': {
509            'host-ipmid': 'phosphor-host-ipmid',
510            'blobs-ipmid': 'phosphor-ipmi-blobs',
511            'sdbusplus': 'sdbusplus',
512            'sdeventplus': 'sdeventplus',
513            'gpioplus': 'gpioplus',
514            'phosphor-logging/log.hpp': 'phosphor-logging',
515        },
516        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
517        'PKG_CHECK_MODULES': {
518            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
519            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
520            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
521            'sdbusplus': 'sdbusplus',
522            'sdeventplus': 'sdeventplus',
523            'gpioplus': 'gpioplus',
524            'phosphor-logging': 'phosphor-logging',
525            'phosphor-snmp': 'phosphor-snmp',
526        },
527    }
528
529    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
530    DEPENDENCIES_REGEX = {
531        'phosphor-logging': r'\S+-dbus-interfaces$'
532    }
533
534    # Set command line arguments
535    parser = argparse.ArgumentParser()
536    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
537                        help="Workspace directory location(i.e. /home)")
538    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
539                        help="OpenBMC package to be unit tested")
540    parser.add_argument("-v", "--verbose", action="store_true",
541                        help="Print additional package status messages")
542    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
543                        type=int, default=1)
544    args = parser.parse_args(sys.argv[1:])
545    WORKSPACE = args.WORKSPACE
546    UNIT_TEST_PKG = args.PACKAGE
547    if args.verbose:
548        def printline(*line):
549            for arg in line:
550                print arg,
551            print
552    else:
553        printline = lambda *l: None
554
555    # First validate code formatting if repo has style formatting files.
556    # The format-code.sh checks for these files.
557    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
558    check_call_cmd(WORKSPACE, "./format-code.sh", CODE_SCAN_DIR)
559
560    # Automake
561    if os.path.isfile(CODE_SCAN_DIR + "/configure.ac"):
562        prev_umask = os.umask(000)
563        # Determine dependencies and add them
564        dep_added = dict()
565        dep_added[UNIT_TEST_PKG] = False
566        # Create dependency tree
567        dep_tree = DepTree(UNIT_TEST_PKG)
568        build_dep_tree(UNIT_TEST_PKG,
569                       os.path.join(WORKSPACE, UNIT_TEST_PKG),
570                       dep_added,
571                       dep_tree)
572
573        # Reorder Dependency Tree
574        for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems():
575            dep_tree.ReorderDeps(pkg_name, regex_str)
576        if args.verbose:
577            dep_tree.PrintTree()
578        install_list = dep_tree.GetInstallList()
579        # We don't want to treat our package as a dependency
580        install_list.remove(UNIT_TEST_PKG)
581        # install reordered dependencies
582        for dep in install_list:
583            build_and_install(dep, False)
584        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
585        os.chdir(top_dir)
586        # Refresh dynamic linker run time bindings for dependencies
587        check_call_cmd(top_dir, 'sudo', '-n', '--', 'ldconfig')
588        # Run package unit tests
589        build_and_install(UNIT_TEST_PKG, True)
590        run_unit_tests(top_dir)
591        maybe_run_valgrind(top_dir)
592        maybe_run_coverage(top_dir)
593        run_cppcheck(top_dir)
594
595        os.umask(prev_umask)
596
597    # Cmake
598    elif os.path.isfile(CODE_SCAN_DIR + "/CMakeLists.txt"):
599        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
600        os.chdir(top_dir)
601        check_call_cmd(top_dir, 'cmake', '.')
602        check_call_cmd(top_dir, 'cmake', '--build', '.', '--', '-j',
603                       str(multiprocessing.cpu_count()))
604        if make_target_exists('test'):
605            check_call_cmd(top_dir, 'ctest', '.')
606        maybe_run_valgrind(top_dir)
607        maybe_run_coverage(top_dir)
608        run_cppcheck(top_dir)
609
610    else:
611        print "Not a supported repo for CI Tests, exit"
612        quit()
613