xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision a61acb506868fdca84a2bd50f16ed3023d48521d)
1#!/usr/bin/env python
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urlparse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import sets
19import subprocess
20import shutil
21import platform
22
23
24class DepTree():
25    """
26    Represents package dependency tree, where each node is a DepTree with a
27    name and DepTree children.
28    """
29
30    def __init__(self, name):
31        """
32        Create new DepTree.
33
34        Parameter descriptions:
35        name               Name of new tree node.
36        """
37        self.name = name
38        self.children = list()
39
40    def AddChild(self, name):
41        """
42        Add new child node to current node.
43
44        Parameter descriptions:
45        name               Name of new child
46        """
47        new_child = DepTree(name)
48        self.children.append(new_child)
49        return new_child
50
51    def AddChildNode(self, node):
52        """
53        Add existing child node to current node.
54
55        Parameter descriptions:
56        node               Tree node to add
57        """
58        self.children.append(node)
59
60    def RemoveChild(self, name):
61        """
62        Remove child node.
63
64        Parameter descriptions:
65        name               Name of child to remove
66        """
67        for child in self.children:
68            if child.name == name:
69                self.children.remove(child)
70                return
71
72    def GetNode(self, name):
73        """
74        Return node with matching name. Return None if not found.
75
76        Parameter descriptions:
77        name               Name of node to return
78        """
79        if self.name == name:
80            return self
81        for child in self.children:
82            node = child.GetNode(name)
83            if node:
84                return node
85        return None
86
87    def GetParentNode(self, name, parent_node=None):
88        """
89        Return parent of node with matching name. Return none if not found.
90
91        Parameter descriptions:
92        name               Name of node to get parent of
93        parent_node        Parent of current node
94        """
95        if self.name == name:
96            return parent_node
97        for child in self.children:
98            found_node = child.GetParentNode(name, self)
99            if found_node:
100                return found_node
101        return None
102
103    def GetPath(self, name, path=None):
104        """
105        Return list of node names from head to matching name.
106        Return None if not found.
107
108        Parameter descriptions:
109        name               Name of node
110        path               List of node names from head to current node
111        """
112        if not path:
113            path = []
114        if self.name == name:
115            path.append(self.name)
116            return path
117        for child in self.children:
118            match = child.GetPath(name, path + [self.name])
119            if match:
120                return match
121        return None
122
123    def GetPathRegex(self, name, regex_str, path=None):
124        """
125        Return list of node paths that end in name, or match regex_str.
126        Return empty list if not found.
127
128        Parameter descriptions:
129        name               Name of node to search for
130        regex_str          Regex string to match node names
131        path               Path of node names from head to current node
132        """
133        new_paths = []
134        if not path:
135            path = []
136        match = re.match(regex_str, self.name)
137        if (self.name == name) or (match):
138            new_paths.append(path + [self.name])
139        for child in self.children:
140            return_paths = None
141            full_path = path + [self.name]
142            return_paths = child.GetPathRegex(name, regex_str, full_path)
143            for i in return_paths:
144                new_paths.append(i)
145        return new_paths
146
147    def MoveNode(self, from_name, to_name):
148        """
149        Mode existing from_name node to become child of to_name node.
150
151        Parameter descriptions:
152        from_name          Name of node to make a child of to_name
153        to_name            Name of node to make parent of from_name
154        """
155        parent_from_node = self.GetParentNode(from_name)
156        from_node = self.GetNode(from_name)
157        parent_from_node.RemoveChild(from_name)
158        to_node = self.GetNode(to_name)
159        to_node.AddChildNode(from_node)
160
161    def ReorderDeps(self, name, regex_str):
162        """
163        Reorder dependency tree.  If tree contains nodes with names that
164        match 'name' and 'regex_str', move 'regex_str' nodes that are
165        to the right of 'name' node, so that they become children of the
166        'name' node.
167
168        Parameter descriptions:
169        name               Name of node to look for
170        regex_str          Regex string to match names to
171        """
172        name_path = self.GetPath(name)
173        if not name_path:
174            return
175        paths = self.GetPathRegex(name, regex_str)
176        is_name_in_paths = False
177        name_index = 0
178        for i in range(len(paths)):
179            path = paths[i]
180            if path[-1] == name:
181                is_name_in_paths = True
182                name_index = i
183                break
184        if not is_name_in_paths:
185            return
186        for i in range(name_index + 1, len(paths)):
187            path = paths[i]
188            if name in path:
189                continue
190            from_name = path[-1]
191            self.MoveNode(from_name, name)
192
193    def GetInstallList(self):
194        """
195        Return post-order list of node names.
196
197        Parameter descriptions:
198        """
199        install_list = []
200        for child in self.children:
201            child_install_list = child.GetInstallList()
202            install_list.extend(child_install_list)
203        install_list.append(self.name)
204        return install_list
205
206    def PrintTree(self, level=0):
207        """
208        Print pre-order node names with indentation denoting node depth level.
209
210        Parameter descriptions:
211        level              Current depth level
212        """
213        INDENT_PER_LEVEL = 4
214        print ' ' * (level * INDENT_PER_LEVEL) + self.name
215        for child in self.children:
216            child.PrintTree(level + 1)
217
218
219def check_call_cmd(dir, *cmd):
220    """
221    Verbose prints the directory location the given command is called from and
222    the command, then executes the command using check_call.
223
224    Parameter descriptions:
225    dir                 Directory location command is to be called from
226    cmd                 List of parameters constructing the complete command
227    """
228    printline(dir, ">", " ".join(cmd))
229    check_call(cmd)
230
231
232def clone_pkg(pkg, branch):
233    """
234    Clone the given openbmc package's git repository from gerrit into
235    the WORKSPACE location
236
237    Parameter descriptions:
238    pkg                 Name of the package to clone
239    branch              Branch to clone from pkg
240    """
241    pkg_dir = os.path.join(WORKSPACE, pkg)
242    if os.path.exists(os.path.join(pkg_dir, '.git')):
243        return pkg_dir
244    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
245    os.mkdir(pkg_dir)
246    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
247    try:
248        # first try the branch
249        repo_inst = Repo.clone_from(pkg_repo, pkg_dir,
250                branch=branch).working_dir
251    except:
252        printline("Input branch not found, default to master")
253        repo_inst = Repo.clone_from(pkg_repo, pkg_dir,
254                branch="master").working_dir
255    return repo_inst
256
257
258def get_autoconf_deps(pkgdir):
259    """
260    Parse the given 'configure.ac' file for package dependencies and return
261    a list of the dependencies found. If the package is not autoconf it is just
262    ignored.
263
264    Parameter descriptions:
265    pkgdir              Directory where package source is located
266    """
267    configure_ac = os.path.join(pkgdir, 'configure.ac')
268    if not os.path.exists(configure_ac):
269        return []
270
271    configure_ac_contents = ''
272    # Prepend some special function overrides so we can parse out dependencies
273    for macro in DEPENDENCIES.iterkeys():
274        configure_ac_contents += ('m4_define([' + macro + '], [' +
275                macro + '_START$' + str(DEPENDENCIES_OFFSET[macro] + 1) +
276                macro + '_END])\n')
277    with open(configure_ac, "rt") as f:
278        configure_ac_contents += f.read()
279
280    autoconf_process = subprocess.Popen(['autoconf', '-Wno-undefined', '-'],
281            stdin=subprocess.PIPE, stdout=subprocess.PIPE,
282            stderr=subprocess.PIPE)
283    (stdout, stderr) = autoconf_process.communicate(input=configure_ac_contents)
284    if not stdout:
285        print(stderr)
286        raise Exception("Failed to run autoconf for parsing dependencies")
287
288    # Parse out all of the dependency text
289    matches = []
290    for macro in DEPENDENCIES.iterkeys():
291        pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
292        for match in re.compile(pattern).finditer(stdout):
293            matches.append((match.group(1), match.group(2)))
294
295    # Look up dependencies from the text
296    found_deps = []
297    for macro, deptext in matches:
298        for potential_dep in deptext.split(' '):
299            for known_dep in DEPENDENCIES[macro].iterkeys():
300                if potential_dep.startswith(known_dep):
301                    found_deps.append(DEPENDENCIES[macro][known_dep])
302
303    return found_deps
304
305def get_meson_deps(pkgdir):
306    """
307    Parse the given 'meson.build' file for package dependencies and return
308    a list of the dependencies found. If the package is not meson compatible
309    it is just ignored.
310
311    Parameter descriptions:
312    pkgdir              Directory where package source is located
313    """
314    meson_build = os.path.join(pkgdir, 'meson.build')
315    if not os.path.exists(meson_build):
316        return []
317
318    found_deps = []
319    for root, dirs, files in os.walk(pkgdir):
320        if 'meson.build' not in files:
321            continue
322        with open(os.path.join(root, 'meson.build'), 'rt') as f:
323            build_contents = f.read()
324        for match in re.finditer(r"dependency\('([^']*)'.*?\)\n", build_contents):
325            maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(match.group(1))
326            if maybe_dep is not None:
327                found_deps.append(maybe_dep)
328
329    return found_deps
330
331make_parallel = [
332    'make',
333    # Run enough jobs to saturate all the cpus
334    '-j', str(multiprocessing.cpu_count()),
335    # Don't start more jobs if the load avg is too high
336    '-l', str(multiprocessing.cpu_count()),
337    # Synchronize the output so logs aren't intermixed in stdout / stderr
338    '-O',
339]
340
341def enFlag(flag, enabled):
342    """
343    Returns an configure flag as a string
344
345    Parameters:
346    flag                The name of the flag
347    enabled             Whether the flag is enabled or disabled
348    """
349    return '--' + ('enable' if enabled else 'disable') + '-' + flag
350
351def mesonFeature(val):
352    """
353    Returns the meson flag which signifies the value
354
355    True is enabled which requires the feature.
356    False is disabled which disables the feature.
357    None is auto which autodetects the feature.
358
359    Parameters:
360    val                 The value being converted
361    """
362    if val is True:
363        return "enabled"
364    elif val is False:
365        return "disabled"
366    elif val is None:
367        return "auto"
368    else:
369        raise Exception("Bad meson feature value")
370
371
372def build_and_install(pkg, build_for_testing=False):
373    """
374    Builds and installs the package in the environment. Optionally
375    builds the examples and test cases for package.
376
377    Parameter description:
378    pkg                 The package we are building
379    build_for_testing   Enable options related to testing on the package?
380    """
381    pkgdir = os.path.join(WORKSPACE, pkg)
382    os.chdir(pkgdir)
383
384    # Refresh dynamic linker run time bindings for dependencies
385    check_call_cmd(pkgdir, 'sudo', '-n', '--', 'ldconfig')
386
387    # Build & install this package
388    # Always try using meson first
389    if os.path.exists('meson.build'):
390        meson_flags = [
391            '-Db_colorout=never',
392            '-Dtests=' + mesonFeature(build_for_testing),
393            '-Dexamples=' + str(build_for_testing).lower(),
394            '-Db_coverage=' + str(build_for_testing).lower(),
395        ]
396        if MESON_FLAGS.get(pkg) is not None:
397            meson_flags.extend(MESON_FLAGS.get(pkg))
398        try:
399            check_call_cmd(pkgdir, 'meson', 'setup', '--reconfigure', 'build', *meson_flags)
400        except:
401            shutil.rmtree('build')
402            check_call_cmd(pkgdir, 'meson', 'setup', 'build', *meson_flags)
403        check_call_cmd(pkgdir, 'ninja', '-C', 'build')
404        check_call_cmd(pkgdir, 'sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
405    # Assume we are autoconf otherwise
406    else:
407        conf_flags = [
408            enFlag('silent-rules', False),
409            enFlag('examples', build_for_testing),
410            enFlag('tests', build_for_testing),
411            enFlag('code-coverage', build_for_testing),
412            enFlag('valgrind', build_for_testing),
413        ]
414        # Add any necessary configure flags for package
415        if CONFIGURE_FLAGS.get(pkg) is not None:
416            conf_flags.extend(CONFIGURE_FLAGS.get(pkg))
417        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
418            if os.path.exists(bootstrap):
419                check_call_cmd(pkgdir, './' + bootstrap)
420                break
421        check_call_cmd(pkgdir, './configure', *conf_flags)
422        check_call_cmd(pkgdir, *make_parallel)
423        check_call_cmd(pkgdir, 'sudo', '-n', '--', *(make_parallel + [ 'install' ]))
424
425def build_dep_tree(pkg, pkgdir, dep_added, head, branch, dep_tree=None):
426    """
427    For each package(pkg), starting with the package to be unit tested,
428    parse its 'configure.ac' file from within the package's directory(pkgdir)
429    for each package dependency defined recursively doing the same thing
430    on each package found as a dependency.
431
432    Parameter descriptions:
433    pkg                 Name of the package
434    pkgdir              Directory where package source is located
435    dep_added           Current dict of dependencies and added status
436    head                Head node of the dependency tree
437    branch              Branch to clone from pkg
438    dep_tree            Current dependency tree node
439    """
440    if not dep_tree:
441        dep_tree = head
442
443    with open("/tmp/depcache", "r") as depcache:
444        cache = depcache.readline()
445
446    # Read out pkg dependencies
447    pkg_deps = []
448    pkg_deps += get_autoconf_deps(pkgdir)
449    pkg_deps += get_meson_deps(pkgdir)
450
451    for dep in sets.Set(pkg_deps):
452        if dep in cache:
453            continue
454        # Dependency package not already known
455        if dep_added.get(dep) is None:
456            # Dependency package not added
457            new_child = dep_tree.AddChild(dep)
458            dep_added[dep] = False
459            dep_pkgdir = clone_pkg(dep,branch)
460            # Determine this dependency package's
461            # dependencies and add them before
462            # returning to add this package
463            dep_added = build_dep_tree(dep,
464                                       dep_pkgdir,
465                                       dep_added,
466                                       head,
467                                       branch,
468                                       new_child)
469        else:
470            # Dependency package known and added
471            if dep_added[dep]:
472                continue
473            else:
474                # Cyclic dependency failure
475                raise Exception("Cyclic dependencies found in "+pkg)
476
477    if not dep_added[pkg]:
478        dep_added[pkg] = True
479
480    return dep_added
481
482def make_target_exists(target):
483    """
484    Runs a check against the makefile in the current directory to determine
485    if the target exists so that it can be built.
486
487    Parameter descriptions:
488    target              The make target we are checking
489    """
490    try:
491        cmd = [ 'make', '-n', target ]
492        with open(os.devnull, 'w') as devnull:
493            check_call(cmd, stdout=devnull, stderr=devnull)
494        return True
495    except CalledProcessError:
496        return False
497
498def run_unit_tests(top_dir):
499    """
500    Runs the unit tests for the package via `make check`
501
502    Parameter descriptions:
503    top_dir             The root directory of our project
504    """
505    try:
506        cmd = make_parallel + [ 'check' ]
507        for i in range(0, args.repeat):
508            check_call_cmd(top_dir,  *cmd)
509    except CalledProcessError:
510        for root, _, files in os.walk(top_dir):
511            if 'test-suite.log' not in files:
512                continue
513            check_call_cmd(root, 'cat', os.path.join(root, 'test-suite.log'))
514        raise Exception('Unit tests failed')
515
516def run_cppcheck(top_dir):
517    try:
518        # http://cppcheck.sourceforge.net/manual.pdf
519        ignore_list = ['-i%s' % path for path in os.listdir(top_dir) \
520                       if path.endswith('-src') or path.endswith('-build')]
521        ignore_list.extend(('-itest', '-iscripts'))
522        params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
523                  '--enable=all']
524        params.extend(ignore_list)
525        params.append('.')
526
527        check_call_cmd(top_dir, *params)
528    except CalledProcessError:
529        raise Exception('Cppcheck failed')
530
531def maybe_run_valgrind(top_dir):
532    """
533    Potentially runs the unit tests through valgrind for the package
534    via `make check-valgrind`. If the package does not have valgrind testing
535    then it just skips over this.
536
537    Parameter descriptions:
538    top_dir             The root directory of our project
539    """
540    # Valgrind testing is currently broken by an aggressive strcmp optimization
541    # that is inlined into optimized code for POWER by gcc 7+. Until we find
542    # a workaround, just don't run valgrind tests on POWER.
543    # https://github.com/openbmc/openbmc/issues/3315
544    if re.match('ppc64', platform.machine()) is not None:
545        return
546    if not make_target_exists('check-valgrind'):
547        return
548
549    try:
550        cmd = make_parallel + [ 'check-valgrind' ]
551        check_call_cmd(top_dir,  *cmd)
552    except CalledProcessError:
553        for root, _, files in os.walk(top_dir):
554            for f in files:
555                if re.search('test-suite-[a-z]+.log', f) is None:
556                    continue
557                check_call_cmd(root, 'cat', os.path.join(root, f))
558        raise Exception('Valgrind tests failed')
559
560def maybe_run_coverage(top_dir):
561    """
562    Potentially runs the unit tests through code coverage for the package
563    via `make check-code-coverage`. If the package does not have code coverage
564    testing then it just skips over this.
565
566    Parameter descriptions:
567    top_dir             The root directory of our project
568    """
569    if not make_target_exists('check-code-coverage'):
570        return
571
572    # Actually run code coverage
573    try:
574        cmd = make_parallel + [ 'check-code-coverage' ]
575        check_call_cmd(top_dir,  *cmd)
576    except CalledProcessError:
577        raise Exception('Code coverage failed')
578
579if __name__ == '__main__':
580    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
581    CONFIGURE_FLAGS = {
582        'phosphor-objmgr': ['--enable-unpatched-systemd'],
583        'sdbusplus': ['--enable-transaction'],
584        'phosphor-logging':
585        ['--enable-metadata-processing',
586         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
587    }
588
589    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
590    MESON_FLAGS = {
591    }
592
593    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
594    DEPENDENCIES = {
595        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
596        'AC_CHECK_HEADER': {
597            'host-ipmid': 'phosphor-host-ipmid',
598            'blobs-ipmid': 'phosphor-ipmi-blobs',
599            'sdbusplus': 'sdbusplus',
600            'sdeventplus': 'sdeventplus',
601            'gpioplus': 'gpioplus',
602            'phosphor-logging/log.hpp': 'phosphor-logging',
603        },
604        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
605        'PKG_CHECK_MODULES': {
606            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
607            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
608            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
609            'sdbusplus': 'sdbusplus',
610            'sdeventplus': 'sdeventplus',
611            'gpioplus': 'gpioplus',
612            'phosphor-logging': 'phosphor-logging',
613            'phosphor-snmp': 'phosphor-snmp',
614        },
615    }
616
617    # Offset into array of macro parameters MACRO(0, 1, ...N)
618    DEPENDENCIES_OFFSET = {
619        'AC_CHECK_LIB': 0,
620        'AC_CHECK_HEADER': 0,
621        'AC_PATH_PROG': 1,
622        'PKG_CHECK_MODULES': 1,
623    }
624
625    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
626    DEPENDENCIES_REGEX = {
627        'phosphor-logging': r'\S+-dbus-interfaces$'
628    }
629
630    # Set command line arguments
631    parser = argparse.ArgumentParser()
632    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
633                        help="Workspace directory location(i.e. /home)")
634    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
635                        help="OpenBMC package to be unit tested")
636    parser.add_argument("-v", "--verbose", action="store_true",
637                        help="Print additional package status messages")
638    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
639                        type=int, default=1)
640    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
641                        help="Branch to target for dependent repositories",
642                        default="master")
643    args = parser.parse_args(sys.argv[1:])
644    WORKSPACE = args.WORKSPACE
645    UNIT_TEST_PKG = args.PACKAGE
646    BRANCH = args.BRANCH
647    if args.verbose:
648        def printline(*line):
649            for arg in line:
650                print arg,
651            print
652    else:
653        printline = lambda *l: None
654
655    # First validate code formatting if repo has style formatting files.
656    # The format-code.sh checks for these files.
657    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
658    check_call_cmd(WORKSPACE, "./format-code.sh", CODE_SCAN_DIR)
659
660    # Automake and meson
661    if (os.path.isfile(CODE_SCAN_DIR + "/configure.ac") or
662        os.path.isfile(CODE_SCAN_DIR + '/meson.build')):
663        prev_umask = os.umask(000)
664        # Determine dependencies and add them
665        dep_added = dict()
666        dep_added[UNIT_TEST_PKG] = False
667        # Create dependency tree
668        dep_tree = DepTree(UNIT_TEST_PKG)
669        build_dep_tree(UNIT_TEST_PKG,
670                       os.path.join(WORKSPACE, UNIT_TEST_PKG),
671                       dep_added,
672                       dep_tree,
673                       BRANCH)
674
675        # Reorder Dependency Tree
676        for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems():
677            dep_tree.ReorderDeps(pkg_name, regex_str)
678        if args.verbose:
679            dep_tree.PrintTree()
680        install_list = dep_tree.GetInstallList()
681        # We don't want to treat our package as a dependency
682        install_list.remove(UNIT_TEST_PKG)
683        # install reordered dependencies
684        for dep in install_list:
685            build_and_install(dep, False)
686        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
687        os.chdir(top_dir)
688        # Run package unit tests
689        build_and_install(UNIT_TEST_PKG, True)
690        if os.path.isfile(CODE_SCAN_DIR + '/meson.build'):
691            check_call_cmd(top_dir, 'meson', 'test', '-C', 'build')
692            check_call_cmd(top_dir, 'ninja', '-C', 'build', 'coverage-html')
693            check_call_cmd(top_dir, 'meson', 'test', '-C', 'build', '--wrap', 'valgrind')
694        else:
695            run_unit_tests(top_dir)
696            maybe_run_valgrind(top_dir)
697            maybe_run_coverage(top_dir)
698        run_cppcheck(top_dir)
699
700        os.umask(prev_umask)
701
702    # Cmake
703    elif os.path.isfile(CODE_SCAN_DIR + "/CMakeLists.txt"):
704        top_dir = os.path.join(WORKSPACE, UNIT_TEST_PKG)
705        os.chdir(top_dir)
706        check_call_cmd(top_dir, 'cmake', '.')
707        check_call_cmd(top_dir, 'cmake', '--build', '.', '--', '-j',
708                       str(multiprocessing.cpu_count()))
709        if make_target_exists('test'):
710            check_call_cmd(top_dir, 'ctest', '.')
711        maybe_run_valgrind(top_dir)
712        maybe_run_coverage(top_dir)
713        run_cppcheck(top_dir)
714
715    else:
716        print "Not a supported repo for CI Tests, exit"
717        quit()
718