xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision e5fffa07e898c6ab3a207674a3fa7f4ac320d49f)
1#!/usr/bin/env python
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urlparse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import sets
19import subprocess
20import shutil
21import platform
22
23
24class DepTree():
25    """
26    Represents package dependency tree, where each node is a DepTree with a
27    name and DepTree children.
28    """
29
30    def __init__(self, name):
31        """
32        Create new DepTree.
33
34        Parameter descriptions:
35        name               Name of new tree node.
36        """
37        self.name = name
38        self.children = list()
39
40    def AddChild(self, name):
41        """
42        Add new child node to current node.
43
44        Parameter descriptions:
45        name               Name of new child
46        """
47        new_child = DepTree(name)
48        self.children.append(new_child)
49        return new_child
50
51    def AddChildNode(self, node):
52        """
53        Add existing child node to current node.
54
55        Parameter descriptions:
56        node               Tree node to add
57        """
58        self.children.append(node)
59
60    def RemoveChild(self, name):
61        """
62        Remove child node.
63
64        Parameter descriptions:
65        name               Name of child to remove
66        """
67        for child in self.children:
68            if child.name == name:
69                self.children.remove(child)
70                return
71
72    def GetNode(self, name):
73        """
74        Return node with matching name. Return None if not found.
75
76        Parameter descriptions:
77        name               Name of node to return
78        """
79        if self.name == name:
80            return self
81        for child in self.children:
82            node = child.GetNode(name)
83            if node:
84                return node
85        return None
86
87    def GetParentNode(self, name, parent_node=None):
88        """
89        Return parent of node with matching name. Return none if not found.
90
91        Parameter descriptions:
92        name               Name of node to get parent of
93        parent_node        Parent of current node
94        """
95        if self.name == name:
96            return parent_node
97        for child in self.children:
98            found_node = child.GetParentNode(name, self)
99            if found_node:
100                return found_node
101        return None
102
103    def GetPath(self, name, path=None):
104        """
105        Return list of node names from head to matching name.
106        Return None if not found.
107
108        Parameter descriptions:
109        name               Name of node
110        path               List of node names from head to current node
111        """
112        if not path:
113            path = []
114        if self.name == name:
115            path.append(self.name)
116            return path
117        for child in self.children:
118            match = child.GetPath(name, path + [self.name])
119            if match:
120                return match
121        return None
122
123    def GetPathRegex(self, name, regex_str, path=None):
124        """
125        Return list of node paths that end in name, or match regex_str.
126        Return empty list if not found.
127
128        Parameter descriptions:
129        name               Name of node to search for
130        regex_str          Regex string to match node names
131        path               Path of node names from head to current node
132        """
133        new_paths = []
134        if not path:
135            path = []
136        match = re.match(regex_str, self.name)
137        if (self.name == name) or (match):
138            new_paths.append(path + [self.name])
139        for child in self.children:
140            return_paths = None
141            full_path = path + [self.name]
142            return_paths = child.GetPathRegex(name, regex_str, full_path)
143            for i in return_paths:
144                new_paths.append(i)
145        return new_paths
146
147    def MoveNode(self, from_name, to_name):
148        """
149        Mode existing from_name node to become child of to_name node.
150
151        Parameter descriptions:
152        from_name          Name of node to make a child of to_name
153        to_name            Name of node to make parent of from_name
154        """
155        parent_from_node = self.GetParentNode(from_name)
156        from_node = self.GetNode(from_name)
157        parent_from_node.RemoveChild(from_name)
158        to_node = self.GetNode(to_name)
159        to_node.AddChildNode(from_node)
160
161    def ReorderDeps(self, name, regex_str):
162        """
163        Reorder dependency tree.  If tree contains nodes with names that
164        match 'name' and 'regex_str', move 'regex_str' nodes that are
165        to the right of 'name' node, so that they become children of the
166        'name' node.
167
168        Parameter descriptions:
169        name               Name of node to look for
170        regex_str          Regex string to match names to
171        """
172        name_path = self.GetPath(name)
173        if not name_path:
174            return
175        paths = self.GetPathRegex(name, regex_str)
176        is_name_in_paths = False
177        name_index = 0
178        for i in range(len(paths)):
179            path = paths[i]
180            if path[-1] == name:
181                is_name_in_paths = True
182                name_index = i
183                break
184        if not is_name_in_paths:
185            return
186        for i in range(name_index + 1, len(paths)):
187            path = paths[i]
188            if name in path:
189                continue
190            from_name = path[-1]
191            self.MoveNode(from_name, name)
192
193    def GetInstallList(self):
194        """
195        Return post-order list of node names.
196
197        Parameter descriptions:
198        """
199        install_list = []
200        for child in self.children:
201            child_install_list = child.GetInstallList()
202            install_list.extend(child_install_list)
203        install_list.append(self.name)
204        return install_list
205
206    def PrintTree(self, level=0):
207        """
208        Print pre-order node names with indentation denoting node depth level.
209
210        Parameter descriptions:
211        level              Current depth level
212        """
213        INDENT_PER_LEVEL = 4
214        print ' ' * (level * INDENT_PER_LEVEL) + self.name
215        for child in self.children:
216            child.PrintTree(level + 1)
217
218
219def check_call_cmd(*cmd):
220    """
221    Verbose prints the directory location the given command is called from and
222    the command, then executes the command using check_call.
223
224    Parameter descriptions:
225    dir                 Directory location command is to be called from
226    cmd                 List of parameters constructing the complete command
227    """
228    printline(os.getcwd(), ">", " ".join(cmd))
229    check_call(cmd)
230
231
232def clone_pkg(pkg, branch):
233    """
234    Clone the given openbmc package's git repository from gerrit into
235    the WORKSPACE location
236
237    Parameter descriptions:
238    pkg                 Name of the package to clone
239    branch              Branch to clone from pkg
240    """
241    pkg_dir = os.path.join(WORKSPACE, pkg)
242    if os.path.exists(os.path.join(pkg_dir, '.git')):
243        return pkg_dir
244    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
245    os.mkdir(pkg_dir)
246    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
247    try:
248        # first try the branch
249        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
250        repo_inst = clone.working_dir
251    except:
252        printline("Input branch not found, default to master")
253        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
254        repo_inst = clone.working_dir
255    return repo_inst
256
257
258def make_target_exists(target):
259    """
260    Runs a check against the makefile in the current directory to determine
261    if the target exists so that it can be built.
262
263    Parameter descriptions:
264    target              The make target we are checking
265    """
266    try:
267        cmd = ['make', '-n', target]
268        with open(os.devnull, 'w') as devnull:
269            check_call(cmd, stdout=devnull, stderr=devnull)
270        return True
271    except CalledProcessError:
272        return False
273
274
275make_parallel = [
276    'make',
277    # Run enough jobs to saturate all the cpus
278    '-j', str(multiprocessing.cpu_count()),
279    # Don't start more jobs if the load avg is too high
280    '-l', str(multiprocessing.cpu_count()),
281    # Synchronize the output so logs aren't intermixed in stdout / stderr
282    '-O',
283]
284
285
286def build_and_install(name, build_for_testing=False):
287    """
288    Builds and installs the package in the environment. Optionally
289    builds the examples and test cases for package.
290
291    Parameter description:
292    name                The name of the package we are building
293    build_for_testing   Enable options related to testing on the package?
294    """
295    os.chdir(os.path.join(WORKSPACE, name))
296
297    # Refresh dynamic linker run time bindings for dependencies
298    check_call_cmd('sudo', '-n', '--', 'ldconfig')
299
300    pkg = Package()
301    if build_for_testing:
302        pkg.test()
303    else:
304        pkg.install()
305
306
307def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
308    """
309    For each package (name), starting with the package to be unit tested,
310    extract its dependencies. For each package dependency defined, recursively
311    apply the same strategy
312
313    Parameter descriptions:
314    name                Name of the package
315    pkgdir              Directory where package source is located
316    dep_added           Current dict of dependencies and added status
317    head                Head node of the dependency tree
318    branch              Branch to clone from pkg
319    dep_tree            Current dependency tree node
320    """
321    if not dep_tree:
322        dep_tree = head
323
324    with open("/tmp/depcache", "r") as depcache:
325        cache = depcache.readline()
326
327    # Read out pkg dependencies
328    pkg = Package(name, pkgdir)
329
330    for dep in sets.Set(pkg.build_system().dependencies()):
331        if dep in cache:
332            continue
333        # Dependency package not already known
334        if dep_added.get(dep) is None:
335            # Dependency package not added
336            new_child = dep_tree.AddChild(dep)
337            dep_added[dep] = False
338            dep_pkgdir = clone_pkg(dep, branch)
339            # Determine this dependency package's
340            # dependencies and add them before
341            # returning to add this package
342            dep_added = build_dep_tree(dep,
343                                       dep_pkgdir,
344                                       dep_added,
345                                       head,
346                                       branch,
347                                       new_child)
348        else:
349            # Dependency package known and added
350            if dep_added[dep]:
351                continue
352            else:
353                # Cyclic dependency failure
354                raise Exception("Cyclic dependencies found in "+name)
355
356    if not dep_added[name]:
357        dep_added[name] = True
358
359    return dep_added
360
361
362def run_cppcheck():
363    match_re = re.compile('((?!\.mako\.).)*\.[ch](?:pp)?$', re.I)
364    cppcheck_files = []
365    stdout = subprocess.check_output(['git', 'ls-files'])
366
367    for f in stdout.decode('utf-8').split():
368        if match_re.match(f):
369            cppcheck_files.append(f)
370
371    if not cppcheck_files:
372        # skip cppcheck if there arent' any c or cpp sources.
373        print("no files")
374        return None
375
376    # http://cppcheck.sourceforge.net/manual.pdf
377    params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
378              '--enable=all', '--file-list=-']
379
380    cppcheck_process = subprocess.Popen(
381        params,
382        stdout=subprocess.PIPE,
383        stderr=subprocess.PIPE,
384        stdin=subprocess.PIPE)
385    (stdout, stderr) = cppcheck_process.communicate(
386        input='\n'.join(cppcheck_files))
387
388    if cppcheck_process.wait():
389        raise Exception('Cppcheck failed')
390    print(stdout)
391    print(stderr)
392
393
394def is_valgrind_safe():
395    """
396    Returns whether it is safe to run valgrind on our platform
397    """
398    src = 'unit-test-vg.c'
399    exe = './unit-test-vg'
400    with open(src, 'w') as h:
401        h.write('#include <errno.h>\n')
402        h.write('#include <stdio.h>\n')
403        h.write('#include <stdlib.h>\n')
404        h.write('#include <string.h>\n')
405        h.write('int main() {\n')
406        h.write('char *heap_str = malloc(16);\n')
407        h.write('strcpy(heap_str, "RandString");\n')
408        h.write('int res = strcmp("RandString", heap_str);\n')
409        h.write('free(heap_str);\n')
410        h.write('char errstr[64];\n')
411        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
412        h.write('printf("%s\\n", errstr);\n')
413        h.write('return res;\n')
414        h.write('}\n')
415    try:
416        with open(os.devnull, 'w') as devnull:
417            check_call(['gcc', '-O2', '-o', exe, src],
418                       stdout=devnull, stderr=devnull)
419            check_call(['valgrind', '--error-exitcode=99', exe],
420                       stdout=devnull, stderr=devnull)
421        return True
422    except:
423        sys.stderr.write("###### Platform is not valgrind safe ######\n")
424        return False
425    finally:
426        os.remove(src)
427        os.remove(exe)
428
429
430def is_sanitize_safe():
431    """
432    Returns whether it is safe to run sanitizers on our platform
433    """
434    src = 'unit-test-sanitize.c'
435    exe = './unit-test-sanitize'
436    with open(src, 'w') as h:
437        h.write('int main() { return 0; }\n')
438    try:
439        with open(os.devnull, 'w') as devnull:
440            check_call(['gcc', '-O2', '-fsanitize=address',
441                        '-fsanitize=undefined', '-o', exe, src],
442                       stdout=devnull, stderr=devnull)
443            check_call([exe], stdout=devnull, stderr=devnull)
444        return True
445    except:
446        sys.stderr.write("###### Platform is not sanitize safe ######\n")
447        return False
448    finally:
449        os.remove(src)
450        os.remove(exe)
451
452
453def maybe_make_valgrind():
454    """
455    Potentially runs the unit tests through valgrind for the package
456    via `make check-valgrind`. If the package does not have valgrind testing
457    then it just skips over this.
458    """
459    # Valgrind testing is currently broken by an aggressive strcmp optimization
460    # that is inlined into optimized code for POWER by gcc 7+. Until we find
461    # a workaround, just don't run valgrind tests on POWER.
462    # https://github.com/openbmc/openbmc/issues/3315
463    if not is_valgrind_safe():
464        sys.stderr.write("###### Skipping valgrind ######\n")
465        return
466    if not make_target_exists('check-valgrind'):
467        return
468
469    try:
470        cmd = make_parallel + ['check-valgrind']
471        check_call_cmd(*cmd)
472    except CalledProcessError:
473        for root, _, files in os.walk(os.getcwd()):
474            for f in files:
475                if re.search('test-suite-[a-z]+.log', f) is None:
476                    continue
477                check_call_cmd('cat', os.path.join(root, f))
478        raise Exception('Valgrind tests failed')
479
480
481def maybe_make_coverage():
482    """
483    Potentially runs the unit tests through code coverage for the package
484    via `make check-code-coverage`. If the package does not have code coverage
485    testing then it just skips over this.
486    """
487    if not make_target_exists('check-code-coverage'):
488        return
489
490    # Actually run code coverage
491    try:
492        cmd = make_parallel + ['check-code-coverage']
493        check_call_cmd(*cmd)
494    except CalledProcessError:
495        raise Exception('Code coverage failed')
496
497
498class BuildSystem(object):
499    """
500    Build systems generally provide the means to configure, build, install and
501    test software. The BuildSystem class defines a set of interfaces on top of
502    which Autotools, Meson, CMake and possibly other build system drivers can
503    be implemented, separating out the phases to control whether a package
504    should merely be installed or also tested and analyzed.
505    """
506    def __init__(self, package, path):
507        """Initialise the driver with properties independent of the build system
508
509        Keyword arguments:
510        package: The name of the package. Derived from the path if None
511        path: The path to the package. Set to the working directory if None
512        """
513        self.path = "." if not path else path
514        self.package = package if package else os.path.basename(os.path.realpath(self.path))
515        self.build_for_testing = False
516
517    def probe(self):
518        """Test if the build system driver can be applied to the package
519
520        Return True if the driver can drive the package's build system,
521        otherwise False.
522
523        Generally probe() is implemented by testing for the presence of the
524        build system's configuration file(s).
525        """
526        raise NotImplemented
527
528    def dependencies(self):
529        """Provide the package's dependencies
530
531        Returns a list of dependencies. If no dependencies are required then an
532        empty list must be returned.
533
534        Generally dependencies() is implemented by analysing and extracting the
535        data from the build system configuration.
536        """
537        raise NotImplemented
538
539    def configure(self, build_for_testing):
540        """Configure the source ready for building
541
542        Should raise an exception if configuration failed.
543
544        Keyword arguments:
545        build_for_testing: Mark the package as being built for testing rather
546                           than for installation as a dependency for the
547                           package under test. Setting to True generally
548                           implies that the package will be configured to build
549                           with debug information, at a low level of
550                           optimisation and possibly with sanitizers enabled.
551
552        Generally configure() is implemented by invoking the build system
553        tooling to generate Makefiles or equivalent.
554        """
555        raise NotImplemented
556
557    def build(self):
558        """Build the software ready for installation and/or testing
559
560        Should raise an exception if the build fails
561
562        Generally build() is implemented by invoking `make` or `ninja`.
563        """
564        raise NotImplemented
565
566    def install(self):
567        """Install the software ready for use
568
569        Should raise an exception if installation fails
570
571        Like build(), install() is generally implemented by invoking `make` or
572        `ninja`.
573        """
574        raise NotImplemented
575
576    def test(self):
577        """Build and run the test suite associated with the package
578
579        Should raise an exception if the build or testing fails.
580
581        Like install(), test() is generally implemented by invoking `make` or
582        `ninja`.
583        """
584        raise NotImplemented
585
586    def analyze(self):
587        """Run any supported analysis tools over the codebase
588
589        Should raise an exception if analysis fails.
590
591        Some analysis tools such as scan-build need injection into the build
592        system. analyze() provides the necessary hook to implement such
593        behaviour. Analyzers independent of the build system can also be
594        specified here but at the cost of possible duplication of code between
595        the build system driver implementations.
596        """
597        raise NotImplemented
598
599
600class Autotools(BuildSystem):
601    def __init__(self, package=None, path=None):
602        super(Autotools, self).__init__(package, path)
603
604    def probe(self):
605        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
606
607    def dependencies(self):
608        configure_ac = os.path.join(self.path, 'configure.ac')
609
610        configure_ac_contents = ''
611        # Prepend some special function overrides so we can parse out
612        # dependencies
613        for macro in DEPENDENCIES.iterkeys():
614            configure_ac_contents += ('m4_define([' + macro + '], [' +
615                                      macro + '_START$' +
616                                      str(DEPENDENCIES_OFFSET[macro] + 1) +
617                                      macro + '_END])\n')
618        with open(configure_ac, "rt") as f:
619            configure_ac_contents += f.read()
620
621        autoconf_process = subprocess.Popen(['autoconf', '-Wno-undefined', '-'],
622                                            stdin=subprocess.PIPE,
623                                            stdout=subprocess.PIPE,
624                                            stderr=subprocess.PIPE)
625        (stdout, stderr) = autoconf_process.communicate(input=configure_ac_contents)
626        if not stdout:
627            print(stderr)
628            raise Exception("Failed to run autoconf for parsing dependencies")
629
630        # Parse out all of the dependency text
631        matches = []
632        for macro in DEPENDENCIES.iterkeys():
633            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
634            for match in re.compile(pattern).finditer(stdout):
635                matches.append((match.group(1), match.group(2)))
636
637        # Look up dependencies from the text
638        found_deps = []
639        for macro, deptext in matches:
640            for potential_dep in deptext.split(' '):
641                for known_dep in DEPENDENCIES[macro].iterkeys():
642                    if potential_dep.startswith(known_dep):
643                        found_deps.append(DEPENDENCIES[macro][known_dep])
644
645        return found_deps
646
647    def _configure_feature(self, flag, enabled):
648        """
649        Returns an configure flag as a string
650
651        Parameters:
652        flag                The name of the flag
653        enabled             Whether the flag is enabled or disabled
654        """
655        return '--' + ('enable' if enabled else 'disable') + '-' + flag
656
657    def configure(self, build_for_testing):
658        self.build_for_testing = build_for_testing
659        conf_flags = [
660            self._configure_feature('silent-rules', False),
661            self._configure_feature('examples', build_for_testing),
662            self._configure_feature('tests', build_for_testing),
663        ]
664        if not TEST_ONLY:
665            conf_flags.extend([
666                self._configure_feature('code-coverage', build_for_testing),
667                self._configure_feature('valgrind', build_for_testing),
668            ])
669        # Add any necessary configure flags for package
670        if CONFIGURE_FLAGS.get(self.package) is not None:
671            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
672        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
673            if os.path.exists(bootstrap):
674                check_call_cmd('./' + bootstrap)
675                break
676        check_call_cmd('./configure', *conf_flags)
677
678    def build(self):
679        check_call_cmd(*make_parallel)
680
681    def install(self):
682        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
683
684    def test(self):
685        try:
686            cmd = make_parallel + ['check']
687            for i in range(0, args.repeat):
688                check_call_cmd(*cmd)
689        except CalledProcessError:
690            for root, _, files in os.walk(os.getcwd()):
691                if 'test-suite.log' not in files:
692                    continue
693                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
694            raise Exception('Unit tests failed')
695
696    def analyze(self):
697        maybe_make_valgrind()
698        maybe_make_coverage()
699        run_cppcheck()
700
701
702class CMake(BuildSystem):
703    def __init__(self, package=None, path=None):
704        super(CMake, self).__init__(package, path)
705
706    def probe(self):
707        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
708
709    def dependencies(self):
710        return []
711
712    def configure(self, build_for_testing):
713        self.build_for_testing = build_for_testing
714        check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
715
716    def build(self):
717        check_call_cmd('cmake', '--build', '.', '--', '-j',
718                       str(multiprocessing.cpu_count()))
719
720    def install(self):
721        pass
722
723    def test(self):
724        if make_target_exists('test'):
725            check_call_cmd('ctest', '.')
726
727    def analyze(self):
728        if os.path.isfile('.clang-tidy'):
729            check_call_cmd('run-clang-tidy-8.py', '-p', '.')
730        maybe_make_valgrind()
731        maybe_make_coverage()
732        run_cppcheck()
733
734
735class Meson(BuildSystem):
736    def __init__(self, package=None, path=None):
737        super(Meson, self).__init__(package, path)
738
739    def probe(self):
740        return os.path.isfile(os.path.join(self.path, 'meson.build'))
741
742    def dependencies(self):
743        meson_build = os.path.join(self.path, 'meson.build')
744        if not os.path.exists(meson_build):
745            return []
746
747        found_deps = []
748        for root, dirs, files in os.walk(self.path):
749            if 'meson.build' not in files:
750                continue
751            with open(os.path.join(root, 'meson.build'), 'rt') as f:
752                build_contents = f.read()
753            for match in re.finditer(r"dependency\('([^']*)'.*?\)\n", build_contents):
754                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(match.group(1))
755                if maybe_dep is not None:
756                    found_deps.append(maybe_dep)
757
758        return found_deps
759
760    def _parse_options(self, options_file):
761        """
762        Returns a set of options defined in the provides meson_options.txt file
763
764        Parameters:
765        options_file        The file containing options
766        """
767        options_contents = ''
768        with open(options_file, "rt") as f:
769            options_contents += f.read()
770        options = sets.Set()
771        pattern = 'option\\(\\s*\'([^\']*)\''
772        for match in re.compile(pattern).finditer(options_contents):
773            options.add(match.group(1))
774        return options
775
776    def _configure_feature(self, val):
777        """
778        Returns the meson flag which signifies the value
779
780        True is enabled which requires the feature.
781        False is disabled which disables the feature.
782        None is auto which autodetects the feature.
783
784        Parameters:
785        val                 The value being converted
786        """
787        if val is True:
788            return "enabled"
789        elif val is False:
790            return "disabled"
791        elif val is None:
792            return "auto"
793        else:
794            raise Exception("Bad meson feature value")
795
796    def configure(self, build_for_testing):
797        self.build_for_testing = build_for_testing
798        meson_options = sets.Set()
799        if os.path.exists("meson_options.txt"):
800            meson_options = self._parse_options("meson_options.txt")
801        meson_flags = [
802            '-Db_colorout=never',
803            '-Dwerror=true',
804            '-Dwarning_level=3',
805        ]
806        if build_for_testing:
807            meson_flags.append('--buildtype=debug')
808        else:
809            meson_flags.append('--buildtype=debugoptimized')
810        if 'tests' in meson_options:
811            meson_flags.append('-Dtests=' + self._configure_feature(build_for_testing))
812        if 'examples' in meson_options:
813            meson_flags.append('-Dexamples=' + str(build_for_testing).lower())
814        if MESON_FLAGS.get(self.package) is not None:
815            meson_flags.extend(MESON_FLAGS.get(self.package))
816        try:
817            check_call_cmd('meson', 'setup', '--reconfigure', 'build', *meson_flags)
818        except:
819            shutil.rmtree('build')
820            check_call_cmd('meson', 'setup', 'build', *meson_flags)
821
822    def build(self):
823        check_call_cmd('ninja', '-C', 'build')
824
825    def install(self):
826        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
827
828    def test(self):
829        try:
830            check_call_cmd('meson', 'test', '-C', 'build')
831        except CalledProcessError:
832            for root, _, files in os.walk(os.getcwd()):
833                if 'testlog.txt' not in files:
834                    continue
835                check_call_cmd('cat', os.path.join(root, 'testlog.txt'))
836            raise Exception('Unit tests failed')
837
838    def _setup_exists(self, setup):
839        """
840        Returns whether the meson build supports the named test setup.
841
842        Parameter descriptions:
843        setup              The setup target to check
844        """
845        try:
846            with open(os.devnull, 'w') as devnull:
847                output = subprocess.check_output(
848                        ['meson', 'test', '-C', 'build',
849                         '--setup', setup, '-t', '0'],
850                        stderr=subprocess.STDOUT)
851        except CalledProcessError as e:
852            output = e.output
853        return not re.search('Test setup .* not found from project', output)
854
855    def _maybe_valgrind(self):
856        """
857        Potentially runs the unit tests through valgrind for the package
858        via `meson test`. The package can specify custom valgrind configurations
859        by utilizing add_test_setup() in a meson.build
860        """
861        if not is_valgrind_safe():
862            sys.stderr.write("###### Skipping valgrind ######\n")
863            return
864        try:
865            if self._setup_exists('valgrind'):
866                check_call_cmd('meson', 'test', '-C', 'build',
867                               '--setup', 'valgrind')
868            else:
869                check_call_cmd('meson', 'test', '-C', 'build',
870                               '--wrapper', 'valgrind')
871        except CalledProcessError:
872            for root, _, files in os.walk(os.getcwd()):
873                if 'testlog-valgrind.txt' not in files:
874                    continue
875                check_call_cmd('cat', os.path.join(root, 'testlog-valgrind.txt'))
876            raise Exception('Valgrind tests failed')
877
878    def analyze(self):
879        self._maybe_valgrind()
880
881        # Run clang-tidy only if the project has a configuration
882        if os.path.isfile('.clang-tidy'):
883            check_call_cmd('run-clang-tidy-8.py', '-p',
884                           'build')
885        # Run the basic clang static analyzer otherwise
886        else:
887            check_call_cmd('ninja', '-C', 'build',
888                           'scan-build')
889
890        # Run tests through sanitizers
891        # b_lundef is needed if clang++ is CXX since it resolves the
892        # asan symbols at runtime only. We don't want to set it earlier
893        # in the build process to ensure we don't have undefined
894        # runtime code.
895        if is_sanitize_safe():
896            check_call_cmd('meson', 'configure', 'build',
897                           '-Db_sanitize=address,undefined',
898                           '-Db_lundef=false')
899            check_call_cmd('meson', 'test', '-C', 'build',
900                           '--logbase', 'testlog-ubasan')
901            # TODO: Fix memory sanitizer
902            # check_call_cmd('meson', 'configure', 'build',
903            #                '-Db_sanitize=memory')
904            # check_call_cmd('meson', 'test', '-C', 'build'
905            #                '--logbase', 'testlog-msan')
906            check_call_cmd('meson', 'configure', 'build',
907                           '-Db_sanitize=none', '-Db_lundef=true')
908        else:
909            sys.stderr.write("###### Skipping sanitizers ######\n")
910
911        # Run coverage checks
912        check_call_cmd('meson', 'configure', 'build',
913                       '-Db_coverage=true')
914        self.test()
915        # Only build coverage HTML if coverage files were produced
916        for root, dirs, files in os.walk('build'):
917            if any([f.endswith('.gcda') for f in files]):
918                check_call_cmd('ninja', '-C', 'build',
919                               'coverage-html')
920                break
921        check_call_cmd('meson', 'configure', 'build',
922                       '-Db_coverage=false')
923        run_cppcheck()
924
925
926class Package(object):
927    def __init__(self, name=None, path=None):
928        self.supported = [Autotools, Meson, CMake]
929        self.name = name
930        self.path = path
931        self.test_only = False
932
933    def build_systems(self):
934        instances = (system(self.name, self.path) for system in self.supported)
935        return (instance for instance in instances if instance.probe())
936
937    def build_system(self, preferred=None):
938        systems = self.build_systems()
939
940        if preferred:
941            return {type(system): system for system in systems}[preferred]
942
943        return next(iter(systems))
944
945    def install(self, system=None):
946        if not system:
947            system = self.build_system()
948
949        system.configure(False)
950        system.build()
951        system.install()
952
953    def _test_one(self, system):
954        system.configure(True)
955        system.build()
956        system.install()
957        system.test()
958        system.analyze()
959
960    def test(self):
961        for system in self.build_systems():
962            self._test_one(system)
963
964
965def find_file(filename, basedir):
966    """
967    Finds all occurrences of a file in the base directory
968    and passes them back with their relative paths.
969
970    Parameter descriptions:
971    filename              The name of the file to find
972    basedir               The base directory search in
973    """
974
975    filepaths = []
976    for root, dirs, files in os.walk(basedir):
977        if filename in files:
978            filepaths.append(os.path.join(root, filename))
979    return filepaths
980
981if __name__ == '__main__':
982    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
983    CONFIGURE_FLAGS = {
984        'sdbusplus': ['--enable-transaction'],
985        'phosphor-logging':
986        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
987         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
988    }
989
990    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
991    MESON_FLAGS = {
992    }
993
994    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
995    DEPENDENCIES = {
996        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
997        'AC_CHECK_HEADER': {
998            'host-ipmid': 'phosphor-host-ipmid',
999            'blobs-ipmid': 'phosphor-ipmi-blobs',
1000            'sdbusplus': 'sdbusplus',
1001            'sdeventplus': 'sdeventplus',
1002            'stdplus': 'stdplus',
1003            'gpioplus': 'gpioplus',
1004            'phosphor-logging/log.hpp': 'phosphor-logging',
1005        },
1006        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1007        'PKG_CHECK_MODULES': {
1008            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1009            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
1010            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
1011            'libipmid': 'phosphor-host-ipmid',
1012            'libipmid-host': 'phosphor-host-ipmid',
1013            'sdbusplus': 'sdbusplus',
1014            'sdeventplus': 'sdeventplus',
1015            'stdplus': 'stdplus',
1016            'gpioplus': 'gpioplus',
1017            'phosphor-logging': 'phosphor-logging',
1018            'phosphor-snmp': 'phosphor-snmp',
1019            'ipmiblob': 'ipmi-blob-tool',
1020        },
1021    }
1022
1023    # Offset into array of macro parameters MACRO(0, 1, ...N)
1024    DEPENDENCIES_OFFSET = {
1025        'AC_CHECK_LIB': 0,
1026        'AC_CHECK_HEADER': 0,
1027        'AC_PATH_PROG': 1,
1028        'PKG_CHECK_MODULES': 1,
1029    }
1030
1031    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1032    DEPENDENCIES_REGEX = {
1033        'phosphor-logging': r'\S+-dbus-interfaces$'
1034    }
1035
1036    # Set command line arguments
1037    parser = argparse.ArgumentParser()
1038    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1039                        help="Workspace directory location(i.e. /home)")
1040    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1041                        help="OpenBMC package to be unit tested")
1042    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1043                        action="store_true", required=False, default=False,
1044                        help="Only run test cases, no other validation")
1045    parser.add_argument("-v", "--verbose", action="store_true",
1046                        help="Print additional package status messages")
1047    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1048                        type=int, default=1)
1049    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1050                        help="Branch to target for dependent repositories",
1051                        default="master")
1052    parser.add_argument("-n", "--noformat", dest="FORMAT",
1053                        action="store_false", required=False,
1054                        help="Whether or not to run format code")
1055    args = parser.parse_args(sys.argv[1:])
1056    WORKSPACE = args.WORKSPACE
1057    UNIT_TEST_PKG = args.PACKAGE
1058    TEST_ONLY = args.TEST_ONLY
1059    BRANCH = args.BRANCH
1060    FORMAT_CODE = args.FORMAT
1061    if args.verbose:
1062        def printline(*line):
1063            for arg in line:
1064                print arg,
1065            print
1066    else:
1067        printline = lambda *l: None
1068
1069    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
1070
1071    # First validate code formatting if repo has style formatting files.
1072    # The format-code.sh checks for these files.
1073    if FORMAT_CODE:
1074        check_call_cmd("./format-code.sh", CODE_SCAN_DIR)
1075
1076    prev_umask = os.umask(000)
1077
1078    # Determine dependencies and add them
1079    dep_added = dict()
1080    dep_added[UNIT_TEST_PKG] = False
1081
1082    # Create dependency tree
1083    dep_tree = DepTree(UNIT_TEST_PKG)
1084    build_dep_tree(UNIT_TEST_PKG,
1085                   os.path.join(WORKSPACE, UNIT_TEST_PKG),
1086                   dep_added,
1087                   dep_tree,
1088                   BRANCH)
1089
1090    # Reorder Dependency Tree
1091    for pkg_name, regex_str in DEPENDENCIES_REGEX.iteritems():
1092        dep_tree.ReorderDeps(pkg_name, regex_str)
1093    if args.verbose:
1094        dep_tree.PrintTree()
1095
1096    install_list = dep_tree.GetInstallList()
1097
1098    # We don't want to treat our package as a dependency
1099    install_list.remove(UNIT_TEST_PKG)
1100
1101    # Install reordered dependencies
1102    for dep in install_list:
1103        build_and_install(dep, False)
1104
1105    # Run package unit tests
1106    build_and_install(UNIT_TEST_PKG, True)
1107
1108    os.umask(prev_umask)
1109
1110    # Run any custom CI scripts the repo has, of which there can be
1111    # multiple of and anywhere in the repository.
1112    ci_scripts = find_file('run-ci.sh', os.path.join(WORKSPACE, UNIT_TEST_PKG))
1113    if ci_scripts:
1114        os.chdir(os.path.join(WORKSPACE, UNIT_TEST_PKG))
1115        for ci_script in ci_scripts:
1116            check_call_cmd('sh', ci_script)
1117