xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 38356643280ba6c38c03050e7d4a0b9673c5ea7b)
1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from urllib.parse import urljoin
12from subprocess import check_call, call, CalledProcessError
13import os
14import sys
15import argparse
16import multiprocessing
17import re
18import subprocess
19import shutil
20import platform
21
22
23class DepTree():
24    """
25    Represents package dependency tree, where each node is a DepTree with a
26    name and DepTree children.
27    """
28
29    def __init__(self, name):
30        """
31        Create new DepTree.
32
33        Parameter descriptions:
34        name               Name of new tree node.
35        """
36        self.name = name
37        self.children = list()
38
39    def AddChild(self, name):
40        """
41        Add new child node to current node.
42
43        Parameter descriptions:
44        name               Name of new child
45        """
46        new_child = DepTree(name)
47        self.children.append(new_child)
48        return new_child
49
50    def AddChildNode(self, node):
51        """
52        Add existing child node to current node.
53
54        Parameter descriptions:
55        node               Tree node to add
56        """
57        self.children.append(node)
58
59    def RemoveChild(self, name):
60        """
61        Remove child node.
62
63        Parameter descriptions:
64        name               Name of child to remove
65        """
66        for child in self.children:
67            if child.name == name:
68                self.children.remove(child)
69                return
70
71    def GetNode(self, name):
72        """
73        Return node with matching name. Return None if not found.
74
75        Parameter descriptions:
76        name               Name of node to return
77        """
78        if self.name == name:
79            return self
80        for child in self.children:
81            node = child.GetNode(name)
82            if node:
83                return node
84        return None
85
86    def GetParentNode(self, name, parent_node=None):
87        """
88        Return parent of node with matching name. Return none if not found.
89
90        Parameter descriptions:
91        name               Name of node to get parent of
92        parent_node        Parent of current node
93        """
94        if self.name == name:
95            return parent_node
96        for child in self.children:
97            found_node = child.GetParentNode(name, self)
98            if found_node:
99                return found_node
100        return None
101
102    def GetPath(self, name, path=None):
103        """
104        Return list of node names from head to matching name.
105        Return None if not found.
106
107        Parameter descriptions:
108        name               Name of node
109        path               List of node names from head to current node
110        """
111        if not path:
112            path = []
113        if self.name == name:
114            path.append(self.name)
115            return path
116        for child in self.children:
117            match = child.GetPath(name, path + [self.name])
118            if match:
119                return match
120        return None
121
122    def GetPathRegex(self, name, regex_str, path=None):
123        """
124        Return list of node paths that end in name, or match regex_str.
125        Return empty list if not found.
126
127        Parameter descriptions:
128        name               Name of node to search for
129        regex_str          Regex string to match node names
130        path               Path of node names from head to current node
131        """
132        new_paths = []
133        if not path:
134            path = []
135        match = re.match(regex_str, self.name)
136        if (self.name == name) or (match):
137            new_paths.append(path + [self.name])
138        for child in self.children:
139            return_paths = None
140            full_path = path + [self.name]
141            return_paths = child.GetPathRegex(name, regex_str, full_path)
142            for i in return_paths:
143                new_paths.append(i)
144        return new_paths
145
146    def MoveNode(self, from_name, to_name):
147        """
148        Mode existing from_name node to become child of to_name node.
149
150        Parameter descriptions:
151        from_name          Name of node to make a child of to_name
152        to_name            Name of node to make parent of from_name
153        """
154        parent_from_node = self.GetParentNode(from_name)
155        from_node = self.GetNode(from_name)
156        parent_from_node.RemoveChild(from_name)
157        to_node = self.GetNode(to_name)
158        to_node.AddChildNode(from_node)
159
160    def ReorderDeps(self, name, regex_str):
161        """
162        Reorder dependency tree.  If tree contains nodes with names that
163        match 'name' and 'regex_str', move 'regex_str' nodes that are
164        to the right of 'name' node, so that they become children of the
165        'name' node.
166
167        Parameter descriptions:
168        name               Name of node to look for
169        regex_str          Regex string to match names to
170        """
171        name_path = self.GetPath(name)
172        if not name_path:
173            return
174        paths = self.GetPathRegex(name, regex_str)
175        is_name_in_paths = False
176        name_index = 0
177        for i in range(len(paths)):
178            path = paths[i]
179            if path[-1] == name:
180                is_name_in_paths = True
181                name_index = i
182                break
183        if not is_name_in_paths:
184            return
185        for i in range(name_index + 1, len(paths)):
186            path = paths[i]
187            if name in path:
188                continue
189            from_name = path[-1]
190            self.MoveNode(from_name, name)
191
192    def GetInstallList(self):
193        """
194        Return post-order list of node names.
195
196        Parameter descriptions:
197        """
198        install_list = []
199        for child in self.children:
200            child_install_list = child.GetInstallList()
201            install_list.extend(child_install_list)
202        install_list.append(self.name)
203        return install_list
204
205    def PrintTree(self, level=0):
206        """
207        Print pre-order node names with indentation denoting node depth level.
208
209        Parameter descriptions:
210        level              Current depth level
211        """
212        INDENT_PER_LEVEL = 4
213        print(' ' * (level * INDENT_PER_LEVEL) + self.name)
214        for child in self.children:
215            child.PrintTree(level + 1)
216
217
218def check_call_cmd(*cmd):
219    """
220    Verbose prints the directory location the given command is called from and
221    the command, then executes the command using check_call.
222
223    Parameter descriptions:
224    dir                 Directory location command is to be called from
225    cmd                 List of parameters constructing the complete command
226    """
227    printline(os.getcwd(), ">", " ".join(cmd))
228    check_call(cmd)
229
230
231def clone_pkg(pkg, branch):
232    """
233    Clone the given openbmc package's git repository from gerrit into
234    the WORKSPACE location
235
236    Parameter descriptions:
237    pkg                 Name of the package to clone
238    branch              Branch to clone from pkg
239    """
240    pkg_dir = os.path.join(WORKSPACE, pkg)
241    if os.path.exists(os.path.join(pkg_dir, '.git')):
242        return pkg_dir
243    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
244    os.mkdir(pkg_dir)
245    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
246    try:
247        # first try the branch
248        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
249        repo_inst = clone.working_dir
250    except:
251        printline("Input branch not found, default to master")
252        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
253        repo_inst = clone.working_dir
254    return repo_inst
255
256
257def make_target_exists(target):
258    """
259    Runs a check against the makefile in the current directory to determine
260    if the target exists so that it can be built.
261
262    Parameter descriptions:
263    target              The make target we are checking
264    """
265    try:
266        cmd = ['make', '-n', target]
267        with open(os.devnull, 'w') as devnull:
268            check_call(cmd, stdout=devnull, stderr=devnull)
269        return True
270    except CalledProcessError:
271        return False
272
273
274make_parallel = [
275    'make',
276    # Run enough jobs to saturate all the cpus
277    '-j', str(multiprocessing.cpu_count()),
278    # Don't start more jobs if the load avg is too high
279    '-l', str(multiprocessing.cpu_count()),
280    # Synchronize the output so logs aren't intermixed in stdout / stderr
281    '-O',
282]
283
284
285def build_and_install(name, build_for_testing=False):
286    """
287    Builds and installs the package in the environment. Optionally
288    builds the examples and test cases for package.
289
290    Parameter description:
291    name                The name of the package we are building
292    build_for_testing   Enable options related to testing on the package?
293    """
294    os.chdir(os.path.join(WORKSPACE, name))
295
296    # Refresh dynamic linker run time bindings for dependencies
297    check_call_cmd('sudo', '-n', '--', 'ldconfig')
298
299    pkg = Package()
300    if build_for_testing:
301        pkg.test()
302    else:
303        pkg.install()
304
305
306def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
307    """
308    For each package (name), starting with the package to be unit tested,
309    extract its dependencies. For each package dependency defined, recursively
310    apply the same strategy
311
312    Parameter descriptions:
313    name                Name of the package
314    pkgdir              Directory where package source is located
315    dep_added           Current dict of dependencies and added status
316    head                Head node of the dependency tree
317    branch              Branch to clone from pkg
318    dep_tree            Current dependency tree node
319    """
320    if not dep_tree:
321        dep_tree = head
322
323    with open("/tmp/depcache", "r") as depcache:
324        cache = depcache.readline()
325
326    # Read out pkg dependencies
327    pkg = Package(name, pkgdir)
328
329    for dep in set(pkg.build_system().dependencies()):
330        if dep in cache:
331            continue
332        # Dependency package not already known
333        if dep_added.get(dep) is None:
334            # Dependency package not added
335            new_child = dep_tree.AddChild(dep)
336            dep_added[dep] = False
337            dep_pkgdir = clone_pkg(dep, branch)
338            # Determine this dependency package's
339            # dependencies and add them before
340            # returning to add this package
341            dep_added = build_dep_tree(dep,
342                                       dep_pkgdir,
343                                       dep_added,
344                                       head,
345                                       branch,
346                                       new_child)
347        else:
348            # Dependency package known and added
349            if dep_added[dep]:
350                continue
351            else:
352                # Cyclic dependency failure
353                raise Exception("Cyclic dependencies found in "+name)
354
355    if not dep_added[name]:
356        dep_added[name] = True
357
358    return dep_added
359
360
361def run_cppcheck():
362    match_re = re.compile(r'((?!\.mako\.).)*\.[ch](?:pp)?$', re.I)
363    cppcheck_files = []
364    stdout = subprocess.check_output(['git', 'ls-files'])
365
366    for f in stdout.decode('utf-8').split():
367        if match_re.match(f):
368            cppcheck_files.append(f)
369
370    if not cppcheck_files:
371        # skip cppcheck if there arent' any c or cpp sources.
372        print("no files")
373        return None
374
375    # http://cppcheck.sourceforge.net/manual.pdf
376    params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
377              '--enable=all', '--file-list=-']
378
379    cppcheck_process = subprocess.Popen(
380        params,
381        stdout=subprocess.PIPE,
382        stderr=subprocess.PIPE,
383        stdin=subprocess.PIPE)
384    (stdout, stderr) = cppcheck_process.communicate(
385        input='\n'.join(cppcheck_files).encode('utf-8'))
386
387    if cppcheck_process.wait():
388        raise Exception('Cppcheck failed')
389    print(stdout.decode('utf-8'))
390    print(stderr.decode('utf-8'))
391
392
393def is_valgrind_safe():
394    """
395    Returns whether it is safe to run valgrind on our platform
396    """
397    src = 'unit-test-vg.c'
398    exe = './unit-test-vg'
399    with open(src, 'w') as h:
400        h.write('#include <errno.h>\n')
401        h.write('#include <stdio.h>\n')
402        h.write('#include <stdlib.h>\n')
403        h.write('#include <string.h>\n')
404        h.write('int main() {\n')
405        h.write('char *heap_str = malloc(16);\n')
406        h.write('strcpy(heap_str, "RandString");\n')
407        h.write('int res = strcmp("RandString", heap_str);\n')
408        h.write('free(heap_str);\n')
409        h.write('char errstr[64];\n')
410        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
411        h.write('printf("%s\\n", errstr);\n')
412        h.write('return res;\n')
413        h.write('}\n')
414    try:
415        with open(os.devnull, 'w') as devnull:
416            check_call(['gcc', '-O2', '-o', exe, src],
417                       stdout=devnull, stderr=devnull)
418            check_call(['valgrind', '--error-exitcode=99', exe],
419                       stdout=devnull, stderr=devnull)
420        return True
421    except:
422        sys.stderr.write("###### Platform is not valgrind safe ######\n")
423        return False
424    finally:
425        os.remove(src)
426        os.remove(exe)
427
428
429def is_sanitize_safe():
430    """
431    Returns whether it is safe to run sanitizers on our platform
432    """
433    src = 'unit-test-sanitize.c'
434    exe = './unit-test-sanitize'
435    with open(src, 'w') as h:
436        h.write('int main() { return 0; }\n')
437    try:
438        with open(os.devnull, 'w') as devnull:
439            check_call(['gcc', '-O2', '-fsanitize=address',
440                        '-fsanitize=undefined', '-o', exe, src],
441                       stdout=devnull, stderr=devnull)
442            check_call([exe], stdout=devnull, stderr=devnull)
443        return True
444    except:
445        sys.stderr.write("###### Platform is not sanitize safe ######\n")
446        return False
447    finally:
448        os.remove(src)
449        os.remove(exe)
450
451
452def maybe_make_valgrind():
453    """
454    Potentially runs the unit tests through valgrind for the package
455    via `make check-valgrind`. If the package does not have valgrind testing
456    then it just skips over this.
457    """
458    # Valgrind testing is currently broken by an aggressive strcmp optimization
459    # that is inlined into optimized code for POWER by gcc 7+. Until we find
460    # a workaround, just don't run valgrind tests on POWER.
461    # https://github.com/openbmc/openbmc/issues/3315
462    if not is_valgrind_safe():
463        sys.stderr.write("###### Skipping valgrind ######\n")
464        return
465    if not make_target_exists('check-valgrind'):
466        return
467
468    try:
469        cmd = make_parallel + ['check-valgrind']
470        check_call_cmd(*cmd)
471    except CalledProcessError:
472        for root, _, files in os.walk(os.getcwd()):
473            for f in files:
474                if re.search('test-suite-[a-z]+.log', f) is None:
475                    continue
476                check_call_cmd('cat', os.path.join(root, f))
477        raise Exception('Valgrind tests failed')
478
479
480def maybe_make_coverage():
481    """
482    Potentially runs the unit tests through code coverage for the package
483    via `make check-code-coverage`. If the package does not have code coverage
484    testing then it just skips over this.
485    """
486    if not make_target_exists('check-code-coverage'):
487        return
488
489    # Actually run code coverage
490    try:
491        cmd = make_parallel + ['check-code-coverage']
492        check_call_cmd(*cmd)
493    except CalledProcessError:
494        raise Exception('Code coverage failed')
495
496
497class BuildSystem(object):
498    """
499    Build systems generally provide the means to configure, build, install and
500    test software. The BuildSystem class defines a set of interfaces on top of
501    which Autotools, Meson, CMake and possibly other build system drivers can
502    be implemented, separating out the phases to control whether a package
503    should merely be installed or also tested and analyzed.
504    """
505    def __init__(self, package, path):
506        """Initialise the driver with properties independent of the build system
507
508        Keyword arguments:
509        package: The name of the package. Derived from the path if None
510        path: The path to the package. Set to the working directory if None
511        """
512        self.path = "." if not path else path
513        realpath = os.path.realpath(self.path)
514        self.package = package if package else os.path.basename(realpath)
515        self.build_for_testing = False
516
517    def probe(self):
518        """Test if the build system driver can be applied to the package
519
520        Return True if the driver can drive the package's build system,
521        otherwise False.
522
523        Generally probe() is implemented by testing for the presence of the
524        build system's configuration file(s).
525        """
526        raise NotImplemented
527
528    def dependencies(self):
529        """Provide the package's dependencies
530
531        Returns a list of dependencies. If no dependencies are required then an
532        empty list must be returned.
533
534        Generally dependencies() is implemented by analysing and extracting the
535        data from the build system configuration.
536        """
537        raise NotImplemented
538
539    def configure(self, build_for_testing):
540        """Configure the source ready for building
541
542        Should raise an exception if configuration failed.
543
544        Keyword arguments:
545        build_for_testing: Mark the package as being built for testing rather
546                           than for installation as a dependency for the
547                           package under test. Setting to True generally
548                           implies that the package will be configured to build
549                           with debug information, at a low level of
550                           optimisation and possibly with sanitizers enabled.
551
552        Generally configure() is implemented by invoking the build system
553        tooling to generate Makefiles or equivalent.
554        """
555        raise NotImplemented
556
557    def build(self):
558        """Build the software ready for installation and/or testing
559
560        Should raise an exception if the build fails
561
562        Generally build() is implemented by invoking `make` or `ninja`.
563        """
564        raise NotImplemented
565
566    def install(self):
567        """Install the software ready for use
568
569        Should raise an exception if installation fails
570
571        Like build(), install() is generally implemented by invoking `make` or
572        `ninja`.
573        """
574        raise NotImplemented
575
576    def test(self):
577        """Build and run the test suite associated with the package
578
579        Should raise an exception if the build or testing fails.
580
581        Like install(), test() is generally implemented by invoking `make` or
582        `ninja`.
583        """
584        raise NotImplemented
585
586    def analyze(self):
587        """Run any supported analysis tools over the codebase
588
589        Should raise an exception if analysis fails.
590
591        Some analysis tools such as scan-build need injection into the build
592        system. analyze() provides the necessary hook to implement such
593        behaviour. Analyzers independent of the build system can also be
594        specified here but at the cost of possible duplication of code between
595        the build system driver implementations.
596        """
597        raise NotImplemented
598
599
600class Autotools(BuildSystem):
601    def __init__(self, package=None, path=None):
602        super(Autotools, self).__init__(package, path)
603
604    def probe(self):
605        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
606
607    def dependencies(self):
608        configure_ac = os.path.join(self.path, 'configure.ac')
609
610        contents = ''
611        # Prepend some special function overrides so we can parse out
612        # dependencies
613        for macro in DEPENDENCIES.keys():
614            contents += ('m4_define([' + macro + '], [' + macro + '_START$' +
615                         str(DEPENDENCIES_OFFSET[macro] + 1) +
616                         macro + '_END])\n')
617        with open(configure_ac, "rt") as f:
618            contents += f.read()
619
620        autoconf_cmdline = ['autoconf', '-Wno-undefined', '-']
621        autoconf_process = subprocess.Popen(autoconf_cmdline,
622                                            stdin=subprocess.PIPE,
623                                            stdout=subprocess.PIPE,
624                                            stderr=subprocess.PIPE)
625        document = contents.encode('utf-8')
626        (stdout, stderr) = autoconf_process.communicate(input=document)
627        if not stdout:
628            print(stderr)
629            raise Exception("Failed to run autoconf for parsing dependencies")
630
631        # Parse out all of the dependency text
632        matches = []
633        for macro in DEPENDENCIES.keys():
634            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
635            for match in re.compile(pattern).finditer(stdout.decode('utf-8')):
636                matches.append((match.group(1), match.group(2)))
637
638        # Look up dependencies from the text
639        found_deps = []
640        for macro, deptext in matches:
641            for potential_dep in deptext.split(' '):
642                for known_dep in DEPENDENCIES[macro].keys():
643                    if potential_dep.startswith(known_dep):
644                        found_deps.append(DEPENDENCIES[macro][known_dep])
645
646        return found_deps
647
648    def _configure_feature(self, flag, enabled):
649        """
650        Returns an configure flag as a string
651
652        Parameters:
653        flag                The name of the flag
654        enabled             Whether the flag is enabled or disabled
655        """
656        return '--' + ('enable' if enabled else 'disable') + '-' + flag
657
658    def configure(self, build_for_testing):
659        self.build_for_testing = build_for_testing
660        conf_flags = [
661            self._configure_feature('silent-rules', False),
662            self._configure_feature('examples', build_for_testing),
663            self._configure_feature('tests', build_for_testing),
664        ]
665        if not TEST_ONLY:
666            conf_flags.extend([
667                self._configure_feature('code-coverage', build_for_testing),
668                self._configure_feature('valgrind', build_for_testing),
669            ])
670        # Add any necessary configure flags for package
671        if CONFIGURE_FLAGS.get(self.package) is not None:
672            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
673        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
674            if os.path.exists(bootstrap):
675                check_call_cmd('./' + bootstrap)
676                break
677        check_call_cmd('./configure', *conf_flags)
678
679    def build(self):
680        check_call_cmd(*make_parallel)
681
682    def install(self):
683        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
684
685    def test(self):
686        try:
687            cmd = make_parallel + ['check']
688            for i in range(0, args.repeat):
689                check_call_cmd(*cmd)
690        except CalledProcessError:
691            for root, _, files in os.walk(os.getcwd()):
692                if 'test-suite.log' not in files:
693                    continue
694                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
695            raise Exception('Unit tests failed')
696
697    def analyze(self):
698        maybe_make_valgrind()
699        maybe_make_coverage()
700        run_cppcheck()
701
702
703class CMake(BuildSystem):
704    def __init__(self, package=None, path=None):
705        super(CMake, self).__init__(package, path)
706
707    def probe(self):
708        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
709
710    def dependencies(self):
711        return []
712
713    def configure(self, build_for_testing):
714        self.build_for_testing = build_for_testing
715        check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
716
717    def build(self):
718        check_call_cmd('cmake', '--build', '.', '--', '-j',
719                       str(multiprocessing.cpu_count()))
720
721    def install(self):
722        pass
723
724    def test(self):
725        if make_target_exists('test'):
726            check_call_cmd('ctest', '.')
727
728    def analyze(self):
729        if TEST_ONLY:
730            return
731
732        if os.path.isfile('.clang-tidy'):
733            check_call_cmd('run-clang-tidy-8.py', '-p', '.')
734        maybe_make_valgrind()
735        maybe_make_coverage()
736        run_cppcheck()
737
738
739class Meson(BuildSystem):
740    def __init__(self, package=None, path=None):
741        super(Meson, self).__init__(package, path)
742
743    def probe(self):
744        return os.path.isfile(os.path.join(self.path, 'meson.build'))
745
746    def dependencies(self):
747        meson_build = os.path.join(self.path, 'meson.build')
748        if not os.path.exists(meson_build):
749            return []
750
751        found_deps = []
752        for root, dirs, files in os.walk(self.path):
753            if 'meson.build' not in files:
754                continue
755            with open(os.path.join(root, 'meson.build'), 'rt') as f:
756                build_contents = f.read()
757            pattern = r"dependency\('([^']*)'.*?\)\n"
758            for match in re.finditer(pattern, build_contents):
759                group = match.group(1)
760                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group)
761                if maybe_dep is not None:
762                    found_deps.append(maybe_dep)
763
764        return found_deps
765
766    def _parse_options(self, options_file):
767        """
768        Returns a set of options defined in the provides meson_options.txt file
769
770        Parameters:
771        options_file        The file containing options
772        """
773        options_contents = ''
774        with open(options_file, "rt") as f:
775            options_contents += f.read()
776        options = set()
777        pattern = 'option\\(\\s*\'([^\']*)\''
778        for match in re.compile(pattern).finditer(options_contents):
779            options.add(match.group(1))
780        return options
781
782    def _configure_feature(self, val):
783        """
784        Returns the meson flag which signifies the value
785
786        True is enabled which requires the feature.
787        False is disabled which disables the feature.
788        None is auto which autodetects the feature.
789
790        Parameters:
791        val                 The value being converted
792        """
793        if val is True:
794            return "enabled"
795        elif val is False:
796            return "disabled"
797        elif val is None:
798            return "auto"
799        else:
800            raise Exception("Bad meson feature value")
801
802    def configure(self, build_for_testing):
803        self.build_for_testing = build_for_testing
804        meson_options = set()
805        if os.path.exists("meson_options.txt"):
806            meson_options = self._parse_options("meson_options.txt")
807        meson_flags = [
808            '-Db_colorout=never',
809            '-Dwerror=true',
810            '-Dwarning_level=3',
811        ]
812        if build_for_testing:
813            meson_flags.append('--buildtype=debug')
814        else:
815            meson_flags.append('--buildtype=debugoptimized')
816        if 'tests' in meson_options:
817            flag_args = self._configure_feature(build_for_testing)
818            meson_flags.append('-Dtests=' + flag_args)
819        if 'examples' in meson_options:
820            meson_flags.append('-Dexamples=' + str(build_for_testing).lower())
821        if MESON_FLAGS.get(self.package) is not None:
822            meson_flags.extend(MESON_FLAGS.get(self.package))
823        try:
824            check_call_cmd('meson', 'setup', '--reconfigure', 'build',
825                           *meson_flags)
826        except:
827            shutil.rmtree('build')
828            check_call_cmd('meson', 'setup', 'build', *meson_flags)
829
830    def build(self):
831        check_call_cmd('ninja', '-C', 'build')
832
833    def install(self):
834        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
835
836    def test(self):
837        try:
838            check_call_cmd('meson', 'test', '-C', 'build')
839        except CalledProcessError:
840            for root, _, files in os.walk(os.getcwd()):
841                if 'testlog.txt' not in files:
842                    continue
843                check_call_cmd('cat', os.path.join(root, 'testlog.txt'))
844            raise Exception('Unit tests failed')
845
846    def _setup_exists(self, setup):
847        """
848        Returns whether the meson build supports the named test setup.
849
850        Parameter descriptions:
851        setup              The setup target to check
852        """
853        try:
854            with open(os.devnull, 'w') as devnull:
855                output = subprocess.check_output(
856                        ['meson', 'test', '-C', 'build',
857                         '--setup', setup, '-t', '0'],
858                        stderr=subprocess.STDOUT)
859        except CalledProcessError as e:
860            output = e.output
861        output = output.decode('utf-8')
862        return not re.search('Test setup .* not found from project', output)
863
864    def _maybe_valgrind(self):
865        """
866        Potentially runs the unit tests through valgrind for the package
867        via `meson test`. The package can specify custom valgrind
868        configurations by utilizing add_test_setup() in a meson.build
869        """
870        if not is_valgrind_safe():
871            sys.stderr.write("###### Skipping valgrind ######\n")
872            return
873        try:
874            if self._setup_exists('valgrind'):
875                check_call_cmd('meson', 'test', '-C', 'build',
876                               '--setup', 'valgrind')
877            else:
878                check_call_cmd('meson', 'test', '-C', 'build',
879                               '--wrapper', 'valgrind')
880        except CalledProcessError:
881            for root, _, files in os.walk(os.getcwd()):
882                if 'testlog-valgrind.txt' not in files:
883                    continue
884                cat_args = os.path.join(root, 'testlog-valgrind.txt')
885                check_call_cmd('cat', cat_args)
886            raise Exception('Valgrind tests failed')
887
888    def analyze(self):
889        if TEST_ONLY:
890            return
891
892        self._maybe_valgrind()
893
894        # Run clang-tidy only if the project has a configuration
895        if os.path.isfile('.clang-tidy'):
896            check_call_cmd('run-clang-tidy-8.py', '-p',
897                           'build')
898        # Run the basic clang static analyzer otherwise
899        else:
900            check_call_cmd('ninja', '-C', 'build',
901                           'scan-build')
902
903        # Run tests through sanitizers
904        # b_lundef is needed if clang++ is CXX since it resolves the
905        # asan symbols at runtime only. We don't want to set it earlier
906        # in the build process to ensure we don't have undefined
907        # runtime code.
908        if is_sanitize_safe():
909            check_call_cmd('meson', 'configure', 'build',
910                           '-Db_sanitize=address,undefined',
911                           '-Db_lundef=false')
912            check_call_cmd('meson', 'test', '-C', 'build',
913                           '--logbase', 'testlog-ubasan')
914            # TODO: Fix memory sanitizer
915            # check_call_cmd('meson', 'configure', 'build',
916            #                '-Db_sanitize=memory')
917            # check_call_cmd('meson', 'test', '-C', 'build'
918            #                '--logbase', 'testlog-msan')
919            check_call_cmd('meson', 'configure', 'build',
920                           '-Db_sanitize=none', '-Db_lundef=true')
921        else:
922            sys.stderr.write("###### Skipping sanitizers ######\n")
923
924        # Run coverage checks
925        check_call_cmd('meson', 'configure', 'build',
926                       '-Db_coverage=true')
927        self.test()
928        # Only build coverage HTML if coverage files were produced
929        for root, dirs, files in os.walk('build'):
930            if any([f.endswith('.gcda') for f in files]):
931                check_call_cmd('ninja', '-C', 'build',
932                               'coverage-html')
933                break
934        check_call_cmd('meson', 'configure', 'build',
935                       '-Db_coverage=false')
936        run_cppcheck()
937
938
939class Package(object):
940    def __init__(self, name=None, path=None):
941        self.supported = [Autotools, Meson, CMake]
942        self.name = name
943        self.path = path
944        self.test_only = False
945
946    def build_systems(self):
947        instances = (system(self.name, self.path) for system in self.supported)
948        return (instance for instance in instances if instance.probe())
949
950    def build_system(self, preferred=None):
951        systems = list(self.build_systems())
952
953        if not systems:
954            return None
955
956        if preferred:
957            return {type(system): system for system in systems}[preferred]
958
959        return next(iter(systems))
960
961    def install(self, system=None):
962        if not system:
963            system = self.build_system()
964
965        system.configure(False)
966        system.build()
967        system.install()
968
969    def _test_one(self, system):
970        system.configure(True)
971        system.build()
972        system.install()
973        system.test()
974        system.analyze()
975
976    def test(self):
977        for system in self.build_systems():
978            self._test_one(system)
979
980
981def find_file(filename, basedir):
982    """
983    Finds all occurrences of a file in the base directory
984    and passes them back with their relative paths.
985
986    Parameter descriptions:
987    filename              The name of the file to find
988    basedir               The base directory search in
989    """
990
991    filepaths = []
992    for root, dirs, files in os.walk(basedir):
993        if filename in files:
994            filepaths.append(os.path.join(root, filename))
995    return filepaths
996
997
998if __name__ == '__main__':
999    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1000    CONFIGURE_FLAGS = {
1001        'sdbusplus': ['--enable-transaction'],
1002        'phosphor-logging':
1003        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
1004         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
1005    }
1006
1007    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1008    MESON_FLAGS = {
1009    }
1010
1011    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1012    DEPENDENCIES = {
1013        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
1014        'AC_CHECK_HEADER': {
1015            'host-ipmid': 'phosphor-host-ipmid',
1016            'blobs-ipmid': 'phosphor-ipmi-blobs',
1017            'sdbusplus': 'sdbusplus',
1018            'sdeventplus': 'sdeventplus',
1019            'stdplus': 'stdplus',
1020            'gpioplus': 'gpioplus',
1021            'phosphor-logging/log.hpp': 'phosphor-logging',
1022        },
1023        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1024        'PKG_CHECK_MODULES': {
1025            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1026            'openpower-dbus-interfaces': 'openpower-dbus-interfaces',
1027            'ibm-dbus-interfaces': 'ibm-dbus-interfaces',
1028            'libipmid': 'phosphor-host-ipmid',
1029            'libipmid-host': 'phosphor-host-ipmid',
1030            'sdbusplus': 'sdbusplus',
1031            'sdeventplus': 'sdeventplus',
1032            'stdplus': 'stdplus',
1033            'gpioplus': 'gpioplus',
1034            'phosphor-logging': 'phosphor-logging',
1035            'phosphor-snmp': 'phosphor-snmp',
1036            'ipmiblob': 'ipmi-blob-tool',
1037        },
1038    }
1039
1040    # Offset into array of macro parameters MACRO(0, 1, ...N)
1041    DEPENDENCIES_OFFSET = {
1042        'AC_CHECK_LIB': 0,
1043        'AC_CHECK_HEADER': 0,
1044        'AC_PATH_PROG': 1,
1045        'PKG_CHECK_MODULES': 1,
1046    }
1047
1048    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1049    DEPENDENCIES_REGEX = {
1050        'phosphor-logging': r'\S+-dbus-interfaces$'
1051    }
1052
1053    # Set command line arguments
1054    parser = argparse.ArgumentParser()
1055    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1056                        help="Workspace directory location(i.e. /home)")
1057    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1058                        help="OpenBMC package to be unit tested")
1059    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1060                        action="store_true", required=False, default=False,
1061                        help="Only run test cases, no other validation")
1062    parser.add_argument("-v", "--verbose", action="store_true",
1063                        help="Print additional package status messages")
1064    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1065                        type=int, default=1)
1066    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1067                        help="Branch to target for dependent repositories",
1068                        default="master")
1069    parser.add_argument("-n", "--noformat", dest="FORMAT",
1070                        action="store_false", required=False,
1071                        help="Whether or not to run format code")
1072    args = parser.parse_args(sys.argv[1:])
1073    WORKSPACE = args.WORKSPACE
1074    UNIT_TEST_PKG = args.PACKAGE
1075    TEST_ONLY = args.TEST_ONLY
1076    BRANCH = args.BRANCH
1077    FORMAT_CODE = args.FORMAT
1078    if args.verbose:
1079        def printline(*line):
1080            for arg in line:
1081                print(arg, end=' ')
1082            print()
1083    else:
1084        def printline(*line):
1085            pass
1086
1087    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
1088
1089    # First validate code formatting if repo has style formatting files.
1090    # The format-code.sh checks for these files.
1091    if FORMAT_CODE:
1092        check_call_cmd("./format-code.sh", CODE_SCAN_DIR)
1093
1094    # Check if this repo has a supported make infrastructure
1095    pkg = Package(UNIT_TEST_PKG, os.path.join(WORKSPACE, UNIT_TEST_PKG))
1096    if not pkg.build_system():
1097        print("No valid build system, exit")
1098        sys.exit(0)
1099
1100    prev_umask = os.umask(000)
1101
1102    # Determine dependencies and add them
1103    dep_added = dict()
1104    dep_added[UNIT_TEST_PKG] = False
1105
1106    # Create dependency tree
1107    dep_tree = DepTree(UNIT_TEST_PKG)
1108    build_dep_tree(UNIT_TEST_PKG,
1109                   os.path.join(WORKSPACE, UNIT_TEST_PKG),
1110                   dep_added,
1111                   dep_tree,
1112                   BRANCH)
1113
1114    # Reorder Dependency Tree
1115    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1116        dep_tree.ReorderDeps(pkg_name, regex_str)
1117    if args.verbose:
1118        dep_tree.PrintTree()
1119
1120    install_list = dep_tree.GetInstallList()
1121
1122    # We don't want to treat our package as a dependency
1123    install_list.remove(UNIT_TEST_PKG)
1124
1125    # Install reordered dependencies
1126    for dep in install_list:
1127        build_and_install(dep, False)
1128
1129    # Run package unit tests
1130    build_and_install(UNIT_TEST_PKG, True)
1131
1132    os.umask(prev_umask)
1133
1134    # Run any custom CI scripts the repo has, of which there can be
1135    # multiple of and anywhere in the repository.
1136    ci_scripts = find_file('run-ci.sh', os.path.join(WORKSPACE, UNIT_TEST_PKG))
1137    if ci_scripts:
1138        os.chdir(os.path.join(WORKSPACE, UNIT_TEST_PKG))
1139        for ci_script in ci_scripts:
1140            check_call_cmd('sh', ci_script)
1141