xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 298d56b3b798abc340eb34a4f300604dacbca5bb)
1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from mesonbuild import coredata, optinterpreter
12from urllib.parse import urljoin
13from subprocess import check_call, call, CalledProcessError
14import os
15import sys
16import argparse
17import multiprocessing
18import re
19import subprocess
20import shutil
21import platform
22
23
24class DepTree():
25    """
26    Represents package dependency tree, where each node is a DepTree with a
27    name and DepTree children.
28    """
29
30    def __init__(self, name):
31        """
32        Create new DepTree.
33
34        Parameter descriptions:
35        name               Name of new tree node.
36        """
37        self.name = name
38        self.children = list()
39
40    def AddChild(self, name):
41        """
42        Add new child node to current node.
43
44        Parameter descriptions:
45        name               Name of new child
46        """
47        new_child = DepTree(name)
48        self.children.append(new_child)
49        return new_child
50
51    def AddChildNode(self, node):
52        """
53        Add existing child node to current node.
54
55        Parameter descriptions:
56        node               Tree node to add
57        """
58        self.children.append(node)
59
60    def RemoveChild(self, name):
61        """
62        Remove child node.
63
64        Parameter descriptions:
65        name               Name of child to remove
66        """
67        for child in self.children:
68            if child.name == name:
69                self.children.remove(child)
70                return
71
72    def GetNode(self, name):
73        """
74        Return node with matching name. Return None if not found.
75
76        Parameter descriptions:
77        name               Name of node to return
78        """
79        if self.name == name:
80            return self
81        for child in self.children:
82            node = child.GetNode(name)
83            if node:
84                return node
85        return None
86
87    def GetParentNode(self, name, parent_node=None):
88        """
89        Return parent of node with matching name. Return none if not found.
90
91        Parameter descriptions:
92        name               Name of node to get parent of
93        parent_node        Parent of current node
94        """
95        if self.name == name:
96            return parent_node
97        for child in self.children:
98            found_node = child.GetParentNode(name, self)
99            if found_node:
100                return found_node
101        return None
102
103    def GetPath(self, name, path=None):
104        """
105        Return list of node names from head to matching name.
106        Return None if not found.
107
108        Parameter descriptions:
109        name               Name of node
110        path               List of node names from head to current node
111        """
112        if not path:
113            path = []
114        if self.name == name:
115            path.append(self.name)
116            return path
117        for child in self.children:
118            match = child.GetPath(name, path + [self.name])
119            if match:
120                return match
121        return None
122
123    def GetPathRegex(self, name, regex_str, path=None):
124        """
125        Return list of node paths that end in name, or match regex_str.
126        Return empty list if not found.
127
128        Parameter descriptions:
129        name               Name of node to search for
130        regex_str          Regex string to match node names
131        path               Path of node names from head to current node
132        """
133        new_paths = []
134        if not path:
135            path = []
136        match = re.match(regex_str, self.name)
137        if (self.name == name) or (match):
138            new_paths.append(path + [self.name])
139        for child in self.children:
140            return_paths = None
141            full_path = path + [self.name]
142            return_paths = child.GetPathRegex(name, regex_str, full_path)
143            for i in return_paths:
144                new_paths.append(i)
145        return new_paths
146
147    def MoveNode(self, from_name, to_name):
148        """
149        Mode existing from_name node to become child of to_name node.
150
151        Parameter descriptions:
152        from_name          Name of node to make a child of to_name
153        to_name            Name of node to make parent of from_name
154        """
155        parent_from_node = self.GetParentNode(from_name)
156        from_node = self.GetNode(from_name)
157        parent_from_node.RemoveChild(from_name)
158        to_node = self.GetNode(to_name)
159        to_node.AddChildNode(from_node)
160
161    def ReorderDeps(self, name, regex_str):
162        """
163        Reorder dependency tree.  If tree contains nodes with names that
164        match 'name' and 'regex_str', move 'regex_str' nodes that are
165        to the right of 'name' node, so that they become children of the
166        'name' node.
167
168        Parameter descriptions:
169        name               Name of node to look for
170        regex_str          Regex string to match names to
171        """
172        name_path = self.GetPath(name)
173        if not name_path:
174            return
175        paths = self.GetPathRegex(name, regex_str)
176        is_name_in_paths = False
177        name_index = 0
178        for i in range(len(paths)):
179            path = paths[i]
180            if path[-1] == name:
181                is_name_in_paths = True
182                name_index = i
183                break
184        if not is_name_in_paths:
185            return
186        for i in range(name_index + 1, len(paths)):
187            path = paths[i]
188            if name in path:
189                continue
190            from_name = path[-1]
191            self.MoveNode(from_name, name)
192
193    def GetInstallList(self):
194        """
195        Return post-order list of node names.
196
197        Parameter descriptions:
198        """
199        install_list = []
200        for child in self.children:
201            child_install_list = child.GetInstallList()
202            install_list.extend(child_install_list)
203        install_list.append(self.name)
204        return install_list
205
206    def PrintTree(self, level=0):
207        """
208        Print pre-order node names with indentation denoting node depth level.
209
210        Parameter descriptions:
211        level              Current depth level
212        """
213        INDENT_PER_LEVEL = 4
214        print(' ' * (level * INDENT_PER_LEVEL) + self.name)
215        for child in self.children:
216            child.PrintTree(level + 1)
217
218
219def check_call_cmd(*cmd):
220    """
221    Verbose prints the directory location the given command is called from and
222    the command, then executes the command using check_call.
223
224    Parameter descriptions:
225    dir                 Directory location command is to be called from
226    cmd                 List of parameters constructing the complete command
227    """
228    printline(os.getcwd(), ">", " ".join(cmd))
229    check_call(cmd)
230
231
232def clone_pkg(pkg, branch):
233    """
234    Clone the given openbmc package's git repository from gerrit into
235    the WORKSPACE location
236
237    Parameter descriptions:
238    pkg                 Name of the package to clone
239    branch              Branch to clone from pkg
240    """
241    pkg_dir = os.path.join(WORKSPACE, pkg)
242    if os.path.exists(os.path.join(pkg_dir, '.git')):
243        return pkg_dir
244    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
245    os.mkdir(pkg_dir)
246    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
247    try:
248        # first try the branch
249        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
250        repo_inst = clone.working_dir
251    except:
252        printline("Input branch not found, default to master")
253        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
254        repo_inst = clone.working_dir
255    return repo_inst
256
257
258def make_target_exists(target):
259    """
260    Runs a check against the makefile in the current directory to determine
261    if the target exists so that it can be built.
262
263    Parameter descriptions:
264    target              The make target we are checking
265    """
266    try:
267        cmd = ['make', '-n', target]
268        with open(os.devnull, 'w') as devnull:
269            check_call(cmd, stdout=devnull, stderr=devnull)
270        return True
271    except CalledProcessError:
272        return False
273
274
275make_parallel = [
276    'make',
277    # Run enough jobs to saturate all the cpus
278    '-j', str(multiprocessing.cpu_count()),
279    # Don't start more jobs if the load avg is too high
280    '-l', str(multiprocessing.cpu_count()),
281    # Synchronize the output so logs aren't intermixed in stdout / stderr
282    '-O',
283]
284
285
286def build_and_install(name, build_for_testing=False):
287    """
288    Builds and installs the package in the environment. Optionally
289    builds the examples and test cases for package.
290
291    Parameter description:
292    name                The name of the package we are building
293    build_for_testing   Enable options related to testing on the package?
294    """
295    os.chdir(os.path.join(WORKSPACE, name))
296
297    # Refresh dynamic linker run time bindings for dependencies
298    check_call_cmd('sudo', '-n', '--', 'ldconfig')
299
300    pkg = Package()
301    if build_for_testing:
302        pkg.test()
303    else:
304        pkg.install()
305
306
307def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
308    """
309    For each package (name), starting with the package to be unit tested,
310    extract its dependencies. For each package dependency defined, recursively
311    apply the same strategy
312
313    Parameter descriptions:
314    name                Name of the package
315    pkgdir              Directory where package source is located
316    dep_added           Current dict of dependencies and added status
317    head                Head node of the dependency tree
318    branch              Branch to clone from pkg
319    dep_tree            Current dependency tree node
320    """
321    if not dep_tree:
322        dep_tree = head
323
324    with open("/tmp/depcache", "r") as depcache:
325        cache = depcache.readline()
326
327    # Read out pkg dependencies
328    pkg = Package(name, pkgdir)
329
330    for dep in set(pkg.build_system().dependencies()):
331        if dep in cache:
332            continue
333        # Dependency package not already known
334        if dep_added.get(dep) is None:
335            # Dependency package not added
336            new_child = dep_tree.AddChild(dep)
337            dep_added[dep] = False
338            dep_pkgdir = clone_pkg(dep, branch)
339            # Determine this dependency package's
340            # dependencies and add them before
341            # returning to add this package
342            dep_added = build_dep_tree(dep,
343                                       dep_pkgdir,
344                                       dep_added,
345                                       head,
346                                       branch,
347                                       new_child)
348        else:
349            # Dependency package known and added
350            if dep_added[dep]:
351                continue
352            else:
353                # Cyclic dependency failure
354                raise Exception("Cyclic dependencies found in "+name)
355
356    if not dep_added[name]:
357        dep_added[name] = True
358
359    return dep_added
360
361
362def run_cppcheck():
363    match_re = re.compile(r'((?!\.mako\.).)*\.[ch](?:pp)?$', re.I)
364    cppcheck_files = []
365    stdout = subprocess.check_output(['git', 'ls-files'])
366
367    for f in stdout.decode('utf-8').split():
368        if match_re.match(f):
369            cppcheck_files.append(f)
370
371    if not cppcheck_files:
372        # skip cppcheck if there arent' any c or cpp sources.
373        print("no files")
374        return None
375
376    # http://cppcheck.sourceforge.net/manual.pdf
377    params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
378              '--enable=all', '--library=googletest', '--file-list=-']
379
380    cppcheck_process = subprocess.Popen(
381        params,
382        stdout=subprocess.PIPE,
383        stderr=subprocess.PIPE,
384        stdin=subprocess.PIPE)
385    (stdout, stderr) = cppcheck_process.communicate(
386        input='\n'.join(cppcheck_files).encode('utf-8'))
387
388    if cppcheck_process.wait():
389        raise Exception('Cppcheck failed')
390    print(stdout.decode('utf-8'))
391    print(stderr.decode('utf-8'))
392
393
394def is_valgrind_safe():
395    """
396    Returns whether it is safe to run valgrind on our platform
397    """
398    src = 'unit-test-vg.c'
399    exe = './unit-test-vg'
400    with open(src, 'w') as h:
401        h.write('#include <errno.h>\n')
402        h.write('#include <stdio.h>\n')
403        h.write('#include <stdlib.h>\n')
404        h.write('#include <string.h>\n')
405        h.write('int main() {\n')
406        h.write('char *heap_str = malloc(16);\n')
407        h.write('strcpy(heap_str, "RandString");\n')
408        h.write('int res = strcmp("RandString", heap_str);\n')
409        h.write('free(heap_str);\n')
410        h.write('char errstr[64];\n')
411        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
412        h.write('printf("%s\\n", errstr);\n')
413        h.write('return res;\n')
414        h.write('}\n')
415    try:
416        with open(os.devnull, 'w') as devnull:
417            check_call(['gcc', '-O2', '-o', exe, src],
418                       stdout=devnull, stderr=devnull)
419            check_call(['valgrind', '--error-exitcode=99', exe],
420                       stdout=devnull, stderr=devnull)
421        return True
422    except:
423        sys.stderr.write("###### Platform is not valgrind safe ######\n")
424        return False
425    finally:
426        os.remove(src)
427        os.remove(exe)
428
429
430def is_sanitize_safe():
431    """
432    Returns whether it is safe to run sanitizers on our platform
433    """
434    src = 'unit-test-sanitize.c'
435    exe = './unit-test-sanitize'
436    with open(src, 'w') as h:
437        h.write('int main() { return 0; }\n')
438    try:
439        with open(os.devnull, 'w') as devnull:
440            check_call(['gcc', '-O2', '-fsanitize=address',
441                        '-fsanitize=undefined', '-o', exe, src],
442                       stdout=devnull, stderr=devnull)
443            check_call([exe], stdout=devnull, stderr=devnull)
444        return True
445    except:
446        sys.stderr.write("###### Platform is not sanitize safe ######\n")
447        return False
448    finally:
449        os.remove(src)
450        os.remove(exe)
451
452
453def maybe_make_valgrind():
454    """
455    Potentially runs the unit tests through valgrind for the package
456    via `make check-valgrind`. If the package does not have valgrind testing
457    then it just skips over this.
458    """
459    # Valgrind testing is currently broken by an aggressive strcmp optimization
460    # that is inlined into optimized code for POWER by gcc 7+. Until we find
461    # a workaround, just don't run valgrind tests on POWER.
462    # https://github.com/openbmc/openbmc/issues/3315
463    if not is_valgrind_safe():
464        sys.stderr.write("###### Skipping valgrind ######\n")
465        return
466    if not make_target_exists('check-valgrind'):
467        return
468
469    try:
470        cmd = make_parallel + ['check-valgrind']
471        check_call_cmd(*cmd)
472    except CalledProcessError:
473        for root, _, files in os.walk(os.getcwd()):
474            for f in files:
475                if re.search('test-suite-[a-z]+.log', f) is None:
476                    continue
477                check_call_cmd('cat', os.path.join(root, f))
478        raise Exception('Valgrind tests failed')
479
480
481def maybe_make_coverage():
482    """
483    Potentially runs the unit tests through code coverage for the package
484    via `make check-code-coverage`. If the package does not have code coverage
485    testing then it just skips over this.
486    """
487    if not make_target_exists('check-code-coverage'):
488        return
489
490    # Actually run code coverage
491    try:
492        cmd = make_parallel + ['check-code-coverage']
493        check_call_cmd(*cmd)
494    except CalledProcessError:
495        raise Exception('Code coverage failed')
496
497
498class BuildSystem(object):
499    """
500    Build systems generally provide the means to configure, build, install and
501    test software. The BuildSystem class defines a set of interfaces on top of
502    which Autotools, Meson, CMake and possibly other build system drivers can
503    be implemented, separating out the phases to control whether a package
504    should merely be installed or also tested and analyzed.
505    """
506    def __init__(self, package, path):
507        """Initialise the driver with properties independent of the build system
508
509        Keyword arguments:
510        package: The name of the package. Derived from the path if None
511        path: The path to the package. Set to the working directory if None
512        """
513        self.path = "." if not path else path
514        realpath = os.path.realpath(self.path)
515        self.package = package if package else os.path.basename(realpath)
516        self.build_for_testing = False
517
518    def probe(self):
519        """Test if the build system driver can be applied to the package
520
521        Return True if the driver can drive the package's build system,
522        otherwise False.
523
524        Generally probe() is implemented by testing for the presence of the
525        build system's configuration file(s).
526        """
527        raise NotImplemented
528
529    def dependencies(self):
530        """Provide the package's dependencies
531
532        Returns a list of dependencies. If no dependencies are required then an
533        empty list must be returned.
534
535        Generally dependencies() is implemented by analysing and extracting the
536        data from the build system configuration.
537        """
538        raise NotImplemented
539
540    def configure(self, build_for_testing):
541        """Configure the source ready for building
542
543        Should raise an exception if configuration failed.
544
545        Keyword arguments:
546        build_for_testing: Mark the package as being built for testing rather
547                           than for installation as a dependency for the
548                           package under test. Setting to True generally
549                           implies that the package will be configured to build
550                           with debug information, at a low level of
551                           optimisation and possibly with sanitizers enabled.
552
553        Generally configure() is implemented by invoking the build system
554        tooling to generate Makefiles or equivalent.
555        """
556        raise NotImplemented
557
558    def build(self):
559        """Build the software ready for installation and/or testing
560
561        Should raise an exception if the build fails
562
563        Generally build() is implemented by invoking `make` or `ninja`.
564        """
565        raise NotImplemented
566
567    def install(self):
568        """Install the software ready for use
569
570        Should raise an exception if installation fails
571
572        Like build(), install() is generally implemented by invoking `make` or
573        `ninja`.
574        """
575        raise NotImplemented
576
577    def test(self):
578        """Build and run the test suite associated with the package
579
580        Should raise an exception if the build or testing fails.
581
582        Like install(), test() is generally implemented by invoking `make` or
583        `ninja`.
584        """
585        raise NotImplemented
586
587    def analyze(self):
588        """Run any supported analysis tools over the codebase
589
590        Should raise an exception if analysis fails.
591
592        Some analysis tools such as scan-build need injection into the build
593        system. analyze() provides the necessary hook to implement such
594        behaviour. Analyzers independent of the build system can also be
595        specified here but at the cost of possible duplication of code between
596        the build system driver implementations.
597        """
598        raise NotImplemented
599
600
601class Autotools(BuildSystem):
602    def __init__(self, package=None, path=None):
603        super(Autotools, self).__init__(package, path)
604
605    def probe(self):
606        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
607
608    def dependencies(self):
609        configure_ac = os.path.join(self.path, 'configure.ac')
610
611        contents = ''
612        # Prepend some special function overrides so we can parse out
613        # dependencies
614        for macro in DEPENDENCIES.keys():
615            contents += ('m4_define([' + macro + '], [' + macro + '_START$' +
616                         str(DEPENDENCIES_OFFSET[macro] + 1) +
617                         macro + '_END])\n')
618        with open(configure_ac, "rt") as f:
619            contents += f.read()
620
621        autoconf_cmdline = ['autoconf', '-Wno-undefined', '-']
622        autoconf_process = subprocess.Popen(autoconf_cmdline,
623                                            stdin=subprocess.PIPE,
624                                            stdout=subprocess.PIPE,
625                                            stderr=subprocess.PIPE)
626        document = contents.encode('utf-8')
627        (stdout, stderr) = autoconf_process.communicate(input=document)
628        if not stdout:
629            print(stderr)
630            raise Exception("Failed to run autoconf for parsing dependencies")
631
632        # Parse out all of the dependency text
633        matches = []
634        for macro in DEPENDENCIES.keys():
635            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
636            for match in re.compile(pattern).finditer(stdout.decode('utf-8')):
637                matches.append((match.group(1), match.group(2)))
638
639        # Look up dependencies from the text
640        found_deps = []
641        for macro, deptext in matches:
642            for potential_dep in deptext.split(' '):
643                for known_dep in DEPENDENCIES[macro].keys():
644                    if potential_dep.startswith(known_dep):
645                        found_deps.append(DEPENDENCIES[macro][known_dep])
646
647        return found_deps
648
649    def _configure_feature(self, flag, enabled):
650        """
651        Returns an configure flag as a string
652
653        Parameters:
654        flag                The name of the flag
655        enabled             Whether the flag is enabled or disabled
656        """
657        return '--' + ('enable' if enabled else 'disable') + '-' + flag
658
659    def configure(self, build_for_testing):
660        self.build_for_testing = build_for_testing
661        conf_flags = [
662            self._configure_feature('silent-rules', False),
663            self._configure_feature('examples', build_for_testing),
664            self._configure_feature('tests', build_for_testing),
665            self._configure_feature('itests', INTEGRATION_TEST),
666        ]
667        if not TEST_ONLY:
668            conf_flags.extend([
669                self._configure_feature('code-coverage', build_for_testing),
670                self._configure_feature('valgrind', build_for_testing),
671            ])
672        # Add any necessary configure flags for package
673        if CONFIGURE_FLAGS.get(self.package) is not None:
674            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
675        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
676            if os.path.exists(bootstrap):
677                check_call_cmd('./' + bootstrap)
678                break
679        check_call_cmd('./configure', *conf_flags)
680
681    def build(self):
682        check_call_cmd(*make_parallel)
683
684    def install(self):
685        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
686
687    def test(self):
688        try:
689            cmd = make_parallel + ['check']
690            for i in range(0, args.repeat):
691                check_call_cmd(*cmd)
692        except CalledProcessError:
693            for root, _, files in os.walk(os.getcwd()):
694                if 'test-suite.log' not in files:
695                    continue
696                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
697            raise Exception('Unit tests failed')
698
699    def analyze(self):
700        maybe_make_valgrind()
701        maybe_make_coverage()
702        run_cppcheck()
703
704
705class CMake(BuildSystem):
706    def __init__(self, package=None, path=None):
707        super(CMake, self).__init__(package, path)
708
709    def probe(self):
710        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
711
712    def dependencies(self):
713        return []
714
715    def configure(self, build_for_testing):
716        self.build_for_testing = build_for_testing
717        if INTEGRATION_TEST:
718            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
719                           '-DITESTS=ON', '.')
720        else:
721            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
722
723    def build(self):
724        check_call_cmd('cmake', '--build', '.', '--', '-j',
725                       str(multiprocessing.cpu_count()))
726
727    def install(self):
728        pass
729
730    def test(self):
731        if make_target_exists('test'):
732            check_call_cmd('ctest', '.')
733
734    def analyze(self):
735        if TEST_ONLY:
736            return
737
738        if os.path.isfile('.clang-tidy'):
739            try:
740                os.mkdir("tidy-build")
741            except FileExistsError as e:
742                pass
743            # clang-tidy needs to run on a clang-specific build
744            check_call_cmd('cmake', '-DCMAKE_C_COMPILER=clang',
745                           '-DCMAKE_CXX_COMPILER=clang++',
746                           '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
747                           '-H.',
748                           '-Btidy-build')
749            # we need to cd here because otherwise clang-tidy doesn't find the
750            # .clang-tidy file in the roots of repos.  Its arguably a "bug"
751            # with run-clang-tidy at a minimum it's "weird" that it requires
752            # the .clang-tidy to be up a dir
753            os.chdir("tidy-build")
754            try:
755                check_call_cmd('run-clang-tidy.py', "-header-filter=.*", '-p',
756                               '.')
757            finally:
758                os.chdir("..")
759
760        maybe_make_valgrind()
761        maybe_make_coverage()
762        run_cppcheck()
763
764
765class Meson(BuildSystem):
766    def __init__(self, package=None, path=None):
767        super(Meson, self).__init__(package, path)
768
769    def probe(self):
770        return os.path.isfile(os.path.join(self.path, 'meson.build'))
771
772    def dependencies(self):
773        meson_build = os.path.join(self.path, 'meson.build')
774        if not os.path.exists(meson_build):
775            return []
776
777        found_deps = []
778        for root, dirs, files in os.walk(self.path):
779            if 'meson.build' not in files:
780                continue
781            with open(os.path.join(root, 'meson.build'), 'rt') as f:
782                build_contents = f.read()
783            pattern = r"dependency\('([^']*)'.*?\),?\n"
784            for match in re.finditer(pattern, build_contents):
785                group = match.group(1)
786                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group)
787                if maybe_dep is not None:
788                    found_deps.append(maybe_dep)
789
790        return found_deps
791
792    def _parse_options(self, options_file):
793        """
794        Returns a set of options defined in the provides meson_options.txt file
795
796        Parameters:
797        options_file        The file containing options
798        """
799        oi = optinterpreter.OptionInterpreter('')
800        oi.process(options_file)
801        return oi.options
802
803    def _configure_boolean(self, val):
804        """
805        Returns the meson flag which signifies the value
806
807        True is true which requires the boolean.
808        False is false which disables the boolean.
809
810        Parameters:
811        val                 The value being converted
812        """
813        if val is True:
814            return 'true'
815        elif val is False:
816            return 'false'
817        else:
818            raise Exception("Bad meson boolean value")
819
820    def _configure_feature(self, val):
821        """
822        Returns the meson flag which signifies the value
823
824        True is enabled which requires the feature.
825        False is disabled which disables the feature.
826        None is auto which autodetects the feature.
827
828        Parameters:
829        val                 The value being converted
830        """
831        if val is True:
832            return "enabled"
833        elif val is False:
834            return "disabled"
835        elif val is None:
836            return "auto"
837        else:
838            raise Exception("Bad meson feature value")
839
840    def _configure_option(self, opts, key, val):
841        """
842        Returns the meson flag which signifies the value
843        based on the type of the opt
844
845        Parameters:
846        opt                 The meson option which we are setting
847        val                 The value being converted
848        """
849        if isinstance(opts[key], coredata.UserBooleanOption):
850            str_val = self._configure_boolean(val)
851        elif isinstance(opts[key], coredata.UserFeatureOption):
852            str_val = self._configure_feature(val)
853        else:
854            raise Exception('Unknown meson option type')
855        return "-D{}={}".format(key, str_val)
856
857    def configure(self, build_for_testing):
858        self.build_for_testing = build_for_testing
859        meson_options = {}
860        if os.path.exists("meson_options.txt"):
861            meson_options = self._parse_options("meson_options.txt")
862        meson_flags = [
863            '-Db_colorout=never',
864            '-Dwerror=true',
865            '-Dwarning_level=3',
866        ]
867        if build_for_testing:
868            meson_flags.append('--buildtype=debug')
869        else:
870            meson_flags.append('--buildtype=debugoptimized')
871        if 'tests' in meson_options:
872            meson_flags.append(self._configure_option(meson_options, 'tests', build_for_testing))
873        if 'examples' in meson_options:
874            meson_flags.append(self._configure_option(meson_options, 'examples', build_for_testing))
875        if 'itests' in meson_options:
876            meson_flags.append(self._configure_option(meson_options, 'itests', INTEGRATION_TEST))
877        if MESON_FLAGS.get(self.package) is not None:
878            meson_flags.extend(MESON_FLAGS.get(self.package))
879        try:
880            check_call_cmd('meson', 'setup', '--reconfigure', 'build',
881                           *meson_flags)
882        except:
883            shutil.rmtree('build')
884            check_call_cmd('meson', 'setup', 'build', *meson_flags)
885
886    def build(self):
887        check_call_cmd('ninja', '-C', 'build')
888
889    def install(self):
890        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
891
892    def test(self):
893        try:
894            test_args = ('--repeat', str(args.repeat), '-C', 'build')
895            check_call_cmd('meson', 'test', *test_args)
896
897        except CalledProcessError:
898            for root, _, files in os.walk(os.getcwd()):
899                if 'testlog.txt' not in files:
900                    continue
901                check_call_cmd('cat', os.path.join(root, 'testlog.txt'))
902            raise Exception('Unit tests failed')
903
904    def _setup_exists(self, setup):
905        """
906        Returns whether the meson build supports the named test setup.
907
908        Parameter descriptions:
909        setup              The setup target to check
910        """
911        try:
912            with open(os.devnull, 'w') as devnull:
913                output = subprocess.check_output(
914                        ['meson', 'test', '-C', 'build',
915                         '--setup', setup, '-t', '0'],
916                        stderr=subprocess.STDOUT)
917        except CalledProcessError as e:
918            output = e.output
919        output = output.decode('utf-8')
920        return not re.search('Test setup .* not found from project', output)
921
922    def _maybe_valgrind(self):
923        """
924        Potentially runs the unit tests through valgrind for the package
925        via `meson test`. The package can specify custom valgrind
926        configurations by utilizing add_test_setup() in a meson.build
927        """
928        if not is_valgrind_safe():
929            sys.stderr.write("###### Skipping valgrind ######\n")
930            return
931        try:
932            if self._setup_exists('valgrind'):
933                check_call_cmd('meson', 'test', '-C', 'build',
934                               '--setup', 'valgrind')
935            else:
936                check_call_cmd('meson', 'test', '-C', 'build',
937                               '--wrapper', 'valgrind')
938        except CalledProcessError:
939            for root, _, files in os.walk(os.getcwd()):
940                if 'testlog-valgrind.txt' not in files:
941                    continue
942                cat_args = os.path.join(root, 'testlog-valgrind.txt')
943                check_call_cmd('cat', cat_args)
944            raise Exception('Valgrind tests failed')
945
946    def analyze(self):
947        if TEST_ONLY:
948            return
949
950        self._maybe_valgrind()
951
952        # Run clang-tidy only if the project has a configuration
953        if os.path.isfile('.clang-tidy'):
954            os.environ["CXX"] = "clang++"
955            check_call_cmd('meson', 'setup', 'build-clang')
956            check_call_cmd('run-clang-tidy.py', '-p',
957                           'build-clang')
958        # Run the basic clang static analyzer otherwise
959        else:
960            check_call_cmd('ninja', '-C', 'build',
961                           'scan-build')
962
963        # Run tests through sanitizers
964        # b_lundef is needed if clang++ is CXX since it resolves the
965        # asan symbols at runtime only. We don't want to set it earlier
966        # in the build process to ensure we don't have undefined
967        # runtime code.
968        if is_sanitize_safe():
969            check_call_cmd('meson', 'configure', 'build',
970                           '-Db_sanitize=address,undefined',
971                           '-Db_lundef=false')
972            check_call_cmd('meson', 'test', '-C', 'build',
973                           '--logbase', 'testlog-ubasan')
974            # TODO: Fix memory sanitizer
975            # check_call_cmd('meson', 'configure', 'build',
976            #                '-Db_sanitize=memory')
977            # check_call_cmd('meson', 'test', '-C', 'build'
978            #                '--logbase', 'testlog-msan')
979            check_call_cmd('meson', 'configure', 'build',
980                           '-Db_sanitize=none')
981        else:
982            sys.stderr.write("###### Skipping sanitizers ######\n")
983
984        # Run coverage checks
985        check_call_cmd('meson', 'configure', 'build',
986                       '-Db_coverage=true')
987        self.test()
988        # Only build coverage HTML if coverage files were produced
989        for root, dirs, files in os.walk('build'):
990            if any([f.endswith('.gcda') for f in files]):
991                check_call_cmd('ninja', '-C', 'build',
992                               'coverage-html')
993                break
994        check_call_cmd('meson', 'configure', 'build',
995                       '-Db_coverage=false')
996        run_cppcheck()
997
998
999class Package(object):
1000    def __init__(self, name=None, path=None):
1001        self.supported = [Meson, Autotools, CMake]
1002        self.name = name
1003        self.path = path
1004        self.test_only = False
1005
1006    def build_systems(self):
1007        instances = (system(self.name, self.path) for system in self.supported)
1008        return (instance for instance in instances if instance.probe())
1009
1010    def build_system(self, preferred=None):
1011        systems = list(self.build_systems())
1012
1013        if not systems:
1014            return None
1015
1016        if preferred:
1017            return {type(system): system for system in systems}[preferred]
1018
1019        return next(iter(systems))
1020
1021    def install(self, system=None):
1022        if not system:
1023            system = self.build_system()
1024
1025        system.configure(False)
1026        system.build()
1027        system.install()
1028
1029    def _test_one(self, system):
1030        system.configure(True)
1031        system.build()
1032        system.install()
1033        system.test()
1034        system.analyze()
1035
1036    def test(self):
1037        for system in self.build_systems():
1038            self._test_one(system)
1039
1040
1041def find_file(filename, basedir):
1042    """
1043    Finds all occurrences of a file in the base directory
1044    and passes them back with their relative paths.
1045
1046    Parameter descriptions:
1047    filename              The name of the file to find
1048    basedir               The base directory search in
1049    """
1050
1051    filepaths = []
1052    for root, dirs, files in os.walk(basedir):
1053        if filename in files:
1054            filepaths.append(os.path.join(root, filename))
1055    return filepaths
1056
1057
1058if __name__ == '__main__':
1059    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1060    CONFIGURE_FLAGS = {
1061        'phosphor-logging':
1062        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
1063         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
1064    }
1065
1066    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1067    MESON_FLAGS = {
1068        'phosphor-dbus-interfaces':
1069        ['-Ddata_com_ibm=true', '-Ddata_org_open_power=true']
1070    }
1071
1072    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1073    DEPENDENCIES = {
1074        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
1075        'AC_CHECK_HEADER': {
1076            'host-ipmid': 'phosphor-host-ipmid',
1077            'blobs-ipmid': 'phosphor-ipmi-blobs',
1078            'sdbusplus': 'sdbusplus',
1079            'sdeventplus': 'sdeventplus',
1080            'stdplus': 'stdplus',
1081            'gpioplus': 'gpioplus',
1082            'phosphor-logging/log.hpp': 'phosphor-logging',
1083        },
1084        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1085        'PKG_CHECK_MODULES': {
1086            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1087            'libipmid': 'phosphor-host-ipmid',
1088            'libipmid-host': 'phosphor-host-ipmid',
1089            'sdbusplus': 'sdbusplus',
1090            'sdeventplus': 'sdeventplus',
1091            'stdplus': 'stdplus',
1092            'gpioplus': 'gpioplus',
1093            'phosphor-logging': 'phosphor-logging',
1094            'phosphor-snmp': 'phosphor-snmp',
1095            'ipmiblob': 'ipmi-blob-tool',
1096            'hei': 'openpower-libhei',
1097            'phosphor-ipmi-blobs': 'phosphor-ipmi-blobs',
1098        },
1099    }
1100
1101    # Offset into array of macro parameters MACRO(0, 1, ...N)
1102    DEPENDENCIES_OFFSET = {
1103        'AC_CHECK_LIB': 0,
1104        'AC_CHECK_HEADER': 0,
1105        'AC_PATH_PROG': 1,
1106        'PKG_CHECK_MODULES': 1,
1107    }
1108
1109    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1110    DEPENDENCIES_REGEX = {
1111        'phosphor-logging': r'\S+-dbus-interfaces$'
1112    }
1113
1114    # Set command line arguments
1115    parser = argparse.ArgumentParser()
1116    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1117                        help="Workspace directory location(i.e. /home)")
1118    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1119                        help="OpenBMC package to be unit tested")
1120    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1121                        action="store_true", required=False, default=False,
1122                        help="Only run test cases, no other validation")
1123    arg_inttests = parser.add_mutually_exclusive_group()
1124    arg_inttests.add_argument("--integration-tests", dest="INTEGRATION_TEST",
1125                        action="store_true", required=False, default=True,
1126                        help="Enable integration tests [default].")
1127    arg_inttests.add_argument("--no-integration-tests", dest="INTEGRATION_TEST",
1128                        action="store_false", required=False,
1129                        help="Disable integration tests.")
1130    parser.add_argument("-v", "--verbose", action="store_true",
1131                        help="Print additional package status messages")
1132    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1133                        type=int, default=1)
1134    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1135                        help="Branch to target for dependent repositories",
1136                        default="master")
1137    parser.add_argument("-n", "--noformat", dest="FORMAT",
1138                        action="store_false", required=False,
1139                        help="Whether or not to run format code")
1140    args = parser.parse_args(sys.argv[1:])
1141    WORKSPACE = args.WORKSPACE
1142    UNIT_TEST_PKG = args.PACKAGE
1143    TEST_ONLY = args.TEST_ONLY
1144    INTEGRATION_TEST = args.INTEGRATION_TEST
1145    BRANCH = args.BRANCH
1146    FORMAT_CODE = args.FORMAT
1147    if args.verbose:
1148        def printline(*line):
1149            for arg in line:
1150                print(arg, end=' ')
1151            print()
1152    else:
1153        def printline(*line):
1154            pass
1155
1156    CODE_SCAN_DIR = WORKSPACE + "/" + UNIT_TEST_PKG
1157
1158    # First validate code formatting if repo has style formatting files.
1159    # The format-code.sh checks for these files.
1160    if FORMAT_CODE:
1161        check_call_cmd("./format-code.sh", CODE_SCAN_DIR)
1162
1163    # Check if this repo has a supported make infrastructure
1164    pkg = Package(UNIT_TEST_PKG, os.path.join(WORKSPACE, UNIT_TEST_PKG))
1165    if not pkg.build_system():
1166        print("No valid build system, exit")
1167        sys.exit(0)
1168
1169    prev_umask = os.umask(000)
1170
1171    # Determine dependencies and add them
1172    dep_added = dict()
1173    dep_added[UNIT_TEST_PKG] = False
1174
1175    # Create dependency tree
1176    dep_tree = DepTree(UNIT_TEST_PKG)
1177    build_dep_tree(UNIT_TEST_PKG,
1178                   os.path.join(WORKSPACE, UNIT_TEST_PKG),
1179                   dep_added,
1180                   dep_tree,
1181                   BRANCH)
1182
1183    # Reorder Dependency Tree
1184    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1185        dep_tree.ReorderDeps(pkg_name, regex_str)
1186    if args.verbose:
1187        dep_tree.PrintTree()
1188
1189    install_list = dep_tree.GetInstallList()
1190
1191    # We don't want to treat our package as a dependency
1192    install_list.remove(UNIT_TEST_PKG)
1193
1194    # Install reordered dependencies
1195    for dep in install_list:
1196        build_and_install(dep, False)
1197
1198    # Run package unit tests
1199    build_and_install(UNIT_TEST_PKG, True)
1200
1201    os.umask(prev_umask)
1202
1203    # Run any custom CI scripts the repo has, of which there can be
1204    # multiple of and anywhere in the repository.
1205    ci_scripts = find_file('run-ci.sh', os.path.join(WORKSPACE, UNIT_TEST_PKG))
1206    if ci_scripts:
1207        os.chdir(os.path.join(WORKSPACE, UNIT_TEST_PKG))
1208        for ci_script in ci_scripts:
1209            check_call_cmd('sh', ci_script)
1210