xref: /openbmc/openbmc-build-scripts/scripts/unit-test.py (revision 47b59dc80db2956c36b6f24aee0b7c1a6b688995)
1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11# interpreter is not used directly but this resolves dependency ordering
12# that would be broken if we didn't include it.
13from mesonbuild import interpreter
14from mesonbuild import coredata, optinterpreter
15from mesonbuild.mesonlib import OptionKey
16from mesonbuild.mesonlib import version_compare as meson_version_compare
17from urllib.parse import urljoin
18from subprocess import check_call, call, CalledProcessError
19import os
20import sys
21import argparse
22import multiprocessing
23import re
24import subprocess
25import shutil
26import platform
27
28
29class DepTree():
30    """
31    Represents package dependency tree, where each node is a DepTree with a
32    name and DepTree children.
33    """
34
35    def __init__(self, name):
36        """
37        Create new DepTree.
38
39        Parameter descriptions:
40        name               Name of new tree node.
41        """
42        self.name = name
43        self.children = list()
44
45    def AddChild(self, name):
46        """
47        Add new child node to current node.
48
49        Parameter descriptions:
50        name               Name of new child
51        """
52        new_child = DepTree(name)
53        self.children.append(new_child)
54        return new_child
55
56    def AddChildNode(self, node):
57        """
58        Add existing child node to current node.
59
60        Parameter descriptions:
61        node               Tree node to add
62        """
63        self.children.append(node)
64
65    def RemoveChild(self, name):
66        """
67        Remove child node.
68
69        Parameter descriptions:
70        name               Name of child to remove
71        """
72        for child in self.children:
73            if child.name == name:
74                self.children.remove(child)
75                return
76
77    def GetNode(self, name):
78        """
79        Return node with matching name. Return None if not found.
80
81        Parameter descriptions:
82        name               Name of node to return
83        """
84        if self.name == name:
85            return self
86        for child in self.children:
87            node = child.GetNode(name)
88            if node:
89                return node
90        return None
91
92    def GetParentNode(self, name, parent_node=None):
93        """
94        Return parent of node with matching name. Return none if not found.
95
96        Parameter descriptions:
97        name               Name of node to get parent of
98        parent_node        Parent of current node
99        """
100        if self.name == name:
101            return parent_node
102        for child in self.children:
103            found_node = child.GetParentNode(name, self)
104            if found_node:
105                return found_node
106        return None
107
108    def GetPath(self, name, path=None):
109        """
110        Return list of node names from head to matching name.
111        Return None if not found.
112
113        Parameter descriptions:
114        name               Name of node
115        path               List of node names from head to current node
116        """
117        if not path:
118            path = []
119        if self.name == name:
120            path.append(self.name)
121            return path
122        for child in self.children:
123            match = child.GetPath(name, path + [self.name])
124            if match:
125                return match
126        return None
127
128    def GetPathRegex(self, name, regex_str, path=None):
129        """
130        Return list of node paths that end in name, or match regex_str.
131        Return empty list if not found.
132
133        Parameter descriptions:
134        name               Name of node to search for
135        regex_str          Regex string to match node names
136        path               Path of node names from head to current node
137        """
138        new_paths = []
139        if not path:
140            path = []
141        match = re.match(regex_str, self.name)
142        if (self.name == name) or (match):
143            new_paths.append(path + [self.name])
144        for child in self.children:
145            return_paths = None
146            full_path = path + [self.name]
147            return_paths = child.GetPathRegex(name, regex_str, full_path)
148            for i in return_paths:
149                new_paths.append(i)
150        return new_paths
151
152    def MoveNode(self, from_name, to_name):
153        """
154        Mode existing from_name node to become child of to_name node.
155
156        Parameter descriptions:
157        from_name          Name of node to make a child of to_name
158        to_name            Name of node to make parent of from_name
159        """
160        parent_from_node = self.GetParentNode(from_name)
161        from_node = self.GetNode(from_name)
162        parent_from_node.RemoveChild(from_name)
163        to_node = self.GetNode(to_name)
164        to_node.AddChildNode(from_node)
165
166    def ReorderDeps(self, name, regex_str):
167        """
168        Reorder dependency tree.  If tree contains nodes with names that
169        match 'name' and 'regex_str', move 'regex_str' nodes that are
170        to the right of 'name' node, so that they become children of the
171        'name' node.
172
173        Parameter descriptions:
174        name               Name of node to look for
175        regex_str          Regex string to match names to
176        """
177        name_path = self.GetPath(name)
178        if not name_path:
179            return
180        paths = self.GetPathRegex(name, regex_str)
181        is_name_in_paths = False
182        name_index = 0
183        for i in range(len(paths)):
184            path = paths[i]
185            if path[-1] == name:
186                is_name_in_paths = True
187                name_index = i
188                break
189        if not is_name_in_paths:
190            return
191        for i in range(name_index + 1, len(paths)):
192            path = paths[i]
193            if name in path:
194                continue
195            from_name = path[-1]
196            self.MoveNode(from_name, name)
197
198    def GetInstallList(self):
199        """
200        Return post-order list of node names.
201
202        Parameter descriptions:
203        """
204        install_list = []
205        for child in self.children:
206            child_install_list = child.GetInstallList()
207            install_list.extend(child_install_list)
208        install_list.append(self.name)
209        return install_list
210
211    def PrintTree(self, level=0):
212        """
213        Print pre-order node names with indentation denoting node depth level.
214
215        Parameter descriptions:
216        level              Current depth level
217        """
218        INDENT_PER_LEVEL = 4
219        print(' ' * (level * INDENT_PER_LEVEL) + self.name)
220        for child in self.children:
221            child.PrintTree(level + 1)
222
223
224def check_call_cmd(*cmd):
225    """
226    Verbose prints the directory location the given command is called from and
227    the command, then executes the command using check_call.
228
229    Parameter descriptions:
230    dir                 Directory location command is to be called from
231    cmd                 List of parameters constructing the complete command
232    """
233    printline(os.getcwd(), ">", " ".join(cmd))
234    check_call(cmd)
235
236
237def clone_pkg(pkg, branch):
238    """
239    Clone the given openbmc package's git repository from gerrit into
240    the WORKSPACE location
241
242    Parameter descriptions:
243    pkg                 Name of the package to clone
244    branch              Branch to clone from pkg
245    """
246    pkg_dir = os.path.join(WORKSPACE, pkg)
247    if os.path.exists(os.path.join(pkg_dir, '.git')):
248        return pkg_dir
249    pkg_repo = urljoin('https://gerrit.openbmc.org/openbmc/', pkg)
250    os.mkdir(pkg_dir)
251    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
252    try:
253        # first try the branch
254        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
255        repo_inst = clone.working_dir
256    except:
257        printline("Input branch not found, default to master")
258        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
259        repo_inst = clone.working_dir
260    return repo_inst
261
262
263def make_target_exists(target):
264    """
265    Runs a check against the makefile in the current directory to determine
266    if the target exists so that it can be built.
267
268    Parameter descriptions:
269    target              The make target we are checking
270    """
271    try:
272        cmd = ['make', '-n', target]
273        with open(os.devnull, 'w') as devnull:
274            check_call(cmd, stdout=devnull, stderr=devnull)
275        return True
276    except CalledProcessError:
277        return False
278
279
280make_parallel = [
281    'make',
282    # Run enough jobs to saturate all the cpus
283    '-j', str(multiprocessing.cpu_count()),
284    # Don't start more jobs if the load avg is too high
285    '-l', str(multiprocessing.cpu_count()),
286    # Synchronize the output so logs aren't intermixed in stdout / stderr
287    '-O',
288]
289
290
291def build_and_install(name, build_for_testing=False):
292    """
293    Builds and installs the package in the environment. Optionally
294    builds the examples and test cases for package.
295
296    Parameter description:
297    name                The name of the package we are building
298    build_for_testing   Enable options related to testing on the package?
299    """
300    os.chdir(os.path.join(WORKSPACE, name))
301
302    # Refresh dynamic linker run time bindings for dependencies
303    check_call_cmd('sudo', '-n', '--', 'ldconfig')
304
305    pkg = Package()
306    if build_for_testing:
307        pkg.test()
308    else:
309        pkg.install()
310
311
312def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
313    """
314    For each package (name), starting with the package to be unit tested,
315    extract its dependencies. For each package dependency defined, recursively
316    apply the same strategy
317
318    Parameter descriptions:
319    name                Name of the package
320    pkgdir              Directory where package source is located
321    dep_added           Current dict of dependencies and added status
322    head                Head node of the dependency tree
323    branch              Branch to clone from pkg
324    dep_tree            Current dependency tree node
325    """
326    if not dep_tree:
327        dep_tree = head
328
329    with open("/tmp/depcache", "r") as depcache:
330        cache = depcache.readline()
331
332    # Read out pkg dependencies
333    pkg = Package(name, pkgdir)
334
335    build = pkg.build_system()
336    if build == None:
337        raise Exception(f"Unable to find build system for {name}.")
338
339    for dep in set(build.dependencies()):
340        if dep in cache:
341            continue
342        # Dependency package not already known
343        if dep_added.get(dep) is None:
344            print(f"Adding {dep} dependency to {name}.")
345            # Dependency package not added
346            new_child = dep_tree.AddChild(dep)
347            dep_added[dep] = False
348            dep_pkgdir = clone_pkg(dep, branch)
349            # Determine this dependency package's
350            # dependencies and add them before
351            # returning to add this package
352            dep_added = build_dep_tree(dep,
353                                       dep_pkgdir,
354                                       dep_added,
355                                       head,
356                                       branch,
357                                       new_child)
358        else:
359            # Dependency package known and added
360            if dep_added[dep]:
361                continue
362            else:
363                # Cyclic dependency failure
364                raise Exception("Cyclic dependencies found in "+name)
365
366    if not dep_added[name]:
367        dep_added[name] = True
368
369    return dep_added
370
371
372def run_cppcheck():
373    if not os.path.exists(os.path.join("build", "compile_commands.json")):
374        return None
375
376    try:
377        os.mkdir("cppcheck-temp")
378    except FileExistsError as e:
379        pass
380
381    # http://cppcheck.sourceforge.net/manual.pdf
382    try:
383        check_call_cmd(
384            'cppcheck',
385            '-j', str(multiprocessing.cpu_count()),
386            '--enable=style,performance,portability,missingInclude',
387            '--suppress=useStlAlgorithm',
388            '--suppress=unusedStructMember',
389            '--suppress=postfixOperator',
390            '--suppress=unreadVariable',
391            '--suppress=knownConditionTrueFalse',
392            '--library=googletest',
393            '--project=build/compile_commands.json',
394            '--cppcheck-build-dir=cppcheck-temp',
395        )
396    except subprocess.CalledProcessError:
397      print("cppcheck found errors")
398
399def is_valgrind_safe():
400    """
401    Returns whether it is safe to run valgrind on our platform
402    """
403    src = 'unit-test-vg.c'
404    exe = './unit-test-vg'
405    with open(src, 'w') as h:
406        h.write('#include <errno.h>\n')
407        h.write('#include <stdio.h>\n')
408        h.write('#include <stdlib.h>\n')
409        h.write('#include <string.h>\n')
410        h.write('int main() {\n')
411        h.write('char *heap_str = malloc(16);\n')
412        h.write('strcpy(heap_str, "RandString");\n')
413        h.write('int res = strcmp("RandString", heap_str);\n')
414        h.write('free(heap_str);\n')
415        h.write('char errstr[64];\n')
416        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
417        h.write('printf("%s\\n", errstr);\n')
418        h.write('return res;\n')
419        h.write('}\n')
420    try:
421        with open(os.devnull, 'w') as devnull:
422            check_call(['gcc', '-O2', '-o', exe, src],
423                       stdout=devnull, stderr=devnull)
424            check_call(['valgrind', '--error-exitcode=99', exe],
425                       stdout=devnull, stderr=devnull)
426        return True
427    except:
428        sys.stderr.write("###### Platform is not valgrind safe ######\n")
429        return False
430    finally:
431        os.remove(src)
432        os.remove(exe)
433
434
435def is_sanitize_safe():
436    """
437    Returns whether it is safe to run sanitizers on our platform
438    """
439    src = 'unit-test-sanitize.c'
440    exe = './unit-test-sanitize'
441    with open(src, 'w') as h:
442        h.write('int main() { return 0; }\n')
443    try:
444        with open(os.devnull, 'w') as devnull:
445            check_call(['gcc', '-O2', '-fsanitize=address',
446                        '-fsanitize=undefined', '-o', exe, src],
447                       stdout=devnull, stderr=devnull)
448            check_call([exe], stdout=devnull, stderr=devnull)
449
450        # TODO - Sanitizer not working on ppc64le
451        # https://github.com/openbmc/openbmc-build-scripts/issues/31
452        if (platform.processor() == 'ppc64le'):
453            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
454            return False
455        else:
456            return True
457    except:
458        sys.stderr.write("###### Platform is not sanitize safe ######\n")
459        return False
460    finally:
461        os.remove(src)
462        os.remove(exe)
463
464
465def maybe_make_valgrind():
466    """
467    Potentially runs the unit tests through valgrind for the package
468    via `make check-valgrind`. If the package does not have valgrind testing
469    then it just skips over this.
470    """
471    # Valgrind testing is currently broken by an aggressive strcmp optimization
472    # that is inlined into optimized code for POWER by gcc 7+. Until we find
473    # a workaround, just don't run valgrind tests on POWER.
474    # https://github.com/openbmc/openbmc/issues/3315
475    if not is_valgrind_safe():
476        sys.stderr.write("###### Skipping valgrind ######\n")
477        return
478    if not make_target_exists('check-valgrind'):
479        return
480
481    try:
482        cmd = make_parallel + ['check-valgrind']
483        check_call_cmd(*cmd)
484    except CalledProcessError:
485        for root, _, files in os.walk(os.getcwd()):
486            for f in files:
487                if re.search('test-suite-[a-z]+.log', f) is None:
488                    continue
489                check_call_cmd('cat', os.path.join(root, f))
490        raise Exception('Valgrind tests failed')
491
492
493def maybe_make_coverage():
494    """
495    Potentially runs the unit tests through code coverage for the package
496    via `make check-code-coverage`. If the package does not have code coverage
497    testing then it just skips over this.
498    """
499    if not make_target_exists('check-code-coverage'):
500        return
501
502    # Actually run code coverage
503    try:
504        cmd = make_parallel + ['check-code-coverage']
505        check_call_cmd(*cmd)
506    except CalledProcessError:
507        raise Exception('Code coverage failed')
508
509
510class BuildSystem(object):
511    """
512    Build systems generally provide the means to configure, build, install and
513    test software. The BuildSystem class defines a set of interfaces on top of
514    which Autotools, Meson, CMake and possibly other build system drivers can
515    be implemented, separating out the phases to control whether a package
516    should merely be installed or also tested and analyzed.
517    """
518
519    def __init__(self, package, path):
520        """Initialise the driver with properties independent of the build system
521
522        Keyword arguments:
523        package: The name of the package. Derived from the path if None
524        path: The path to the package. Set to the working directory if None
525        """
526        self.path = "." if not path else path
527        realpath = os.path.realpath(self.path)
528        self.package = package if package else os.path.basename(realpath)
529        self.build_for_testing = False
530
531    def probe(self):
532        """Test if the build system driver can be applied to the package
533
534        Return True if the driver can drive the package's build system,
535        otherwise False.
536
537        Generally probe() is implemented by testing for the presence of the
538        build system's configuration file(s).
539        """
540        raise NotImplemented
541
542    def dependencies(self):
543        """Provide the package's dependencies
544
545        Returns a list of dependencies. If no dependencies are required then an
546        empty list must be returned.
547
548        Generally dependencies() is implemented by analysing and extracting the
549        data from the build system configuration.
550        """
551        raise NotImplemented
552
553    def configure(self, build_for_testing):
554        """Configure the source ready for building
555
556        Should raise an exception if configuration failed.
557
558        Keyword arguments:
559        build_for_testing: Mark the package as being built for testing rather
560                           than for installation as a dependency for the
561                           package under test. Setting to True generally
562                           implies that the package will be configured to build
563                           with debug information, at a low level of
564                           optimisation and possibly with sanitizers enabled.
565
566        Generally configure() is implemented by invoking the build system
567        tooling to generate Makefiles or equivalent.
568        """
569        raise NotImplemented
570
571    def build(self):
572        """Build the software ready for installation and/or testing
573
574        Should raise an exception if the build fails
575
576        Generally build() is implemented by invoking `make` or `ninja`.
577        """
578        raise NotImplemented
579
580    def install(self):
581        """Install the software ready for use
582
583        Should raise an exception if installation fails
584
585        Like build(), install() is generally implemented by invoking `make` or
586        `ninja`.
587        """
588        raise NotImplemented
589
590    def test(self):
591        """Build and run the test suite associated with the package
592
593        Should raise an exception if the build or testing fails.
594
595        Like install(), test() is generally implemented by invoking `make` or
596        `ninja`.
597        """
598        raise NotImplemented
599
600    def analyze(self):
601        """Run any supported analysis tools over the codebase
602
603        Should raise an exception if analysis fails.
604
605        Some analysis tools such as scan-build need injection into the build
606        system. analyze() provides the necessary hook to implement such
607        behaviour. Analyzers independent of the build system can also be
608        specified here but at the cost of possible duplication of code between
609        the build system driver implementations.
610        """
611        raise NotImplemented
612
613
614class Autotools(BuildSystem):
615    def __init__(self, package=None, path=None):
616        super(Autotools, self).__init__(package, path)
617
618    def probe(self):
619        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
620
621    def dependencies(self):
622        configure_ac = os.path.join(self.path, 'configure.ac')
623
624        contents = ''
625        # Prepend some special function overrides so we can parse out
626        # dependencies
627        for macro in DEPENDENCIES.keys():
628            contents += ('m4_define([' + macro + '], [' + macro + '_START$' +
629                         str(DEPENDENCIES_OFFSET[macro] + 1) +
630                         macro + '_END])\n')
631        with open(configure_ac, "rt") as f:
632            contents += f.read()
633
634        autoconf_cmdline = ['autoconf', '-Wno-undefined', '-']
635        autoconf_process = subprocess.Popen(autoconf_cmdline,
636                                            stdin=subprocess.PIPE,
637                                            stdout=subprocess.PIPE,
638                                            stderr=subprocess.PIPE)
639        document = contents.encode('utf-8')
640        (stdout, stderr) = autoconf_process.communicate(input=document)
641        if not stdout:
642            print(stderr)
643            raise Exception("Failed to run autoconf for parsing dependencies")
644
645        # Parse out all of the dependency text
646        matches = []
647        for macro in DEPENDENCIES.keys():
648            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
649            for match in re.compile(pattern).finditer(stdout.decode('utf-8')):
650                matches.append((match.group(1), match.group(2)))
651
652        # Look up dependencies from the text
653        found_deps = []
654        for macro, deptext in matches:
655            for potential_dep in deptext.split(' '):
656                for known_dep in DEPENDENCIES[macro].keys():
657                    if potential_dep.startswith(known_dep):
658                        found_deps.append(DEPENDENCIES[macro][known_dep])
659
660        return found_deps
661
662    def _configure_feature(self, flag, enabled):
663        """
664        Returns an configure flag as a string
665
666        Parameters:
667        flag                The name of the flag
668        enabled             Whether the flag is enabled or disabled
669        """
670        return '--' + ('enable' if enabled else 'disable') + '-' + flag
671
672    def configure(self, build_for_testing):
673        self.build_for_testing = build_for_testing
674        conf_flags = [
675            self._configure_feature('silent-rules', False),
676            self._configure_feature('examples', build_for_testing),
677            self._configure_feature('tests', build_for_testing),
678            self._configure_feature('itests', INTEGRATION_TEST),
679        ]
680        conf_flags.extend([
681            self._configure_feature('code-coverage', build_for_testing),
682            self._configure_feature('valgrind', build_for_testing),
683        ])
684        # Add any necessary configure flags for package
685        if CONFIGURE_FLAGS.get(self.package) is not None:
686            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
687        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
688            if os.path.exists(bootstrap):
689                check_call_cmd('./' + bootstrap)
690                break
691        check_call_cmd('./configure', *conf_flags)
692
693    def build(self):
694        check_call_cmd(*make_parallel)
695
696    def install(self):
697        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
698
699    def test(self):
700        try:
701            cmd = make_parallel + ['check']
702            for i in range(0, args.repeat):
703                check_call_cmd(*cmd)
704
705            maybe_make_valgrind()
706            maybe_make_coverage()
707        except CalledProcessError:
708            for root, _, files in os.walk(os.getcwd()):
709                if 'test-suite.log' not in files:
710                    continue
711                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
712            raise Exception('Unit tests failed')
713
714    def analyze(self):
715        run_cppcheck()
716
717
718class CMake(BuildSystem):
719    def __init__(self, package=None, path=None):
720        super(CMake, self).__init__(package, path)
721
722    def probe(self):
723        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
724
725    def dependencies(self):
726        return []
727
728    def configure(self, build_for_testing):
729        self.build_for_testing = build_for_testing
730        if INTEGRATION_TEST:
731            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
732                           '-DITESTS=ON', '.')
733        else:
734            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
735
736    def build(self):
737        check_call_cmd('cmake', '--build', '.', '--', '-j',
738                       str(multiprocessing.cpu_count()))
739
740    def install(self):
741        pass
742
743    def test(self):
744        if make_target_exists('test'):
745            check_call_cmd('ctest', '.')
746
747    def analyze(self):
748        if os.path.isfile('.clang-tidy'):
749            try:
750                os.mkdir("tidy-build")
751            except FileExistsError as e:
752                pass
753            # clang-tidy needs to run on a clang-specific build
754            check_call_cmd('cmake', '-DCMAKE_C_COMPILER=clang',
755                           '-DCMAKE_CXX_COMPILER=clang++',
756                           '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
757                           '-H.',
758                           '-Btidy-build')
759            # we need to cd here because otherwise clang-tidy doesn't find the
760            # .clang-tidy file in the roots of repos.  Its arguably a "bug"
761            # with run-clang-tidy at a minimum it's "weird" that it requires
762            # the .clang-tidy to be up a dir
763            os.chdir("tidy-build")
764            try:
765                check_call_cmd('run-clang-tidy', "-header-filter=.*", '-p',
766                               '.')
767            finally:
768                os.chdir("..")
769
770        maybe_make_valgrind()
771        maybe_make_coverage()
772        run_cppcheck()
773
774
775class Meson(BuildSystem):
776    def __init__(self, package=None, path=None):
777        super(Meson, self).__init__(package, path)
778
779    def probe(self):
780        return os.path.isfile(os.path.join(self.path, 'meson.build'))
781
782    def dependencies(self):
783        meson_build = os.path.join(self.path, 'meson.build')
784        if not os.path.exists(meson_build):
785            return []
786
787        found_deps = []
788        for root, dirs, files in os.walk(self.path):
789            if 'meson.build' not in files:
790                continue
791            with open(os.path.join(root, 'meson.build'), 'rt') as f:
792                build_contents = f.read()
793            pattern = r"dependency\('([^']*)'.*?\),?\n"
794            for match in re.finditer(pattern, build_contents):
795                group = match.group(1)
796                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group)
797                if maybe_dep is not None:
798                    found_deps.append(maybe_dep)
799
800        return found_deps
801
802    def _parse_options(self, options_file):
803        """
804        Returns a set of options defined in the provides meson_options.txt file
805
806        Parameters:
807        options_file        The file containing options
808        """
809        oi = optinterpreter.OptionInterpreter('')
810        oi.process(options_file)
811        return oi.options
812
813    def _configure_boolean(self, val):
814        """
815        Returns the meson flag which signifies the value
816
817        True is true which requires the boolean.
818        False is false which disables the boolean.
819
820        Parameters:
821        val                 The value being converted
822        """
823        if val is True:
824            return 'true'
825        elif val is False:
826            return 'false'
827        else:
828            raise Exception("Bad meson boolean value")
829
830    def _configure_feature(self, val):
831        """
832        Returns the meson flag which signifies the value
833
834        True is enabled which requires the feature.
835        False is disabled which disables the feature.
836        None is auto which autodetects the feature.
837
838        Parameters:
839        val                 The value being converted
840        """
841        if val is True:
842            return "enabled"
843        elif val is False:
844            return "disabled"
845        elif val is None:
846            return "auto"
847        else:
848            raise Exception("Bad meson feature value")
849
850    def _configure_option(self, opts, key, val):
851        """
852        Returns the meson flag which signifies the value
853        based on the type of the opt
854
855        Parameters:
856        opt                 The meson option which we are setting
857        val                 The value being converted
858        """
859        if isinstance(opts[key], coredata.UserBooleanOption):
860            str_val = self._configure_boolean(val)
861        elif isinstance(opts[key], coredata.UserFeatureOption):
862            str_val = self._configure_feature(val)
863        else:
864            raise Exception('Unknown meson option type')
865        return "-D{}={}".format(key, str_val)
866
867    def configure(self, build_for_testing):
868        self.build_for_testing = build_for_testing
869        meson_options = {}
870        if os.path.exists("meson_options.txt"):
871            meson_options = self._parse_options("meson_options.txt")
872        meson_flags = [
873            '-Db_colorout=never',
874            '-Dwerror=true',
875            '-Dwarning_level=3',
876        ]
877        if build_for_testing:
878            meson_flags.append('--buildtype=debug')
879        else:
880            meson_flags.append('--buildtype=debugoptimized')
881        if OptionKey('tests') in meson_options:
882            meson_flags.append(self._configure_option(
883                meson_options, OptionKey('tests'), build_for_testing))
884        if OptionKey('examples') in meson_options:
885            meson_flags.append(self._configure_option(
886                meson_options, OptionKey('examples'), build_for_testing))
887        if OptionKey('itests') in meson_options:
888            meson_flags.append(self._configure_option(
889                meson_options, OptionKey('itests'), INTEGRATION_TEST))
890        if MESON_FLAGS.get(self.package) is not None:
891            meson_flags.extend(MESON_FLAGS.get(self.package))
892        try:
893            check_call_cmd('meson', 'setup', '--reconfigure', 'build',
894                           *meson_flags)
895        except:
896            shutil.rmtree('build')
897            check_call_cmd('meson', 'setup', 'build', *meson_flags)
898
899    def build(self):
900        check_call_cmd('ninja', '-C', 'build')
901
902    def install(self):
903        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
904
905    def test(self):
906        # It is useful to check various settings of the meson.build file
907        # for compatibility, such as meson_version checks.  We shouldn't
908        # do this in the configure path though because it affects subprojects
909        # and dependencies as well, but we only want this applied to the
910        # project-under-test (otherwise an upstream dependency could fail
911        # this check without our control).
912        self._extra_meson_checks()
913
914        try:
915            test_args = ('--repeat', str(args.repeat), '-C', 'build')
916            check_call_cmd('meson', 'test', '--print-errorlogs', *test_args)
917
918        except CalledProcessError:
919            raise Exception('Unit tests failed')
920
921    def _setup_exists(self, setup):
922        """
923        Returns whether the meson build supports the named test setup.
924
925        Parameter descriptions:
926        setup              The setup target to check
927        """
928        try:
929            with open(os.devnull, 'w') as devnull:
930                output = subprocess.check_output(
931                    ['meson', 'test', '-C', 'build',
932                     '--setup', setup, '-t', '0'],
933                    stderr=subprocess.STDOUT)
934        except CalledProcessError as e:
935            output = e.output
936        output = output.decode('utf-8')
937        return not re.search('Test setup .* not found from project', output)
938
939    def _maybe_valgrind(self):
940        """
941        Potentially runs the unit tests through valgrind for the package
942        via `meson test`. The package can specify custom valgrind
943        configurations by utilizing add_test_setup() in a meson.build
944        """
945        if not is_valgrind_safe():
946            sys.stderr.write("###### Skipping valgrind ######\n")
947            return
948        try:
949            if self._setup_exists('valgrind'):
950                check_call_cmd('meson', 'test', '-t', '10', '-C', 'build',
951                               '--print-errorlogs', '--setup', 'valgrind')
952            else:
953                check_call_cmd('meson', 'test', '-t', '10', '-C', 'build',
954                               '--print-errorlogs', '--wrapper', 'valgrind')
955        except CalledProcessError:
956            raise Exception('Valgrind tests failed')
957
958    def analyze(self):
959        self._maybe_valgrind()
960
961        # Run clang-tidy only if the project has a configuration
962        if os.path.isfile('.clang-tidy'):
963            os.environ["CXX"] = "clang++"
964            check_call_cmd('meson', 'setup', 'build-clang')
965            os.chdir("build-clang")
966            try:
967                check_call_cmd('run-clang-tidy', '-fix', '-format', '-p', '.')
968            except subprocess.CalledProcessError:
969                check_call_cmd("git", "-C", CODE_SCAN_DIR,
970                               "--no-pager", "diff")
971                raise
972            finally:
973                os.chdir("..")
974
975        # Run the basic clang static analyzer otherwise
976        else:
977            check_call_cmd('ninja', '-C', 'build',
978                           'scan-build')
979
980        # Run tests through sanitizers
981        # b_lundef is needed if clang++ is CXX since it resolves the
982        # asan symbols at runtime only. We don't want to set it earlier
983        # in the build process to ensure we don't have undefined
984        # runtime code.
985        if is_sanitize_safe():
986            check_call_cmd('meson', 'configure', 'build',
987                           '-Db_sanitize=address,undefined',
988                           '-Db_lundef=false')
989            check_call_cmd('meson', 'test', '-C', 'build', '--print-errorlogs',
990                           '--logbase', 'testlog-ubasan')
991            # TODO: Fix memory sanitizer
992            # check_call_cmd('meson', 'configure', 'build',
993            #                '-Db_sanitize=memory')
994            # check_call_cmd('meson', 'test', '-C', 'build'
995            #                '--logbase', 'testlog-msan')
996            check_call_cmd('meson', 'configure', 'build',
997                           '-Db_sanitize=none')
998        else:
999            sys.stderr.write("###### Skipping sanitizers ######\n")
1000
1001        # Run coverage checks
1002        check_call_cmd('meson', 'configure', 'build',
1003                       '-Db_coverage=true')
1004        self.test()
1005        # Only build coverage HTML if coverage files were produced
1006        for root, dirs, files in os.walk('build'):
1007            if any([f.endswith('.gcda') for f in files]):
1008                check_call_cmd('ninja', '-C', 'build',
1009                               'coverage-html')
1010                break
1011        check_call_cmd('meson', 'configure', 'build',
1012                       '-Db_coverage=false')
1013        run_cppcheck()
1014
1015    def _extra_meson_checks(self):
1016        with open(os.path.join(self.path, 'meson.build'), 'rt') as f:
1017            build_contents = f.read()
1018
1019        # Find project's specified meson_version.
1020        meson_version = None
1021        pattern = r"meson_version:[^']*'([^']*)'"
1022        for match in re.finditer(pattern, build_contents):
1023            group = match.group(1)
1024            meson_version = group
1025
1026        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1027        # identify this.  Add to our unit-test checks so that we don't
1028        # get a meson.build missing this.
1029        pattern = r"'cpp_std=c\+\+20'"
1030        for match in re.finditer(pattern, build_contents):
1031            if not meson_version or \
1032                    not meson_version_compare(meson_version, ">=0.57"):
1033                raise Exception(
1034                    "C++20 support requires specifying in meson.build: "
1035                    + "meson_version: '>=0.57'"
1036                )
1037
1038
1039class Package(object):
1040    def __init__(self, name=None, path=None):
1041        self.supported = [Meson, Autotools, CMake]
1042        self.name = name
1043        self.path = path
1044        self.test_only = False
1045
1046    def build_systems(self):
1047        instances = (system(self.name, self.path) for system in self.supported)
1048        return (instance for instance in instances if instance.probe())
1049
1050    def build_system(self, preferred=None):
1051        systems = list(self.build_systems())
1052
1053        if not systems:
1054            return None
1055
1056        if preferred:
1057            return {type(system): system for system in systems}[preferred]
1058
1059        return next(iter(systems))
1060
1061    def install(self, system=None):
1062        if not system:
1063            system = self.build_system()
1064
1065        system.configure(False)
1066        system.build()
1067        system.install()
1068
1069    def _test_one(self, system):
1070        system.configure(True)
1071        system.build()
1072        system.install()
1073        system.test()
1074        if not TEST_ONLY:
1075            system.analyze()
1076
1077    def test(self):
1078        for system in self.build_systems():
1079            self._test_one(system)
1080
1081
1082def find_file(filename, basedir):
1083    """
1084    Finds all occurrences of a file (or list of files) in the base
1085    directory and passes them back with their relative paths.
1086
1087    Parameter descriptions:
1088    filename              The name of the file (or list of files) to
1089                          find
1090    basedir               The base directory search in
1091    """
1092
1093    if not isinstance(filename, list):
1094        filename = [filename]
1095
1096    filepaths = []
1097    for root, dirs, files in os.walk(basedir):
1098        if os.path.split(root)[-1] == 'subprojects':
1099            for f in files:
1100                subproject = '.'.join(f.split('.')[0:-1])
1101                if f.endswith('.wrap') and subproject in dirs:
1102                    # don't find files in meson subprojects with wraps
1103                    dirs.remove(subproject)
1104        for f in filename:
1105            if f in files:
1106                filepaths.append(os.path.join(root, f))
1107    return filepaths
1108
1109
1110if __name__ == '__main__':
1111    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1112    CONFIGURE_FLAGS = {
1113        'phosphor-logging':
1114        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
1115         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
1116    }
1117
1118    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1119    MESON_FLAGS = {
1120        'phosphor-dbus-interfaces':
1121        ['-Ddata_com_ibm=true', '-Ddata_org_open_power=true'],
1122        'phosphor-logging':
1123        ['-Dopenpower-pel-extension=enabled']
1124    }
1125
1126    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1127    DEPENDENCIES = {
1128        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
1129        'AC_CHECK_HEADER': {
1130            'host-ipmid': 'phosphor-host-ipmid',
1131            'blobs-ipmid': 'phosphor-ipmi-blobs',
1132            'sdbusplus': 'sdbusplus',
1133            'sdeventplus': 'sdeventplus',
1134            'stdplus': 'stdplus',
1135            'gpioplus': 'gpioplus',
1136            'phosphor-logging/log.hpp': 'phosphor-logging',
1137        },
1138        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1139        'PKG_CHECK_MODULES': {
1140            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1141            'libipmid': 'phosphor-host-ipmid',
1142            'libipmid-host': 'phosphor-host-ipmid',
1143            'sdbusplus': 'sdbusplus',
1144            'sdeventplus': 'sdeventplus',
1145            'stdplus': 'stdplus',
1146            'gpioplus': 'gpioplus',
1147            'phosphor-logging': 'phosphor-logging',
1148            'phosphor-snmp': 'phosphor-snmp',
1149            'ipmiblob': 'ipmi-blob-tool',
1150            'hei': 'openpower-libhei',
1151            'phosphor-ipmi-blobs': 'phosphor-ipmi-blobs',
1152            'libcr51sign': 'google-misc',
1153        },
1154    }
1155
1156    # Offset into array of macro parameters MACRO(0, 1, ...N)
1157    DEPENDENCIES_OFFSET = {
1158        'AC_CHECK_LIB': 0,
1159        'AC_CHECK_HEADER': 0,
1160        'AC_PATH_PROG': 1,
1161        'PKG_CHECK_MODULES': 1,
1162    }
1163
1164    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1165    DEPENDENCIES_REGEX = {
1166        'phosphor-logging': r'\S+-dbus-interfaces$'
1167    }
1168
1169    # Set command line arguments
1170    parser = argparse.ArgumentParser()
1171    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1172                        help="Workspace directory location(i.e. /home)")
1173    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1174                        help="OpenBMC package to be unit tested")
1175    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1176                        action="store_true", required=False, default=False,
1177                        help="Only run test cases, no other validation")
1178    arg_inttests = parser.add_mutually_exclusive_group()
1179    arg_inttests.add_argument("--integration-tests", dest="INTEGRATION_TEST",
1180                              action="store_true", required=False, default=True,
1181                              help="Enable integration tests [default].")
1182    arg_inttests.add_argument("--no-integration-tests", dest="INTEGRATION_TEST",
1183                              action="store_false", required=False,
1184                              help="Disable integration tests.")
1185    parser.add_argument("-v", "--verbose", action="store_true",
1186                        help="Print additional package status messages")
1187    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1188                        type=int, default=1)
1189    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1190                        help="Branch to target for dependent repositories",
1191                        default="master")
1192    parser.add_argument("-n", "--noformat", dest="FORMAT",
1193                        action="store_false", required=False,
1194                        help="Whether or not to run format code")
1195    args = parser.parse_args(sys.argv[1:])
1196    WORKSPACE = args.WORKSPACE
1197    UNIT_TEST_PKG = args.PACKAGE
1198    TEST_ONLY = args.TEST_ONLY
1199    INTEGRATION_TEST = args.INTEGRATION_TEST
1200    BRANCH = args.BRANCH
1201    FORMAT_CODE = args.FORMAT
1202    if args.verbose:
1203        def printline(*line):
1204            for arg in line:
1205                print(arg, end=' ')
1206            print()
1207    else:
1208        def printline(*line):
1209            pass
1210
1211    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1212
1213    # First validate code formatting if repo has style formatting files.
1214    # The format-code.sh checks for these files.
1215    if FORMAT_CODE:
1216        format_scripts = find_file(['format-code.sh', 'format-code'],
1217                                   CODE_SCAN_DIR)
1218
1219        # use default format-code.sh if no other found
1220        if not format_scripts:
1221            format_scripts.append(os.path.join(WORKSPACE, "format-code.sh"))
1222
1223        for f in format_scripts:
1224            check_call_cmd(f, CODE_SCAN_DIR)
1225
1226        # Check to see if any files changed
1227        check_call_cmd("git", "-C", CODE_SCAN_DIR,
1228                       "--no-pager", "diff", "--exit-code")
1229
1230    # Check if this repo has a supported make infrastructure
1231    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1232    if not pkg.build_system():
1233        print("No valid build system, exit")
1234        sys.exit(0)
1235
1236    prev_umask = os.umask(000)
1237
1238    # Determine dependencies and add them
1239    dep_added = dict()
1240    dep_added[UNIT_TEST_PKG] = False
1241
1242    # Create dependency tree
1243    dep_tree = DepTree(UNIT_TEST_PKG)
1244    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1245
1246    # Reorder Dependency Tree
1247    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1248        dep_tree.ReorderDeps(pkg_name, regex_str)
1249    if args.verbose:
1250        dep_tree.PrintTree()
1251
1252    install_list = dep_tree.GetInstallList()
1253
1254    # We don't want to treat our package as a dependency
1255    install_list.remove(UNIT_TEST_PKG)
1256
1257    # Install reordered dependencies
1258    for dep in install_list:
1259        build_and_install(dep, False)
1260
1261    # Run package unit tests
1262    build_and_install(UNIT_TEST_PKG, True)
1263
1264    os.umask(prev_umask)
1265
1266    # Run any custom CI scripts the repo has, of which there can be
1267    # multiple of and anywhere in the repository.
1268    ci_scripts = find_file(['run-ci.sh', 'run-ci'], CODE_SCAN_DIR)
1269    if ci_scripts:
1270        os.chdir(CODE_SCAN_DIR)
1271        for ci_script in ci_scripts:
1272            check_call_cmd(ci_script)
1273