1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11# interpreter is not used directly but this resolves dependency ordering
12# that would be broken if we didn't include it.
13from mesonbuild import interpreter
14from mesonbuild import coredata, optinterpreter
15from mesonbuild.mesonlib import OptionKey
16from mesonbuild.mesonlib import version_compare as meson_version_compare
17from urllib.parse import urljoin
18from subprocess import check_call, call, CalledProcessError
19from tempfile import TemporaryDirectory
20import os
21import sys
22import argparse
23import multiprocessing
24import re
25import subprocess
26import shutil
27import platform
28
29
30class DepTree():
31    """
32    Represents package dependency tree, where each node is a DepTree with a
33    name and DepTree children.
34    """
35
36    def __init__(self, name):
37        """
38        Create new DepTree.
39
40        Parameter descriptions:
41        name               Name of new tree node.
42        """
43        self.name = name
44        self.children = list()
45
46    def AddChild(self, name):
47        """
48        Add new child node to current node.
49
50        Parameter descriptions:
51        name               Name of new child
52        """
53        new_child = DepTree(name)
54        self.children.append(new_child)
55        return new_child
56
57    def AddChildNode(self, node):
58        """
59        Add existing child node to current node.
60
61        Parameter descriptions:
62        node               Tree node to add
63        """
64        self.children.append(node)
65
66    def RemoveChild(self, name):
67        """
68        Remove child node.
69
70        Parameter descriptions:
71        name               Name of child to remove
72        """
73        for child in self.children:
74            if child.name == name:
75                self.children.remove(child)
76                return
77
78    def GetNode(self, name):
79        """
80        Return node with matching name. Return None if not found.
81
82        Parameter descriptions:
83        name               Name of node to return
84        """
85        if self.name == name:
86            return self
87        for child in self.children:
88            node = child.GetNode(name)
89            if node:
90                return node
91        return None
92
93    def GetParentNode(self, name, parent_node=None):
94        """
95        Return parent of node with matching name. Return none if not found.
96
97        Parameter descriptions:
98        name               Name of node to get parent of
99        parent_node        Parent of current node
100        """
101        if self.name == name:
102            return parent_node
103        for child in self.children:
104            found_node = child.GetParentNode(name, self)
105            if found_node:
106                return found_node
107        return None
108
109    def GetPath(self, name, path=None):
110        """
111        Return list of node names from head to matching name.
112        Return None if not found.
113
114        Parameter descriptions:
115        name               Name of node
116        path               List of node names from head to current node
117        """
118        if not path:
119            path = []
120        if self.name == name:
121            path.append(self.name)
122            return path
123        for child in self.children:
124            match = child.GetPath(name, path + [self.name])
125            if match:
126                return match
127        return None
128
129    def GetPathRegex(self, name, regex_str, path=None):
130        """
131        Return list of node paths that end in name, or match regex_str.
132        Return empty list if not found.
133
134        Parameter descriptions:
135        name               Name of node to search for
136        regex_str          Regex string to match node names
137        path               Path of node names from head to current node
138        """
139        new_paths = []
140        if not path:
141            path = []
142        match = re.match(regex_str, self.name)
143        if (self.name == name) or (match):
144            new_paths.append(path + [self.name])
145        for child in self.children:
146            return_paths = None
147            full_path = path + [self.name]
148            return_paths = child.GetPathRegex(name, regex_str, full_path)
149            for i in return_paths:
150                new_paths.append(i)
151        return new_paths
152
153    def MoveNode(self, from_name, to_name):
154        """
155        Mode existing from_name node to become child of to_name node.
156
157        Parameter descriptions:
158        from_name          Name of node to make a child of to_name
159        to_name            Name of node to make parent of from_name
160        """
161        parent_from_node = self.GetParentNode(from_name)
162        from_node = self.GetNode(from_name)
163        parent_from_node.RemoveChild(from_name)
164        to_node = self.GetNode(to_name)
165        to_node.AddChildNode(from_node)
166
167    def ReorderDeps(self, name, regex_str):
168        """
169        Reorder dependency tree.  If tree contains nodes with names that
170        match 'name' and 'regex_str', move 'regex_str' nodes that are
171        to the right of 'name' node, so that they become children of the
172        'name' node.
173
174        Parameter descriptions:
175        name               Name of node to look for
176        regex_str          Regex string to match names to
177        """
178        name_path = self.GetPath(name)
179        if not name_path:
180            return
181        paths = self.GetPathRegex(name, regex_str)
182        is_name_in_paths = False
183        name_index = 0
184        for i in range(len(paths)):
185            path = paths[i]
186            if path[-1] == name:
187                is_name_in_paths = True
188                name_index = i
189                break
190        if not is_name_in_paths:
191            return
192        for i in range(name_index + 1, len(paths)):
193            path = paths[i]
194            if name in path:
195                continue
196            from_name = path[-1]
197            self.MoveNode(from_name, name)
198
199    def GetInstallList(self):
200        """
201        Return post-order list of node names.
202
203        Parameter descriptions:
204        """
205        install_list = []
206        for child in self.children:
207            child_install_list = child.GetInstallList()
208            install_list.extend(child_install_list)
209        install_list.append(self.name)
210        return install_list
211
212    def PrintTree(self, level=0):
213        """
214        Print pre-order node names with indentation denoting node depth level.
215
216        Parameter descriptions:
217        level              Current depth level
218        """
219        INDENT_PER_LEVEL = 4
220        print(' ' * (level * INDENT_PER_LEVEL) + self.name)
221        for child in self.children:
222            child.PrintTree(level + 1)
223
224
225def check_call_cmd(*cmd):
226    """
227    Verbose prints the directory location the given command is called from and
228    the command, then executes the command using check_call.
229
230    Parameter descriptions:
231    dir                 Directory location command is to be called from
232    cmd                 List of parameters constructing the complete command
233    """
234    printline(os.getcwd(), ">", " ".join(cmd))
235    check_call(cmd)
236
237
238def clone_pkg(pkg, branch):
239    """
240    Clone the given openbmc package's git repository from gerrit into
241    the WORKSPACE location
242
243    Parameter descriptions:
244    pkg                 Name of the package to clone
245    branch              Branch to clone from pkg
246    """
247    pkg_dir = os.path.join(WORKSPACE, pkg)
248    if os.path.exists(os.path.join(pkg_dir, '.git')):
249        return pkg_dir
250    pkg_repo = urljoin('https://gerrit.openbmc.org/openbmc/', pkg)
251    os.mkdir(pkg_dir)
252    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
253    try:
254        # first try the branch
255        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
256        repo_inst = clone.working_dir
257    except:
258        printline("Input branch not found, default to master")
259        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
260        repo_inst = clone.working_dir
261    return repo_inst
262
263
264def make_target_exists(target):
265    """
266    Runs a check against the makefile in the current directory to determine
267    if the target exists so that it can be built.
268
269    Parameter descriptions:
270    target              The make target we are checking
271    """
272    try:
273        cmd = ['make', '-n', target]
274        with open(os.devnull, 'w') as devnull:
275            check_call(cmd, stdout=devnull, stderr=devnull)
276        return True
277    except CalledProcessError:
278        return False
279
280
281make_parallel = [
282    'make',
283    # Run enough jobs to saturate all the cpus
284    '-j', str(multiprocessing.cpu_count()),
285    # Don't start more jobs if the load avg is too high
286    '-l', str(multiprocessing.cpu_count()),
287    # Synchronize the output so logs aren't intermixed in stdout / stderr
288    '-O',
289]
290
291
292def build_and_install(name, build_for_testing=False):
293    """
294    Builds and installs the package in the environment. Optionally
295    builds the examples and test cases for package.
296
297    Parameter description:
298    name                The name of the package we are building
299    build_for_testing   Enable options related to testing on the package?
300    """
301    os.chdir(os.path.join(WORKSPACE, name))
302
303    # Refresh dynamic linker run time bindings for dependencies
304    check_call_cmd('sudo', '-n', '--', 'ldconfig')
305
306    pkg = Package()
307    if build_for_testing:
308        pkg.test()
309    else:
310        pkg.install()
311
312
313def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
314    """
315    For each package (name), starting with the package to be unit tested,
316    extract its dependencies. For each package dependency defined, recursively
317    apply the same strategy
318
319    Parameter descriptions:
320    name                Name of the package
321    pkgdir              Directory where package source is located
322    dep_added           Current dict of dependencies and added status
323    head                Head node of the dependency tree
324    branch              Branch to clone from pkg
325    dep_tree            Current dependency tree node
326    """
327    if not dep_tree:
328        dep_tree = head
329
330    with open("/tmp/depcache", "r") as depcache:
331        cache = depcache.readline()
332
333    # Read out pkg dependencies
334    pkg = Package(name, pkgdir)
335
336    build = pkg.build_system()
337    if build == None:
338        raise Exception(f"Unable to find build system for {name}.")
339
340    for dep in set(build.dependencies()):
341        if dep in cache:
342            continue
343        # Dependency package not already known
344        if dep_added.get(dep) is None:
345            print(f"Adding {dep} dependency to {name}.")
346            # Dependency package not added
347            new_child = dep_tree.AddChild(dep)
348            dep_added[dep] = False
349            dep_pkgdir = clone_pkg(dep, branch)
350            # Determine this dependency package's
351            # dependencies and add them before
352            # returning to add this package
353            dep_added = build_dep_tree(dep,
354                                       dep_pkgdir,
355                                       dep_added,
356                                       head,
357                                       branch,
358                                       new_child)
359        else:
360            # Dependency package known and added
361            if dep_added[dep]:
362                continue
363            else:
364                # Cyclic dependency failure
365                raise Exception("Cyclic dependencies found in "+name)
366
367    if not dep_added[name]:
368        dep_added[name] = True
369
370    return dep_added
371
372
373def run_cppcheck():
374    if not os.path.exists(os.path.join("build", "compile_commands.json")):
375        return None
376
377    with TemporaryDirectory() as cpp_dir:
378
379        # http://cppcheck.sourceforge.net/manual.pdf
380        try:
381            check_call_cmd(
382                'cppcheck',
383                '-j', str(multiprocessing.cpu_count()),
384                '--enable=style,performance,portability,missingInclude',
385                '--suppress=useStlAlgorithm',
386                '--suppress=unusedStructMember',
387                '--suppress=postfixOperator',
388                '--suppress=unreadVariable',
389                '--suppress=knownConditionTrueFalse',
390                '--library=googletest',
391                '--project=build/compile_commands.json',
392                f'--cppcheck-build-dir={cpp_dir}',
393            )
394        except subprocess.CalledProcessError:
395            print("cppcheck found errors")
396
397
398def is_valgrind_safe():
399    """
400    Returns whether it is safe to run valgrind on our platform
401    """
402    src = 'unit-test-vg.c'
403    exe = './unit-test-vg'
404    with open(src, 'w') as h:
405        h.write('#include <errno.h>\n')
406        h.write('#include <stdio.h>\n')
407        h.write('#include <stdlib.h>\n')
408        h.write('#include <string.h>\n')
409        h.write('int main() {\n')
410        h.write('char *heap_str = malloc(16);\n')
411        h.write('strcpy(heap_str, "RandString");\n')
412        h.write('int res = strcmp("RandString", heap_str);\n')
413        h.write('free(heap_str);\n')
414        h.write('char errstr[64];\n')
415        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
416        h.write('printf("%s\\n", errstr);\n')
417        h.write('return res;\n')
418        h.write('}\n')
419    try:
420        with open(os.devnull, 'w') as devnull:
421            check_call(['gcc', '-O2', '-o', exe, src],
422                       stdout=devnull, stderr=devnull)
423            check_call(['valgrind', '--error-exitcode=99', exe],
424                       stdout=devnull, stderr=devnull)
425        return True
426    except:
427        sys.stderr.write("###### Platform is not valgrind safe ######\n")
428        return False
429    finally:
430        os.remove(src)
431        os.remove(exe)
432
433
434def is_sanitize_safe():
435    """
436    Returns whether it is safe to run sanitizers on our platform
437    """
438    src = 'unit-test-sanitize.c'
439    exe = './unit-test-sanitize'
440    with open(src, 'w') as h:
441        h.write('int main() { return 0; }\n')
442    try:
443        with open(os.devnull, 'w') as devnull:
444            check_call(['gcc', '-O2', '-fsanitize=address',
445                        '-fsanitize=undefined', '-o', exe, src],
446                       stdout=devnull, stderr=devnull)
447            check_call([exe], stdout=devnull, stderr=devnull)
448
449        # TODO - Sanitizer not working on ppc64le
450        # https://github.com/openbmc/openbmc-build-scripts/issues/31
451        if (platform.processor() == 'ppc64le'):
452            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
453            return False
454        else:
455            return True
456    except:
457        sys.stderr.write("###### Platform is not sanitize safe ######\n")
458        return False
459    finally:
460        os.remove(src)
461        os.remove(exe)
462
463
464def maybe_make_valgrind():
465    """
466    Potentially runs the unit tests through valgrind for the package
467    via `make check-valgrind`. If the package does not have valgrind testing
468    then it just skips over this.
469    """
470    # Valgrind testing is currently broken by an aggressive strcmp optimization
471    # that is inlined into optimized code for POWER by gcc 7+. Until we find
472    # a workaround, just don't run valgrind tests on POWER.
473    # https://github.com/openbmc/openbmc/issues/3315
474    if not is_valgrind_safe():
475        sys.stderr.write("###### Skipping valgrind ######\n")
476        return
477    if not make_target_exists('check-valgrind'):
478        return
479
480    try:
481        cmd = make_parallel + ['check-valgrind']
482        check_call_cmd(*cmd)
483    except CalledProcessError:
484        for root, _, files in os.walk(os.getcwd()):
485            for f in files:
486                if re.search('test-suite-[a-z]+.log', f) is None:
487                    continue
488                check_call_cmd('cat', os.path.join(root, f))
489        raise Exception('Valgrind tests failed')
490
491
492def maybe_make_coverage():
493    """
494    Potentially runs the unit tests through code coverage for the package
495    via `make check-code-coverage`. If the package does not have code coverage
496    testing then it just skips over this.
497    """
498    if not make_target_exists('check-code-coverage'):
499        return
500
501    # Actually run code coverage
502    try:
503        cmd = make_parallel + ['check-code-coverage']
504        check_call_cmd(*cmd)
505    except CalledProcessError:
506        raise Exception('Code coverage failed')
507
508
509class BuildSystem(object):
510    """
511    Build systems generally provide the means to configure, build, install and
512    test software. The BuildSystem class defines a set of interfaces on top of
513    which Autotools, Meson, CMake and possibly other build system drivers can
514    be implemented, separating out the phases to control whether a package
515    should merely be installed or also tested and analyzed.
516    """
517
518    def __init__(self, package, path):
519        """Initialise the driver with properties independent of the build system
520
521        Keyword arguments:
522        package: The name of the package. Derived from the path if None
523        path: The path to the package. Set to the working directory if None
524        """
525        self.path = "." if not path else path
526        realpath = os.path.realpath(self.path)
527        self.package = package if package else os.path.basename(realpath)
528        self.build_for_testing = False
529
530    def probe(self):
531        """Test if the build system driver can be applied to the package
532
533        Return True if the driver can drive the package's build system,
534        otherwise False.
535
536        Generally probe() is implemented by testing for the presence of the
537        build system's configuration file(s).
538        """
539        raise NotImplemented
540
541    def dependencies(self):
542        """Provide the package's dependencies
543
544        Returns a list of dependencies. If no dependencies are required then an
545        empty list must be returned.
546
547        Generally dependencies() is implemented by analysing and extracting the
548        data from the build system configuration.
549        """
550        raise NotImplemented
551
552    def configure(self, build_for_testing):
553        """Configure the source ready for building
554
555        Should raise an exception if configuration failed.
556
557        Keyword arguments:
558        build_for_testing: Mark the package as being built for testing rather
559                           than for installation as a dependency for the
560                           package under test. Setting to True generally
561                           implies that the package will be configured to build
562                           with debug information, at a low level of
563                           optimisation and possibly with sanitizers enabled.
564
565        Generally configure() is implemented by invoking the build system
566        tooling to generate Makefiles or equivalent.
567        """
568        raise NotImplemented
569
570    def build(self):
571        """Build the software ready for installation and/or testing
572
573        Should raise an exception if the build fails
574
575        Generally build() is implemented by invoking `make` or `ninja`.
576        """
577        raise NotImplemented
578
579    def install(self):
580        """Install the software ready for use
581
582        Should raise an exception if installation fails
583
584        Like build(), install() is generally implemented by invoking `make` or
585        `ninja`.
586        """
587        raise NotImplemented
588
589    def test(self):
590        """Build and run the test suite associated with the package
591
592        Should raise an exception if the build or testing fails.
593
594        Like install(), test() is generally implemented by invoking `make` or
595        `ninja`.
596        """
597        raise NotImplemented
598
599    def analyze(self):
600        """Run any supported analysis tools over the codebase
601
602        Should raise an exception if analysis fails.
603
604        Some analysis tools such as scan-build need injection into the build
605        system. analyze() provides the necessary hook to implement such
606        behaviour. Analyzers independent of the build system can also be
607        specified here but at the cost of possible duplication of code between
608        the build system driver implementations.
609        """
610        raise NotImplemented
611
612
613class Autotools(BuildSystem):
614    def __init__(self, package=None, path=None):
615        super(Autotools, self).__init__(package, path)
616
617    def probe(self):
618        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
619
620    def dependencies(self):
621        configure_ac = os.path.join(self.path, 'configure.ac')
622
623        contents = ''
624        # Prepend some special function overrides so we can parse out
625        # dependencies
626        for macro in DEPENDENCIES.keys():
627            contents += ('m4_define([' + macro + '], [' + macro + '_START$' +
628                         str(DEPENDENCIES_OFFSET[macro] + 1) +
629                         macro + '_END])\n')
630        with open(configure_ac, "rt") as f:
631            contents += f.read()
632
633        autoconf_cmdline = ['autoconf', '-Wno-undefined', '-']
634        autoconf_process = subprocess.Popen(autoconf_cmdline,
635                                            stdin=subprocess.PIPE,
636                                            stdout=subprocess.PIPE,
637                                            stderr=subprocess.PIPE)
638        document = contents.encode('utf-8')
639        (stdout, stderr) = autoconf_process.communicate(input=document)
640        if not stdout:
641            print(stderr)
642            raise Exception("Failed to run autoconf for parsing dependencies")
643
644        # Parse out all of the dependency text
645        matches = []
646        for macro in DEPENDENCIES.keys():
647            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
648            for match in re.compile(pattern).finditer(stdout.decode('utf-8')):
649                matches.append((match.group(1), match.group(2)))
650
651        # Look up dependencies from the text
652        found_deps = []
653        for macro, deptext in matches:
654            for potential_dep in deptext.split(' '):
655                for known_dep in DEPENDENCIES[macro].keys():
656                    if potential_dep.startswith(known_dep):
657                        found_deps.append(DEPENDENCIES[macro][known_dep])
658
659        return found_deps
660
661    def _configure_feature(self, flag, enabled):
662        """
663        Returns an configure flag as a string
664
665        Parameters:
666        flag                The name of the flag
667        enabled             Whether the flag is enabled or disabled
668        """
669        return '--' + ('enable' if enabled else 'disable') + '-' + flag
670
671    def configure(self, build_for_testing):
672        self.build_for_testing = build_for_testing
673        conf_flags = [
674            self._configure_feature('silent-rules', False),
675            self._configure_feature('examples', build_for_testing),
676            self._configure_feature('tests', build_for_testing),
677            self._configure_feature('itests', INTEGRATION_TEST),
678        ]
679        conf_flags.extend([
680            self._configure_feature('code-coverage', build_for_testing),
681            self._configure_feature('valgrind', build_for_testing),
682        ])
683        # Add any necessary configure flags for package
684        if CONFIGURE_FLAGS.get(self.package) is not None:
685            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
686        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
687            if os.path.exists(bootstrap):
688                check_call_cmd('./' + bootstrap)
689                break
690        check_call_cmd('./configure', *conf_flags)
691
692    def build(self):
693        check_call_cmd(*make_parallel)
694
695    def install(self):
696        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
697
698    def test(self):
699        try:
700            cmd = make_parallel + ['check']
701            for i in range(0, args.repeat):
702                check_call_cmd(*cmd)
703
704            maybe_make_valgrind()
705            maybe_make_coverage()
706        except CalledProcessError:
707            for root, _, files in os.walk(os.getcwd()):
708                if 'test-suite.log' not in files:
709                    continue
710                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
711            raise Exception('Unit tests failed')
712
713    def analyze(self):
714        run_cppcheck()
715
716
717class CMake(BuildSystem):
718    def __init__(self, package=None, path=None):
719        super(CMake, self).__init__(package, path)
720
721    def probe(self):
722        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
723
724    def dependencies(self):
725        return []
726
727    def configure(self, build_for_testing):
728        self.build_for_testing = build_for_testing
729        if INTEGRATION_TEST:
730            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
731                           '-DITESTS=ON', '.')
732        else:
733            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
734
735    def build(self):
736        check_call_cmd('cmake', '--build', '.', '--', '-j',
737                       str(multiprocessing.cpu_count()))
738
739    def install(self):
740        pass
741
742    def test(self):
743        if make_target_exists('test'):
744            check_call_cmd('ctest', '.')
745
746    def analyze(self):
747        if os.path.isfile('.clang-tidy'):
748            shutil.rmtree("tidy-build", ignore_errors=True)
749            os.mkdir("tidy-build")
750
751            # clang-tidy needs to run on a clang-specific build
752            check_call_cmd('cmake', '-DCMAKE_C_COMPILER=clang',
753                           '-DCMAKE_CXX_COMPILER=clang++',
754                           '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
755                           '-H.',
756                           '-Btidy-build')
757            # we need to cd here because otherwise clang-tidy doesn't find the
758            # .clang-tidy file in the roots of repos.  It's arguably a "bug"
759            # with run-clang-tidy at a minimum it's "weird" that it requires
760            # the .clang-tidy to be up a dir
761            os.chdir("tidy-build")
762            try:
763                check_call_cmd('run-clang-tidy', "-header-filter=.*", '-p',
764                               '.')
765            finally:
766                os.chdir("..")
767                shutil.rmtree("tidy-build", ignore_errors=True)
768
769        maybe_make_valgrind()
770        maybe_make_coverage()
771        run_cppcheck()
772
773
774class Meson(BuildSystem):
775    def __init__(self, package=None, path=None):
776        super(Meson, self).__init__(package, path)
777
778    def probe(self):
779        return os.path.isfile(os.path.join(self.path, 'meson.build'))
780
781    def dependencies(self):
782        meson_build = os.path.join(self.path, 'meson.build')
783        if not os.path.exists(meson_build):
784            return []
785
786        found_deps = []
787        for root, dirs, files in os.walk(self.path):
788            if 'meson.build' not in files:
789                continue
790            with open(os.path.join(root, 'meson.build'), 'rt') as f:
791                build_contents = f.read()
792            pattern = r"dependency\('([^']*)'.*?\),?\n"
793            for match in re.finditer(pattern, build_contents):
794                group = match.group(1)
795                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group)
796                if maybe_dep is not None:
797                    found_deps.append(maybe_dep)
798
799        return found_deps
800
801    def _parse_options(self, options_file):
802        """
803        Returns a set of options defined in the provides meson_options.txt file
804
805        Parameters:
806        options_file        The file containing options
807        """
808        oi = optinterpreter.OptionInterpreter('')
809        oi.process(options_file)
810        return oi.options
811
812    def _configure_boolean(self, val):
813        """
814        Returns the meson flag which signifies the value
815
816        True is true which requires the boolean.
817        False is false which disables the boolean.
818
819        Parameters:
820        val                 The value being converted
821        """
822        if val is True:
823            return 'true'
824        elif val is False:
825            return 'false'
826        else:
827            raise Exception("Bad meson boolean value")
828
829    def _configure_feature(self, val):
830        """
831        Returns the meson flag which signifies the value
832
833        True is enabled which requires the feature.
834        False is disabled which disables the feature.
835        None is auto which autodetects the feature.
836
837        Parameters:
838        val                 The value being converted
839        """
840        if val is True:
841            return "enabled"
842        elif val is False:
843            return "disabled"
844        elif val is None:
845            return "auto"
846        else:
847            raise Exception("Bad meson feature value")
848
849    def _configure_option(self, opts, key, val):
850        """
851        Returns the meson flag which signifies the value
852        based on the type of the opt
853
854        Parameters:
855        opt                 The meson option which we are setting
856        val                 The value being converted
857        """
858        if isinstance(opts[key], coredata.UserBooleanOption):
859            str_val = self._configure_boolean(val)
860        elif isinstance(opts[key], coredata.UserFeatureOption):
861            str_val = self._configure_feature(val)
862        else:
863            raise Exception('Unknown meson option type')
864        return "-D{}={}".format(key, str_val)
865
866    def configure(self, build_for_testing):
867        self.build_for_testing = build_for_testing
868        meson_options = {}
869        if os.path.exists("meson_options.txt"):
870            meson_options = self._parse_options("meson_options.txt")
871        meson_flags = [
872            '-Db_colorout=never',
873            '-Dwerror=true',
874            '-Dwarning_level=3',
875        ]
876        if build_for_testing:
877            meson_flags.append('--buildtype=debug')
878        else:
879            meson_flags.append('--buildtype=debugoptimized')
880        if OptionKey('tests') in meson_options:
881            meson_flags.append(self._configure_option(
882                meson_options, OptionKey('tests'), build_for_testing))
883        if OptionKey('examples') in meson_options:
884            meson_flags.append(self._configure_option(
885                meson_options, OptionKey('examples'), build_for_testing))
886        if OptionKey('itests') in meson_options:
887            meson_flags.append(self._configure_option(
888                meson_options, OptionKey('itests'), INTEGRATION_TEST))
889        if MESON_FLAGS.get(self.package) is not None:
890            meson_flags.extend(MESON_FLAGS.get(self.package))
891        try:
892            check_call_cmd('meson', 'setup', '--reconfigure', 'build',
893                           *meson_flags)
894        except:
895            shutil.rmtree('build', ignore_errors=True)
896            check_call_cmd('meson', 'setup', 'build', *meson_flags)
897
898    def build(self):
899        check_call_cmd('ninja', '-C', 'build')
900
901    def install(self):
902        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
903
904    def test(self):
905        # It is useful to check various settings of the meson.build file
906        # for compatibility, such as meson_version checks.  We shouldn't
907        # do this in the configure path though because it affects subprojects
908        # and dependencies as well, but we only want this applied to the
909        # project-under-test (otherwise an upstream dependency could fail
910        # this check without our control).
911        self._extra_meson_checks()
912
913        try:
914            test_args = ('--repeat', str(args.repeat), '-C', 'build')
915            check_call_cmd('meson', 'test', '--print-errorlogs', *test_args)
916
917        except CalledProcessError:
918            raise Exception('Unit tests failed')
919
920    def _setup_exists(self, setup):
921        """
922        Returns whether the meson build supports the named test setup.
923
924        Parameter descriptions:
925        setup              The setup target to check
926        """
927        try:
928            with open(os.devnull, 'w') as devnull:
929                output = subprocess.check_output(
930                    ['meson', 'test', '-C', 'build',
931                     '--setup', setup, '-t', '0'],
932                    stderr=subprocess.STDOUT)
933        except CalledProcessError as e:
934            output = e.output
935        output = output.decode('utf-8')
936        return not re.search('Test setup .* not found from project', output)
937
938    def _maybe_valgrind(self):
939        """
940        Potentially runs the unit tests through valgrind for the package
941        via `meson test`. The package can specify custom valgrind
942        configurations by utilizing add_test_setup() in a meson.build
943        """
944        if not is_valgrind_safe():
945            sys.stderr.write("###### Skipping valgrind ######\n")
946            return
947        try:
948            if self._setup_exists('valgrind'):
949                check_call_cmd('meson', 'test', '-t', '10', '-C', 'build',
950                               '--print-errorlogs', '--setup', 'valgrind')
951            else:
952                check_call_cmd('meson', 'test', '-t', '10', '-C', 'build',
953                               '--print-errorlogs', '--wrapper', 'valgrind')
954        except CalledProcessError:
955            raise Exception('Valgrind tests failed')
956
957    def analyze(self):
958        self._maybe_valgrind()
959
960        # Run clang-tidy only if the project has a configuration
961        if os.path.isfile('.clang-tidy'):
962            os.environ["CXX"] = "clang++"
963            shutil.rmtree("build-clang", ignore_errors=True)
964            check_call_cmd('meson', 'setup', 'build-clang')
965            os.chdir("build-clang")
966            try:
967                check_call_cmd('run-clang-tidy', '-fix', '-format', '-p', '.')
968            except subprocess.CalledProcessError:
969                check_call_cmd("git", "-C", CODE_SCAN_DIR,
970                               "--no-pager", "diff")
971                raise
972            finally:
973                os.chdir("..")
974                shutil.rmtree("build-clang", ignore_errors=True)
975
976        # Run the basic clang static analyzer otherwise
977        else:
978            check_call_cmd('ninja', '-C', 'build',
979                           'scan-build')
980
981        # Run tests through sanitizers
982        # b_lundef is needed if clang++ is CXX since it resolves the
983        # asan symbols at runtime only. We don't want to set it earlier
984        # in the build process to ensure we don't have undefined
985        # runtime code.
986        if is_sanitize_safe():
987            check_call_cmd('meson', 'configure', 'build',
988                           '-Db_sanitize=address,undefined',
989                           '-Db_lundef=false')
990            check_call_cmd('meson', 'test', '-C', 'build', '--print-errorlogs',
991                           '--logbase', 'testlog-ubasan')
992            # TODO: Fix memory sanitizer
993            # check_call_cmd('meson', 'configure', 'build',
994            #                '-Db_sanitize=memory')
995            # check_call_cmd('meson', 'test', '-C', 'build'
996            #                '--logbase', 'testlog-msan')
997            check_call_cmd('meson', 'configure', 'build',
998                           '-Db_sanitize=none')
999        else:
1000            sys.stderr.write("###### Skipping sanitizers ######\n")
1001
1002        # Run coverage checks
1003        check_call_cmd('meson', 'configure', 'build',
1004                       '-Db_coverage=true')
1005        self.test()
1006        # Only build coverage HTML if coverage files were produced
1007        for root, dirs, files in os.walk('build'):
1008            if any([f.endswith('.gcda') for f in files]):
1009                check_call_cmd('ninja', '-C', 'build',
1010                               'coverage-html')
1011                break
1012        check_call_cmd('meson', 'configure', 'build',
1013                       '-Db_coverage=false')
1014        run_cppcheck()
1015
1016    def _extra_meson_checks(self):
1017        with open(os.path.join(self.path, 'meson.build'), 'rt') as f:
1018            build_contents = f.read()
1019
1020        # Find project's specified meson_version.
1021        meson_version = None
1022        pattern = r"meson_version:[^']*'([^']*)'"
1023        for match in re.finditer(pattern, build_contents):
1024            group = match.group(1)
1025            meson_version = group
1026
1027        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1028        # identify this.  Add to our unit-test checks so that we don't
1029        # get a meson.build missing this.
1030        pattern = r"'cpp_std=c\+\+20'"
1031        for match in re.finditer(pattern, build_contents):
1032            if not meson_version or \
1033                    not meson_version_compare(meson_version, ">=0.57"):
1034                raise Exception(
1035                    "C++20 support requires specifying in meson.build: "
1036                    + "meson_version: '>=0.57'"
1037                )
1038
1039
1040class Package(object):
1041    def __init__(self, name=None, path=None):
1042        self.supported = [Meson, Autotools, CMake]
1043        self.name = name
1044        self.path = path
1045        self.test_only = False
1046
1047    def build_systems(self):
1048        instances = (system(self.name, self.path) for system in self.supported)
1049        return (instance for instance in instances if instance.probe())
1050
1051    def build_system(self, preferred=None):
1052        systems = list(self.build_systems())
1053
1054        if not systems:
1055            return None
1056
1057        if preferred:
1058            return {type(system): system for system in systems}[preferred]
1059
1060        return next(iter(systems))
1061
1062    def install(self, system=None):
1063        if not system:
1064            system = self.build_system()
1065
1066        system.configure(False)
1067        system.build()
1068        system.install()
1069
1070    def _test_one(self, system):
1071        system.configure(True)
1072        system.build()
1073        system.install()
1074        system.test()
1075        if not TEST_ONLY:
1076            system.analyze()
1077
1078    def test(self):
1079        for system in self.build_systems():
1080            self._test_one(system)
1081
1082
1083def find_file(filename, basedir):
1084    """
1085    Finds all occurrences of a file (or list of files) in the base
1086    directory and passes them back with their relative paths.
1087
1088    Parameter descriptions:
1089    filename              The name of the file (or list of files) to
1090                          find
1091    basedir               The base directory search in
1092    """
1093
1094    if not isinstance(filename, list):
1095        filename = [filename]
1096
1097    filepaths = []
1098    for root, dirs, files in os.walk(basedir):
1099        if os.path.split(root)[-1] == 'subprojects':
1100            for f in files:
1101                subproject = '.'.join(f.split('.')[0:-1])
1102                if f.endswith('.wrap') and subproject in dirs:
1103                    # don't find files in meson subprojects with wraps
1104                    dirs.remove(subproject)
1105        for f in filename:
1106            if f in files:
1107                filepaths.append(os.path.join(root, f))
1108    return filepaths
1109
1110
1111if __name__ == '__main__':
1112    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1113    CONFIGURE_FLAGS = {
1114        'phosphor-logging':
1115        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
1116         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
1117    }
1118
1119    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1120    MESON_FLAGS = {
1121        'phosphor-dbus-interfaces':
1122        ['-Ddata_com_ibm=true', '-Ddata_org_open_power=true'],
1123        'phosphor-logging':
1124        ['-Dopenpower-pel-extension=enabled']
1125    }
1126
1127    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1128    DEPENDENCIES = {
1129        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
1130        'AC_CHECK_HEADER': {
1131            'host-ipmid': 'phosphor-host-ipmid',
1132            'blobs-ipmid': 'phosphor-ipmi-blobs',
1133            'sdbusplus': 'sdbusplus',
1134            'sdeventplus': 'sdeventplus',
1135            'stdplus': 'stdplus',
1136            'gpioplus': 'gpioplus',
1137            'phosphor-logging/log.hpp': 'phosphor-logging',
1138        },
1139        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1140        'PKG_CHECK_MODULES': {
1141            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1142            'libipmid': 'phosphor-host-ipmid',
1143            'libipmid-host': 'phosphor-host-ipmid',
1144            'sdbusplus': 'sdbusplus',
1145            'sdeventplus': 'sdeventplus',
1146            'stdplus': 'stdplus',
1147            'gpioplus': 'gpioplus',
1148            'phosphor-logging': 'phosphor-logging',
1149            'phosphor-snmp': 'phosphor-snmp',
1150            'ipmiblob': 'ipmi-blob-tool',
1151            'hei': 'openpower-libhei',
1152            'phosphor-ipmi-blobs': 'phosphor-ipmi-blobs',
1153            'libcr51sign': 'google-misc',
1154        },
1155    }
1156
1157    # Offset into array of macro parameters MACRO(0, 1, ...N)
1158    DEPENDENCIES_OFFSET = {
1159        'AC_CHECK_LIB': 0,
1160        'AC_CHECK_HEADER': 0,
1161        'AC_PATH_PROG': 1,
1162        'PKG_CHECK_MODULES': 1,
1163    }
1164
1165    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1166    DEPENDENCIES_REGEX = {
1167        'phosphor-logging': r'\S+-dbus-interfaces$'
1168    }
1169
1170    # Set command line arguments
1171    parser = argparse.ArgumentParser()
1172    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1173                        help="Workspace directory location(i.e. /home)")
1174    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1175                        help="OpenBMC package to be unit tested")
1176    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1177                        action="store_true", required=False, default=False,
1178                        help="Only run test cases, no other validation")
1179    arg_inttests = parser.add_mutually_exclusive_group()
1180    arg_inttests.add_argument("--integration-tests", dest="INTEGRATION_TEST",
1181                              action="store_true", required=False, default=True,
1182                              help="Enable integration tests [default].")
1183    arg_inttests.add_argument("--no-integration-tests", dest="INTEGRATION_TEST",
1184                              action="store_false", required=False,
1185                              help="Disable integration tests.")
1186    parser.add_argument("-v", "--verbose", action="store_true",
1187                        help="Print additional package status messages")
1188    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1189                        type=int, default=1)
1190    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1191                        help="Branch to target for dependent repositories",
1192                        default="master")
1193    parser.add_argument("-n", "--noformat", dest="FORMAT",
1194                        action="store_false", required=False,
1195                        help="Whether or not to run format code")
1196    args = parser.parse_args(sys.argv[1:])
1197    WORKSPACE = args.WORKSPACE
1198    UNIT_TEST_PKG = args.PACKAGE
1199    TEST_ONLY = args.TEST_ONLY
1200    INTEGRATION_TEST = args.INTEGRATION_TEST
1201    BRANCH = args.BRANCH
1202    FORMAT_CODE = args.FORMAT
1203    if args.verbose:
1204        def printline(*line):
1205            for arg in line:
1206                print(arg, end=' ')
1207            print()
1208    else:
1209        def printline(*line):
1210            pass
1211
1212    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1213
1214    # First validate code formatting if repo has style formatting files.
1215    # The format-code.sh checks for these files.
1216    if FORMAT_CODE:
1217        format_scripts = find_file(['format-code.sh', 'format-code'],
1218                                   CODE_SCAN_DIR)
1219
1220        # use default format-code.sh if no other found
1221        if not format_scripts:
1222            format_scripts.append(os.path.join(WORKSPACE, "format-code.sh"))
1223
1224        for f in format_scripts:
1225            check_call_cmd(f, CODE_SCAN_DIR)
1226
1227        # Check to see if any files changed
1228        check_call_cmd("git", "-C", CODE_SCAN_DIR,
1229                       "--no-pager", "diff", "--exit-code")
1230
1231    # Check if this repo has a supported make infrastructure
1232    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1233    if not pkg.build_system():
1234        print("No valid build system, exit")
1235        sys.exit(0)
1236
1237    prev_umask = os.umask(000)
1238
1239    # Determine dependencies and add them
1240    dep_added = dict()
1241    dep_added[UNIT_TEST_PKG] = False
1242
1243    # Create dependency tree
1244    dep_tree = DepTree(UNIT_TEST_PKG)
1245    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1246
1247    # Reorder Dependency Tree
1248    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1249        dep_tree.ReorderDeps(pkg_name, regex_str)
1250    if args.verbose:
1251        dep_tree.PrintTree()
1252
1253    install_list = dep_tree.GetInstallList()
1254
1255    # We don't want to treat our package as a dependency
1256    install_list.remove(UNIT_TEST_PKG)
1257
1258    # Install reordered dependencies
1259    for dep in install_list:
1260        build_and_install(dep, False)
1261
1262    # Run package unit tests
1263    build_and_install(UNIT_TEST_PKG, True)
1264
1265    os.umask(prev_umask)
1266
1267    # Run any custom CI scripts the repo has, of which there can be
1268    # multiple of and anywhere in the repository.
1269    ci_scripts = find_file(['run-ci.sh', 'run-ci'], CODE_SCAN_DIR)
1270    if ci_scripts:
1271        os.chdir(CODE_SCAN_DIR)
1272        for ci_script in ci_scripts:
1273            check_call_cmd(ci_script)
1274