1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11# interpreter is not used directly but this resolves dependency ordering
12# that would be broken if we didn't include it.
13from mesonbuild import interpreter
14from mesonbuild import coredata, optinterpreter
15from mesonbuild.mesonlib import OptionKey
16from mesonbuild.mesonlib import version_compare as meson_version_compare
17from urllib.parse import urljoin
18from subprocess import check_call, call, CalledProcessError
19from tempfile import TemporaryDirectory
20import os
21import sys
22import argparse
23import multiprocessing
24import re
25import subprocess
26import shutil
27import platform
28
29
30class DepTree():
31    """
32    Represents package dependency tree, where each node is a DepTree with a
33    name and DepTree children.
34    """
35
36    def __init__(self, name):
37        """
38        Create new DepTree.
39
40        Parameter descriptions:
41        name               Name of new tree node.
42        """
43        self.name = name
44        self.children = list()
45
46    def AddChild(self, name):
47        """
48        Add new child node to current node.
49
50        Parameter descriptions:
51        name               Name of new child
52        """
53        new_child = DepTree(name)
54        self.children.append(new_child)
55        return new_child
56
57    def AddChildNode(self, node):
58        """
59        Add existing child node to current node.
60
61        Parameter descriptions:
62        node               Tree node to add
63        """
64        self.children.append(node)
65
66    def RemoveChild(self, name):
67        """
68        Remove child node.
69
70        Parameter descriptions:
71        name               Name of child to remove
72        """
73        for child in self.children:
74            if child.name == name:
75                self.children.remove(child)
76                return
77
78    def GetNode(self, name):
79        """
80        Return node with matching name. Return None if not found.
81
82        Parameter descriptions:
83        name               Name of node to return
84        """
85        if self.name == name:
86            return self
87        for child in self.children:
88            node = child.GetNode(name)
89            if node:
90                return node
91        return None
92
93    def GetParentNode(self, name, parent_node=None):
94        """
95        Return parent of node with matching name. Return none if not found.
96
97        Parameter descriptions:
98        name               Name of node to get parent of
99        parent_node        Parent of current node
100        """
101        if self.name == name:
102            return parent_node
103        for child in self.children:
104            found_node = child.GetParentNode(name, self)
105            if found_node:
106                return found_node
107        return None
108
109    def GetPath(self, name, path=None):
110        """
111        Return list of node names from head to matching name.
112        Return None if not found.
113
114        Parameter descriptions:
115        name               Name of node
116        path               List of node names from head to current node
117        """
118        if not path:
119            path = []
120        if self.name == name:
121            path.append(self.name)
122            return path
123        for child in self.children:
124            match = child.GetPath(name, path + [self.name])
125            if match:
126                return match
127        return None
128
129    def GetPathRegex(self, name, regex_str, path=None):
130        """
131        Return list of node paths that end in name, or match regex_str.
132        Return empty list if not found.
133
134        Parameter descriptions:
135        name               Name of node to search for
136        regex_str          Regex string to match node names
137        path               Path of node names from head to current node
138        """
139        new_paths = []
140        if not path:
141            path = []
142        match = re.match(regex_str, self.name)
143        if (self.name == name) or (match):
144            new_paths.append(path + [self.name])
145        for child in self.children:
146            return_paths = None
147            full_path = path + [self.name]
148            return_paths = child.GetPathRegex(name, regex_str, full_path)
149            for i in return_paths:
150                new_paths.append(i)
151        return new_paths
152
153    def MoveNode(self, from_name, to_name):
154        """
155        Mode existing from_name node to become child of to_name node.
156
157        Parameter descriptions:
158        from_name          Name of node to make a child of to_name
159        to_name            Name of node to make parent of from_name
160        """
161        parent_from_node = self.GetParentNode(from_name)
162        from_node = self.GetNode(from_name)
163        parent_from_node.RemoveChild(from_name)
164        to_node = self.GetNode(to_name)
165        to_node.AddChildNode(from_node)
166
167    def ReorderDeps(self, name, regex_str):
168        """
169        Reorder dependency tree.  If tree contains nodes with names that
170        match 'name' and 'regex_str', move 'regex_str' nodes that are
171        to the right of 'name' node, so that they become children of the
172        'name' node.
173
174        Parameter descriptions:
175        name               Name of node to look for
176        regex_str          Regex string to match names to
177        """
178        name_path = self.GetPath(name)
179        if not name_path:
180            return
181        paths = self.GetPathRegex(name, regex_str)
182        is_name_in_paths = False
183        name_index = 0
184        for i in range(len(paths)):
185            path = paths[i]
186            if path[-1] == name:
187                is_name_in_paths = True
188                name_index = i
189                break
190        if not is_name_in_paths:
191            return
192        for i in range(name_index + 1, len(paths)):
193            path = paths[i]
194            if name in path:
195                continue
196            from_name = path[-1]
197            self.MoveNode(from_name, name)
198
199    def GetInstallList(self):
200        """
201        Return post-order list of node names.
202
203        Parameter descriptions:
204        """
205        install_list = []
206        for child in self.children:
207            child_install_list = child.GetInstallList()
208            install_list.extend(child_install_list)
209        install_list.append(self.name)
210        return install_list
211
212    def PrintTree(self, level=0):
213        """
214        Print pre-order node names with indentation denoting node depth level.
215
216        Parameter descriptions:
217        level              Current depth level
218        """
219        INDENT_PER_LEVEL = 4
220        print(' ' * (level * INDENT_PER_LEVEL) + self.name)
221        for child in self.children:
222            child.PrintTree(level + 1)
223
224
225def check_call_cmd(*cmd):
226    """
227    Verbose prints the directory location the given command is called from and
228    the command, then executes the command using check_call.
229
230    Parameter descriptions:
231    dir                 Directory location command is to be called from
232    cmd                 List of parameters constructing the complete command
233    """
234    printline(os.getcwd(), ">", " ".join(cmd))
235    check_call(cmd)
236
237
238def clone_pkg(pkg, branch):
239    """
240    Clone the given openbmc package's git repository from gerrit into
241    the WORKSPACE location
242
243    Parameter descriptions:
244    pkg                 Name of the package to clone
245    branch              Branch to clone from pkg
246    """
247    pkg_dir = os.path.join(WORKSPACE, pkg)
248    if os.path.exists(os.path.join(pkg_dir, '.git')):
249        return pkg_dir
250    pkg_repo = urljoin('https://gerrit.openbmc.org/openbmc/', pkg)
251    os.mkdir(pkg_dir)
252    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
253    try:
254        # first try the branch
255        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
256        repo_inst = clone.working_dir
257    except:
258        printline("Input branch not found, default to master")
259        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
260        repo_inst = clone.working_dir
261    return repo_inst
262
263
264def make_target_exists(target):
265    """
266    Runs a check against the makefile in the current directory to determine
267    if the target exists so that it can be built.
268
269    Parameter descriptions:
270    target              The make target we are checking
271    """
272    try:
273        cmd = ['make', '-n', target]
274        with open(os.devnull, 'w') as devnull:
275            check_call(cmd, stdout=devnull, stderr=devnull)
276        return True
277    except CalledProcessError:
278        return False
279
280
281make_parallel = [
282    'make',
283    # Run enough jobs to saturate all the cpus
284    '-j', str(multiprocessing.cpu_count()),
285    # Don't start more jobs if the load avg is too high
286    '-l', str(multiprocessing.cpu_count()),
287    # Synchronize the output so logs aren't intermixed in stdout / stderr
288    '-O',
289]
290
291
292def build_and_install(name, build_for_testing=False):
293    """
294    Builds and installs the package in the environment. Optionally
295    builds the examples and test cases for package.
296
297    Parameter description:
298    name                The name of the package we are building
299    build_for_testing   Enable options related to testing on the package?
300    """
301    os.chdir(os.path.join(WORKSPACE, name))
302
303    # Refresh dynamic linker run time bindings for dependencies
304    check_call_cmd('sudo', '-n', '--', 'ldconfig')
305
306    pkg = Package()
307    if build_for_testing:
308        pkg.test()
309    else:
310        pkg.install()
311
312
313def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
314    """
315    For each package (name), starting with the package to be unit tested,
316    extract its dependencies. For each package dependency defined, recursively
317    apply the same strategy
318
319    Parameter descriptions:
320    name                Name of the package
321    pkgdir              Directory where package source is located
322    dep_added           Current dict of dependencies and added status
323    head                Head node of the dependency tree
324    branch              Branch to clone from pkg
325    dep_tree            Current dependency tree node
326    """
327    if not dep_tree:
328        dep_tree = head
329
330    with open("/tmp/depcache", "r") as depcache:
331        cache = depcache.readline()
332
333    # Read out pkg dependencies
334    pkg = Package(name, pkgdir)
335
336    build = pkg.build_system()
337    if build == None:
338        raise Exception(f"Unable to find build system for {name}.")
339
340    for dep in set(build.dependencies()):
341        if dep in cache:
342            continue
343        # Dependency package not already known
344        if dep_added.get(dep) is None:
345            print(f"Adding {dep} dependency to {name}.")
346            # Dependency package not added
347            new_child = dep_tree.AddChild(dep)
348            dep_added[dep] = False
349            dep_pkgdir = clone_pkg(dep, branch)
350            # Determine this dependency package's
351            # dependencies and add them before
352            # returning to add this package
353            dep_added = build_dep_tree(dep,
354                                       dep_pkgdir,
355                                       dep_added,
356                                       head,
357                                       branch,
358                                       new_child)
359        else:
360            # Dependency package known and added
361            if dep_added[dep]:
362                continue
363            else:
364                # Cyclic dependency failure
365                raise Exception("Cyclic dependencies found in "+name)
366
367    if not dep_added[name]:
368        dep_added[name] = True
369
370    return dep_added
371
372
373def run_cppcheck():
374    if not os.path.exists(os.path.join("build", "compile_commands.json")):
375        return None
376
377    with TemporaryDirectory() as cpp_dir:
378
379        # http://cppcheck.sourceforge.net/manual.pdf
380        try:
381            check_call_cmd(
382                'cppcheck',
383                '-j', str(multiprocessing.cpu_count()),
384                '--enable=style,performance,portability,missingInclude',
385                '--suppress=useStlAlgorithm',
386                '--suppress=unusedStructMember',
387                '--suppress=postfixOperator',
388                '--suppress=unreadVariable',
389                '--suppress=knownConditionTrueFalse',
390                '--library=googletest',
391                '--project=build/compile_commands.json',
392                f'--cppcheck-build-dir={cpp_dir}',
393            )
394        except subprocess.CalledProcessError:
395            print("cppcheck found errors")
396
397
398def is_valgrind_safe():
399    """
400    Returns whether it is safe to run valgrind on our platform
401    """
402    src = 'unit-test-vg.c'
403    exe = './unit-test-vg'
404    with open(src, 'w') as h:
405        h.write('#include <errno.h>\n')
406        h.write('#include <stdio.h>\n')
407        h.write('#include <stdlib.h>\n')
408        h.write('#include <string.h>\n')
409        h.write('int main() {\n')
410        h.write('char *heap_str = malloc(16);\n')
411        h.write('strcpy(heap_str, "RandString");\n')
412        h.write('int res = strcmp("RandString", heap_str);\n')
413        h.write('free(heap_str);\n')
414        h.write('char errstr[64];\n')
415        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
416        h.write('printf("%s\\n", errstr);\n')
417        h.write('return res;\n')
418        h.write('}\n')
419    try:
420        with open(os.devnull, 'w') as devnull:
421            check_call(['gcc', '-O2', '-o', exe, src],
422                       stdout=devnull, stderr=devnull)
423            check_call(['valgrind', '--error-exitcode=99', exe],
424                       stdout=devnull, stderr=devnull)
425        return True
426    except:
427        sys.stderr.write("###### Platform is not valgrind safe ######\n")
428        return False
429    finally:
430        os.remove(src)
431        os.remove(exe)
432
433
434def is_sanitize_safe():
435    """
436    Returns whether it is safe to run sanitizers on our platform
437    """
438    src = 'unit-test-sanitize.c'
439    exe = './unit-test-sanitize'
440    with open(src, 'w') as h:
441        h.write('int main() { return 0; }\n')
442    try:
443        with open(os.devnull, 'w') as devnull:
444            check_call(['gcc', '-O2', '-fsanitize=address',
445                        '-fsanitize=undefined', '-o', exe, src],
446                       stdout=devnull, stderr=devnull)
447            check_call([exe], stdout=devnull, stderr=devnull)
448
449        # TODO - Sanitizer not working on ppc64le
450        # https://github.com/openbmc/openbmc-build-scripts/issues/31
451        if (platform.processor() == 'ppc64le'):
452            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
453            return False
454        else:
455            return True
456    except:
457        sys.stderr.write("###### Platform is not sanitize safe ######\n")
458        return False
459    finally:
460        os.remove(src)
461        os.remove(exe)
462
463
464def maybe_make_valgrind():
465    """
466    Potentially runs the unit tests through valgrind for the package
467    via `make check-valgrind`. If the package does not have valgrind testing
468    then it just skips over this.
469    """
470    # Valgrind testing is currently broken by an aggressive strcmp optimization
471    # that is inlined into optimized code for POWER by gcc 7+. Until we find
472    # a workaround, just don't run valgrind tests on POWER.
473    # https://github.com/openbmc/openbmc/issues/3315
474    if not is_valgrind_safe():
475        sys.stderr.write("###### Skipping valgrind ######\n")
476        return
477    if not make_target_exists('check-valgrind'):
478        return
479
480    try:
481        cmd = make_parallel + ['check-valgrind']
482        check_call_cmd(*cmd)
483    except CalledProcessError:
484        for root, _, files in os.walk(os.getcwd()):
485            for f in files:
486                if re.search('test-suite-[a-z]+.log', f) is None:
487                    continue
488                check_call_cmd('cat', os.path.join(root, f))
489        raise Exception('Valgrind tests failed')
490
491
492def maybe_make_coverage():
493    """
494    Potentially runs the unit tests through code coverage for the package
495    via `make check-code-coverage`. If the package does not have code coverage
496    testing then it just skips over this.
497    """
498    if not make_target_exists('check-code-coverage'):
499        return
500
501    # Actually run code coverage
502    try:
503        cmd = make_parallel + ['check-code-coverage']
504        check_call_cmd(*cmd)
505    except CalledProcessError:
506        raise Exception('Code coverage failed')
507
508
509class BuildSystem(object):
510    """
511    Build systems generally provide the means to configure, build, install and
512    test software. The BuildSystem class defines a set of interfaces on top of
513    which Autotools, Meson, CMake and possibly other build system drivers can
514    be implemented, separating out the phases to control whether a package
515    should merely be installed or also tested and analyzed.
516    """
517
518    def __init__(self, package, path):
519        """Initialise the driver with properties independent of the build system
520
521        Keyword arguments:
522        package: The name of the package. Derived from the path if None
523        path: The path to the package. Set to the working directory if None
524        """
525        self.path = "." if not path else path
526        realpath = os.path.realpath(self.path)
527        self.package = package if package else os.path.basename(realpath)
528        self.build_for_testing = False
529
530    def probe(self):
531        """Test if the build system driver can be applied to the package
532
533        Return True if the driver can drive the package's build system,
534        otherwise False.
535
536        Generally probe() is implemented by testing for the presence of the
537        build system's configuration file(s).
538        """
539        raise NotImplemented
540
541    def dependencies(self):
542        """Provide the package's dependencies
543
544        Returns a list of dependencies. If no dependencies are required then an
545        empty list must be returned.
546
547        Generally dependencies() is implemented by analysing and extracting the
548        data from the build system configuration.
549        """
550        raise NotImplemented
551
552    def configure(self, build_for_testing):
553        """Configure the source ready for building
554
555        Should raise an exception if configuration failed.
556
557        Keyword arguments:
558        build_for_testing: Mark the package as being built for testing rather
559                           than for installation as a dependency for the
560                           package under test. Setting to True generally
561                           implies that the package will be configured to build
562                           with debug information, at a low level of
563                           optimisation and possibly with sanitizers enabled.
564
565        Generally configure() is implemented by invoking the build system
566        tooling to generate Makefiles or equivalent.
567        """
568        raise NotImplemented
569
570    def build(self):
571        """Build the software ready for installation and/or testing
572
573        Should raise an exception if the build fails
574
575        Generally build() is implemented by invoking `make` or `ninja`.
576        """
577        raise NotImplemented
578
579    def install(self):
580        """Install the software ready for use
581
582        Should raise an exception if installation fails
583
584        Like build(), install() is generally implemented by invoking `make` or
585        `ninja`.
586        """
587        raise NotImplemented
588
589    def test(self):
590        """Build and run the test suite associated with the package
591
592        Should raise an exception if the build or testing fails.
593
594        Like install(), test() is generally implemented by invoking `make` or
595        `ninja`.
596        """
597        raise NotImplemented
598
599    def analyze(self):
600        """Run any supported analysis tools over the codebase
601
602        Should raise an exception if analysis fails.
603
604        Some analysis tools such as scan-build need injection into the build
605        system. analyze() provides the necessary hook to implement such
606        behaviour. Analyzers independent of the build system can also be
607        specified here but at the cost of possible duplication of code between
608        the build system driver implementations.
609        """
610        raise NotImplemented
611
612
613class Autotools(BuildSystem):
614    def __init__(self, package=None, path=None):
615        super(Autotools, self).__init__(package, path)
616
617    def probe(self):
618        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
619
620    def dependencies(self):
621        configure_ac = os.path.join(self.path, 'configure.ac')
622
623        contents = ''
624        # Prepend some special function overrides so we can parse out
625        # dependencies
626        for macro in DEPENDENCIES.keys():
627            contents += ('m4_define([' + macro + '], [' + macro + '_START$' +
628                         str(DEPENDENCIES_OFFSET[macro] + 1) +
629                         macro + '_END])\n')
630        with open(configure_ac, "rt") as f:
631            contents += f.read()
632
633        autoconf_cmdline = ['autoconf', '-Wno-undefined', '-']
634        autoconf_process = subprocess.Popen(autoconf_cmdline,
635                                            stdin=subprocess.PIPE,
636                                            stdout=subprocess.PIPE,
637                                            stderr=subprocess.PIPE)
638        document = contents.encode('utf-8')
639        (stdout, stderr) = autoconf_process.communicate(input=document)
640        if not stdout:
641            print(stderr)
642            raise Exception("Failed to run autoconf for parsing dependencies")
643
644        # Parse out all of the dependency text
645        matches = []
646        for macro in DEPENDENCIES.keys():
647            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
648            for match in re.compile(pattern).finditer(stdout.decode('utf-8')):
649                matches.append((match.group(1), match.group(2)))
650
651        # Look up dependencies from the text
652        found_deps = []
653        for macro, deptext in matches:
654            for potential_dep in deptext.split(' '):
655                for known_dep in DEPENDENCIES[macro].keys():
656                    if potential_dep.startswith(known_dep):
657                        found_deps.append(DEPENDENCIES[macro][known_dep])
658
659        return found_deps
660
661    def _configure_feature(self, flag, enabled):
662        """
663        Returns an configure flag as a string
664
665        Parameters:
666        flag                The name of the flag
667        enabled             Whether the flag is enabled or disabled
668        """
669        return '--' + ('enable' if enabled else 'disable') + '-' + flag
670
671    def configure(self, build_for_testing):
672        self.build_for_testing = build_for_testing
673        conf_flags = [
674            self._configure_feature('silent-rules', False),
675            self._configure_feature('examples', build_for_testing),
676            self._configure_feature('tests', build_for_testing),
677            self._configure_feature('itests', INTEGRATION_TEST),
678        ]
679        conf_flags.extend([
680            self._configure_feature('code-coverage', build_for_testing),
681            self._configure_feature('valgrind', build_for_testing),
682        ])
683        # Add any necessary configure flags for package
684        if CONFIGURE_FLAGS.get(self.package) is not None:
685            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
686        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
687            if os.path.exists(bootstrap):
688                check_call_cmd('./' + bootstrap)
689                break
690        check_call_cmd('./configure', *conf_flags)
691
692    def build(self):
693        check_call_cmd(*make_parallel)
694
695    def install(self):
696        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
697
698    def test(self):
699        try:
700            cmd = make_parallel + ['check']
701            for i in range(0, args.repeat):
702                check_call_cmd(*cmd)
703
704            maybe_make_valgrind()
705            maybe_make_coverage()
706        except CalledProcessError:
707            for root, _, files in os.walk(os.getcwd()):
708                if 'test-suite.log' not in files:
709                    continue
710                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
711            raise Exception('Unit tests failed')
712
713    def analyze(self):
714        run_cppcheck()
715
716
717class CMake(BuildSystem):
718    def __init__(self, package=None, path=None):
719        super(CMake, self).__init__(package, path)
720
721    def probe(self):
722        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
723
724    def dependencies(self):
725        return []
726
727    def configure(self, build_for_testing):
728        self.build_for_testing = build_for_testing
729        if INTEGRATION_TEST:
730            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
731                           '-DITESTS=ON', '.')
732        else:
733            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
734
735    def build(self):
736        check_call_cmd('cmake', '--build', '.', '--', '-j',
737                       str(multiprocessing.cpu_count()))
738
739    def install(self):
740        pass
741
742    def test(self):
743        if make_target_exists('test'):
744            check_call_cmd('ctest', '.')
745
746    def analyze(self):
747        if os.path.isfile('.clang-tidy'):
748            with TemporaryDirectory(prefix='build', dir='.') as build_dir:
749                # clang-tidy needs to run on a clang-specific build
750                check_call_cmd('cmake', '-DCMAKE_C_COMPILER=clang',
751                               '-DCMAKE_CXX_COMPILER=clang++',
752                               '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
753                               '-H.',
754                               '-B' + build_dir)
755
756                check_call_cmd('run-clang-tidy', "-header-filter=.*", '-p',
757                               build_dir)
758
759        maybe_make_valgrind()
760        maybe_make_coverage()
761        run_cppcheck()
762
763
764class Meson(BuildSystem):
765    def __init__(self, package=None, path=None):
766        super(Meson, self).__init__(package, path)
767
768    def probe(self):
769        return os.path.isfile(os.path.join(self.path, 'meson.build'))
770
771    def dependencies(self):
772        meson_build = os.path.join(self.path, 'meson.build')
773        if not os.path.exists(meson_build):
774            return []
775
776        found_deps = []
777        for root, dirs, files in os.walk(self.path):
778            if 'meson.build' not in files:
779                continue
780            with open(os.path.join(root, 'meson.build'), 'rt') as f:
781                build_contents = f.read()
782            pattern = r"dependency\('([^']*)'.*?\),?\n"
783            for match in re.finditer(pattern, build_contents):
784                group = match.group(1)
785                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group)
786                if maybe_dep is not None:
787                    found_deps.append(maybe_dep)
788
789        return found_deps
790
791    def _parse_options(self, options_file):
792        """
793        Returns a set of options defined in the provides meson_options.txt file
794
795        Parameters:
796        options_file        The file containing options
797        """
798        oi = optinterpreter.OptionInterpreter('')
799        oi.process(options_file)
800        return oi.options
801
802    def _configure_boolean(self, val):
803        """
804        Returns the meson flag which signifies the value
805
806        True is true which requires the boolean.
807        False is false which disables the boolean.
808
809        Parameters:
810        val                 The value being converted
811        """
812        if val is True:
813            return 'true'
814        elif val is False:
815            return 'false'
816        else:
817            raise Exception("Bad meson boolean value")
818
819    def _configure_feature(self, val):
820        """
821        Returns the meson flag which signifies the value
822
823        True is enabled which requires the feature.
824        False is disabled which disables the feature.
825        None is auto which autodetects the feature.
826
827        Parameters:
828        val                 The value being converted
829        """
830        if val is True:
831            return "enabled"
832        elif val is False:
833            return "disabled"
834        elif val is None:
835            return "auto"
836        else:
837            raise Exception("Bad meson feature value")
838
839    def _configure_option(self, opts, key, val):
840        """
841        Returns the meson flag which signifies the value
842        based on the type of the opt
843
844        Parameters:
845        opt                 The meson option which we are setting
846        val                 The value being converted
847        """
848        if isinstance(opts[key], coredata.UserBooleanOption):
849            str_val = self._configure_boolean(val)
850        elif isinstance(opts[key], coredata.UserFeatureOption):
851            str_val = self._configure_feature(val)
852        else:
853            raise Exception('Unknown meson option type')
854        return "-D{}={}".format(key, str_val)
855
856    def configure(self, build_for_testing):
857        self.build_for_testing = build_for_testing
858        meson_options = {}
859        if os.path.exists("meson_options.txt"):
860            meson_options = self._parse_options("meson_options.txt")
861        meson_flags = [
862            '-Db_colorout=never',
863            '-Dwerror=true',
864            '-Dwarning_level=3',
865        ]
866        if build_for_testing:
867            meson_flags.append('--buildtype=debug')
868        else:
869            meson_flags.append('--buildtype=debugoptimized')
870        if OptionKey('tests') in meson_options:
871            meson_flags.append(self._configure_option(
872                meson_options, OptionKey('tests'), build_for_testing))
873        if OptionKey('examples') in meson_options:
874            meson_flags.append(self._configure_option(
875                meson_options, OptionKey('examples'), build_for_testing))
876        if OptionKey('itests') in meson_options:
877            meson_flags.append(self._configure_option(
878                meson_options, OptionKey('itests'), INTEGRATION_TEST))
879        if MESON_FLAGS.get(self.package) is not None:
880            meson_flags.extend(MESON_FLAGS.get(self.package))
881        try:
882            check_call_cmd('meson', 'setup', '--reconfigure', 'build',
883                           *meson_flags)
884        except:
885            shutil.rmtree('build', ignore_errors=True)
886            check_call_cmd('meson', 'setup', 'build', *meson_flags)
887
888    def build(self):
889        check_call_cmd('ninja', '-C', 'build')
890
891    def install(self):
892        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
893
894    def test(self):
895        # It is useful to check various settings of the meson.build file
896        # for compatibility, such as meson_version checks.  We shouldn't
897        # do this in the configure path though because it affects subprojects
898        # and dependencies as well, but we only want this applied to the
899        # project-under-test (otherwise an upstream dependency could fail
900        # this check without our control).
901        self._extra_meson_checks()
902
903        try:
904            test_args = ('--repeat', str(args.repeat), '-C', 'build')
905            check_call_cmd('meson', 'test', '--print-errorlogs', *test_args)
906
907        except CalledProcessError:
908            raise Exception('Unit tests failed')
909
910    def _setup_exists(self, setup):
911        """
912        Returns whether the meson build supports the named test setup.
913
914        Parameter descriptions:
915        setup              The setup target to check
916        """
917        try:
918            with open(os.devnull, 'w') as devnull:
919                output = subprocess.check_output(
920                    ['meson', 'test', '-C', 'build',
921                     '--setup', setup, '-t', '0'],
922                    stderr=subprocess.STDOUT)
923        except CalledProcessError as e:
924            output = e.output
925        output = output.decode('utf-8')
926        return not re.search('Test setup .* not found from project', output)
927
928    def _maybe_valgrind(self):
929        """
930        Potentially runs the unit tests through valgrind for the package
931        via `meson test`. The package can specify custom valgrind
932        configurations by utilizing add_test_setup() in a meson.build
933        """
934        if not is_valgrind_safe():
935            sys.stderr.write("###### Skipping valgrind ######\n")
936            return
937        try:
938            if self._setup_exists('valgrind'):
939                check_call_cmd('meson', 'test', '-t', '10', '-C', 'build',
940                               '--print-errorlogs', '--setup', 'valgrind')
941            else:
942                check_call_cmd('meson', 'test', '-t', '10', '-C', 'build',
943                               '--print-errorlogs', '--wrapper', 'valgrind')
944        except CalledProcessError:
945            raise Exception('Valgrind tests failed')
946
947    def analyze(self):
948        self._maybe_valgrind()
949
950        # Run clang-tidy only if the project has a configuration
951        if os.path.isfile('.clang-tidy'):
952            os.environ["CXX"] = "clang++"
953            with TemporaryDirectory(prefix='build', dir='.') as build_dir:
954                check_call_cmd('meson', 'setup', build_dir)
955                try:
956                    check_call_cmd('run-clang-tidy', '-fix',
957                                   '-format', '-p', build_dir)
958                except subprocess.CalledProcessError:
959                    check_call_cmd("git", "-C", CODE_SCAN_DIR,
960                                   "--no-pager", "diff")
961                    raise
962        # Run the basic clang static analyzer otherwise
963        else:
964            check_call_cmd('ninja', '-C', 'build',
965                           'scan-build')
966
967        # Run tests through sanitizers
968        # b_lundef is needed if clang++ is CXX since it resolves the
969        # asan symbols at runtime only. We don't want to set it earlier
970        # in the build process to ensure we don't have undefined
971        # runtime code.
972        if is_sanitize_safe():
973            check_call_cmd('meson', 'configure', 'build',
974                           '-Db_sanitize=address,undefined',
975                           '-Db_lundef=false')
976            check_call_cmd('meson', 'test', '-C', 'build', '--print-errorlogs',
977                           '--logbase', 'testlog-ubasan')
978            # TODO: Fix memory sanitizer
979            # check_call_cmd('meson', 'configure', 'build',
980            #                '-Db_sanitize=memory')
981            # check_call_cmd('meson', 'test', '-C', 'build'
982            #                '--logbase', 'testlog-msan')
983            check_call_cmd('meson', 'configure', 'build',
984                           '-Db_sanitize=none')
985        else:
986            sys.stderr.write("###### Skipping sanitizers ######\n")
987
988        # Run coverage checks
989        check_call_cmd('meson', 'configure', 'build',
990                       '-Db_coverage=true')
991        self.test()
992        # Only build coverage HTML if coverage files were produced
993        for root, dirs, files in os.walk('build'):
994            if any([f.endswith('.gcda') for f in files]):
995                check_call_cmd('ninja', '-C', 'build',
996                               'coverage-html')
997                break
998        check_call_cmd('meson', 'configure', 'build',
999                       '-Db_coverage=false')
1000        run_cppcheck()
1001
1002    def _extra_meson_checks(self):
1003        with open(os.path.join(self.path, 'meson.build'), 'rt') as f:
1004            build_contents = f.read()
1005
1006        # Find project's specified meson_version.
1007        meson_version = None
1008        pattern = r"meson_version:[^']*'([^']*)'"
1009        for match in re.finditer(pattern, build_contents):
1010            group = match.group(1)
1011            meson_version = group
1012
1013        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1014        # identify this.  Add to our unit-test checks so that we don't
1015        # get a meson.build missing this.
1016        pattern = r"'cpp_std=c\+\+20'"
1017        for match in re.finditer(pattern, build_contents):
1018            if not meson_version or \
1019                    not meson_version_compare(meson_version, ">=0.57"):
1020                raise Exception(
1021                    "C++20 support requires specifying in meson.build: "
1022                    + "meson_version: '>=0.57'"
1023                )
1024
1025
1026class Package(object):
1027    def __init__(self, name=None, path=None):
1028        self.supported = [Meson, Autotools, CMake]
1029        self.name = name
1030        self.path = path
1031        self.test_only = False
1032
1033    def build_systems(self):
1034        instances = (system(self.name, self.path) for system in self.supported)
1035        return (instance for instance in instances if instance.probe())
1036
1037    def build_system(self, preferred=None):
1038        systems = list(self.build_systems())
1039
1040        if not systems:
1041            return None
1042
1043        if preferred:
1044            return {type(system): system for system in systems}[preferred]
1045
1046        return next(iter(systems))
1047
1048    def install(self, system=None):
1049        if not system:
1050            system = self.build_system()
1051
1052        system.configure(False)
1053        system.build()
1054        system.install()
1055
1056    def _test_one(self, system):
1057        system.configure(True)
1058        system.build()
1059        system.install()
1060        system.test()
1061        if not TEST_ONLY:
1062            system.analyze()
1063
1064    def test(self):
1065        for system in self.build_systems():
1066            self._test_one(system)
1067
1068
1069def find_file(filename, basedir):
1070    """
1071    Finds all occurrences of a file (or list of files) in the base
1072    directory and passes them back with their relative paths.
1073
1074    Parameter descriptions:
1075    filename              The name of the file (or list of files) to
1076                          find
1077    basedir               The base directory search in
1078    """
1079
1080    if not isinstance(filename, list):
1081        filename = [filename]
1082
1083    filepaths = []
1084    for root, dirs, files in os.walk(basedir):
1085        if os.path.split(root)[-1] == 'subprojects':
1086            for f in files:
1087                subproject = '.'.join(f.split('.')[0:-1])
1088                if f.endswith('.wrap') and subproject in dirs:
1089                    # don't find files in meson subprojects with wraps
1090                    dirs.remove(subproject)
1091        for f in filename:
1092            if f in files:
1093                filepaths.append(os.path.join(root, f))
1094    return filepaths
1095
1096
1097if __name__ == '__main__':
1098    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1099    CONFIGURE_FLAGS = {
1100        'phosphor-logging':
1101        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
1102         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
1103    }
1104
1105    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1106    MESON_FLAGS = {
1107        'phosphor-dbus-interfaces':
1108        ['-Ddata_com_ibm=true', '-Ddata_org_open_power=true'],
1109        'phosphor-logging':
1110        ['-Dopenpower-pel-extension=enabled']
1111    }
1112
1113    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1114    DEPENDENCIES = {
1115        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
1116        'AC_CHECK_HEADER': {
1117            'host-ipmid': 'phosphor-host-ipmid',
1118            'blobs-ipmid': 'phosphor-ipmi-blobs',
1119            'sdbusplus': 'sdbusplus',
1120            'sdeventplus': 'sdeventplus',
1121            'stdplus': 'stdplus',
1122            'gpioplus': 'gpioplus',
1123            'phosphor-logging/log.hpp': 'phosphor-logging',
1124        },
1125        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1126        'PKG_CHECK_MODULES': {
1127            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1128            'libipmid': 'phosphor-host-ipmid',
1129            'libipmid-host': 'phosphor-host-ipmid',
1130            'sdbusplus': 'sdbusplus',
1131            'sdeventplus': 'sdeventplus',
1132            'stdplus': 'stdplus',
1133            'gpioplus': 'gpioplus',
1134            'phosphor-logging': 'phosphor-logging',
1135            'phosphor-snmp': 'phosphor-snmp',
1136            'ipmiblob': 'ipmi-blob-tool',
1137            'hei': 'openpower-libhei',
1138            'phosphor-ipmi-blobs': 'phosphor-ipmi-blobs',
1139            'libcr51sign': 'google-misc',
1140        },
1141    }
1142
1143    # Offset into array of macro parameters MACRO(0, 1, ...N)
1144    DEPENDENCIES_OFFSET = {
1145        'AC_CHECK_LIB': 0,
1146        'AC_CHECK_HEADER': 0,
1147        'AC_PATH_PROG': 1,
1148        'PKG_CHECK_MODULES': 1,
1149    }
1150
1151    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1152    DEPENDENCIES_REGEX = {
1153        'phosphor-logging': r'\S+-dbus-interfaces$'
1154    }
1155
1156    # Set command line arguments
1157    parser = argparse.ArgumentParser()
1158    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1159                        help="Workspace directory location(i.e. /home)")
1160    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1161                        help="OpenBMC package to be unit tested")
1162    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1163                        action="store_true", required=False, default=False,
1164                        help="Only run test cases, no other validation")
1165    arg_inttests = parser.add_mutually_exclusive_group()
1166    arg_inttests.add_argument("--integration-tests", dest="INTEGRATION_TEST",
1167                              action="store_true", required=False, default=True,
1168                              help="Enable integration tests [default].")
1169    arg_inttests.add_argument("--no-integration-tests", dest="INTEGRATION_TEST",
1170                              action="store_false", required=False,
1171                              help="Disable integration tests.")
1172    parser.add_argument("-v", "--verbose", action="store_true",
1173                        help="Print additional package status messages")
1174    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1175                        type=int, default=1)
1176    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1177                        help="Branch to target for dependent repositories",
1178                        default="master")
1179    parser.add_argument("-n", "--noformat", dest="FORMAT",
1180                        action="store_false", required=False,
1181                        help="Whether or not to run format code")
1182    args = parser.parse_args(sys.argv[1:])
1183    WORKSPACE = args.WORKSPACE
1184    UNIT_TEST_PKG = args.PACKAGE
1185    TEST_ONLY = args.TEST_ONLY
1186    INTEGRATION_TEST = args.INTEGRATION_TEST
1187    BRANCH = args.BRANCH
1188    FORMAT_CODE = args.FORMAT
1189    if args.verbose:
1190        def printline(*line):
1191            for arg in line:
1192                print(arg, end=' ')
1193            print()
1194    else:
1195        def printline(*line):
1196            pass
1197
1198    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1199
1200    # First validate code formatting if repo has style formatting files.
1201    # The format-code.sh checks for these files.
1202    if FORMAT_CODE:
1203        format_scripts = find_file(['format-code.sh', 'format-code'],
1204                                   CODE_SCAN_DIR)
1205
1206        # use default format-code.sh if no other found
1207        if not format_scripts:
1208            format_scripts.append(os.path.join(WORKSPACE, "format-code.sh"))
1209
1210        for f in format_scripts:
1211            check_call_cmd(f, CODE_SCAN_DIR)
1212
1213        # Check to see if any files changed
1214        check_call_cmd("git", "-C", CODE_SCAN_DIR,
1215                       "--no-pager", "diff", "--exit-code")
1216
1217    # Check if this repo has a supported make infrastructure
1218    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1219    if not pkg.build_system():
1220        print("No valid build system, exit")
1221        sys.exit(0)
1222
1223    prev_umask = os.umask(000)
1224
1225    # Determine dependencies and add them
1226    dep_added = dict()
1227    dep_added[UNIT_TEST_PKG] = False
1228
1229    # Create dependency tree
1230    dep_tree = DepTree(UNIT_TEST_PKG)
1231    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1232
1233    # Reorder Dependency Tree
1234    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1235        dep_tree.ReorderDeps(pkg_name, regex_str)
1236    if args.verbose:
1237        dep_tree.PrintTree()
1238
1239    install_list = dep_tree.GetInstallList()
1240
1241    # We don't want to treat our package as a dependency
1242    install_list.remove(UNIT_TEST_PKG)
1243
1244    # Install reordered dependencies
1245    for dep in install_list:
1246        build_and_install(dep, False)
1247
1248    # Run package unit tests
1249    build_and_install(UNIT_TEST_PKG, True)
1250
1251    os.umask(prev_umask)
1252
1253    # Run any custom CI scripts the repo has, of which there can be
1254    # multiple of and anywhere in the repository.
1255    ci_scripts = find_file(['run-ci.sh', 'run-ci'], CODE_SCAN_DIR)
1256    if ci_scripts:
1257        os.chdir(CODE_SCAN_DIR)
1258        for ci_script in ci_scripts:
1259            check_call_cmd(ci_script)
1260