1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11# interpreter is not used directly but this resolves dependency ordering
12# that would be broken if we didn't include it.
13from mesonbuild import interpreter
14from mesonbuild import coredata, optinterpreter
15from mesonbuild.mesonlib import OptionKey
16from mesonbuild.mesonlib import version_compare as meson_version_compare
17from urllib.parse import urljoin
18from subprocess import check_call, call, CalledProcessError
19import os
20import sys
21import argparse
22import multiprocessing
23import re
24import subprocess
25import shutil
26import platform
27
28
29class DepTree():
30    """
31    Represents package dependency tree, where each node is a DepTree with a
32    name and DepTree children.
33    """
34
35    def __init__(self, name):
36        """
37        Create new DepTree.
38
39        Parameter descriptions:
40        name               Name of new tree node.
41        """
42        self.name = name
43        self.children = list()
44
45    def AddChild(self, name):
46        """
47        Add new child node to current node.
48
49        Parameter descriptions:
50        name               Name of new child
51        """
52        new_child = DepTree(name)
53        self.children.append(new_child)
54        return new_child
55
56    def AddChildNode(self, node):
57        """
58        Add existing child node to current node.
59
60        Parameter descriptions:
61        node               Tree node to add
62        """
63        self.children.append(node)
64
65    def RemoveChild(self, name):
66        """
67        Remove child node.
68
69        Parameter descriptions:
70        name               Name of child to remove
71        """
72        for child in self.children:
73            if child.name == name:
74                self.children.remove(child)
75                return
76
77    def GetNode(self, name):
78        """
79        Return node with matching name. Return None if not found.
80
81        Parameter descriptions:
82        name               Name of node to return
83        """
84        if self.name == name:
85            return self
86        for child in self.children:
87            node = child.GetNode(name)
88            if node:
89                return node
90        return None
91
92    def GetParentNode(self, name, parent_node=None):
93        """
94        Return parent of node with matching name. Return none if not found.
95
96        Parameter descriptions:
97        name               Name of node to get parent of
98        parent_node        Parent of current node
99        """
100        if self.name == name:
101            return parent_node
102        for child in self.children:
103            found_node = child.GetParentNode(name, self)
104            if found_node:
105                return found_node
106        return None
107
108    def GetPath(self, name, path=None):
109        """
110        Return list of node names from head to matching name.
111        Return None if not found.
112
113        Parameter descriptions:
114        name               Name of node
115        path               List of node names from head to current node
116        """
117        if not path:
118            path = []
119        if self.name == name:
120            path.append(self.name)
121            return path
122        for child in self.children:
123            match = child.GetPath(name, path + [self.name])
124            if match:
125                return match
126        return None
127
128    def GetPathRegex(self, name, regex_str, path=None):
129        """
130        Return list of node paths that end in name, or match regex_str.
131        Return empty list if not found.
132
133        Parameter descriptions:
134        name               Name of node to search for
135        regex_str          Regex string to match node names
136        path               Path of node names from head to current node
137        """
138        new_paths = []
139        if not path:
140            path = []
141        match = re.match(regex_str, self.name)
142        if (self.name == name) or (match):
143            new_paths.append(path + [self.name])
144        for child in self.children:
145            return_paths = None
146            full_path = path + [self.name]
147            return_paths = child.GetPathRegex(name, regex_str, full_path)
148            for i in return_paths:
149                new_paths.append(i)
150        return new_paths
151
152    def MoveNode(self, from_name, to_name):
153        """
154        Mode existing from_name node to become child of to_name node.
155
156        Parameter descriptions:
157        from_name          Name of node to make a child of to_name
158        to_name            Name of node to make parent of from_name
159        """
160        parent_from_node = self.GetParentNode(from_name)
161        from_node = self.GetNode(from_name)
162        parent_from_node.RemoveChild(from_name)
163        to_node = self.GetNode(to_name)
164        to_node.AddChildNode(from_node)
165
166    def ReorderDeps(self, name, regex_str):
167        """
168        Reorder dependency tree.  If tree contains nodes with names that
169        match 'name' and 'regex_str', move 'regex_str' nodes that are
170        to the right of 'name' node, so that they become children of the
171        'name' node.
172
173        Parameter descriptions:
174        name               Name of node to look for
175        regex_str          Regex string to match names to
176        """
177        name_path = self.GetPath(name)
178        if not name_path:
179            return
180        paths = self.GetPathRegex(name, regex_str)
181        is_name_in_paths = False
182        name_index = 0
183        for i in range(len(paths)):
184            path = paths[i]
185            if path[-1] == name:
186                is_name_in_paths = True
187                name_index = i
188                break
189        if not is_name_in_paths:
190            return
191        for i in range(name_index + 1, len(paths)):
192            path = paths[i]
193            if name in path:
194                continue
195            from_name = path[-1]
196            self.MoveNode(from_name, name)
197
198    def GetInstallList(self):
199        """
200        Return post-order list of node names.
201
202        Parameter descriptions:
203        """
204        install_list = []
205        for child in self.children:
206            child_install_list = child.GetInstallList()
207            install_list.extend(child_install_list)
208        install_list.append(self.name)
209        return install_list
210
211    def PrintTree(self, level=0):
212        """
213        Print pre-order node names with indentation denoting node depth level.
214
215        Parameter descriptions:
216        level              Current depth level
217        """
218        INDENT_PER_LEVEL = 4
219        print(' ' * (level * INDENT_PER_LEVEL) + self.name)
220        for child in self.children:
221            child.PrintTree(level + 1)
222
223
224def check_call_cmd(*cmd):
225    """
226    Verbose prints the directory location the given command is called from and
227    the command, then executes the command using check_call.
228
229    Parameter descriptions:
230    dir                 Directory location command is to be called from
231    cmd                 List of parameters constructing the complete command
232    """
233    printline(os.getcwd(), ">", " ".join(cmd))
234    check_call(cmd)
235
236
237def clone_pkg(pkg, branch):
238    """
239    Clone the given openbmc package's git repository from gerrit into
240    the WORKSPACE location
241
242    Parameter descriptions:
243    pkg                 Name of the package to clone
244    branch              Branch to clone from pkg
245    """
246    pkg_dir = os.path.join(WORKSPACE, pkg)
247    if os.path.exists(os.path.join(pkg_dir, '.git')):
248        return pkg_dir
249    pkg_repo = urljoin('https://gerrit.openbmc.org/openbmc/', pkg)
250    os.mkdir(pkg_dir)
251    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
252    try:
253        # first try the branch
254        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
255        repo_inst = clone.working_dir
256    except:
257        printline("Input branch not found, default to master")
258        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
259        repo_inst = clone.working_dir
260    return repo_inst
261
262
263def make_target_exists(target):
264    """
265    Runs a check against the makefile in the current directory to determine
266    if the target exists so that it can be built.
267
268    Parameter descriptions:
269    target              The make target we are checking
270    """
271    try:
272        cmd = ['make', '-n', target]
273        with open(os.devnull, 'w') as devnull:
274            check_call(cmd, stdout=devnull, stderr=devnull)
275        return True
276    except CalledProcessError:
277        return False
278
279
280make_parallel = [
281    'make',
282    # Run enough jobs to saturate all the cpus
283    '-j', str(multiprocessing.cpu_count()),
284    # Don't start more jobs if the load avg is too high
285    '-l', str(multiprocessing.cpu_count()),
286    # Synchronize the output so logs aren't intermixed in stdout / stderr
287    '-O',
288]
289
290
291def build_and_install(name, build_for_testing=False):
292    """
293    Builds and installs the package in the environment. Optionally
294    builds the examples and test cases for package.
295
296    Parameter description:
297    name                The name of the package we are building
298    build_for_testing   Enable options related to testing on the package?
299    """
300    os.chdir(os.path.join(WORKSPACE, name))
301
302    # Refresh dynamic linker run time bindings for dependencies
303    check_call_cmd('sudo', '-n', '--', 'ldconfig')
304
305    pkg = Package()
306    if build_for_testing:
307        pkg.test()
308    else:
309        pkg.install()
310
311
312def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
313    """
314    For each package (name), starting with the package to be unit tested,
315    extract its dependencies. For each package dependency defined, recursively
316    apply the same strategy
317
318    Parameter descriptions:
319    name                Name of the package
320    pkgdir              Directory where package source is located
321    dep_added           Current dict of dependencies and added status
322    head                Head node of the dependency tree
323    branch              Branch to clone from pkg
324    dep_tree            Current dependency tree node
325    """
326    if not dep_tree:
327        dep_tree = head
328
329    with open("/tmp/depcache", "r") as depcache:
330        cache = depcache.readline()
331
332    # Read out pkg dependencies
333    pkg = Package(name, pkgdir)
334
335    build = pkg.build_system()
336    if build == None:
337        raise Exception(f"Unable to find build system for {name}.")
338
339    for dep in set(build.dependencies()):
340        if dep in cache:
341            continue
342        # Dependency package not already known
343        if dep_added.get(dep) is None:
344            print(f"Adding {dep} dependency to {name}.")
345            # Dependency package not added
346            new_child = dep_tree.AddChild(dep)
347            dep_added[dep] = False
348            dep_pkgdir = clone_pkg(dep, branch)
349            # Determine this dependency package's
350            # dependencies and add them before
351            # returning to add this package
352            dep_added = build_dep_tree(dep,
353                                       dep_pkgdir,
354                                       dep_added,
355                                       head,
356                                       branch,
357                                       new_child)
358        else:
359            # Dependency package known and added
360            if dep_added[dep]:
361                continue
362            else:
363                # Cyclic dependency failure
364                raise Exception("Cyclic dependencies found in "+name)
365
366    if not dep_added[name]:
367        dep_added[name] = True
368
369    return dep_added
370
371
372def run_cppcheck():
373    match_re = re.compile(r'((?!\.mako\.).)*\.[ch](?:pp)?$', re.I)
374    cppcheck_files = []
375    stdout = subprocess.check_output(['git', 'ls-files'])
376
377    for f in stdout.decode('utf-8').split():
378        if match_re.match(f):
379            cppcheck_files.append(f)
380
381    if not cppcheck_files:
382        # skip cppcheck if there arent' any c or cpp sources.
383        print("no files")
384        return None
385
386    # http://cppcheck.sourceforge.net/manual.pdf
387    params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
388              '--enable=all', '--library=googletest', '--file-list=-']
389
390    cppcheck_process = subprocess.Popen(
391        params,
392        stdout=subprocess.PIPE,
393        stderr=subprocess.PIPE,
394        stdin=subprocess.PIPE)
395    (stdout, stderr) = cppcheck_process.communicate(
396        input='\n'.join(cppcheck_files).encode('utf-8'))
397
398    if cppcheck_process.wait():
399        raise Exception('Cppcheck failed')
400    print(stdout.decode('utf-8'))
401    print(stderr.decode('utf-8'))
402
403
404def is_valgrind_safe():
405    """
406    Returns whether it is safe to run valgrind on our platform
407    """
408    src = 'unit-test-vg.c'
409    exe = './unit-test-vg'
410    with open(src, 'w') as h:
411        h.write('#include <errno.h>\n')
412        h.write('#include <stdio.h>\n')
413        h.write('#include <stdlib.h>\n')
414        h.write('#include <string.h>\n')
415        h.write('int main() {\n')
416        h.write('char *heap_str = malloc(16);\n')
417        h.write('strcpy(heap_str, "RandString");\n')
418        h.write('int res = strcmp("RandString", heap_str);\n')
419        h.write('free(heap_str);\n')
420        h.write('char errstr[64];\n')
421        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
422        h.write('printf("%s\\n", errstr);\n')
423        h.write('return res;\n')
424        h.write('}\n')
425    try:
426        with open(os.devnull, 'w') as devnull:
427            check_call(['gcc', '-O2', '-o', exe, src],
428                       stdout=devnull, stderr=devnull)
429            check_call(['valgrind', '--error-exitcode=99', exe],
430                       stdout=devnull, stderr=devnull)
431        return True
432    except:
433        sys.stderr.write("###### Platform is not valgrind safe ######\n")
434        return False
435    finally:
436        os.remove(src)
437        os.remove(exe)
438
439
440def is_sanitize_safe():
441    """
442    Returns whether it is safe to run sanitizers on our platform
443    """
444    src = 'unit-test-sanitize.c'
445    exe = './unit-test-sanitize'
446    with open(src, 'w') as h:
447        h.write('int main() { return 0; }\n')
448    try:
449        with open(os.devnull, 'w') as devnull:
450            check_call(['gcc', '-O2', '-fsanitize=address',
451                        '-fsanitize=undefined', '-o', exe, src],
452                       stdout=devnull, stderr=devnull)
453            check_call([exe], stdout=devnull, stderr=devnull)
454
455        # TODO - Sanitizer not working on ppc64le
456        # https://github.com/openbmc/openbmc-build-scripts/issues/31
457        if (platform.processor() == 'ppc64le'):
458            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
459            return False
460        else:
461            return True
462    except:
463        sys.stderr.write("###### Platform is not sanitize safe ######\n")
464        return False
465    finally:
466        os.remove(src)
467        os.remove(exe)
468
469
470def maybe_make_valgrind():
471    """
472    Potentially runs the unit tests through valgrind for the package
473    via `make check-valgrind`. If the package does not have valgrind testing
474    then it just skips over this.
475    """
476    # Valgrind testing is currently broken by an aggressive strcmp optimization
477    # that is inlined into optimized code for POWER by gcc 7+. Until we find
478    # a workaround, just don't run valgrind tests on POWER.
479    # https://github.com/openbmc/openbmc/issues/3315
480    if not is_valgrind_safe():
481        sys.stderr.write("###### Skipping valgrind ######\n")
482        return
483    if not make_target_exists('check-valgrind'):
484        return
485
486    try:
487        cmd = make_parallel + ['check-valgrind']
488        check_call_cmd(*cmd)
489    except CalledProcessError:
490        for root, _, files in os.walk(os.getcwd()):
491            for f in files:
492                if re.search('test-suite-[a-z]+.log', f) is None:
493                    continue
494                check_call_cmd('cat', os.path.join(root, f))
495        raise Exception('Valgrind tests failed')
496
497
498def maybe_make_coverage():
499    """
500    Potentially runs the unit tests through code coverage for the package
501    via `make check-code-coverage`. If the package does not have code coverage
502    testing then it just skips over this.
503    """
504    if not make_target_exists('check-code-coverage'):
505        return
506
507    # Actually run code coverage
508    try:
509        cmd = make_parallel + ['check-code-coverage']
510        check_call_cmd(*cmd)
511    except CalledProcessError:
512        raise Exception('Code coverage failed')
513
514
515class BuildSystem(object):
516    """
517    Build systems generally provide the means to configure, build, install and
518    test software. The BuildSystem class defines a set of interfaces on top of
519    which Autotools, Meson, CMake and possibly other build system drivers can
520    be implemented, separating out the phases to control whether a package
521    should merely be installed or also tested and analyzed.
522    """
523
524    def __init__(self, package, path):
525        """Initialise the driver with properties independent of the build system
526
527        Keyword arguments:
528        package: The name of the package. Derived from the path if None
529        path: The path to the package. Set to the working directory if None
530        """
531        self.path = "." if not path else path
532        realpath = os.path.realpath(self.path)
533        self.package = package if package else os.path.basename(realpath)
534        self.build_for_testing = False
535
536    def probe(self):
537        """Test if the build system driver can be applied to the package
538
539        Return True if the driver can drive the package's build system,
540        otherwise False.
541
542        Generally probe() is implemented by testing for the presence of the
543        build system's configuration file(s).
544        """
545        raise NotImplemented
546
547    def dependencies(self):
548        """Provide the package's dependencies
549
550        Returns a list of dependencies. If no dependencies are required then an
551        empty list must be returned.
552
553        Generally dependencies() is implemented by analysing and extracting the
554        data from the build system configuration.
555        """
556        raise NotImplemented
557
558    def configure(self, build_for_testing):
559        """Configure the source ready for building
560
561        Should raise an exception if configuration failed.
562
563        Keyword arguments:
564        build_for_testing: Mark the package as being built for testing rather
565                           than for installation as a dependency for the
566                           package under test. Setting to True generally
567                           implies that the package will be configured to build
568                           with debug information, at a low level of
569                           optimisation and possibly with sanitizers enabled.
570
571        Generally configure() is implemented by invoking the build system
572        tooling to generate Makefiles or equivalent.
573        """
574        raise NotImplemented
575
576    def build(self):
577        """Build the software ready for installation and/or testing
578
579        Should raise an exception if the build fails
580
581        Generally build() is implemented by invoking `make` or `ninja`.
582        """
583        raise NotImplemented
584
585    def install(self):
586        """Install the software ready for use
587
588        Should raise an exception if installation fails
589
590        Like build(), install() is generally implemented by invoking `make` or
591        `ninja`.
592        """
593        raise NotImplemented
594
595    def test(self):
596        """Build and run the test suite associated with the package
597
598        Should raise an exception if the build or testing fails.
599
600        Like install(), test() is generally implemented by invoking `make` or
601        `ninja`.
602        """
603        raise NotImplemented
604
605    def analyze(self):
606        """Run any supported analysis tools over the codebase
607
608        Should raise an exception if analysis fails.
609
610        Some analysis tools such as scan-build need injection into the build
611        system. analyze() provides the necessary hook to implement such
612        behaviour. Analyzers independent of the build system can also be
613        specified here but at the cost of possible duplication of code between
614        the build system driver implementations.
615        """
616        raise NotImplemented
617
618
619class Autotools(BuildSystem):
620    def __init__(self, package=None, path=None):
621        super(Autotools, self).__init__(package, path)
622
623    def probe(self):
624        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
625
626    def dependencies(self):
627        configure_ac = os.path.join(self.path, 'configure.ac')
628
629        contents = ''
630        # Prepend some special function overrides so we can parse out
631        # dependencies
632        for macro in DEPENDENCIES.keys():
633            contents += ('m4_define([' + macro + '], [' + macro + '_START$' +
634                         str(DEPENDENCIES_OFFSET[macro] + 1) +
635                         macro + '_END])\n')
636        with open(configure_ac, "rt") as f:
637            contents += f.read()
638
639        autoconf_cmdline = ['autoconf', '-Wno-undefined', '-']
640        autoconf_process = subprocess.Popen(autoconf_cmdline,
641                                            stdin=subprocess.PIPE,
642                                            stdout=subprocess.PIPE,
643                                            stderr=subprocess.PIPE)
644        document = contents.encode('utf-8')
645        (stdout, stderr) = autoconf_process.communicate(input=document)
646        if not stdout:
647            print(stderr)
648            raise Exception("Failed to run autoconf for parsing dependencies")
649
650        # Parse out all of the dependency text
651        matches = []
652        for macro in DEPENDENCIES.keys():
653            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
654            for match in re.compile(pattern).finditer(stdout.decode('utf-8')):
655                matches.append((match.group(1), match.group(2)))
656
657        # Look up dependencies from the text
658        found_deps = []
659        for macro, deptext in matches:
660            for potential_dep in deptext.split(' '):
661                for known_dep in DEPENDENCIES[macro].keys():
662                    if potential_dep.startswith(known_dep):
663                        found_deps.append(DEPENDENCIES[macro][known_dep])
664
665        return found_deps
666
667    def _configure_feature(self, flag, enabled):
668        """
669        Returns an configure flag as a string
670
671        Parameters:
672        flag                The name of the flag
673        enabled             Whether the flag is enabled or disabled
674        """
675        return '--' + ('enable' if enabled else 'disable') + '-' + flag
676
677    def configure(self, build_for_testing):
678        self.build_for_testing = build_for_testing
679        conf_flags = [
680            self._configure_feature('silent-rules', False),
681            self._configure_feature('examples', build_for_testing),
682            self._configure_feature('tests', build_for_testing),
683            self._configure_feature('itests', INTEGRATION_TEST),
684        ]
685        conf_flags.extend([
686            self._configure_feature('code-coverage', build_for_testing),
687            self._configure_feature('valgrind', build_for_testing),
688        ])
689        # Add any necessary configure flags for package
690        if CONFIGURE_FLAGS.get(self.package) is not None:
691            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
692        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
693            if os.path.exists(bootstrap):
694                check_call_cmd('./' + bootstrap)
695                break
696        check_call_cmd('./configure', *conf_flags)
697
698    def build(self):
699        check_call_cmd(*make_parallel)
700
701    def install(self):
702        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
703
704    def test(self):
705        try:
706            cmd = make_parallel + ['check']
707            for i in range(0, args.repeat):
708                check_call_cmd(*cmd)
709
710            maybe_make_valgrind()
711            maybe_make_coverage()
712        except CalledProcessError:
713            for root, _, files in os.walk(os.getcwd()):
714                if 'test-suite.log' not in files:
715                    continue
716                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
717            raise Exception('Unit tests failed')
718
719    def analyze(self):
720        run_cppcheck()
721
722
723class CMake(BuildSystem):
724    def __init__(self, package=None, path=None):
725        super(CMake, self).__init__(package, path)
726
727    def probe(self):
728        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
729
730    def dependencies(self):
731        return []
732
733    def configure(self, build_for_testing):
734        self.build_for_testing = build_for_testing
735        if INTEGRATION_TEST:
736            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
737                           '-DITESTS=ON', '.')
738        else:
739            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
740
741    def build(self):
742        check_call_cmd('cmake', '--build', '.', '--', '-j',
743                       str(multiprocessing.cpu_count()))
744
745    def install(self):
746        pass
747
748    def test(self):
749        if make_target_exists('test'):
750            check_call_cmd('ctest', '.')
751
752    def analyze(self):
753        if os.path.isfile('.clang-tidy'):
754            try:
755                os.mkdir("tidy-build")
756            except FileExistsError as e:
757                pass
758            # clang-tidy needs to run on a clang-specific build
759            check_call_cmd('cmake', '-DCMAKE_C_COMPILER=clang',
760                           '-DCMAKE_CXX_COMPILER=clang++',
761                           '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
762                           '-H.',
763                           '-Btidy-build')
764            # we need to cd here because otherwise clang-tidy doesn't find the
765            # .clang-tidy file in the roots of repos.  Its arguably a "bug"
766            # with run-clang-tidy at a minimum it's "weird" that it requires
767            # the .clang-tidy to be up a dir
768            os.chdir("tidy-build")
769            try:
770                check_call_cmd('run-clang-tidy', "-header-filter=.*", '-p',
771                               '.')
772            finally:
773                os.chdir("..")
774
775        maybe_make_valgrind()
776        maybe_make_coverage()
777        run_cppcheck()
778
779
780class Meson(BuildSystem):
781    def __init__(self, package=None, path=None):
782        super(Meson, self).__init__(package, path)
783
784    def probe(self):
785        return os.path.isfile(os.path.join(self.path, 'meson.build'))
786
787    def dependencies(self):
788        meson_build = os.path.join(self.path, 'meson.build')
789        if not os.path.exists(meson_build):
790            return []
791
792        found_deps = []
793        for root, dirs, files in os.walk(self.path):
794            if 'meson.build' not in files:
795                continue
796            with open(os.path.join(root, 'meson.build'), 'rt') as f:
797                build_contents = f.read()
798            pattern = r"dependency\('([^']*)'.*?\),?\n"
799            for match in re.finditer(pattern, build_contents):
800                group = match.group(1)
801                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group)
802                if maybe_dep is not None:
803                    found_deps.append(maybe_dep)
804
805        return found_deps
806
807    def _parse_options(self, options_file):
808        """
809        Returns a set of options defined in the provides meson_options.txt file
810
811        Parameters:
812        options_file        The file containing options
813        """
814        oi = optinterpreter.OptionInterpreter('')
815        oi.process(options_file)
816        return oi.options
817
818    def _configure_boolean(self, val):
819        """
820        Returns the meson flag which signifies the value
821
822        True is true which requires the boolean.
823        False is false which disables the boolean.
824
825        Parameters:
826        val                 The value being converted
827        """
828        if val is True:
829            return 'true'
830        elif val is False:
831            return 'false'
832        else:
833            raise Exception("Bad meson boolean value")
834
835    def _configure_feature(self, val):
836        """
837        Returns the meson flag which signifies the value
838
839        True is enabled which requires the feature.
840        False is disabled which disables the feature.
841        None is auto which autodetects the feature.
842
843        Parameters:
844        val                 The value being converted
845        """
846        if val is True:
847            return "enabled"
848        elif val is False:
849            return "disabled"
850        elif val is None:
851            return "auto"
852        else:
853            raise Exception("Bad meson feature value")
854
855    def _configure_option(self, opts, key, val):
856        """
857        Returns the meson flag which signifies the value
858        based on the type of the opt
859
860        Parameters:
861        opt                 The meson option which we are setting
862        val                 The value being converted
863        """
864        if isinstance(opts[key], coredata.UserBooleanOption):
865            str_val = self._configure_boolean(val)
866        elif isinstance(opts[key], coredata.UserFeatureOption):
867            str_val = self._configure_feature(val)
868        else:
869            raise Exception('Unknown meson option type')
870        return "-D{}={}".format(key, str_val)
871
872    def configure(self, build_for_testing):
873        self.build_for_testing = build_for_testing
874        meson_options = {}
875        if os.path.exists("meson_options.txt"):
876            meson_options = self._parse_options("meson_options.txt")
877        meson_flags = [
878            '-Db_colorout=never',
879            '-Dwerror=true',
880            '-Dwarning_level=3',
881        ]
882        if build_for_testing:
883            meson_flags.append('--buildtype=debug')
884        else:
885            meson_flags.append('--buildtype=debugoptimized')
886        if OptionKey('tests') in meson_options:
887            meson_flags.append(self._configure_option(
888                meson_options, OptionKey('tests'), build_for_testing))
889        if OptionKey('examples') in meson_options:
890            meson_flags.append(self._configure_option(
891                meson_options, OptionKey('examples'), build_for_testing))
892        if OptionKey('itests') in meson_options:
893            meson_flags.append(self._configure_option(
894                meson_options, OptionKey('itests'), INTEGRATION_TEST))
895        if MESON_FLAGS.get(self.package) is not None:
896            meson_flags.extend(MESON_FLAGS.get(self.package))
897        try:
898            check_call_cmd('meson', 'setup', '--reconfigure', 'build',
899                           *meson_flags)
900        except:
901            shutil.rmtree('build')
902            check_call_cmd('meson', 'setup', 'build', *meson_flags)
903
904    def build(self):
905        check_call_cmd('ninja', '-C', 'build')
906
907    def install(self):
908        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
909
910    def test(self):
911        # It is useful to check various settings of the meson.build file
912        # for compatibility, such as meson_version checks.  We shouldn't
913        # do this in the configure path though because it affects subprojects
914        # and dependencies as well, but we only want this applied to the
915        # project-under-test (otherwise an upstream dependency could fail
916        # this check without our control).
917        self._extra_meson_checks()
918
919        try:
920            test_args = ('--repeat', str(args.repeat), '-C', 'build')
921            check_call_cmd('meson', 'test', '--print-errorlogs', *test_args)
922
923        except CalledProcessError:
924            raise Exception('Unit tests failed')
925
926    def _setup_exists(self, setup):
927        """
928        Returns whether the meson build supports the named test setup.
929
930        Parameter descriptions:
931        setup              The setup target to check
932        """
933        try:
934            with open(os.devnull, 'w') as devnull:
935                output = subprocess.check_output(
936                    ['meson', 'test', '-C', 'build',
937                     '--setup', setup, '-t', '0'],
938                    stderr=subprocess.STDOUT)
939        except CalledProcessError as e:
940            output = e.output
941        output = output.decode('utf-8')
942        return not re.search('Test setup .* not found from project', output)
943
944    def _maybe_valgrind(self):
945        """
946        Potentially runs the unit tests through valgrind for the package
947        via `meson test`. The package can specify custom valgrind
948        configurations by utilizing add_test_setup() in a meson.build
949        """
950        if not is_valgrind_safe():
951            sys.stderr.write("###### Skipping valgrind ######\n")
952            return
953        try:
954            if self._setup_exists('valgrind'):
955                check_call_cmd('meson', 'test', '-t', '10', '-C', 'build',
956                               '--print-errorlogs', '--setup', 'valgrind')
957            else:
958                check_call_cmd('meson', 'test', '-t', '10', '-C', 'build',
959                               '--print-errorlogs', '--wrapper', 'valgrind')
960        except CalledProcessError:
961            raise Exception('Valgrind tests failed')
962
963    def analyze(self):
964        self._maybe_valgrind()
965
966        # Run clang-tidy only if the project has a configuration
967        if os.path.isfile('.clang-tidy'):
968            os.environ["CXX"] = "clang++"
969            check_call_cmd('meson', 'setup', 'build-clang')
970            os.chdir("build-clang")
971            try:
972                check_call_cmd('run-clang-tidy', '-fix', '-format', '-p', '.')
973            except subprocess.CalledProcessError:
974                check_call_cmd("git", "-C", CODE_SCAN_DIR,
975                               "--no-pager", "diff")
976                raise
977            finally:
978                os.chdir("..")
979
980        # Run the basic clang static analyzer otherwise
981        else:
982            check_call_cmd('ninja', '-C', 'build',
983                           'scan-build')
984
985        # Run tests through sanitizers
986        # b_lundef is needed if clang++ is CXX since it resolves the
987        # asan symbols at runtime only. We don't want to set it earlier
988        # in the build process to ensure we don't have undefined
989        # runtime code.
990        if is_sanitize_safe():
991            check_call_cmd('meson', 'configure', 'build',
992                           '-Db_sanitize=address,undefined',
993                           '-Db_lundef=false')
994            check_call_cmd('meson', 'test', '-C', 'build', '--print-errorlogs',
995                           '--logbase', 'testlog-ubasan')
996            # TODO: Fix memory sanitizer
997            # check_call_cmd('meson', 'configure', 'build',
998            #                '-Db_sanitize=memory')
999            # check_call_cmd('meson', 'test', '-C', 'build'
1000            #                '--logbase', 'testlog-msan')
1001            check_call_cmd('meson', 'configure', 'build',
1002                           '-Db_sanitize=none')
1003        else:
1004            sys.stderr.write("###### Skipping sanitizers ######\n")
1005
1006        # Run coverage checks
1007        check_call_cmd('meson', 'configure', 'build',
1008                       '-Db_coverage=true')
1009        self.test()
1010        # Only build coverage HTML if coverage files were produced
1011        for root, dirs, files in os.walk('build'):
1012            if any([f.endswith('.gcda') for f in files]):
1013                check_call_cmd('ninja', '-C', 'build',
1014                               'coverage-html')
1015                break
1016        check_call_cmd('meson', 'configure', 'build',
1017                       '-Db_coverage=false')
1018        run_cppcheck()
1019
1020    def _extra_meson_checks(self):
1021        with open(os.path.join(self.path, 'meson.build'), 'rt') as f:
1022            build_contents = f.read()
1023
1024        # Find project's specified meson_version.
1025        meson_version = None
1026        pattern = r"meson_version:[^']*'([^']*)'"
1027        for match in re.finditer(pattern, build_contents):
1028            group = match.group(1)
1029            meson_version = group
1030
1031        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1032        # identify this.  Add to our unit-test checks so that we don't
1033        # get a meson.build missing this.
1034        pattern = r"'cpp_std=c\+\+20'"
1035        for match in re.finditer(pattern, build_contents):
1036            if not meson_version or \
1037                    not meson_version_compare(meson_version, ">=0.57"):
1038                raise Exception(
1039                    "C++20 support requires specifying in meson.build: "
1040                    + "meson_version: '>=0.57'"
1041                )
1042
1043
1044class Package(object):
1045    def __init__(self, name=None, path=None):
1046        self.supported = [Meson, Autotools, CMake]
1047        self.name = name
1048        self.path = path
1049        self.test_only = False
1050
1051    def build_systems(self):
1052        instances = (system(self.name, self.path) for system in self.supported)
1053        return (instance for instance in instances if instance.probe())
1054
1055    def build_system(self, preferred=None):
1056        systems = list(self.build_systems())
1057
1058        if not systems:
1059            return None
1060
1061        if preferred:
1062            return {type(system): system for system in systems}[preferred]
1063
1064        return next(iter(systems))
1065
1066    def install(self, system=None):
1067        if not system:
1068            system = self.build_system()
1069
1070        system.configure(False)
1071        system.build()
1072        system.install()
1073
1074    def _test_one(self, system):
1075        system.configure(True)
1076        system.build()
1077        system.install()
1078        system.test()
1079        if not TEST_ONLY:
1080            system.analyze()
1081
1082    def test(self):
1083        for system in self.build_systems():
1084            self._test_one(system)
1085
1086
1087def find_file(filename, basedir):
1088    """
1089    Finds all occurrences of a file (or list of files) in the base
1090    directory and passes them back with their relative paths.
1091
1092    Parameter descriptions:
1093    filename              The name of the file (or list of files) to
1094                          find
1095    basedir               The base directory search in
1096    """
1097
1098    if not isinstance(filename, list):
1099        filename = [filename]
1100
1101    filepaths = []
1102    for root, dirs, files in os.walk(basedir):
1103        if os.path.split(root)[-1] == 'subprojects':
1104            for f in files:
1105                subproject = '.'.join(f.split('.')[0:-1])
1106                if f.endswith('.wrap') and subproject in dirs:
1107                    # don't find files in meson subprojects with wraps
1108                    dirs.remove(subproject)
1109        for f in filename:
1110            if f in files:
1111                filepaths.append(os.path.join(root, f))
1112    return filepaths
1113
1114
1115if __name__ == '__main__':
1116    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1117    CONFIGURE_FLAGS = {
1118        'phosphor-logging':
1119        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
1120         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
1121    }
1122
1123    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1124    MESON_FLAGS = {
1125        'phosphor-dbus-interfaces':
1126        ['-Ddata_com_ibm=true', '-Ddata_org_open_power=true'],
1127        'phosphor-logging':
1128        ['-Dopenpower-pel-extension=enabled']
1129    }
1130
1131    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1132    DEPENDENCIES = {
1133        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
1134        'AC_CHECK_HEADER': {
1135            'host-ipmid': 'phosphor-host-ipmid',
1136            'blobs-ipmid': 'phosphor-ipmi-blobs',
1137            'sdbusplus': 'sdbusplus',
1138            'sdeventplus': 'sdeventplus',
1139            'stdplus': 'stdplus',
1140            'gpioplus': 'gpioplus',
1141            'phosphor-logging/log.hpp': 'phosphor-logging',
1142        },
1143        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1144        'PKG_CHECK_MODULES': {
1145            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1146            'libipmid': 'phosphor-host-ipmid',
1147            'libipmid-host': 'phosphor-host-ipmid',
1148            'sdbusplus': 'sdbusplus',
1149            'sdeventplus': 'sdeventplus',
1150            'stdplus': 'stdplus',
1151            'gpioplus': 'gpioplus',
1152            'phosphor-logging': 'phosphor-logging',
1153            'phosphor-snmp': 'phosphor-snmp',
1154            'ipmiblob': 'ipmi-blob-tool',
1155            'hei': 'openpower-libhei',
1156            'phosphor-ipmi-blobs': 'phosphor-ipmi-blobs',
1157            'libcr51sign': 'google-misc',
1158        },
1159    }
1160
1161    # Offset into array of macro parameters MACRO(0, 1, ...N)
1162    DEPENDENCIES_OFFSET = {
1163        'AC_CHECK_LIB': 0,
1164        'AC_CHECK_HEADER': 0,
1165        'AC_PATH_PROG': 1,
1166        'PKG_CHECK_MODULES': 1,
1167    }
1168
1169    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1170    DEPENDENCIES_REGEX = {
1171        'phosphor-logging': r'\S+-dbus-interfaces$'
1172    }
1173
1174    # Set command line arguments
1175    parser = argparse.ArgumentParser()
1176    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1177                        help="Workspace directory location(i.e. /home)")
1178    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1179                        help="OpenBMC package to be unit tested")
1180    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1181                        action="store_true", required=False, default=False,
1182                        help="Only run test cases, no other validation")
1183    arg_inttests = parser.add_mutually_exclusive_group()
1184    arg_inttests.add_argument("--integration-tests", dest="INTEGRATION_TEST",
1185                              action="store_true", required=False, default=True,
1186                              help="Enable integration tests [default].")
1187    arg_inttests.add_argument("--no-integration-tests", dest="INTEGRATION_TEST",
1188                              action="store_false", required=False,
1189                              help="Disable integration tests.")
1190    parser.add_argument("-v", "--verbose", action="store_true",
1191                        help="Print additional package status messages")
1192    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1193                        type=int, default=1)
1194    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1195                        help="Branch to target for dependent repositories",
1196                        default="master")
1197    parser.add_argument("-n", "--noformat", dest="FORMAT",
1198                        action="store_false", required=False,
1199                        help="Whether or not to run format code")
1200    args = parser.parse_args(sys.argv[1:])
1201    WORKSPACE = args.WORKSPACE
1202    UNIT_TEST_PKG = args.PACKAGE
1203    TEST_ONLY = args.TEST_ONLY
1204    INTEGRATION_TEST = args.INTEGRATION_TEST
1205    BRANCH = args.BRANCH
1206    FORMAT_CODE = args.FORMAT
1207    if args.verbose:
1208        def printline(*line):
1209            for arg in line:
1210                print(arg, end=' ')
1211            print()
1212    else:
1213        def printline(*line):
1214            pass
1215
1216    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1217
1218    # First validate code formatting if repo has style formatting files.
1219    # The format-code.sh checks for these files.
1220    if FORMAT_CODE:
1221        format_scripts = find_file(['format-code.sh', 'format-code'],
1222                                   CODE_SCAN_DIR)
1223
1224        # use default format-code.sh if no other found
1225        if not format_scripts:
1226            format_scripts.append(os.path.join(WORKSPACE, "format-code.sh"))
1227
1228        for f in format_scripts:
1229            check_call_cmd(f, CODE_SCAN_DIR)
1230
1231        # Check to see if any files changed
1232        check_call_cmd("git", "-C", CODE_SCAN_DIR,
1233                       "--no-pager", "diff", "--exit-code")
1234
1235    # Check if this repo has a supported make infrastructure
1236    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1237    if not pkg.build_system():
1238        print("No valid build system, exit")
1239        sys.exit(0)
1240
1241    prev_umask = os.umask(000)
1242
1243    # Determine dependencies and add them
1244    dep_added = dict()
1245    dep_added[UNIT_TEST_PKG] = False
1246
1247    # Create dependency tree
1248    dep_tree = DepTree(UNIT_TEST_PKG)
1249    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1250
1251    # Reorder Dependency Tree
1252    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1253        dep_tree.ReorderDeps(pkg_name, regex_str)
1254    if args.verbose:
1255        dep_tree.PrintTree()
1256
1257    install_list = dep_tree.GetInstallList()
1258
1259    # We don't want to treat our package as a dependency
1260    install_list.remove(UNIT_TEST_PKG)
1261
1262    # Install reordered dependencies
1263    for dep in install_list:
1264        build_and_install(dep, False)
1265
1266    # Run package unit tests
1267    build_and_install(UNIT_TEST_PKG, True)
1268
1269    os.umask(prev_umask)
1270
1271    # Run any custom CI scripts the repo has, of which there can be
1272    # multiple of and anywhere in the repository.
1273    ci_scripts = find_file(['run-ci.sh', 'run-ci'], CODE_SCAN_DIR)
1274    if ci_scripts:
1275        os.chdir(CODE_SCAN_DIR)
1276        for ci_script in ci_scripts:
1277            check_call_cmd(ci_script)
1278