1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from mesonbuild import coredata, optinterpreter
12from mesonbuild.mesonlib import OptionKey
13from mesonbuild.mesonlib import version_compare as meson_version_compare
14from urllib.parse import urljoin
15from subprocess import check_call, call, CalledProcessError
16import os
17import sys
18import argparse
19import multiprocessing
20import re
21import subprocess
22import shutil
23import platform
24
25
26class DepTree():
27    """
28    Represents package dependency tree, where each node is a DepTree with a
29    name and DepTree children.
30    """
31
32    def __init__(self, name):
33        """
34        Create new DepTree.
35
36        Parameter descriptions:
37        name               Name of new tree node.
38        """
39        self.name = name
40        self.children = list()
41
42    def AddChild(self, name):
43        """
44        Add new child node to current node.
45
46        Parameter descriptions:
47        name               Name of new child
48        """
49        new_child = DepTree(name)
50        self.children.append(new_child)
51        return new_child
52
53    def AddChildNode(self, node):
54        """
55        Add existing child node to current node.
56
57        Parameter descriptions:
58        node               Tree node to add
59        """
60        self.children.append(node)
61
62    def RemoveChild(self, name):
63        """
64        Remove child node.
65
66        Parameter descriptions:
67        name               Name of child to remove
68        """
69        for child in self.children:
70            if child.name == name:
71                self.children.remove(child)
72                return
73
74    def GetNode(self, name):
75        """
76        Return node with matching name. Return None if not found.
77
78        Parameter descriptions:
79        name               Name of node to return
80        """
81        if self.name == name:
82            return self
83        for child in self.children:
84            node = child.GetNode(name)
85            if node:
86                return node
87        return None
88
89    def GetParentNode(self, name, parent_node=None):
90        """
91        Return parent of node with matching name. Return none if not found.
92
93        Parameter descriptions:
94        name               Name of node to get parent of
95        parent_node        Parent of current node
96        """
97        if self.name == name:
98            return parent_node
99        for child in self.children:
100            found_node = child.GetParentNode(name, self)
101            if found_node:
102                return found_node
103        return None
104
105    def GetPath(self, name, path=None):
106        """
107        Return list of node names from head to matching name.
108        Return None if not found.
109
110        Parameter descriptions:
111        name               Name of node
112        path               List of node names from head to current node
113        """
114        if not path:
115            path = []
116        if self.name == name:
117            path.append(self.name)
118            return path
119        for child in self.children:
120            match = child.GetPath(name, path + [self.name])
121            if match:
122                return match
123        return None
124
125    def GetPathRegex(self, name, regex_str, path=None):
126        """
127        Return list of node paths that end in name, or match regex_str.
128        Return empty list if not found.
129
130        Parameter descriptions:
131        name               Name of node to search for
132        regex_str          Regex string to match node names
133        path               Path of node names from head to current node
134        """
135        new_paths = []
136        if not path:
137            path = []
138        match = re.match(regex_str, self.name)
139        if (self.name == name) or (match):
140            new_paths.append(path + [self.name])
141        for child in self.children:
142            return_paths = None
143            full_path = path + [self.name]
144            return_paths = child.GetPathRegex(name, regex_str, full_path)
145            for i in return_paths:
146                new_paths.append(i)
147        return new_paths
148
149    def MoveNode(self, from_name, to_name):
150        """
151        Mode existing from_name node to become child of to_name node.
152
153        Parameter descriptions:
154        from_name          Name of node to make a child of to_name
155        to_name            Name of node to make parent of from_name
156        """
157        parent_from_node = self.GetParentNode(from_name)
158        from_node = self.GetNode(from_name)
159        parent_from_node.RemoveChild(from_name)
160        to_node = self.GetNode(to_name)
161        to_node.AddChildNode(from_node)
162
163    def ReorderDeps(self, name, regex_str):
164        """
165        Reorder dependency tree.  If tree contains nodes with names that
166        match 'name' and 'regex_str', move 'regex_str' nodes that are
167        to the right of 'name' node, so that they become children of the
168        'name' node.
169
170        Parameter descriptions:
171        name               Name of node to look for
172        regex_str          Regex string to match names to
173        """
174        name_path = self.GetPath(name)
175        if not name_path:
176            return
177        paths = self.GetPathRegex(name, regex_str)
178        is_name_in_paths = False
179        name_index = 0
180        for i in range(len(paths)):
181            path = paths[i]
182            if path[-1] == name:
183                is_name_in_paths = True
184                name_index = i
185                break
186        if not is_name_in_paths:
187            return
188        for i in range(name_index + 1, len(paths)):
189            path = paths[i]
190            if name in path:
191                continue
192            from_name = path[-1]
193            self.MoveNode(from_name, name)
194
195    def GetInstallList(self):
196        """
197        Return post-order list of node names.
198
199        Parameter descriptions:
200        """
201        install_list = []
202        for child in self.children:
203            child_install_list = child.GetInstallList()
204            install_list.extend(child_install_list)
205        install_list.append(self.name)
206        return install_list
207
208    def PrintTree(self, level=0):
209        """
210        Print pre-order node names with indentation denoting node depth level.
211
212        Parameter descriptions:
213        level              Current depth level
214        """
215        INDENT_PER_LEVEL = 4
216        print(' ' * (level * INDENT_PER_LEVEL) + self.name)
217        for child in self.children:
218            child.PrintTree(level + 1)
219
220
221def check_call_cmd(*cmd):
222    """
223    Verbose prints the directory location the given command is called from and
224    the command, then executes the command using check_call.
225
226    Parameter descriptions:
227    dir                 Directory location command is to be called from
228    cmd                 List of parameters constructing the complete command
229    """
230    printline(os.getcwd(), ">", " ".join(cmd))
231    check_call(cmd)
232
233
234def clone_pkg(pkg, branch):
235    """
236    Clone the given openbmc package's git repository from gerrit into
237    the WORKSPACE location
238
239    Parameter descriptions:
240    pkg                 Name of the package to clone
241    branch              Branch to clone from pkg
242    """
243    pkg_dir = os.path.join(WORKSPACE, pkg)
244    if os.path.exists(os.path.join(pkg_dir, '.git')):
245        return pkg_dir
246    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
247    os.mkdir(pkg_dir)
248    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
249    try:
250        # first try the branch
251        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
252        repo_inst = clone.working_dir
253    except:
254        printline("Input branch not found, default to master")
255        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
256        repo_inst = clone.working_dir
257    return repo_inst
258
259
260def make_target_exists(target):
261    """
262    Runs a check against the makefile in the current directory to determine
263    if the target exists so that it can be built.
264
265    Parameter descriptions:
266    target              The make target we are checking
267    """
268    try:
269        cmd = ['make', '-n', target]
270        with open(os.devnull, 'w') as devnull:
271            check_call(cmd, stdout=devnull, stderr=devnull)
272        return True
273    except CalledProcessError:
274        return False
275
276
277make_parallel = [
278    'make',
279    # Run enough jobs to saturate all the cpus
280    '-j', str(multiprocessing.cpu_count()),
281    # Don't start more jobs if the load avg is too high
282    '-l', str(multiprocessing.cpu_count()),
283    # Synchronize the output so logs aren't intermixed in stdout / stderr
284    '-O',
285]
286
287
288def build_and_install(name, build_for_testing=False):
289    """
290    Builds and installs the package in the environment. Optionally
291    builds the examples and test cases for package.
292
293    Parameter description:
294    name                The name of the package we are building
295    build_for_testing   Enable options related to testing on the package?
296    """
297    os.chdir(os.path.join(WORKSPACE, name))
298
299    # Refresh dynamic linker run time bindings for dependencies
300    check_call_cmd('sudo', '-n', '--', 'ldconfig')
301
302    pkg = Package()
303    if build_for_testing:
304        pkg.test()
305    else:
306        pkg.install()
307
308
309def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
310    """
311    For each package (name), starting with the package to be unit tested,
312    extract its dependencies. For each package dependency defined, recursively
313    apply the same strategy
314
315    Parameter descriptions:
316    name                Name of the package
317    pkgdir              Directory where package source is located
318    dep_added           Current dict of dependencies and added status
319    head                Head node of the dependency tree
320    branch              Branch to clone from pkg
321    dep_tree            Current dependency tree node
322    """
323    if not dep_tree:
324        dep_tree = head
325
326    with open("/tmp/depcache", "r") as depcache:
327        cache = depcache.readline()
328
329    # Read out pkg dependencies
330    pkg = Package(name, pkgdir)
331
332    build = pkg.build_system()
333    if build == None:
334        raise Exception(f"Unable to find build system for {name}.")
335
336    for dep in set(build.dependencies()):
337        if dep in cache:
338            continue
339        # Dependency package not already known
340        if dep_added.get(dep) is None:
341            print(f"Adding {dep} dependency to {name}.")
342            # Dependency package not added
343            new_child = dep_tree.AddChild(dep)
344            dep_added[dep] = False
345            dep_pkgdir = clone_pkg(dep, branch)
346            # Determine this dependency package's
347            # dependencies and add them before
348            # returning to add this package
349            dep_added = build_dep_tree(dep,
350                                       dep_pkgdir,
351                                       dep_added,
352                                       head,
353                                       branch,
354                                       new_child)
355        else:
356            # Dependency package known and added
357            if dep_added[dep]:
358                continue
359            else:
360                # Cyclic dependency failure
361                raise Exception("Cyclic dependencies found in "+name)
362
363    if not dep_added[name]:
364        dep_added[name] = True
365
366    return dep_added
367
368
369def run_cppcheck():
370    match_re = re.compile(r'((?!\.mako\.).)*\.[ch](?:pp)?$', re.I)
371    cppcheck_files = []
372    stdout = subprocess.check_output(['git', 'ls-files'])
373
374    for f in stdout.decode('utf-8').split():
375        if match_re.match(f):
376            cppcheck_files.append(f)
377
378    if not cppcheck_files:
379        # skip cppcheck if there arent' any c or cpp sources.
380        print("no files")
381        return None
382
383    # http://cppcheck.sourceforge.net/manual.pdf
384    params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
385              '--enable=all', '--library=googletest', '--file-list=-']
386
387    cppcheck_process = subprocess.Popen(
388        params,
389        stdout=subprocess.PIPE,
390        stderr=subprocess.PIPE,
391        stdin=subprocess.PIPE)
392    (stdout, stderr) = cppcheck_process.communicate(
393        input='\n'.join(cppcheck_files).encode('utf-8'))
394
395    if cppcheck_process.wait():
396        raise Exception('Cppcheck failed')
397    print(stdout.decode('utf-8'))
398    print(stderr.decode('utf-8'))
399
400
401def is_valgrind_safe():
402    """
403    Returns whether it is safe to run valgrind on our platform
404    """
405    src = 'unit-test-vg.c'
406    exe = './unit-test-vg'
407    with open(src, 'w') as h:
408        h.write('#include <errno.h>\n')
409        h.write('#include <stdio.h>\n')
410        h.write('#include <stdlib.h>\n')
411        h.write('#include <string.h>\n')
412        h.write('int main() {\n')
413        h.write('char *heap_str = malloc(16);\n')
414        h.write('strcpy(heap_str, "RandString");\n')
415        h.write('int res = strcmp("RandString", heap_str);\n')
416        h.write('free(heap_str);\n')
417        h.write('char errstr[64];\n')
418        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
419        h.write('printf("%s\\n", errstr);\n')
420        h.write('return res;\n')
421        h.write('}\n')
422    try:
423        with open(os.devnull, 'w') as devnull:
424            check_call(['gcc', '-O2', '-o', exe, src],
425                       stdout=devnull, stderr=devnull)
426            check_call(['valgrind', '--error-exitcode=99', exe],
427                       stdout=devnull, stderr=devnull)
428        return True
429    except:
430        sys.stderr.write("###### Platform is not valgrind safe ######\n")
431        return False
432    finally:
433        os.remove(src)
434        os.remove(exe)
435
436
437def is_sanitize_safe():
438    """
439    Returns whether it is safe to run sanitizers on our platform
440    """
441    src = 'unit-test-sanitize.c'
442    exe = './unit-test-sanitize'
443    with open(src, 'w') as h:
444        h.write('int main() { return 0; }\n')
445    try:
446        with open(os.devnull, 'w') as devnull:
447            check_call(['gcc', '-O2', '-fsanitize=address',
448                        '-fsanitize=undefined', '-o', exe, src],
449                       stdout=devnull, stderr=devnull)
450            check_call([exe], stdout=devnull, stderr=devnull)
451        return True
452    except:
453        sys.stderr.write("###### Platform is not sanitize safe ######\n")
454        return False
455    finally:
456        os.remove(src)
457        os.remove(exe)
458
459
460def maybe_make_valgrind():
461    """
462    Potentially runs the unit tests through valgrind for the package
463    via `make check-valgrind`. If the package does not have valgrind testing
464    then it just skips over this.
465    """
466    # Valgrind testing is currently broken by an aggressive strcmp optimization
467    # that is inlined into optimized code for POWER by gcc 7+. Until we find
468    # a workaround, just don't run valgrind tests on POWER.
469    # https://github.com/openbmc/openbmc/issues/3315
470    if not is_valgrind_safe():
471        sys.stderr.write("###### Skipping valgrind ######\n")
472        return
473    if not make_target_exists('check-valgrind'):
474        return
475
476    try:
477        cmd = make_parallel + ['check-valgrind']
478        check_call_cmd(*cmd)
479    except CalledProcessError:
480        for root, _, files in os.walk(os.getcwd()):
481            for f in files:
482                if re.search('test-suite-[a-z]+.log', f) is None:
483                    continue
484                check_call_cmd('cat', os.path.join(root, f))
485        raise Exception('Valgrind tests failed')
486
487
488def maybe_make_coverage():
489    """
490    Potentially runs the unit tests through code coverage for the package
491    via `make check-code-coverage`. If the package does not have code coverage
492    testing then it just skips over this.
493    """
494    if not make_target_exists('check-code-coverage'):
495        return
496
497    # Actually run code coverage
498    try:
499        cmd = make_parallel + ['check-code-coverage']
500        check_call_cmd(*cmd)
501    except CalledProcessError:
502        raise Exception('Code coverage failed')
503
504
505class BuildSystem(object):
506    """
507    Build systems generally provide the means to configure, build, install and
508    test software. The BuildSystem class defines a set of interfaces on top of
509    which Autotools, Meson, CMake and possibly other build system drivers can
510    be implemented, separating out the phases to control whether a package
511    should merely be installed or also tested and analyzed.
512    """
513    def __init__(self, package, path):
514        """Initialise the driver with properties independent of the build system
515
516        Keyword arguments:
517        package: The name of the package. Derived from the path if None
518        path: The path to the package. Set to the working directory if None
519        """
520        self.path = "." if not path else path
521        realpath = os.path.realpath(self.path)
522        self.package = package if package else os.path.basename(realpath)
523        self.build_for_testing = False
524
525    def probe(self):
526        """Test if the build system driver can be applied to the package
527
528        Return True if the driver can drive the package's build system,
529        otherwise False.
530
531        Generally probe() is implemented by testing for the presence of the
532        build system's configuration file(s).
533        """
534        raise NotImplemented
535
536    def dependencies(self):
537        """Provide the package's dependencies
538
539        Returns a list of dependencies. If no dependencies are required then an
540        empty list must be returned.
541
542        Generally dependencies() is implemented by analysing and extracting the
543        data from the build system configuration.
544        """
545        raise NotImplemented
546
547    def configure(self, build_for_testing):
548        """Configure the source ready for building
549
550        Should raise an exception if configuration failed.
551
552        Keyword arguments:
553        build_for_testing: Mark the package as being built for testing rather
554                           than for installation as a dependency for the
555                           package under test. Setting to True generally
556                           implies that the package will be configured to build
557                           with debug information, at a low level of
558                           optimisation and possibly with sanitizers enabled.
559
560        Generally configure() is implemented by invoking the build system
561        tooling to generate Makefiles or equivalent.
562        """
563        raise NotImplemented
564
565    def build(self):
566        """Build the software ready for installation and/or testing
567
568        Should raise an exception if the build fails
569
570        Generally build() is implemented by invoking `make` or `ninja`.
571        """
572        raise NotImplemented
573
574    def install(self):
575        """Install the software ready for use
576
577        Should raise an exception if installation fails
578
579        Like build(), install() is generally implemented by invoking `make` or
580        `ninja`.
581        """
582        raise NotImplemented
583
584    def test(self):
585        """Build and run the test suite associated with the package
586
587        Should raise an exception if the build or testing fails.
588
589        Like install(), test() is generally implemented by invoking `make` or
590        `ninja`.
591        """
592        raise NotImplemented
593
594    def analyze(self):
595        """Run any supported analysis tools over the codebase
596
597        Should raise an exception if analysis fails.
598
599        Some analysis tools such as scan-build need injection into the build
600        system. analyze() provides the necessary hook to implement such
601        behaviour. Analyzers independent of the build system can also be
602        specified here but at the cost of possible duplication of code between
603        the build system driver implementations.
604        """
605        raise NotImplemented
606
607
608class Autotools(BuildSystem):
609    def __init__(self, package=None, path=None):
610        super(Autotools, self).__init__(package, path)
611
612    def probe(self):
613        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
614
615    def dependencies(self):
616        configure_ac = os.path.join(self.path, 'configure.ac')
617
618        contents = ''
619        # Prepend some special function overrides so we can parse out
620        # dependencies
621        for macro in DEPENDENCIES.keys():
622            contents += ('m4_define([' + macro + '], [' + macro + '_START$' +
623                         str(DEPENDENCIES_OFFSET[macro] + 1) +
624                         macro + '_END])\n')
625        with open(configure_ac, "rt") as f:
626            contents += f.read()
627
628        autoconf_cmdline = ['autoconf', '-Wno-undefined', '-']
629        autoconf_process = subprocess.Popen(autoconf_cmdline,
630                                            stdin=subprocess.PIPE,
631                                            stdout=subprocess.PIPE,
632                                            stderr=subprocess.PIPE)
633        document = contents.encode('utf-8')
634        (stdout, stderr) = autoconf_process.communicate(input=document)
635        if not stdout:
636            print(stderr)
637            raise Exception("Failed to run autoconf for parsing dependencies")
638
639        # Parse out all of the dependency text
640        matches = []
641        for macro in DEPENDENCIES.keys():
642            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
643            for match in re.compile(pattern).finditer(stdout.decode('utf-8')):
644                matches.append((match.group(1), match.group(2)))
645
646        # Look up dependencies from the text
647        found_deps = []
648        for macro, deptext in matches:
649            for potential_dep in deptext.split(' '):
650                for known_dep in DEPENDENCIES[macro].keys():
651                    if potential_dep.startswith(known_dep):
652                        found_deps.append(DEPENDENCIES[macro][known_dep])
653
654        return found_deps
655
656    def _configure_feature(self, flag, enabled):
657        """
658        Returns an configure flag as a string
659
660        Parameters:
661        flag                The name of the flag
662        enabled             Whether the flag is enabled or disabled
663        """
664        return '--' + ('enable' if enabled else 'disable') + '-' + flag
665
666    def configure(self, build_for_testing):
667        self.build_for_testing = build_for_testing
668        conf_flags = [
669            self._configure_feature('silent-rules', False),
670            self._configure_feature('examples', build_for_testing),
671            self._configure_feature('tests', build_for_testing),
672            self._configure_feature('itests', INTEGRATION_TEST),
673        ]
674        if not TEST_ONLY:
675            conf_flags.extend([
676                self._configure_feature('code-coverage', build_for_testing),
677                self._configure_feature('valgrind', build_for_testing),
678            ])
679        # Add any necessary configure flags for package
680        if CONFIGURE_FLAGS.get(self.package) is not None:
681            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
682        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
683            if os.path.exists(bootstrap):
684                check_call_cmd('./' + bootstrap)
685                break
686        check_call_cmd('./configure', *conf_flags)
687
688    def build(self):
689        check_call_cmd(*make_parallel)
690
691    def install(self):
692        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
693
694    def test(self):
695        try:
696            cmd = make_parallel + ['check']
697            for i in range(0, args.repeat):
698                check_call_cmd(*cmd)
699        except CalledProcessError:
700            for root, _, files in os.walk(os.getcwd()):
701                if 'test-suite.log' not in files:
702                    continue
703                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
704            raise Exception('Unit tests failed')
705
706    def analyze(self):
707        maybe_make_valgrind()
708        maybe_make_coverage()
709        run_cppcheck()
710
711
712class CMake(BuildSystem):
713    def __init__(self, package=None, path=None):
714        super(CMake, self).__init__(package, path)
715
716    def probe(self):
717        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
718
719    def dependencies(self):
720        return []
721
722    def configure(self, build_for_testing):
723        self.build_for_testing = build_for_testing
724        if INTEGRATION_TEST:
725            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
726                           '-DITESTS=ON', '.')
727        else:
728            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
729
730    def build(self):
731        check_call_cmd('cmake', '--build', '.', '--', '-j',
732                       str(multiprocessing.cpu_count()))
733
734    def install(self):
735        pass
736
737    def test(self):
738        if make_target_exists('test'):
739            check_call_cmd('ctest', '.')
740
741    def analyze(self):
742        if TEST_ONLY:
743            return
744
745        if os.path.isfile('.clang-tidy'):
746            try:
747                os.mkdir("tidy-build")
748            except FileExistsError as e:
749                pass
750            # clang-tidy needs to run on a clang-specific build
751            check_call_cmd('cmake', '-DCMAKE_C_COMPILER=clang',
752                           '-DCMAKE_CXX_COMPILER=clang++',
753                           '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
754                           '-H.',
755                           '-Btidy-build')
756            # we need to cd here because otherwise clang-tidy doesn't find the
757            # .clang-tidy file in the roots of repos.  Its arguably a "bug"
758            # with run-clang-tidy at a minimum it's "weird" that it requires
759            # the .clang-tidy to be up a dir
760            os.chdir("tidy-build")
761            try:
762                check_call_cmd('run-clang-tidy.py', "-header-filter=.*", '-p',
763                               '.')
764            finally:
765                os.chdir("..")
766
767        maybe_make_valgrind()
768        maybe_make_coverage()
769        run_cppcheck()
770
771
772class Meson(BuildSystem):
773    def __init__(self, package=None, path=None):
774        super(Meson, self).__init__(package, path)
775
776    def probe(self):
777        return os.path.isfile(os.path.join(self.path, 'meson.build'))
778
779    def dependencies(self):
780        meson_build = os.path.join(self.path, 'meson.build')
781        if not os.path.exists(meson_build):
782            return []
783
784        found_deps = []
785        for root, dirs, files in os.walk(self.path):
786            if 'meson.build' not in files:
787                continue
788            with open(os.path.join(root, 'meson.build'), 'rt') as f:
789                build_contents = f.read()
790            pattern = r"dependency\('([^']*)'.*?\),?\n"
791            for match in re.finditer(pattern, build_contents):
792                group = match.group(1)
793                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group)
794                if maybe_dep is not None:
795                    found_deps.append(maybe_dep)
796
797        return found_deps
798
799    def _parse_options(self, options_file):
800        """
801        Returns a set of options defined in the provides meson_options.txt file
802
803        Parameters:
804        options_file        The file containing options
805        """
806        oi = optinterpreter.OptionInterpreter('')
807        oi.process(options_file)
808        return oi.options
809
810    def _configure_boolean(self, val):
811        """
812        Returns the meson flag which signifies the value
813
814        True is true which requires the boolean.
815        False is false which disables the boolean.
816
817        Parameters:
818        val                 The value being converted
819        """
820        if val is True:
821            return 'true'
822        elif val is False:
823            return 'false'
824        else:
825            raise Exception("Bad meson boolean value")
826
827    def _configure_feature(self, val):
828        """
829        Returns the meson flag which signifies the value
830
831        True is enabled which requires the feature.
832        False is disabled which disables the feature.
833        None is auto which autodetects the feature.
834
835        Parameters:
836        val                 The value being converted
837        """
838        if val is True:
839            return "enabled"
840        elif val is False:
841            return "disabled"
842        elif val is None:
843            return "auto"
844        else:
845            raise Exception("Bad meson feature value")
846
847    def _configure_option(self, opts, key, val):
848        """
849        Returns the meson flag which signifies the value
850        based on the type of the opt
851
852        Parameters:
853        opt                 The meson option which we are setting
854        val                 The value being converted
855        """
856        if isinstance(opts[key], coredata.UserBooleanOption):
857            str_val = self._configure_boolean(val)
858        elif isinstance(opts[key], coredata.UserFeatureOption):
859            str_val = self._configure_feature(val)
860        else:
861            raise Exception('Unknown meson option type')
862        return "-D{}={}".format(key, str_val)
863
864    def configure(self, build_for_testing):
865        self.build_for_testing = build_for_testing
866        meson_options = {}
867        if os.path.exists("meson_options.txt"):
868            meson_options = self._parse_options("meson_options.txt")
869        meson_flags = [
870            '-Db_colorout=never',
871            '-Dwerror=true',
872            '-Dwarning_level=3',
873        ]
874        if build_for_testing:
875            meson_flags.append('--buildtype=debug')
876        else:
877            meson_flags.append('--buildtype=debugoptimized')
878        if OptionKey('tests') in meson_options:
879            meson_flags.append(self._configure_option(meson_options, OptionKey('tests'), build_for_testing))
880        if OptionKey('examples') in meson_options:
881            meson_flags.append(self._configure_option(meson_options, OptionKey('examples'), build_for_testing))
882        if OptionKey('itests') in meson_options:
883            meson_flags.append(self._configure_option(meson_options, OptionKey('itests'), INTEGRATION_TEST))
884        if MESON_FLAGS.get(self.package) is not None:
885            meson_flags.extend(MESON_FLAGS.get(self.package))
886        try:
887            check_call_cmd('meson', 'setup', '--reconfigure', 'build',
888                           *meson_flags)
889        except:
890            shutil.rmtree('build')
891            check_call_cmd('meson', 'setup', 'build', *meson_flags)
892
893    def build(self):
894        check_call_cmd('ninja', '-C', 'build')
895
896    def install(self):
897        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
898
899    def test(self):
900        # It is useful to check various settings of the meson.build file
901        # for compatibility, such as meson_version checks.  We shouldn't
902        # do this in the configure path though because it affects subprojects
903        # and dependencies as well, but we only want this applied to the
904        # project-under-test (otherwise an upstream dependency could fail
905        # this check without our control).
906        self._extra_meson_checks()
907
908        try:
909            test_args = ('--repeat', str(args.repeat), '-C', 'build')
910            check_call_cmd('meson', 'test', *test_args)
911
912        except CalledProcessError:
913            for root, _, files in os.walk(os.getcwd()):
914                if 'testlog.txt' not in files:
915                    continue
916                check_call_cmd('cat', os.path.join(root, 'testlog.txt'))
917            raise Exception('Unit tests failed')
918
919    def _setup_exists(self, setup):
920        """
921        Returns whether the meson build supports the named test setup.
922
923        Parameter descriptions:
924        setup              The setup target to check
925        """
926        try:
927            with open(os.devnull, 'w') as devnull:
928                output = subprocess.check_output(
929                        ['meson', 'test', '-C', 'build',
930                         '--setup', setup, '-t', '0'],
931                        stderr=subprocess.STDOUT)
932        except CalledProcessError as e:
933            output = e.output
934        output = output.decode('utf-8')
935        return not re.search('Test setup .* not found from project', output)
936
937    def _maybe_valgrind(self):
938        """
939        Potentially runs the unit tests through valgrind for the package
940        via `meson test`. The package can specify custom valgrind
941        configurations by utilizing add_test_setup() in a meson.build
942        """
943        if not is_valgrind_safe():
944            sys.stderr.write("###### Skipping valgrind ######\n")
945            return
946        try:
947            if self._setup_exists('valgrind'):
948                check_call_cmd('meson', 'test','-t','10','-C', 'build',
949                               '--setup', 'valgrind')
950            else:
951                check_call_cmd('meson', 'test','-t','10', '-C', 'build',
952                               '--wrapper', 'valgrind')
953        except CalledProcessError:
954            for root, _, files in os.walk(os.getcwd()):
955                if 'testlog-valgrind.txt' not in files:
956                    continue
957                cat_args = os.path.join(root, 'testlog-valgrind.txt')
958                check_call_cmd('cat', cat_args)
959            raise Exception('Valgrind tests failed')
960
961    def analyze(self):
962        if TEST_ONLY:
963            return
964
965        self._maybe_valgrind()
966
967        # Run clang-tidy only if the project has a configuration
968        if os.path.isfile('.clang-tidy'):
969            os.environ["CXX"] = "clang++"
970            check_call_cmd('meson', 'setup', 'build-clang')
971            check_call_cmd('run-clang-tidy.py', '-p',
972                           'build-clang')
973        # Run the basic clang static analyzer otherwise
974        else:
975            check_call_cmd('ninja', '-C', 'build',
976                           'scan-build')
977
978        # Run tests through sanitizers
979        # b_lundef is needed if clang++ is CXX since it resolves the
980        # asan symbols at runtime only. We don't want to set it earlier
981        # in the build process to ensure we don't have undefined
982        # runtime code.
983        if is_sanitize_safe():
984            check_call_cmd('meson', 'configure', 'build',
985                           '-Db_sanitize=address,undefined',
986                           '-Db_lundef=false')
987            check_call_cmd('meson', 'test', '-C', 'build',
988                           '--logbase', 'testlog-ubasan')
989            # TODO: Fix memory sanitizer
990            # check_call_cmd('meson', 'configure', 'build',
991            #                '-Db_sanitize=memory')
992            # check_call_cmd('meson', 'test', '-C', 'build'
993            #                '--logbase', 'testlog-msan')
994            check_call_cmd('meson', 'configure', 'build',
995                           '-Db_sanitize=none')
996        else:
997            sys.stderr.write("###### Skipping sanitizers ######\n")
998
999        # Run coverage checks
1000        check_call_cmd('meson', 'configure', 'build',
1001                       '-Db_coverage=true')
1002        self.test()
1003        # Only build coverage HTML if coverage files were produced
1004        for root, dirs, files in os.walk('build'):
1005            if any([f.endswith('.gcda') for f in files]):
1006                check_call_cmd('ninja', '-C', 'build',
1007                               'coverage-html')
1008                break
1009        check_call_cmd('meson', 'configure', 'build',
1010                       '-Db_coverage=false')
1011        run_cppcheck()
1012
1013    def _extra_meson_checks(self):
1014        with open(os.path.join(self.path, 'meson.build'), 'rt') as f:
1015            build_contents = f.read()
1016
1017        # Find project's specified meson_version.
1018        meson_version = None
1019        pattern = r"meson_version:[^']*'([^']*)'"
1020        for match in re.finditer(pattern, build_contents):
1021            group = match.group(1)
1022            meson_version = group
1023
1024        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1025        # identify this.  Add to our unit-test checks so that we don't
1026        # get a meson.build missing this.
1027        pattern = r"'cpp_std=c\+\+20'"
1028        for match in re.finditer(pattern, build_contents):
1029            if not meson_version or \
1030                    not meson_version_compare(meson_version, ">=0.57"):
1031                raise Exception(
1032                    "C++20 support requires specifying in meson.build: "
1033                    + "meson_version: '>=0.57'"
1034                )
1035
1036
1037class Package(object):
1038    def __init__(self, name=None, path=None):
1039        self.supported = [Meson, Autotools, CMake]
1040        self.name = name
1041        self.path = path
1042        self.test_only = False
1043
1044    def build_systems(self):
1045        instances = (system(self.name, self.path) for system in self.supported)
1046        return (instance for instance in instances if instance.probe())
1047
1048    def build_system(self, preferred=None):
1049        systems = list(self.build_systems())
1050
1051        if not systems:
1052            return None
1053
1054        if preferred:
1055            return {type(system): system for system in systems}[preferred]
1056
1057        return next(iter(systems))
1058
1059    def install(self, system=None):
1060        if not system:
1061            system = self.build_system()
1062
1063        system.configure(False)
1064        system.build()
1065        system.install()
1066
1067    def _test_one(self, system):
1068        system.configure(True)
1069        system.build()
1070        system.install()
1071        system.test()
1072        system.analyze()
1073
1074    def test(self):
1075        for system in self.build_systems():
1076            self._test_one(system)
1077
1078
1079def find_file(filename, basedir):
1080    """
1081    Finds all occurrences of a file (or list of files) in the base
1082    directory and passes them back with their relative paths.
1083
1084    Parameter descriptions:
1085    filename              The name of the file (or list of files) to
1086                          find
1087    basedir               The base directory search in
1088    """
1089
1090    if not isinstance(filename, list):
1091        filename = [ filename ]
1092
1093    filepaths = []
1094    for root, dirs, files in os.walk(basedir):
1095        for f in filename:
1096            if f in files:
1097                filepaths.append(os.path.join(root, f))
1098    return filepaths
1099
1100
1101if __name__ == '__main__':
1102    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1103    CONFIGURE_FLAGS = {
1104        'phosphor-logging':
1105        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
1106         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
1107    }
1108
1109    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1110    MESON_FLAGS = {
1111        'phosphor-dbus-interfaces':
1112        ['-Ddata_com_ibm=true', '-Ddata_org_open_power=true'],
1113        'phosphor-logging':
1114        ['-Dopenpower-pel-extension=enabled']
1115    }
1116
1117    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1118    DEPENDENCIES = {
1119        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
1120        'AC_CHECK_HEADER': {
1121            'host-ipmid': 'phosphor-host-ipmid',
1122            'blobs-ipmid': 'phosphor-ipmi-blobs',
1123            'sdbusplus': 'sdbusplus',
1124            'sdeventplus': 'sdeventplus',
1125            'stdplus': 'stdplus',
1126            'gpioplus': 'gpioplus',
1127            'phosphor-logging/log.hpp': 'phosphor-logging',
1128        },
1129        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1130        'PKG_CHECK_MODULES': {
1131            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1132            'libipmid': 'phosphor-host-ipmid',
1133            'libipmid-host': 'phosphor-host-ipmid',
1134            'sdbusplus': 'sdbusplus',
1135            'sdeventplus': 'sdeventplus',
1136            'stdplus': 'stdplus',
1137            'gpioplus': 'gpioplus',
1138            'phosphor-logging': 'phosphor-logging',
1139            'phosphor-snmp': 'phosphor-snmp',
1140            'ipmiblob': 'ipmi-blob-tool',
1141            'hei': 'openpower-libhei',
1142            'phosphor-ipmi-blobs': 'phosphor-ipmi-blobs',
1143        },
1144    }
1145
1146    # Offset into array of macro parameters MACRO(0, 1, ...N)
1147    DEPENDENCIES_OFFSET = {
1148        'AC_CHECK_LIB': 0,
1149        'AC_CHECK_HEADER': 0,
1150        'AC_PATH_PROG': 1,
1151        'PKG_CHECK_MODULES': 1,
1152    }
1153
1154    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1155    DEPENDENCIES_REGEX = {
1156        'phosphor-logging': r'\S+-dbus-interfaces$'
1157    }
1158
1159    # Set command line arguments
1160    parser = argparse.ArgumentParser()
1161    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1162                        help="Workspace directory location(i.e. /home)")
1163    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1164                        help="OpenBMC package to be unit tested")
1165    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1166                        action="store_true", required=False, default=False,
1167                        help="Only run test cases, no other validation")
1168    arg_inttests = parser.add_mutually_exclusive_group()
1169    arg_inttests.add_argument("--integration-tests", dest="INTEGRATION_TEST",
1170                        action="store_true", required=False, default=True,
1171                        help="Enable integration tests [default].")
1172    arg_inttests.add_argument("--no-integration-tests", dest="INTEGRATION_TEST",
1173                        action="store_false", required=False,
1174                        help="Disable integration tests.")
1175    parser.add_argument("-v", "--verbose", action="store_true",
1176                        help="Print additional package status messages")
1177    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1178                        type=int, default=1)
1179    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1180                        help="Branch to target for dependent repositories",
1181                        default="master")
1182    parser.add_argument("-n", "--noformat", dest="FORMAT",
1183                        action="store_false", required=False,
1184                        help="Whether or not to run format code")
1185    args = parser.parse_args(sys.argv[1:])
1186    WORKSPACE = args.WORKSPACE
1187    UNIT_TEST_PKG = args.PACKAGE
1188    TEST_ONLY = args.TEST_ONLY
1189    INTEGRATION_TEST = args.INTEGRATION_TEST
1190    BRANCH = args.BRANCH
1191    FORMAT_CODE = args.FORMAT
1192    if args.verbose:
1193        def printline(*line):
1194            for arg in line:
1195                print(arg, end=' ')
1196            print()
1197    else:
1198        def printline(*line):
1199            pass
1200
1201    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1202
1203    # First validate code formatting if repo has style formatting files.
1204    # The format-code.sh checks for these files.
1205    if FORMAT_CODE:
1206        format_scripts = find_file(['format-code.sh', 'format-code'],
1207                                   CODE_SCAN_DIR)
1208
1209        # use default format-code.sh if no other found
1210        if not format_scripts:
1211            format_scripts.append(os.path.join(WORKSPACE, "format-code.sh"))
1212
1213        for f in format_scripts:
1214            check_call_cmd(f, CODE_SCAN_DIR)
1215
1216    # Check if this repo has a supported make infrastructure
1217    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1218    if not pkg.build_system():
1219        print("No valid build system, exit")
1220        sys.exit(0)
1221
1222    prev_umask = os.umask(000)
1223
1224    # Determine dependencies and add them
1225    dep_added = dict()
1226    dep_added[UNIT_TEST_PKG] = False
1227
1228    # Create dependency tree
1229    dep_tree = DepTree(UNIT_TEST_PKG)
1230    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1231
1232    # Reorder Dependency Tree
1233    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1234        dep_tree.ReorderDeps(pkg_name, regex_str)
1235    if args.verbose:
1236        dep_tree.PrintTree()
1237
1238    install_list = dep_tree.GetInstallList()
1239
1240    # We don't want to treat our package as a dependency
1241    install_list.remove(UNIT_TEST_PKG)
1242
1243    # Install reordered dependencies
1244    for dep in install_list:
1245        build_and_install(dep, False)
1246
1247    # Run package unit tests
1248    build_and_install(UNIT_TEST_PKG, True)
1249
1250    os.umask(prev_umask)
1251
1252    # Run any custom CI scripts the repo has, of which there can be
1253    # multiple of and anywhere in the repository.
1254    ci_scripts = find_file(['run-ci.sh', 'run-ci'], CODE_SCAN_DIR)
1255    if ci_scripts:
1256        os.chdir(CODE_SCAN_DIR)
1257        for ci_script in ci_scripts:
1258            check_call_cmd(ci_script)
1259