1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from mesonbuild import coredata, optinterpreter
12from mesonbuild.mesonlib import OptionKey
13from urllib.parse import urljoin
14from subprocess import check_call, call, CalledProcessError
15import os
16import sys
17import argparse
18import multiprocessing
19import re
20import subprocess
21import shutil
22import platform
23
24
25class DepTree():
26    """
27    Represents package dependency tree, where each node is a DepTree with a
28    name and DepTree children.
29    """
30
31    def __init__(self, name):
32        """
33        Create new DepTree.
34
35        Parameter descriptions:
36        name               Name of new tree node.
37        """
38        self.name = name
39        self.children = list()
40
41    def AddChild(self, name):
42        """
43        Add new child node to current node.
44
45        Parameter descriptions:
46        name               Name of new child
47        """
48        new_child = DepTree(name)
49        self.children.append(new_child)
50        return new_child
51
52    def AddChildNode(self, node):
53        """
54        Add existing child node to current node.
55
56        Parameter descriptions:
57        node               Tree node to add
58        """
59        self.children.append(node)
60
61    def RemoveChild(self, name):
62        """
63        Remove child node.
64
65        Parameter descriptions:
66        name               Name of child to remove
67        """
68        for child in self.children:
69            if child.name == name:
70                self.children.remove(child)
71                return
72
73    def GetNode(self, name):
74        """
75        Return node with matching name. Return None if not found.
76
77        Parameter descriptions:
78        name               Name of node to return
79        """
80        if self.name == name:
81            return self
82        for child in self.children:
83            node = child.GetNode(name)
84            if node:
85                return node
86        return None
87
88    def GetParentNode(self, name, parent_node=None):
89        """
90        Return parent of node with matching name. Return none if not found.
91
92        Parameter descriptions:
93        name               Name of node to get parent of
94        parent_node        Parent of current node
95        """
96        if self.name == name:
97            return parent_node
98        for child in self.children:
99            found_node = child.GetParentNode(name, self)
100            if found_node:
101                return found_node
102        return None
103
104    def GetPath(self, name, path=None):
105        """
106        Return list of node names from head to matching name.
107        Return None if not found.
108
109        Parameter descriptions:
110        name               Name of node
111        path               List of node names from head to current node
112        """
113        if not path:
114            path = []
115        if self.name == name:
116            path.append(self.name)
117            return path
118        for child in self.children:
119            match = child.GetPath(name, path + [self.name])
120            if match:
121                return match
122        return None
123
124    def GetPathRegex(self, name, regex_str, path=None):
125        """
126        Return list of node paths that end in name, or match regex_str.
127        Return empty list if not found.
128
129        Parameter descriptions:
130        name               Name of node to search for
131        regex_str          Regex string to match node names
132        path               Path of node names from head to current node
133        """
134        new_paths = []
135        if not path:
136            path = []
137        match = re.match(regex_str, self.name)
138        if (self.name == name) or (match):
139            new_paths.append(path + [self.name])
140        for child in self.children:
141            return_paths = None
142            full_path = path + [self.name]
143            return_paths = child.GetPathRegex(name, regex_str, full_path)
144            for i in return_paths:
145                new_paths.append(i)
146        return new_paths
147
148    def MoveNode(self, from_name, to_name):
149        """
150        Mode existing from_name node to become child of to_name node.
151
152        Parameter descriptions:
153        from_name          Name of node to make a child of to_name
154        to_name            Name of node to make parent of from_name
155        """
156        parent_from_node = self.GetParentNode(from_name)
157        from_node = self.GetNode(from_name)
158        parent_from_node.RemoveChild(from_name)
159        to_node = self.GetNode(to_name)
160        to_node.AddChildNode(from_node)
161
162    def ReorderDeps(self, name, regex_str):
163        """
164        Reorder dependency tree.  If tree contains nodes with names that
165        match 'name' and 'regex_str', move 'regex_str' nodes that are
166        to the right of 'name' node, so that they become children of the
167        'name' node.
168
169        Parameter descriptions:
170        name               Name of node to look for
171        regex_str          Regex string to match names to
172        """
173        name_path = self.GetPath(name)
174        if not name_path:
175            return
176        paths = self.GetPathRegex(name, regex_str)
177        is_name_in_paths = False
178        name_index = 0
179        for i in range(len(paths)):
180            path = paths[i]
181            if path[-1] == name:
182                is_name_in_paths = True
183                name_index = i
184                break
185        if not is_name_in_paths:
186            return
187        for i in range(name_index + 1, len(paths)):
188            path = paths[i]
189            if name in path:
190                continue
191            from_name = path[-1]
192            self.MoveNode(from_name, name)
193
194    def GetInstallList(self):
195        """
196        Return post-order list of node names.
197
198        Parameter descriptions:
199        """
200        install_list = []
201        for child in self.children:
202            child_install_list = child.GetInstallList()
203            install_list.extend(child_install_list)
204        install_list.append(self.name)
205        return install_list
206
207    def PrintTree(self, level=0):
208        """
209        Print pre-order node names with indentation denoting node depth level.
210
211        Parameter descriptions:
212        level              Current depth level
213        """
214        INDENT_PER_LEVEL = 4
215        print(' ' * (level * INDENT_PER_LEVEL) + self.name)
216        for child in self.children:
217            child.PrintTree(level + 1)
218
219
220def check_call_cmd(*cmd):
221    """
222    Verbose prints the directory location the given command is called from and
223    the command, then executes the command using check_call.
224
225    Parameter descriptions:
226    dir                 Directory location command is to be called from
227    cmd                 List of parameters constructing the complete command
228    """
229    printline(os.getcwd(), ">", " ".join(cmd))
230    check_call(cmd)
231
232
233def clone_pkg(pkg, branch):
234    """
235    Clone the given openbmc package's git repository from gerrit into
236    the WORKSPACE location
237
238    Parameter descriptions:
239    pkg                 Name of the package to clone
240    branch              Branch to clone from pkg
241    """
242    pkg_dir = os.path.join(WORKSPACE, pkg)
243    if os.path.exists(os.path.join(pkg_dir, '.git')):
244        return pkg_dir
245    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
246    os.mkdir(pkg_dir)
247    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
248    try:
249        # first try the branch
250        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
251        repo_inst = clone.working_dir
252    except:
253        printline("Input branch not found, default to master")
254        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
255        repo_inst = clone.working_dir
256    return repo_inst
257
258
259def make_target_exists(target):
260    """
261    Runs a check against the makefile in the current directory to determine
262    if the target exists so that it can be built.
263
264    Parameter descriptions:
265    target              The make target we are checking
266    """
267    try:
268        cmd = ['make', '-n', target]
269        with open(os.devnull, 'w') as devnull:
270            check_call(cmd, stdout=devnull, stderr=devnull)
271        return True
272    except CalledProcessError:
273        return False
274
275
276make_parallel = [
277    'make',
278    # Run enough jobs to saturate all the cpus
279    '-j', str(multiprocessing.cpu_count()),
280    # Don't start more jobs if the load avg is too high
281    '-l', str(multiprocessing.cpu_count()),
282    # Synchronize the output so logs aren't intermixed in stdout / stderr
283    '-O',
284]
285
286
287def build_and_install(name, build_for_testing=False):
288    """
289    Builds and installs the package in the environment. Optionally
290    builds the examples and test cases for package.
291
292    Parameter description:
293    name                The name of the package we are building
294    build_for_testing   Enable options related to testing on the package?
295    """
296    os.chdir(os.path.join(WORKSPACE, name))
297
298    # Refresh dynamic linker run time bindings for dependencies
299    check_call_cmd('sudo', '-n', '--', 'ldconfig')
300
301    pkg = Package()
302    if build_for_testing:
303        pkg.test()
304    else:
305        pkg.install()
306
307
308def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
309    """
310    For each package (name), starting with the package to be unit tested,
311    extract its dependencies. For each package dependency defined, recursively
312    apply the same strategy
313
314    Parameter descriptions:
315    name                Name of the package
316    pkgdir              Directory where package source is located
317    dep_added           Current dict of dependencies and added status
318    head                Head node of the dependency tree
319    branch              Branch to clone from pkg
320    dep_tree            Current dependency tree node
321    """
322    if not dep_tree:
323        dep_tree = head
324
325    with open("/tmp/depcache", "r") as depcache:
326        cache = depcache.readline()
327
328    # Read out pkg dependencies
329    pkg = Package(name, pkgdir)
330
331    build = pkg.build_system()
332    if build == None:
333        raise Exception(f"Unable to find build system for {name}.")
334
335    for dep in set(build.dependencies()):
336        if dep in cache:
337            continue
338        # Dependency package not already known
339        if dep_added.get(dep) is None:
340            print(f"Adding {dep} dependency to {name}.")
341            # Dependency package not added
342            new_child = dep_tree.AddChild(dep)
343            dep_added[dep] = False
344            dep_pkgdir = clone_pkg(dep, branch)
345            # Determine this dependency package's
346            # dependencies and add them before
347            # returning to add this package
348            dep_added = build_dep_tree(dep,
349                                       dep_pkgdir,
350                                       dep_added,
351                                       head,
352                                       branch,
353                                       new_child)
354        else:
355            # Dependency package known and added
356            if dep_added[dep]:
357                continue
358            else:
359                # Cyclic dependency failure
360                raise Exception("Cyclic dependencies found in "+name)
361
362    if not dep_added[name]:
363        dep_added[name] = True
364
365    return dep_added
366
367
368def run_cppcheck():
369    match_re = re.compile(r'((?!\.mako\.).)*\.[ch](?:pp)?$', re.I)
370    cppcheck_files = []
371    stdout = subprocess.check_output(['git', 'ls-files'])
372
373    for f in stdout.decode('utf-8').split():
374        if match_re.match(f):
375            cppcheck_files.append(f)
376
377    if not cppcheck_files:
378        # skip cppcheck if there arent' any c or cpp sources.
379        print("no files")
380        return None
381
382    # http://cppcheck.sourceforge.net/manual.pdf
383    params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
384              '--enable=all', '--library=googletest', '--file-list=-']
385
386    cppcheck_process = subprocess.Popen(
387        params,
388        stdout=subprocess.PIPE,
389        stderr=subprocess.PIPE,
390        stdin=subprocess.PIPE)
391    (stdout, stderr) = cppcheck_process.communicate(
392        input='\n'.join(cppcheck_files).encode('utf-8'))
393
394    if cppcheck_process.wait():
395        raise Exception('Cppcheck failed')
396    print(stdout.decode('utf-8'))
397    print(stderr.decode('utf-8'))
398
399
400def is_valgrind_safe():
401    """
402    Returns whether it is safe to run valgrind on our platform
403    """
404    src = 'unit-test-vg.c'
405    exe = './unit-test-vg'
406    with open(src, 'w') as h:
407        h.write('#include <errno.h>\n')
408        h.write('#include <stdio.h>\n')
409        h.write('#include <stdlib.h>\n')
410        h.write('#include <string.h>\n')
411        h.write('int main() {\n')
412        h.write('char *heap_str = malloc(16);\n')
413        h.write('strcpy(heap_str, "RandString");\n')
414        h.write('int res = strcmp("RandString", heap_str);\n')
415        h.write('free(heap_str);\n')
416        h.write('char errstr[64];\n')
417        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
418        h.write('printf("%s\\n", errstr);\n')
419        h.write('return res;\n')
420        h.write('}\n')
421    try:
422        with open(os.devnull, 'w') as devnull:
423            check_call(['gcc', '-O2', '-o', exe, src],
424                       stdout=devnull, stderr=devnull)
425            check_call(['valgrind', '--error-exitcode=99', exe],
426                       stdout=devnull, stderr=devnull)
427        return True
428    except:
429        sys.stderr.write("###### Platform is not valgrind safe ######\n")
430        return False
431    finally:
432        os.remove(src)
433        os.remove(exe)
434
435
436def is_sanitize_safe():
437    """
438    Returns whether it is safe to run sanitizers on our platform
439    """
440    src = 'unit-test-sanitize.c'
441    exe = './unit-test-sanitize'
442    with open(src, 'w') as h:
443        h.write('int main() { return 0; }\n')
444    try:
445        with open(os.devnull, 'w') as devnull:
446            check_call(['gcc', '-O2', '-fsanitize=address',
447                        '-fsanitize=undefined', '-o', exe, src],
448                       stdout=devnull, stderr=devnull)
449            check_call([exe], stdout=devnull, stderr=devnull)
450        return True
451    except:
452        sys.stderr.write("###### Platform is not sanitize safe ######\n")
453        return False
454    finally:
455        os.remove(src)
456        os.remove(exe)
457
458
459def maybe_make_valgrind():
460    """
461    Potentially runs the unit tests through valgrind for the package
462    via `make check-valgrind`. If the package does not have valgrind testing
463    then it just skips over this.
464    """
465    # Valgrind testing is currently broken by an aggressive strcmp optimization
466    # that is inlined into optimized code for POWER by gcc 7+. Until we find
467    # a workaround, just don't run valgrind tests on POWER.
468    # https://github.com/openbmc/openbmc/issues/3315
469    if not is_valgrind_safe():
470        sys.stderr.write("###### Skipping valgrind ######\n")
471        return
472    if not make_target_exists('check-valgrind'):
473        return
474
475    try:
476        cmd = make_parallel + ['check-valgrind']
477        check_call_cmd(*cmd)
478    except CalledProcessError:
479        for root, _, files in os.walk(os.getcwd()):
480            for f in files:
481                if re.search('test-suite-[a-z]+.log', f) is None:
482                    continue
483                check_call_cmd('cat', os.path.join(root, f))
484        raise Exception('Valgrind tests failed')
485
486
487def maybe_make_coverage():
488    """
489    Potentially runs the unit tests through code coverage for the package
490    via `make check-code-coverage`. If the package does not have code coverage
491    testing then it just skips over this.
492    """
493    if not make_target_exists('check-code-coverage'):
494        return
495
496    # Actually run code coverage
497    try:
498        cmd = make_parallel + ['check-code-coverage']
499        check_call_cmd(*cmd)
500    except CalledProcessError:
501        raise Exception('Code coverage failed')
502
503
504class BuildSystem(object):
505    """
506    Build systems generally provide the means to configure, build, install and
507    test software. The BuildSystem class defines a set of interfaces on top of
508    which Autotools, Meson, CMake and possibly other build system drivers can
509    be implemented, separating out the phases to control whether a package
510    should merely be installed or also tested and analyzed.
511    """
512    def __init__(self, package, path):
513        """Initialise the driver with properties independent of the build system
514
515        Keyword arguments:
516        package: The name of the package. Derived from the path if None
517        path: The path to the package. Set to the working directory if None
518        """
519        self.path = "." if not path else path
520        realpath = os.path.realpath(self.path)
521        self.package = package if package else os.path.basename(realpath)
522        self.build_for_testing = False
523
524    def probe(self):
525        """Test if the build system driver can be applied to the package
526
527        Return True if the driver can drive the package's build system,
528        otherwise False.
529
530        Generally probe() is implemented by testing for the presence of the
531        build system's configuration file(s).
532        """
533        raise NotImplemented
534
535    def dependencies(self):
536        """Provide the package's dependencies
537
538        Returns a list of dependencies. If no dependencies are required then an
539        empty list must be returned.
540
541        Generally dependencies() is implemented by analysing and extracting the
542        data from the build system configuration.
543        """
544        raise NotImplemented
545
546    def configure(self, build_for_testing):
547        """Configure the source ready for building
548
549        Should raise an exception if configuration failed.
550
551        Keyword arguments:
552        build_for_testing: Mark the package as being built for testing rather
553                           than for installation as a dependency for the
554                           package under test. Setting to True generally
555                           implies that the package will be configured to build
556                           with debug information, at a low level of
557                           optimisation and possibly with sanitizers enabled.
558
559        Generally configure() is implemented by invoking the build system
560        tooling to generate Makefiles or equivalent.
561        """
562        raise NotImplemented
563
564    def build(self):
565        """Build the software ready for installation and/or testing
566
567        Should raise an exception if the build fails
568
569        Generally build() is implemented by invoking `make` or `ninja`.
570        """
571        raise NotImplemented
572
573    def install(self):
574        """Install the software ready for use
575
576        Should raise an exception if installation fails
577
578        Like build(), install() is generally implemented by invoking `make` or
579        `ninja`.
580        """
581        raise NotImplemented
582
583    def test(self):
584        """Build and run the test suite associated with the package
585
586        Should raise an exception if the build or testing fails.
587
588        Like install(), test() is generally implemented by invoking `make` or
589        `ninja`.
590        """
591        raise NotImplemented
592
593    def analyze(self):
594        """Run any supported analysis tools over the codebase
595
596        Should raise an exception if analysis fails.
597
598        Some analysis tools such as scan-build need injection into the build
599        system. analyze() provides the necessary hook to implement such
600        behaviour. Analyzers independent of the build system can also be
601        specified here but at the cost of possible duplication of code between
602        the build system driver implementations.
603        """
604        raise NotImplemented
605
606
607class Autotools(BuildSystem):
608    def __init__(self, package=None, path=None):
609        super(Autotools, self).__init__(package, path)
610
611    def probe(self):
612        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
613
614    def dependencies(self):
615        configure_ac = os.path.join(self.path, 'configure.ac')
616
617        contents = ''
618        # Prepend some special function overrides so we can parse out
619        # dependencies
620        for macro in DEPENDENCIES.keys():
621            contents += ('m4_define([' + macro + '], [' + macro + '_START$' +
622                         str(DEPENDENCIES_OFFSET[macro] + 1) +
623                         macro + '_END])\n')
624        with open(configure_ac, "rt") as f:
625            contents += f.read()
626
627        autoconf_cmdline = ['autoconf', '-Wno-undefined', '-']
628        autoconf_process = subprocess.Popen(autoconf_cmdline,
629                                            stdin=subprocess.PIPE,
630                                            stdout=subprocess.PIPE,
631                                            stderr=subprocess.PIPE)
632        document = contents.encode('utf-8')
633        (stdout, stderr) = autoconf_process.communicate(input=document)
634        if not stdout:
635            print(stderr)
636            raise Exception("Failed to run autoconf for parsing dependencies")
637
638        # Parse out all of the dependency text
639        matches = []
640        for macro in DEPENDENCIES.keys():
641            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
642            for match in re.compile(pattern).finditer(stdout.decode('utf-8')):
643                matches.append((match.group(1), match.group(2)))
644
645        # Look up dependencies from the text
646        found_deps = []
647        for macro, deptext in matches:
648            for potential_dep in deptext.split(' '):
649                for known_dep in DEPENDENCIES[macro].keys():
650                    if potential_dep.startswith(known_dep):
651                        found_deps.append(DEPENDENCIES[macro][known_dep])
652
653        return found_deps
654
655    def _configure_feature(self, flag, enabled):
656        """
657        Returns an configure flag as a string
658
659        Parameters:
660        flag                The name of the flag
661        enabled             Whether the flag is enabled or disabled
662        """
663        return '--' + ('enable' if enabled else 'disable') + '-' + flag
664
665    def configure(self, build_for_testing):
666        self.build_for_testing = build_for_testing
667        conf_flags = [
668            self._configure_feature('silent-rules', False),
669            self._configure_feature('examples', build_for_testing),
670            self._configure_feature('tests', build_for_testing),
671            self._configure_feature('itests', INTEGRATION_TEST),
672        ]
673        if not TEST_ONLY:
674            conf_flags.extend([
675                self._configure_feature('code-coverage', build_for_testing),
676                self._configure_feature('valgrind', build_for_testing),
677            ])
678        # Add any necessary configure flags for package
679        if CONFIGURE_FLAGS.get(self.package) is not None:
680            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
681        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
682            if os.path.exists(bootstrap):
683                check_call_cmd('./' + bootstrap)
684                break
685        check_call_cmd('./configure', *conf_flags)
686
687    def build(self):
688        check_call_cmd(*make_parallel)
689
690    def install(self):
691        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
692
693    def test(self):
694        try:
695            cmd = make_parallel + ['check']
696            for i in range(0, args.repeat):
697                check_call_cmd(*cmd)
698        except CalledProcessError:
699            for root, _, files in os.walk(os.getcwd()):
700                if 'test-suite.log' not in files:
701                    continue
702                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
703            raise Exception('Unit tests failed')
704
705    def analyze(self):
706        maybe_make_valgrind()
707        maybe_make_coverage()
708        run_cppcheck()
709
710
711class CMake(BuildSystem):
712    def __init__(self, package=None, path=None):
713        super(CMake, self).__init__(package, path)
714
715    def probe(self):
716        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
717
718    def dependencies(self):
719        return []
720
721    def configure(self, build_for_testing):
722        self.build_for_testing = build_for_testing
723        if INTEGRATION_TEST:
724            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
725                           '-DITESTS=ON', '.')
726        else:
727            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
728
729    def build(self):
730        check_call_cmd('cmake', '--build', '.', '--', '-j',
731                       str(multiprocessing.cpu_count()))
732
733    def install(self):
734        pass
735
736    def test(self):
737        if make_target_exists('test'):
738            check_call_cmd('ctest', '.')
739
740    def analyze(self):
741        if TEST_ONLY:
742            return
743
744        if os.path.isfile('.clang-tidy'):
745            try:
746                os.mkdir("tidy-build")
747            except FileExistsError as e:
748                pass
749            # clang-tidy needs to run on a clang-specific build
750            check_call_cmd('cmake', '-DCMAKE_C_COMPILER=clang',
751                           '-DCMAKE_CXX_COMPILER=clang++',
752                           '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
753                           '-H.',
754                           '-Btidy-build')
755            # we need to cd here because otherwise clang-tidy doesn't find the
756            # .clang-tidy file in the roots of repos.  Its arguably a "bug"
757            # with run-clang-tidy at a minimum it's "weird" that it requires
758            # the .clang-tidy to be up a dir
759            os.chdir("tidy-build")
760            try:
761                check_call_cmd('run-clang-tidy.py', "-header-filter=.*", '-p',
762                               '.')
763            finally:
764                os.chdir("..")
765
766        maybe_make_valgrind()
767        maybe_make_coverage()
768        run_cppcheck()
769
770
771class Meson(BuildSystem):
772    def __init__(self, package=None, path=None):
773        super(Meson, self).__init__(package, path)
774
775    def probe(self):
776        return os.path.isfile(os.path.join(self.path, 'meson.build'))
777
778    def dependencies(self):
779        meson_build = os.path.join(self.path, 'meson.build')
780        if not os.path.exists(meson_build):
781            return []
782
783        found_deps = []
784        for root, dirs, files in os.walk(self.path):
785            if 'meson.build' not in files:
786                continue
787            with open(os.path.join(root, 'meson.build'), 'rt') as f:
788                build_contents = f.read()
789            pattern = r"dependency\('([^']*)'.*?\),?\n"
790            for match in re.finditer(pattern, build_contents):
791                group = match.group(1)
792                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group)
793                if maybe_dep is not None:
794                    found_deps.append(maybe_dep)
795
796        return found_deps
797
798    def _parse_options(self, options_file):
799        """
800        Returns a set of options defined in the provides meson_options.txt file
801
802        Parameters:
803        options_file        The file containing options
804        """
805        oi = optinterpreter.OptionInterpreter('')
806        oi.process(options_file)
807        return oi.options
808
809    def _configure_boolean(self, val):
810        """
811        Returns the meson flag which signifies the value
812
813        True is true which requires the boolean.
814        False is false which disables the boolean.
815
816        Parameters:
817        val                 The value being converted
818        """
819        if val is True:
820            return 'true'
821        elif val is False:
822            return 'false'
823        else:
824            raise Exception("Bad meson boolean value")
825
826    def _configure_feature(self, val):
827        """
828        Returns the meson flag which signifies the value
829
830        True is enabled which requires the feature.
831        False is disabled which disables the feature.
832        None is auto which autodetects the feature.
833
834        Parameters:
835        val                 The value being converted
836        """
837        if val is True:
838            return "enabled"
839        elif val is False:
840            return "disabled"
841        elif val is None:
842            return "auto"
843        else:
844            raise Exception("Bad meson feature value")
845
846    def _configure_option(self, opts, key, val):
847        """
848        Returns the meson flag which signifies the value
849        based on the type of the opt
850
851        Parameters:
852        opt                 The meson option which we are setting
853        val                 The value being converted
854        """
855        if isinstance(opts[key], coredata.UserBooleanOption):
856            str_val = self._configure_boolean(val)
857        elif isinstance(opts[key], coredata.UserFeatureOption):
858            str_val = self._configure_feature(val)
859        else:
860            raise Exception('Unknown meson option type')
861        return "-D{}={}".format(key, str_val)
862
863    def configure(self, build_for_testing):
864        self.build_for_testing = build_for_testing
865        meson_options = {}
866        if os.path.exists("meson_options.txt"):
867            meson_options = self._parse_options("meson_options.txt")
868        meson_flags = [
869            '-Db_colorout=never',
870            '-Dwerror=true',
871            '-Dwarning_level=3',
872        ]
873        if build_for_testing:
874            meson_flags.append('--buildtype=debug')
875        else:
876            meson_flags.append('--buildtype=debugoptimized')
877        if OptionKey('tests') in meson_options:
878            meson_flags.append(self._configure_option(meson_options, OptionKey('tests'), build_for_testing))
879        if OptionKey('examples') in meson_options:
880            meson_flags.append(self._configure_option(meson_options, OptionKey('examples'), build_for_testing))
881        if OptionKey('itests') in meson_options:
882            meson_flags.append(self._configure_option(meson_options, OptionKey('itests'), INTEGRATION_TEST))
883        if MESON_FLAGS.get(self.package) is not None:
884            meson_flags.extend(MESON_FLAGS.get(self.package))
885        try:
886            check_call_cmd('meson', 'setup', '--reconfigure', 'build',
887                           *meson_flags)
888        except:
889            shutil.rmtree('build')
890            check_call_cmd('meson', 'setup', 'build', *meson_flags)
891
892    def build(self):
893        check_call_cmd('ninja', '-C', 'build')
894
895    def install(self):
896        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
897
898    def test(self):
899        try:
900            test_args = ('--repeat', str(args.repeat), '-C', 'build')
901            check_call_cmd('meson', 'test', *test_args)
902
903        except CalledProcessError:
904            for root, _, files in os.walk(os.getcwd()):
905                if 'testlog.txt' not in files:
906                    continue
907                check_call_cmd('cat', os.path.join(root, 'testlog.txt'))
908            raise Exception('Unit tests failed')
909
910    def _setup_exists(self, setup):
911        """
912        Returns whether the meson build supports the named test setup.
913
914        Parameter descriptions:
915        setup              The setup target to check
916        """
917        try:
918            with open(os.devnull, 'w') as devnull:
919                output = subprocess.check_output(
920                        ['meson', 'test', '-C', 'build',
921                         '--setup', setup, '-t', '0'],
922                        stderr=subprocess.STDOUT)
923        except CalledProcessError as e:
924            output = e.output
925        output = output.decode('utf-8')
926        return not re.search('Test setup .* not found from project', output)
927
928    def _maybe_valgrind(self):
929        """
930        Potentially runs the unit tests through valgrind for the package
931        via `meson test`. The package can specify custom valgrind
932        configurations by utilizing add_test_setup() in a meson.build
933        """
934        if not is_valgrind_safe():
935            sys.stderr.write("###### Skipping valgrind ######\n")
936            return
937        try:
938            if self._setup_exists('valgrind'):
939                check_call_cmd('meson', 'test', '-C', 'build',
940                               '--setup', 'valgrind')
941            else:
942                check_call_cmd('meson', 'test', '-C', 'build',
943                               '--wrapper', 'valgrind')
944        except CalledProcessError:
945            for root, _, files in os.walk(os.getcwd()):
946                if 'testlog-valgrind.txt' not in files:
947                    continue
948                cat_args = os.path.join(root, 'testlog-valgrind.txt')
949                check_call_cmd('cat', cat_args)
950            raise Exception('Valgrind tests failed')
951
952    def analyze(self):
953        if TEST_ONLY:
954            return
955
956        self._maybe_valgrind()
957
958        # Run clang-tidy only if the project has a configuration
959        if os.path.isfile('.clang-tidy'):
960            os.environ["CXX"] = "clang++"
961            check_call_cmd('meson', 'setup', 'build-clang')
962            check_call_cmd('run-clang-tidy.py', '-p',
963                           'build-clang')
964        # Run the basic clang static analyzer otherwise
965        else:
966            check_call_cmd('ninja', '-C', 'build',
967                           'scan-build')
968
969        # Run tests through sanitizers
970        # b_lundef is needed if clang++ is CXX since it resolves the
971        # asan symbols at runtime only. We don't want to set it earlier
972        # in the build process to ensure we don't have undefined
973        # runtime code.
974        if is_sanitize_safe():
975            check_call_cmd('meson', 'configure', 'build',
976                           '-Db_sanitize=address,undefined',
977                           '-Db_lundef=false')
978            check_call_cmd('meson', 'test', '-C', 'build',
979                           '--logbase', 'testlog-ubasan')
980            # TODO: Fix memory sanitizer
981            # check_call_cmd('meson', 'configure', 'build',
982            #                '-Db_sanitize=memory')
983            # check_call_cmd('meson', 'test', '-C', 'build'
984            #                '--logbase', 'testlog-msan')
985            check_call_cmd('meson', 'configure', 'build',
986                           '-Db_sanitize=none')
987        else:
988            sys.stderr.write("###### Skipping sanitizers ######\n")
989
990        # Run coverage checks
991        check_call_cmd('meson', 'configure', 'build',
992                       '-Db_coverage=true')
993        self.test()
994        # Only build coverage HTML if coverage files were produced
995        for root, dirs, files in os.walk('build'):
996            if any([f.endswith('.gcda') for f in files]):
997                check_call_cmd('ninja', '-C', 'build',
998                               'coverage-html')
999                break
1000        check_call_cmd('meson', 'configure', 'build',
1001                       '-Db_coverage=false')
1002        run_cppcheck()
1003
1004
1005class Package(object):
1006    def __init__(self, name=None, path=None):
1007        self.supported = [Meson, Autotools, CMake]
1008        self.name = name
1009        self.path = path
1010        self.test_only = False
1011
1012    def build_systems(self):
1013        instances = (system(self.name, self.path) for system in self.supported)
1014        return (instance for instance in instances if instance.probe())
1015
1016    def build_system(self, preferred=None):
1017        systems = list(self.build_systems())
1018
1019        if not systems:
1020            return None
1021
1022        if preferred:
1023            return {type(system): system for system in systems}[preferred]
1024
1025        return next(iter(systems))
1026
1027    def install(self, system=None):
1028        if not system:
1029            system = self.build_system()
1030
1031        system.configure(False)
1032        system.build()
1033        system.install()
1034
1035    def _test_one(self, system):
1036        system.configure(True)
1037        system.build()
1038        system.install()
1039        system.test()
1040        system.analyze()
1041
1042    def test(self):
1043        for system in self.build_systems():
1044            self._test_one(system)
1045
1046
1047def find_file(filename, basedir):
1048    """
1049    Finds all occurrences of a file (or list of files) in the base
1050    directory and passes them back with their relative paths.
1051
1052    Parameter descriptions:
1053    filename              The name of the file (or list of files) to
1054                          find
1055    basedir               The base directory search in
1056    """
1057
1058    if not isinstance(filename, list):
1059        filename = [ filename ]
1060
1061    filepaths = []
1062    for root, dirs, files in os.walk(basedir):
1063        for f in filename:
1064            if f in files:
1065                filepaths.append(os.path.join(root, f))
1066    return filepaths
1067
1068
1069if __name__ == '__main__':
1070    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1071    CONFIGURE_FLAGS = {
1072        'phosphor-logging':
1073        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
1074         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
1075    }
1076
1077    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1078    MESON_FLAGS = {
1079        'phosphor-dbus-interfaces':
1080        ['-Ddata_com_ibm=true', '-Ddata_org_open_power=true']
1081    }
1082
1083    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1084    DEPENDENCIES = {
1085        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
1086        'AC_CHECK_HEADER': {
1087            'host-ipmid': 'phosphor-host-ipmid',
1088            'blobs-ipmid': 'phosphor-ipmi-blobs',
1089            'sdbusplus': 'sdbusplus',
1090            'sdeventplus': 'sdeventplus',
1091            'stdplus': 'stdplus',
1092            'gpioplus': 'gpioplus',
1093            'phosphor-logging/log.hpp': 'phosphor-logging',
1094        },
1095        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1096        'PKG_CHECK_MODULES': {
1097            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1098            'libipmid': 'phosphor-host-ipmid',
1099            'libipmid-host': 'phosphor-host-ipmid',
1100            'sdbusplus': 'sdbusplus',
1101            'sdeventplus': 'sdeventplus',
1102            'stdplus': 'stdplus',
1103            'gpioplus': 'gpioplus',
1104            'phosphor-logging': 'phosphor-logging',
1105            'phosphor-snmp': 'phosphor-snmp',
1106            'ipmiblob': 'ipmi-blob-tool',
1107            'hei': 'openpower-libhei',
1108            'phosphor-ipmi-blobs': 'phosphor-ipmi-blobs',
1109        },
1110    }
1111
1112    # Offset into array of macro parameters MACRO(0, 1, ...N)
1113    DEPENDENCIES_OFFSET = {
1114        'AC_CHECK_LIB': 0,
1115        'AC_CHECK_HEADER': 0,
1116        'AC_PATH_PROG': 1,
1117        'PKG_CHECK_MODULES': 1,
1118    }
1119
1120    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1121    DEPENDENCIES_REGEX = {
1122        'phosphor-logging': r'\S+-dbus-interfaces$'
1123    }
1124
1125    # Set command line arguments
1126    parser = argparse.ArgumentParser()
1127    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1128                        help="Workspace directory location(i.e. /home)")
1129    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1130                        help="OpenBMC package to be unit tested")
1131    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1132                        action="store_true", required=False, default=False,
1133                        help="Only run test cases, no other validation")
1134    arg_inttests = parser.add_mutually_exclusive_group()
1135    arg_inttests.add_argument("--integration-tests", dest="INTEGRATION_TEST",
1136                        action="store_true", required=False, default=True,
1137                        help="Enable integration tests [default].")
1138    arg_inttests.add_argument("--no-integration-tests", dest="INTEGRATION_TEST",
1139                        action="store_false", required=False,
1140                        help="Disable integration tests.")
1141    parser.add_argument("-v", "--verbose", action="store_true",
1142                        help="Print additional package status messages")
1143    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1144                        type=int, default=1)
1145    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1146                        help="Branch to target for dependent repositories",
1147                        default="master")
1148    parser.add_argument("-n", "--noformat", dest="FORMAT",
1149                        action="store_false", required=False,
1150                        help="Whether or not to run format code")
1151    args = parser.parse_args(sys.argv[1:])
1152    WORKSPACE = args.WORKSPACE
1153    UNIT_TEST_PKG = args.PACKAGE
1154    TEST_ONLY = args.TEST_ONLY
1155    INTEGRATION_TEST = args.INTEGRATION_TEST
1156    BRANCH = args.BRANCH
1157    FORMAT_CODE = args.FORMAT
1158    if args.verbose:
1159        def printline(*line):
1160            for arg in line:
1161                print(arg, end=' ')
1162            print()
1163    else:
1164        def printline(*line):
1165            pass
1166
1167    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1168
1169    # First validate code formatting if repo has style formatting files.
1170    # The format-code.sh checks for these files.
1171    if FORMAT_CODE:
1172        format_scripts = find_file(['format-code.sh', 'format-code'],
1173                                   CODE_SCAN_DIR)
1174
1175        # use default format-code.sh if no other found
1176        if not format_scripts:
1177            format_scripts.append(os.path.join(WORKSPACE, "format-code.sh"))
1178
1179        for f in format_scripts:
1180            check_call_cmd(f, CODE_SCAN_DIR)
1181
1182    # Check if this repo has a supported make infrastructure
1183    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1184    if not pkg.build_system():
1185        print("No valid build system, exit")
1186        sys.exit(0)
1187
1188    prev_umask = os.umask(000)
1189
1190    # Determine dependencies and add them
1191    dep_added = dict()
1192    dep_added[UNIT_TEST_PKG] = False
1193
1194    # Create dependency tree
1195    dep_tree = DepTree(UNIT_TEST_PKG)
1196    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1197
1198    # Reorder Dependency Tree
1199    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1200        dep_tree.ReorderDeps(pkg_name, regex_str)
1201    if args.verbose:
1202        dep_tree.PrintTree()
1203
1204    install_list = dep_tree.GetInstallList()
1205
1206    # We don't want to treat our package as a dependency
1207    install_list.remove(UNIT_TEST_PKG)
1208
1209    # Install reordered dependencies
1210    for dep in install_list:
1211        build_and_install(dep, False)
1212
1213    # Run package unit tests
1214    build_and_install(UNIT_TEST_PKG, True)
1215
1216    os.umask(prev_umask)
1217
1218    # Run any custom CI scripts the repo has, of which there can be
1219    # multiple of and anywhere in the repository.
1220    ci_scripts = find_file(['run-ci.sh', 'run-ci'], CODE_SCAN_DIR)
1221    if ci_scripts:
1222        os.chdir(CODE_SCAN_DIR)
1223        for ci_script in ci_scripts:
1224            check_call_cmd(ci_script)
1225