1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from mesonbuild import coredata, optinterpreter
12from urllib.parse import urljoin
13from subprocess import check_call, call, CalledProcessError
14import os
15import sys
16import argparse
17import multiprocessing
18import re
19import subprocess
20import shutil
21import platform
22
23
24class DepTree():
25    """
26    Represents package dependency tree, where each node is a DepTree with a
27    name and DepTree children.
28    """
29
30    def __init__(self, name):
31        """
32        Create new DepTree.
33
34        Parameter descriptions:
35        name               Name of new tree node.
36        """
37        self.name = name
38        self.children = list()
39
40    def AddChild(self, name):
41        """
42        Add new child node to current node.
43
44        Parameter descriptions:
45        name               Name of new child
46        """
47        new_child = DepTree(name)
48        self.children.append(new_child)
49        return new_child
50
51    def AddChildNode(self, node):
52        """
53        Add existing child node to current node.
54
55        Parameter descriptions:
56        node               Tree node to add
57        """
58        self.children.append(node)
59
60    def RemoveChild(self, name):
61        """
62        Remove child node.
63
64        Parameter descriptions:
65        name               Name of child to remove
66        """
67        for child in self.children:
68            if child.name == name:
69                self.children.remove(child)
70                return
71
72    def GetNode(self, name):
73        """
74        Return node with matching name. Return None if not found.
75
76        Parameter descriptions:
77        name               Name of node to return
78        """
79        if self.name == name:
80            return self
81        for child in self.children:
82            node = child.GetNode(name)
83            if node:
84                return node
85        return None
86
87    def GetParentNode(self, name, parent_node=None):
88        """
89        Return parent of node with matching name. Return none if not found.
90
91        Parameter descriptions:
92        name               Name of node to get parent of
93        parent_node        Parent of current node
94        """
95        if self.name == name:
96            return parent_node
97        for child in self.children:
98            found_node = child.GetParentNode(name, self)
99            if found_node:
100                return found_node
101        return None
102
103    def GetPath(self, name, path=None):
104        """
105        Return list of node names from head to matching name.
106        Return None if not found.
107
108        Parameter descriptions:
109        name               Name of node
110        path               List of node names from head to current node
111        """
112        if not path:
113            path = []
114        if self.name == name:
115            path.append(self.name)
116            return path
117        for child in self.children:
118            match = child.GetPath(name, path + [self.name])
119            if match:
120                return match
121        return None
122
123    def GetPathRegex(self, name, regex_str, path=None):
124        """
125        Return list of node paths that end in name, or match regex_str.
126        Return empty list if not found.
127
128        Parameter descriptions:
129        name               Name of node to search for
130        regex_str          Regex string to match node names
131        path               Path of node names from head to current node
132        """
133        new_paths = []
134        if not path:
135            path = []
136        match = re.match(regex_str, self.name)
137        if (self.name == name) or (match):
138            new_paths.append(path + [self.name])
139        for child in self.children:
140            return_paths = None
141            full_path = path + [self.name]
142            return_paths = child.GetPathRegex(name, regex_str, full_path)
143            for i in return_paths:
144                new_paths.append(i)
145        return new_paths
146
147    def MoveNode(self, from_name, to_name):
148        """
149        Mode existing from_name node to become child of to_name node.
150
151        Parameter descriptions:
152        from_name          Name of node to make a child of to_name
153        to_name            Name of node to make parent of from_name
154        """
155        parent_from_node = self.GetParentNode(from_name)
156        from_node = self.GetNode(from_name)
157        parent_from_node.RemoveChild(from_name)
158        to_node = self.GetNode(to_name)
159        to_node.AddChildNode(from_node)
160
161    def ReorderDeps(self, name, regex_str):
162        """
163        Reorder dependency tree.  If tree contains nodes with names that
164        match 'name' and 'regex_str', move 'regex_str' nodes that are
165        to the right of 'name' node, so that they become children of the
166        'name' node.
167
168        Parameter descriptions:
169        name               Name of node to look for
170        regex_str          Regex string to match names to
171        """
172        name_path = self.GetPath(name)
173        if not name_path:
174            return
175        paths = self.GetPathRegex(name, regex_str)
176        is_name_in_paths = False
177        name_index = 0
178        for i in range(len(paths)):
179            path = paths[i]
180            if path[-1] == name:
181                is_name_in_paths = True
182                name_index = i
183                break
184        if not is_name_in_paths:
185            return
186        for i in range(name_index + 1, len(paths)):
187            path = paths[i]
188            if name in path:
189                continue
190            from_name = path[-1]
191            self.MoveNode(from_name, name)
192
193    def GetInstallList(self):
194        """
195        Return post-order list of node names.
196
197        Parameter descriptions:
198        """
199        install_list = []
200        for child in self.children:
201            child_install_list = child.GetInstallList()
202            install_list.extend(child_install_list)
203        install_list.append(self.name)
204        return install_list
205
206    def PrintTree(self, level=0):
207        """
208        Print pre-order node names with indentation denoting node depth level.
209
210        Parameter descriptions:
211        level              Current depth level
212        """
213        INDENT_PER_LEVEL = 4
214        print(' ' * (level * INDENT_PER_LEVEL) + self.name)
215        for child in self.children:
216            child.PrintTree(level + 1)
217
218
219def check_call_cmd(*cmd):
220    """
221    Verbose prints the directory location the given command is called from and
222    the command, then executes the command using check_call.
223
224    Parameter descriptions:
225    dir                 Directory location command is to be called from
226    cmd                 List of parameters constructing the complete command
227    """
228    printline(os.getcwd(), ">", " ".join(cmd))
229    check_call(cmd)
230
231
232def clone_pkg(pkg, branch):
233    """
234    Clone the given openbmc package's git repository from gerrit into
235    the WORKSPACE location
236
237    Parameter descriptions:
238    pkg                 Name of the package to clone
239    branch              Branch to clone from pkg
240    """
241    pkg_dir = os.path.join(WORKSPACE, pkg)
242    if os.path.exists(os.path.join(pkg_dir, '.git')):
243        return pkg_dir
244    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
245    os.mkdir(pkg_dir)
246    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
247    try:
248        # first try the branch
249        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
250        repo_inst = clone.working_dir
251    except:
252        printline("Input branch not found, default to master")
253        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
254        repo_inst = clone.working_dir
255    return repo_inst
256
257
258def make_target_exists(target):
259    """
260    Runs a check against the makefile in the current directory to determine
261    if the target exists so that it can be built.
262
263    Parameter descriptions:
264    target              The make target we are checking
265    """
266    try:
267        cmd = ['make', '-n', target]
268        with open(os.devnull, 'w') as devnull:
269            check_call(cmd, stdout=devnull, stderr=devnull)
270        return True
271    except CalledProcessError:
272        return False
273
274
275make_parallel = [
276    'make',
277    # Run enough jobs to saturate all the cpus
278    '-j', str(multiprocessing.cpu_count()),
279    # Don't start more jobs if the load avg is too high
280    '-l', str(multiprocessing.cpu_count()),
281    # Synchronize the output so logs aren't intermixed in stdout / stderr
282    '-O',
283]
284
285
286def build_and_install(name, build_for_testing=False):
287    """
288    Builds and installs the package in the environment. Optionally
289    builds the examples and test cases for package.
290
291    Parameter description:
292    name                The name of the package we are building
293    build_for_testing   Enable options related to testing on the package?
294    """
295    os.chdir(os.path.join(WORKSPACE, name))
296
297    # Refresh dynamic linker run time bindings for dependencies
298    check_call_cmd('sudo', '-n', '--', 'ldconfig')
299
300    pkg = Package()
301    if build_for_testing:
302        pkg.test()
303    else:
304        pkg.install()
305
306
307def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
308    """
309    For each package (name), starting with the package to be unit tested,
310    extract its dependencies. For each package dependency defined, recursively
311    apply the same strategy
312
313    Parameter descriptions:
314    name                Name of the package
315    pkgdir              Directory where package source is located
316    dep_added           Current dict of dependencies and added status
317    head                Head node of the dependency tree
318    branch              Branch to clone from pkg
319    dep_tree            Current dependency tree node
320    """
321    if not dep_tree:
322        dep_tree = head
323
324    with open("/tmp/depcache", "r") as depcache:
325        cache = depcache.readline()
326
327    # Read out pkg dependencies
328    pkg = Package(name, pkgdir)
329
330    build = pkg.build_system()
331    if build == None:
332        raise Exception(f"Unable to find build system for {name}.")
333
334    for dep in set(build.dependencies()):
335        if dep in cache:
336            continue
337        # Dependency package not already known
338        if dep_added.get(dep) is None:
339            print(f"Adding {dep} dependency to {name}.")
340            # Dependency package not added
341            new_child = dep_tree.AddChild(dep)
342            dep_added[dep] = False
343            dep_pkgdir = clone_pkg(dep, branch)
344            # Determine this dependency package's
345            # dependencies and add them before
346            # returning to add this package
347            dep_added = build_dep_tree(dep,
348                                       dep_pkgdir,
349                                       dep_added,
350                                       head,
351                                       branch,
352                                       new_child)
353        else:
354            # Dependency package known and added
355            if dep_added[dep]:
356                continue
357            else:
358                # Cyclic dependency failure
359                raise Exception("Cyclic dependencies found in "+name)
360
361    if not dep_added[name]:
362        dep_added[name] = True
363
364    return dep_added
365
366
367def run_cppcheck():
368    match_re = re.compile(r'((?!\.mako\.).)*\.[ch](?:pp)?$', re.I)
369    cppcheck_files = []
370    stdout = subprocess.check_output(['git', 'ls-files'])
371
372    for f in stdout.decode('utf-8').split():
373        if match_re.match(f):
374            cppcheck_files.append(f)
375
376    if not cppcheck_files:
377        # skip cppcheck if there arent' any c or cpp sources.
378        print("no files")
379        return None
380
381    # http://cppcheck.sourceforge.net/manual.pdf
382    params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
383              '--enable=all', '--library=googletest', '--file-list=-']
384
385    cppcheck_process = subprocess.Popen(
386        params,
387        stdout=subprocess.PIPE,
388        stderr=subprocess.PIPE,
389        stdin=subprocess.PIPE)
390    (stdout, stderr) = cppcheck_process.communicate(
391        input='\n'.join(cppcheck_files).encode('utf-8'))
392
393    if cppcheck_process.wait():
394        raise Exception('Cppcheck failed')
395    print(stdout.decode('utf-8'))
396    print(stderr.decode('utf-8'))
397
398
399def is_valgrind_safe():
400    """
401    Returns whether it is safe to run valgrind on our platform
402    """
403    src = 'unit-test-vg.c'
404    exe = './unit-test-vg'
405    with open(src, 'w') as h:
406        h.write('#include <errno.h>\n')
407        h.write('#include <stdio.h>\n')
408        h.write('#include <stdlib.h>\n')
409        h.write('#include <string.h>\n')
410        h.write('int main() {\n')
411        h.write('char *heap_str = malloc(16);\n')
412        h.write('strcpy(heap_str, "RandString");\n')
413        h.write('int res = strcmp("RandString", heap_str);\n')
414        h.write('free(heap_str);\n')
415        h.write('char errstr[64];\n')
416        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
417        h.write('printf("%s\\n", errstr);\n')
418        h.write('return res;\n')
419        h.write('}\n')
420    try:
421        with open(os.devnull, 'w') as devnull:
422            check_call(['gcc', '-O2', '-o', exe, src],
423                       stdout=devnull, stderr=devnull)
424            check_call(['valgrind', '--error-exitcode=99', exe],
425                       stdout=devnull, stderr=devnull)
426        return True
427    except:
428        sys.stderr.write("###### Platform is not valgrind safe ######\n")
429        return False
430    finally:
431        os.remove(src)
432        os.remove(exe)
433
434
435def is_sanitize_safe():
436    """
437    Returns whether it is safe to run sanitizers on our platform
438    """
439    src = 'unit-test-sanitize.c'
440    exe = './unit-test-sanitize'
441    with open(src, 'w') as h:
442        h.write('int main() { return 0; }\n')
443    try:
444        with open(os.devnull, 'w') as devnull:
445            check_call(['gcc', '-O2', '-fsanitize=address',
446                        '-fsanitize=undefined', '-o', exe, src],
447                       stdout=devnull, stderr=devnull)
448            check_call([exe], stdout=devnull, stderr=devnull)
449        return True
450    except:
451        sys.stderr.write("###### Platform is not sanitize safe ######\n")
452        return False
453    finally:
454        os.remove(src)
455        os.remove(exe)
456
457
458def maybe_make_valgrind():
459    """
460    Potentially runs the unit tests through valgrind for the package
461    via `make check-valgrind`. If the package does not have valgrind testing
462    then it just skips over this.
463    """
464    # Valgrind testing is currently broken by an aggressive strcmp optimization
465    # that is inlined into optimized code for POWER by gcc 7+. Until we find
466    # a workaround, just don't run valgrind tests on POWER.
467    # https://github.com/openbmc/openbmc/issues/3315
468    if not is_valgrind_safe():
469        sys.stderr.write("###### Skipping valgrind ######\n")
470        return
471    if not make_target_exists('check-valgrind'):
472        return
473
474    try:
475        cmd = make_parallel + ['check-valgrind']
476        check_call_cmd(*cmd)
477    except CalledProcessError:
478        for root, _, files in os.walk(os.getcwd()):
479            for f in files:
480                if re.search('test-suite-[a-z]+.log', f) is None:
481                    continue
482                check_call_cmd('cat', os.path.join(root, f))
483        raise Exception('Valgrind tests failed')
484
485
486def maybe_make_coverage():
487    """
488    Potentially runs the unit tests through code coverage for the package
489    via `make check-code-coverage`. If the package does not have code coverage
490    testing then it just skips over this.
491    """
492    if not make_target_exists('check-code-coverage'):
493        return
494
495    # Actually run code coverage
496    try:
497        cmd = make_parallel + ['check-code-coverage']
498        check_call_cmd(*cmd)
499    except CalledProcessError:
500        raise Exception('Code coverage failed')
501
502
503class BuildSystem(object):
504    """
505    Build systems generally provide the means to configure, build, install and
506    test software. The BuildSystem class defines a set of interfaces on top of
507    which Autotools, Meson, CMake and possibly other build system drivers can
508    be implemented, separating out the phases to control whether a package
509    should merely be installed or also tested and analyzed.
510    """
511    def __init__(self, package, path):
512        """Initialise the driver with properties independent of the build system
513
514        Keyword arguments:
515        package: The name of the package. Derived from the path if None
516        path: The path to the package. Set to the working directory if None
517        """
518        self.path = "." if not path else path
519        realpath = os.path.realpath(self.path)
520        self.package = package if package else os.path.basename(realpath)
521        self.build_for_testing = False
522
523    def probe(self):
524        """Test if the build system driver can be applied to the package
525
526        Return True if the driver can drive the package's build system,
527        otherwise False.
528
529        Generally probe() is implemented by testing for the presence of the
530        build system's configuration file(s).
531        """
532        raise NotImplemented
533
534    def dependencies(self):
535        """Provide the package's dependencies
536
537        Returns a list of dependencies. If no dependencies are required then an
538        empty list must be returned.
539
540        Generally dependencies() is implemented by analysing and extracting the
541        data from the build system configuration.
542        """
543        raise NotImplemented
544
545    def configure(self, build_for_testing):
546        """Configure the source ready for building
547
548        Should raise an exception if configuration failed.
549
550        Keyword arguments:
551        build_for_testing: Mark the package as being built for testing rather
552                           than for installation as a dependency for the
553                           package under test. Setting to True generally
554                           implies that the package will be configured to build
555                           with debug information, at a low level of
556                           optimisation and possibly with sanitizers enabled.
557
558        Generally configure() is implemented by invoking the build system
559        tooling to generate Makefiles or equivalent.
560        """
561        raise NotImplemented
562
563    def build(self):
564        """Build the software ready for installation and/or testing
565
566        Should raise an exception if the build fails
567
568        Generally build() is implemented by invoking `make` or `ninja`.
569        """
570        raise NotImplemented
571
572    def install(self):
573        """Install the software ready for use
574
575        Should raise an exception if installation fails
576
577        Like build(), install() is generally implemented by invoking `make` or
578        `ninja`.
579        """
580        raise NotImplemented
581
582    def test(self):
583        """Build and run the test suite associated with the package
584
585        Should raise an exception if the build or testing fails.
586
587        Like install(), test() is generally implemented by invoking `make` or
588        `ninja`.
589        """
590        raise NotImplemented
591
592    def analyze(self):
593        """Run any supported analysis tools over the codebase
594
595        Should raise an exception if analysis fails.
596
597        Some analysis tools such as scan-build need injection into the build
598        system. analyze() provides the necessary hook to implement such
599        behaviour. Analyzers independent of the build system can also be
600        specified here but at the cost of possible duplication of code between
601        the build system driver implementations.
602        """
603        raise NotImplemented
604
605
606class Autotools(BuildSystem):
607    def __init__(self, package=None, path=None):
608        super(Autotools, self).__init__(package, path)
609
610    def probe(self):
611        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
612
613    def dependencies(self):
614        configure_ac = os.path.join(self.path, 'configure.ac')
615
616        contents = ''
617        # Prepend some special function overrides so we can parse out
618        # dependencies
619        for macro in DEPENDENCIES.keys():
620            contents += ('m4_define([' + macro + '], [' + macro + '_START$' +
621                         str(DEPENDENCIES_OFFSET[macro] + 1) +
622                         macro + '_END])\n')
623        with open(configure_ac, "rt") as f:
624            contents += f.read()
625
626        autoconf_cmdline = ['autoconf', '-Wno-undefined', '-']
627        autoconf_process = subprocess.Popen(autoconf_cmdline,
628                                            stdin=subprocess.PIPE,
629                                            stdout=subprocess.PIPE,
630                                            stderr=subprocess.PIPE)
631        document = contents.encode('utf-8')
632        (stdout, stderr) = autoconf_process.communicate(input=document)
633        if not stdout:
634            print(stderr)
635            raise Exception("Failed to run autoconf for parsing dependencies")
636
637        # Parse out all of the dependency text
638        matches = []
639        for macro in DEPENDENCIES.keys():
640            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
641            for match in re.compile(pattern).finditer(stdout.decode('utf-8')):
642                matches.append((match.group(1), match.group(2)))
643
644        # Look up dependencies from the text
645        found_deps = []
646        for macro, deptext in matches:
647            for potential_dep in deptext.split(' '):
648                for known_dep in DEPENDENCIES[macro].keys():
649                    if potential_dep.startswith(known_dep):
650                        found_deps.append(DEPENDENCIES[macro][known_dep])
651
652        return found_deps
653
654    def _configure_feature(self, flag, enabled):
655        """
656        Returns an configure flag as a string
657
658        Parameters:
659        flag                The name of the flag
660        enabled             Whether the flag is enabled or disabled
661        """
662        return '--' + ('enable' if enabled else 'disable') + '-' + flag
663
664    def configure(self, build_for_testing):
665        self.build_for_testing = build_for_testing
666        conf_flags = [
667            self._configure_feature('silent-rules', False),
668            self._configure_feature('examples', build_for_testing),
669            self._configure_feature('tests', build_for_testing),
670            self._configure_feature('itests', INTEGRATION_TEST),
671        ]
672        if not TEST_ONLY:
673            conf_flags.extend([
674                self._configure_feature('code-coverage', build_for_testing),
675                self._configure_feature('valgrind', build_for_testing),
676            ])
677        # Add any necessary configure flags for package
678        if CONFIGURE_FLAGS.get(self.package) is not None:
679            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
680        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
681            if os.path.exists(bootstrap):
682                check_call_cmd('./' + bootstrap)
683                break
684        check_call_cmd('./configure', *conf_flags)
685
686    def build(self):
687        check_call_cmd(*make_parallel)
688
689    def install(self):
690        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
691
692    def test(self):
693        try:
694            cmd = make_parallel + ['check']
695            for i in range(0, args.repeat):
696                check_call_cmd(*cmd)
697        except CalledProcessError:
698            for root, _, files in os.walk(os.getcwd()):
699                if 'test-suite.log' not in files:
700                    continue
701                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
702            raise Exception('Unit tests failed')
703
704    def analyze(self):
705        maybe_make_valgrind()
706        maybe_make_coverage()
707        run_cppcheck()
708
709
710class CMake(BuildSystem):
711    def __init__(self, package=None, path=None):
712        super(CMake, self).__init__(package, path)
713
714    def probe(self):
715        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
716
717    def dependencies(self):
718        return []
719
720    def configure(self, build_for_testing):
721        self.build_for_testing = build_for_testing
722        if INTEGRATION_TEST:
723            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
724                           '-DITESTS=ON', '.')
725        else:
726            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
727
728    def build(self):
729        check_call_cmd('cmake', '--build', '.', '--', '-j',
730                       str(multiprocessing.cpu_count()))
731
732    def install(self):
733        pass
734
735    def test(self):
736        if make_target_exists('test'):
737            check_call_cmd('ctest', '.')
738
739    def analyze(self):
740        if TEST_ONLY:
741            return
742
743        if os.path.isfile('.clang-tidy'):
744            try:
745                os.mkdir("tidy-build")
746            except FileExistsError as e:
747                pass
748            # clang-tidy needs to run on a clang-specific build
749            check_call_cmd('cmake', '-DCMAKE_C_COMPILER=clang',
750                           '-DCMAKE_CXX_COMPILER=clang++',
751                           '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
752                           '-H.',
753                           '-Btidy-build')
754            # we need to cd here because otherwise clang-tidy doesn't find the
755            # .clang-tidy file in the roots of repos.  Its arguably a "bug"
756            # with run-clang-tidy at a minimum it's "weird" that it requires
757            # the .clang-tidy to be up a dir
758            os.chdir("tidy-build")
759            try:
760                check_call_cmd('run-clang-tidy.py', "-header-filter=.*", '-p',
761                               '.')
762            finally:
763                os.chdir("..")
764
765        maybe_make_valgrind()
766        maybe_make_coverage()
767        run_cppcheck()
768
769
770class Meson(BuildSystem):
771    def __init__(self, package=None, path=None):
772        super(Meson, self).__init__(package, path)
773
774    def probe(self):
775        return os.path.isfile(os.path.join(self.path, 'meson.build'))
776
777    def dependencies(self):
778        meson_build = os.path.join(self.path, 'meson.build')
779        if not os.path.exists(meson_build):
780            return []
781
782        found_deps = []
783        for root, dirs, files in os.walk(self.path):
784            if 'meson.build' not in files:
785                continue
786            with open(os.path.join(root, 'meson.build'), 'rt') as f:
787                build_contents = f.read()
788            pattern = r"dependency\('([^']*)'.*?\),?\n"
789            for match in re.finditer(pattern, build_contents):
790                group = match.group(1)
791                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group)
792                if maybe_dep is not None:
793                    found_deps.append(maybe_dep)
794
795        return found_deps
796
797    def _parse_options(self, options_file):
798        """
799        Returns a set of options defined in the provides meson_options.txt file
800
801        Parameters:
802        options_file        The file containing options
803        """
804        oi = optinterpreter.OptionInterpreter('')
805        oi.process(options_file)
806        return oi.options
807
808    def _configure_boolean(self, val):
809        """
810        Returns the meson flag which signifies the value
811
812        True is true which requires the boolean.
813        False is false which disables the boolean.
814
815        Parameters:
816        val                 The value being converted
817        """
818        if val is True:
819            return 'true'
820        elif val is False:
821            return 'false'
822        else:
823            raise Exception("Bad meson boolean value")
824
825    def _configure_feature(self, val):
826        """
827        Returns the meson flag which signifies the value
828
829        True is enabled which requires the feature.
830        False is disabled which disables the feature.
831        None is auto which autodetects the feature.
832
833        Parameters:
834        val                 The value being converted
835        """
836        if val is True:
837            return "enabled"
838        elif val is False:
839            return "disabled"
840        elif val is None:
841            return "auto"
842        else:
843            raise Exception("Bad meson feature value")
844
845    def _configure_option(self, opts, key, val):
846        """
847        Returns the meson flag which signifies the value
848        based on the type of the opt
849
850        Parameters:
851        opt                 The meson option which we are setting
852        val                 The value being converted
853        """
854        if isinstance(opts[key], coredata.UserBooleanOption):
855            str_val = self._configure_boolean(val)
856        elif isinstance(opts[key], coredata.UserFeatureOption):
857            str_val = self._configure_feature(val)
858        else:
859            raise Exception('Unknown meson option type')
860        return "-D{}={}".format(key, str_val)
861
862    def configure(self, build_for_testing):
863        self.build_for_testing = build_for_testing
864        meson_options = {}
865        if os.path.exists("meson_options.txt"):
866            meson_options = self._parse_options("meson_options.txt")
867        meson_flags = [
868            '-Db_colorout=never',
869            '-Dwerror=true',
870            '-Dwarning_level=3',
871        ]
872        if build_for_testing:
873            meson_flags.append('--buildtype=debug')
874        else:
875            meson_flags.append('--buildtype=debugoptimized')
876        if 'tests' in meson_options:
877            meson_flags.append(self._configure_option(meson_options, 'tests', build_for_testing))
878        if 'examples' in meson_options:
879            meson_flags.append(self._configure_option(meson_options, 'examples', build_for_testing))
880        if 'itests' in meson_options:
881            meson_flags.append(self._configure_option(meson_options, 'itests', INTEGRATION_TEST))
882        if MESON_FLAGS.get(self.package) is not None:
883            meson_flags.extend(MESON_FLAGS.get(self.package))
884        try:
885            check_call_cmd('meson', 'setup', '--reconfigure', 'build',
886                           *meson_flags)
887        except:
888            shutil.rmtree('build')
889            check_call_cmd('meson', 'setup', 'build', *meson_flags)
890
891    def build(self):
892        check_call_cmd('ninja', '-C', 'build')
893
894    def install(self):
895        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
896
897    def test(self):
898        try:
899            test_args = ('--repeat', str(args.repeat), '-C', 'build')
900            check_call_cmd('meson', 'test', *test_args)
901
902        except CalledProcessError:
903            for root, _, files in os.walk(os.getcwd()):
904                if 'testlog.txt' not in files:
905                    continue
906                check_call_cmd('cat', os.path.join(root, 'testlog.txt'))
907            raise Exception('Unit tests failed')
908
909    def _setup_exists(self, setup):
910        """
911        Returns whether the meson build supports the named test setup.
912
913        Parameter descriptions:
914        setup              The setup target to check
915        """
916        try:
917            with open(os.devnull, 'w') as devnull:
918                output = subprocess.check_output(
919                        ['meson', 'test', '-C', 'build',
920                         '--setup', setup, '-t', '0'],
921                        stderr=subprocess.STDOUT)
922        except CalledProcessError as e:
923            output = e.output
924        output = output.decode('utf-8')
925        return not re.search('Test setup .* not found from project', output)
926
927    def _maybe_valgrind(self):
928        """
929        Potentially runs the unit tests through valgrind for the package
930        via `meson test`. The package can specify custom valgrind
931        configurations by utilizing add_test_setup() in a meson.build
932        """
933        if not is_valgrind_safe():
934            sys.stderr.write("###### Skipping valgrind ######\n")
935            return
936        try:
937            if self._setup_exists('valgrind'):
938                check_call_cmd('meson', 'test', '-C', 'build',
939                               '--setup', 'valgrind')
940            else:
941                check_call_cmd('meson', 'test', '-C', 'build',
942                               '--wrapper', 'valgrind')
943        except CalledProcessError:
944            for root, _, files in os.walk(os.getcwd()):
945                if 'testlog-valgrind.txt' not in files:
946                    continue
947                cat_args = os.path.join(root, 'testlog-valgrind.txt')
948                check_call_cmd('cat', cat_args)
949            raise Exception('Valgrind tests failed')
950
951    def analyze(self):
952        if TEST_ONLY:
953            return
954
955        self._maybe_valgrind()
956
957        # Run clang-tidy only if the project has a configuration
958        if os.path.isfile('.clang-tidy'):
959            os.environ["CXX"] = "clang++"
960            check_call_cmd('meson', 'setup', 'build-clang')
961            check_call_cmd('run-clang-tidy.py', '-p',
962                           'build-clang')
963        # Run the basic clang static analyzer otherwise
964        else:
965            check_call_cmd('ninja', '-C', 'build',
966                           'scan-build')
967
968        # Run tests through sanitizers
969        # b_lundef is needed if clang++ is CXX since it resolves the
970        # asan symbols at runtime only. We don't want to set it earlier
971        # in the build process to ensure we don't have undefined
972        # runtime code.
973        if is_sanitize_safe():
974            check_call_cmd('meson', 'configure', 'build',
975                           '-Db_sanitize=address,undefined',
976                           '-Db_lundef=false')
977            check_call_cmd('meson', 'test', '-C', 'build',
978                           '--logbase', 'testlog-ubasan')
979            # TODO: Fix memory sanitizer
980            # check_call_cmd('meson', 'configure', 'build',
981            #                '-Db_sanitize=memory')
982            # check_call_cmd('meson', 'test', '-C', 'build'
983            #                '--logbase', 'testlog-msan')
984            check_call_cmd('meson', 'configure', 'build',
985                           '-Db_sanitize=none')
986        else:
987            sys.stderr.write("###### Skipping sanitizers ######\n")
988
989        # Run coverage checks
990        check_call_cmd('meson', 'configure', 'build',
991                       '-Db_coverage=true')
992        self.test()
993        # Only build coverage HTML if coverage files were produced
994        for root, dirs, files in os.walk('build'):
995            if any([f.endswith('.gcda') for f in files]):
996                check_call_cmd('ninja', '-C', 'build',
997                               'coverage-html')
998                break
999        check_call_cmd('meson', 'configure', 'build',
1000                       '-Db_coverage=false')
1001        run_cppcheck()
1002
1003
1004class Package(object):
1005    def __init__(self, name=None, path=None):
1006        self.supported = [Meson, Autotools, CMake]
1007        self.name = name
1008        self.path = path
1009        self.test_only = False
1010
1011    def build_systems(self):
1012        instances = (system(self.name, self.path) for system in self.supported)
1013        return (instance for instance in instances if instance.probe())
1014
1015    def build_system(self, preferred=None):
1016        systems = list(self.build_systems())
1017
1018        if not systems:
1019            return None
1020
1021        if preferred:
1022            return {type(system): system for system in systems}[preferred]
1023
1024        return next(iter(systems))
1025
1026    def install(self, system=None):
1027        if not system:
1028            system = self.build_system()
1029
1030        system.configure(False)
1031        system.build()
1032        system.install()
1033
1034    def _test_one(self, system):
1035        system.configure(True)
1036        system.build()
1037        system.install()
1038        system.test()
1039        system.analyze()
1040
1041    def test(self):
1042        for system in self.build_systems():
1043            self._test_one(system)
1044
1045
1046def find_file(filename, basedir):
1047    """
1048    Finds all occurrences of a file (or list of files) in the base
1049    directory and passes them back with their relative paths.
1050
1051    Parameter descriptions:
1052    filename              The name of the file (or list of files) to
1053                          find
1054    basedir               The base directory search in
1055    """
1056
1057    if not isinstance(filename, list):
1058        filename = [ filename ]
1059
1060    filepaths = []
1061    for root, dirs, files in os.walk(basedir):
1062        for f in filename:
1063            if f in files:
1064                filepaths.append(os.path.join(root, f))
1065    return filepaths
1066
1067
1068if __name__ == '__main__':
1069    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1070    CONFIGURE_FLAGS = {
1071        'phosphor-logging':
1072        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
1073         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
1074    }
1075
1076    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1077    MESON_FLAGS = {
1078        'phosphor-dbus-interfaces':
1079        ['-Ddata_com_ibm=true', '-Ddata_org_open_power=true']
1080    }
1081
1082    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1083    DEPENDENCIES = {
1084        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
1085        'AC_CHECK_HEADER': {
1086            'host-ipmid': 'phosphor-host-ipmid',
1087            'blobs-ipmid': 'phosphor-ipmi-blobs',
1088            'sdbusplus': 'sdbusplus',
1089            'sdeventplus': 'sdeventplus',
1090            'stdplus': 'stdplus',
1091            'gpioplus': 'gpioplus',
1092            'phosphor-logging/log.hpp': 'phosphor-logging',
1093        },
1094        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1095        'PKG_CHECK_MODULES': {
1096            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1097            'libipmid': 'phosphor-host-ipmid',
1098            'libipmid-host': 'phosphor-host-ipmid',
1099            'sdbusplus': 'sdbusplus',
1100            'sdeventplus': 'sdeventplus',
1101            'stdplus': 'stdplus',
1102            'gpioplus': 'gpioplus',
1103            'phosphor-logging': 'phosphor-logging',
1104            'phosphor-snmp': 'phosphor-snmp',
1105            'ipmiblob': 'ipmi-blob-tool',
1106            'hei': 'openpower-libhei',
1107            'phosphor-ipmi-blobs': 'phosphor-ipmi-blobs',
1108        },
1109    }
1110
1111    # Offset into array of macro parameters MACRO(0, 1, ...N)
1112    DEPENDENCIES_OFFSET = {
1113        'AC_CHECK_LIB': 0,
1114        'AC_CHECK_HEADER': 0,
1115        'AC_PATH_PROG': 1,
1116        'PKG_CHECK_MODULES': 1,
1117    }
1118
1119    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1120    DEPENDENCIES_REGEX = {
1121        'phosphor-logging': r'\S+-dbus-interfaces$'
1122    }
1123
1124    # Set command line arguments
1125    parser = argparse.ArgumentParser()
1126    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1127                        help="Workspace directory location(i.e. /home)")
1128    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1129                        help="OpenBMC package to be unit tested")
1130    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1131                        action="store_true", required=False, default=False,
1132                        help="Only run test cases, no other validation")
1133    arg_inttests = parser.add_mutually_exclusive_group()
1134    arg_inttests.add_argument("--integration-tests", dest="INTEGRATION_TEST",
1135                        action="store_true", required=False, default=True,
1136                        help="Enable integration tests [default].")
1137    arg_inttests.add_argument("--no-integration-tests", dest="INTEGRATION_TEST",
1138                        action="store_false", required=False,
1139                        help="Disable integration tests.")
1140    parser.add_argument("-v", "--verbose", action="store_true",
1141                        help="Print additional package status messages")
1142    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1143                        type=int, default=1)
1144    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1145                        help="Branch to target for dependent repositories",
1146                        default="master")
1147    parser.add_argument("-n", "--noformat", dest="FORMAT",
1148                        action="store_false", required=False,
1149                        help="Whether or not to run format code")
1150    args = parser.parse_args(sys.argv[1:])
1151    WORKSPACE = args.WORKSPACE
1152    UNIT_TEST_PKG = args.PACKAGE
1153    TEST_ONLY = args.TEST_ONLY
1154    INTEGRATION_TEST = args.INTEGRATION_TEST
1155    BRANCH = args.BRANCH
1156    FORMAT_CODE = args.FORMAT
1157    if args.verbose:
1158        def printline(*line):
1159            for arg in line:
1160                print(arg, end=' ')
1161            print()
1162    else:
1163        def printline(*line):
1164            pass
1165
1166    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1167
1168    # First validate code formatting if repo has style formatting files.
1169    # The format-code.sh checks for these files.
1170    if FORMAT_CODE:
1171        format_scripts = find_file(['format-code.sh', 'format-code'],
1172                                   CODE_SCAN_DIR)
1173
1174        # use default format-code.sh if no other found
1175        if not format_scripts:
1176            format_scripts.append(os.path.join(WORKSPACE, "format-code.sh"))
1177
1178        for f in format_scripts:
1179            check_call_cmd(f, CODE_SCAN_DIR)
1180
1181    # Check if this repo has a supported make infrastructure
1182    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1183    if not pkg.build_system():
1184        print("No valid build system, exit")
1185        sys.exit(0)
1186
1187    prev_umask = os.umask(000)
1188
1189    # Determine dependencies and add them
1190    dep_added = dict()
1191    dep_added[UNIT_TEST_PKG] = False
1192
1193    # Create dependency tree
1194    dep_tree = DepTree(UNIT_TEST_PKG)
1195    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1196
1197    # Reorder Dependency Tree
1198    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1199        dep_tree.ReorderDeps(pkg_name, regex_str)
1200    if args.verbose:
1201        dep_tree.PrintTree()
1202
1203    install_list = dep_tree.GetInstallList()
1204
1205    # We don't want to treat our package as a dependency
1206    install_list.remove(UNIT_TEST_PKG)
1207
1208    # Install reordered dependencies
1209    for dep in install_list:
1210        build_and_install(dep, False)
1211
1212    # Run package unit tests
1213    build_and_install(UNIT_TEST_PKG, True)
1214
1215    os.umask(prev_umask)
1216
1217    # Run any custom CI scripts the repo has, of which there can be
1218    # multiple of and anywhere in the repository.
1219    ci_scripts = find_file(['run-ci.sh', 'run-ci'], CODE_SCAN_DIR)
1220    if ci_scripts:
1221        os.chdir(CODE_SCAN_DIR)
1222        for ci_script in ci_scripts:
1223            check_call_cmd(ci_script)
1224