1#!/usr/bin/env python3
2
3"""
4This script determines the given package's openbmc dependencies from its
5configure.ac file where it downloads, configures, builds, and installs each of
6these dependencies. Then the given package is configured, built, and installed
7prior to executing its unit tests.
8"""
9
10from git import Repo
11from mesonbuild import coredata, optinterpreter
12from mesonbuild.mesonlib import OptionKey
13from mesonbuild.mesonlib import version_compare as meson_version_compare
14from urllib.parse import urljoin
15from subprocess import check_call, call, CalledProcessError
16import os
17import sys
18import argparse
19import multiprocessing
20import re
21import subprocess
22import shutil
23import platform
24
25
26class DepTree():
27    """
28    Represents package dependency tree, where each node is a DepTree with a
29    name and DepTree children.
30    """
31
32    def __init__(self, name):
33        """
34        Create new DepTree.
35
36        Parameter descriptions:
37        name               Name of new tree node.
38        """
39        self.name = name
40        self.children = list()
41
42    def AddChild(self, name):
43        """
44        Add new child node to current node.
45
46        Parameter descriptions:
47        name               Name of new child
48        """
49        new_child = DepTree(name)
50        self.children.append(new_child)
51        return new_child
52
53    def AddChildNode(self, node):
54        """
55        Add existing child node to current node.
56
57        Parameter descriptions:
58        node               Tree node to add
59        """
60        self.children.append(node)
61
62    def RemoveChild(self, name):
63        """
64        Remove child node.
65
66        Parameter descriptions:
67        name               Name of child to remove
68        """
69        for child in self.children:
70            if child.name == name:
71                self.children.remove(child)
72                return
73
74    def GetNode(self, name):
75        """
76        Return node with matching name. Return None if not found.
77
78        Parameter descriptions:
79        name               Name of node to return
80        """
81        if self.name == name:
82            return self
83        for child in self.children:
84            node = child.GetNode(name)
85            if node:
86                return node
87        return None
88
89    def GetParentNode(self, name, parent_node=None):
90        """
91        Return parent of node with matching name. Return none if not found.
92
93        Parameter descriptions:
94        name               Name of node to get parent of
95        parent_node        Parent of current node
96        """
97        if self.name == name:
98            return parent_node
99        for child in self.children:
100            found_node = child.GetParentNode(name, self)
101            if found_node:
102                return found_node
103        return None
104
105    def GetPath(self, name, path=None):
106        """
107        Return list of node names from head to matching name.
108        Return None if not found.
109
110        Parameter descriptions:
111        name               Name of node
112        path               List of node names from head to current node
113        """
114        if not path:
115            path = []
116        if self.name == name:
117            path.append(self.name)
118            return path
119        for child in self.children:
120            match = child.GetPath(name, path + [self.name])
121            if match:
122                return match
123        return None
124
125    def GetPathRegex(self, name, regex_str, path=None):
126        """
127        Return list of node paths that end in name, or match regex_str.
128        Return empty list if not found.
129
130        Parameter descriptions:
131        name               Name of node to search for
132        regex_str          Regex string to match node names
133        path               Path of node names from head to current node
134        """
135        new_paths = []
136        if not path:
137            path = []
138        match = re.match(regex_str, self.name)
139        if (self.name == name) or (match):
140            new_paths.append(path + [self.name])
141        for child in self.children:
142            return_paths = None
143            full_path = path + [self.name]
144            return_paths = child.GetPathRegex(name, regex_str, full_path)
145            for i in return_paths:
146                new_paths.append(i)
147        return new_paths
148
149    def MoveNode(self, from_name, to_name):
150        """
151        Mode existing from_name node to become child of to_name node.
152
153        Parameter descriptions:
154        from_name          Name of node to make a child of to_name
155        to_name            Name of node to make parent of from_name
156        """
157        parent_from_node = self.GetParentNode(from_name)
158        from_node = self.GetNode(from_name)
159        parent_from_node.RemoveChild(from_name)
160        to_node = self.GetNode(to_name)
161        to_node.AddChildNode(from_node)
162
163    def ReorderDeps(self, name, regex_str):
164        """
165        Reorder dependency tree.  If tree contains nodes with names that
166        match 'name' and 'regex_str', move 'regex_str' nodes that are
167        to the right of 'name' node, so that they become children of the
168        'name' node.
169
170        Parameter descriptions:
171        name               Name of node to look for
172        regex_str          Regex string to match names to
173        """
174        name_path = self.GetPath(name)
175        if not name_path:
176            return
177        paths = self.GetPathRegex(name, regex_str)
178        is_name_in_paths = False
179        name_index = 0
180        for i in range(len(paths)):
181            path = paths[i]
182            if path[-1] == name:
183                is_name_in_paths = True
184                name_index = i
185                break
186        if not is_name_in_paths:
187            return
188        for i in range(name_index + 1, len(paths)):
189            path = paths[i]
190            if name in path:
191                continue
192            from_name = path[-1]
193            self.MoveNode(from_name, name)
194
195    def GetInstallList(self):
196        """
197        Return post-order list of node names.
198
199        Parameter descriptions:
200        """
201        install_list = []
202        for child in self.children:
203            child_install_list = child.GetInstallList()
204            install_list.extend(child_install_list)
205        install_list.append(self.name)
206        return install_list
207
208    def PrintTree(self, level=0):
209        """
210        Print pre-order node names with indentation denoting node depth level.
211
212        Parameter descriptions:
213        level              Current depth level
214        """
215        INDENT_PER_LEVEL = 4
216        print(' ' * (level * INDENT_PER_LEVEL) + self.name)
217        for child in self.children:
218            child.PrintTree(level + 1)
219
220
221def check_call_cmd(*cmd):
222    """
223    Verbose prints the directory location the given command is called from and
224    the command, then executes the command using check_call.
225
226    Parameter descriptions:
227    dir                 Directory location command is to be called from
228    cmd                 List of parameters constructing the complete command
229    """
230    printline(os.getcwd(), ">", " ".join(cmd))
231    check_call(cmd)
232
233
234def clone_pkg(pkg, branch):
235    """
236    Clone the given openbmc package's git repository from gerrit into
237    the WORKSPACE location
238
239    Parameter descriptions:
240    pkg                 Name of the package to clone
241    branch              Branch to clone from pkg
242    """
243    pkg_dir = os.path.join(WORKSPACE, pkg)
244    if os.path.exists(os.path.join(pkg_dir, '.git')):
245        return pkg_dir
246    pkg_repo = urljoin('https://gerrit.openbmc-project.xyz/openbmc/', pkg)
247    os.mkdir(pkg_dir)
248    printline(pkg_dir, "> git clone", pkg_repo, branch, "./")
249    try:
250        # first try the branch
251        clone = Repo.clone_from(pkg_repo, pkg_dir, branch=branch)
252        repo_inst = clone.working_dir
253    except:
254        printline("Input branch not found, default to master")
255        clone = Repo.clone_from(pkg_repo, pkg_dir, branch="master")
256        repo_inst = clone.working_dir
257    return repo_inst
258
259
260def make_target_exists(target):
261    """
262    Runs a check against the makefile in the current directory to determine
263    if the target exists so that it can be built.
264
265    Parameter descriptions:
266    target              The make target we are checking
267    """
268    try:
269        cmd = ['make', '-n', target]
270        with open(os.devnull, 'w') as devnull:
271            check_call(cmd, stdout=devnull, stderr=devnull)
272        return True
273    except CalledProcessError:
274        return False
275
276
277make_parallel = [
278    'make',
279    # Run enough jobs to saturate all the cpus
280    '-j', str(multiprocessing.cpu_count()),
281    # Don't start more jobs if the load avg is too high
282    '-l', str(multiprocessing.cpu_count()),
283    # Synchronize the output so logs aren't intermixed in stdout / stderr
284    '-O',
285]
286
287
288def build_and_install(name, build_for_testing=False):
289    """
290    Builds and installs the package in the environment. Optionally
291    builds the examples and test cases for package.
292
293    Parameter description:
294    name                The name of the package we are building
295    build_for_testing   Enable options related to testing on the package?
296    """
297    os.chdir(os.path.join(WORKSPACE, name))
298
299    # Refresh dynamic linker run time bindings for dependencies
300    check_call_cmd('sudo', '-n', '--', 'ldconfig')
301
302    pkg = Package()
303    if build_for_testing:
304        pkg.test()
305    else:
306        pkg.install()
307
308
309def build_dep_tree(name, pkgdir, dep_added, head, branch, dep_tree=None):
310    """
311    For each package (name), starting with the package to be unit tested,
312    extract its dependencies. For each package dependency defined, recursively
313    apply the same strategy
314
315    Parameter descriptions:
316    name                Name of the package
317    pkgdir              Directory where package source is located
318    dep_added           Current dict of dependencies and added status
319    head                Head node of the dependency tree
320    branch              Branch to clone from pkg
321    dep_tree            Current dependency tree node
322    """
323    if not dep_tree:
324        dep_tree = head
325
326    with open("/tmp/depcache", "r") as depcache:
327        cache = depcache.readline()
328
329    # Read out pkg dependencies
330    pkg = Package(name, pkgdir)
331
332    build = pkg.build_system()
333    if build == None:
334        raise Exception(f"Unable to find build system for {name}.")
335
336    for dep in set(build.dependencies()):
337        if dep in cache:
338            continue
339        # Dependency package not already known
340        if dep_added.get(dep) is None:
341            print(f"Adding {dep} dependency to {name}.")
342            # Dependency package not added
343            new_child = dep_tree.AddChild(dep)
344            dep_added[dep] = False
345            dep_pkgdir = clone_pkg(dep, branch)
346            # Determine this dependency package's
347            # dependencies and add them before
348            # returning to add this package
349            dep_added = build_dep_tree(dep,
350                                       dep_pkgdir,
351                                       dep_added,
352                                       head,
353                                       branch,
354                                       new_child)
355        else:
356            # Dependency package known and added
357            if dep_added[dep]:
358                continue
359            else:
360                # Cyclic dependency failure
361                raise Exception("Cyclic dependencies found in "+name)
362
363    if not dep_added[name]:
364        dep_added[name] = True
365
366    return dep_added
367
368
369def run_cppcheck():
370    match_re = re.compile(r'((?!\.mako\.).)*\.[ch](?:pp)?$', re.I)
371    cppcheck_files = []
372    stdout = subprocess.check_output(['git', 'ls-files'])
373
374    for f in stdout.decode('utf-8').split():
375        if match_re.match(f):
376            cppcheck_files.append(f)
377
378    if not cppcheck_files:
379        # skip cppcheck if there arent' any c or cpp sources.
380        print("no files")
381        return None
382
383    # http://cppcheck.sourceforge.net/manual.pdf
384    params = ['cppcheck', '-j', str(multiprocessing.cpu_count()),
385              '--enable=all', '--library=googletest', '--file-list=-']
386
387    cppcheck_process = subprocess.Popen(
388        params,
389        stdout=subprocess.PIPE,
390        stderr=subprocess.PIPE,
391        stdin=subprocess.PIPE)
392    (stdout, stderr) = cppcheck_process.communicate(
393        input='\n'.join(cppcheck_files).encode('utf-8'))
394
395    if cppcheck_process.wait():
396        raise Exception('Cppcheck failed')
397    print(stdout.decode('utf-8'))
398    print(stderr.decode('utf-8'))
399
400
401def is_valgrind_safe():
402    """
403    Returns whether it is safe to run valgrind on our platform
404    """
405    src = 'unit-test-vg.c'
406    exe = './unit-test-vg'
407    with open(src, 'w') as h:
408        h.write('#include <errno.h>\n')
409        h.write('#include <stdio.h>\n')
410        h.write('#include <stdlib.h>\n')
411        h.write('#include <string.h>\n')
412        h.write('int main() {\n')
413        h.write('char *heap_str = malloc(16);\n')
414        h.write('strcpy(heap_str, "RandString");\n')
415        h.write('int res = strcmp("RandString", heap_str);\n')
416        h.write('free(heap_str);\n')
417        h.write('char errstr[64];\n')
418        h.write('strerror_r(EINVAL, errstr, sizeof(errstr));\n')
419        h.write('printf("%s\\n", errstr);\n')
420        h.write('return res;\n')
421        h.write('}\n')
422    try:
423        with open(os.devnull, 'w') as devnull:
424            check_call(['gcc', '-O2', '-o', exe, src],
425                       stdout=devnull, stderr=devnull)
426            check_call(['valgrind', '--error-exitcode=99', exe],
427                       stdout=devnull, stderr=devnull)
428        return True
429    except:
430        sys.stderr.write("###### Platform is not valgrind safe ######\n")
431        return False
432    finally:
433        os.remove(src)
434        os.remove(exe)
435
436
437def is_sanitize_safe():
438    """
439    Returns whether it is safe to run sanitizers on our platform
440    """
441    src = 'unit-test-sanitize.c'
442    exe = './unit-test-sanitize'
443    with open(src, 'w') as h:
444        h.write('int main() { return 0; }\n')
445    try:
446        with open(os.devnull, 'w') as devnull:
447            check_call(['gcc', '-O2', '-fsanitize=address',
448                        '-fsanitize=undefined', '-o', exe, src],
449                       stdout=devnull, stderr=devnull)
450            check_call([exe], stdout=devnull, stderr=devnull)
451
452        # TODO - Sanitizer not working on ppc64le
453        # https://github.com/openbmc/openbmc-build-scripts/issues/31
454        if (platform.processor() == 'ppc64le'):
455            sys.stderr.write("###### ppc64le is not sanitize safe ######\n")
456            return False
457        else:
458            return True
459    except:
460        sys.stderr.write("###### Platform is not sanitize safe ######\n")
461        return False
462    finally:
463        os.remove(src)
464        os.remove(exe)
465
466
467def maybe_make_valgrind():
468    """
469    Potentially runs the unit tests through valgrind for the package
470    via `make check-valgrind`. If the package does not have valgrind testing
471    then it just skips over this.
472    """
473    # Valgrind testing is currently broken by an aggressive strcmp optimization
474    # that is inlined into optimized code for POWER by gcc 7+. Until we find
475    # a workaround, just don't run valgrind tests on POWER.
476    # https://github.com/openbmc/openbmc/issues/3315
477    if not is_valgrind_safe():
478        sys.stderr.write("###### Skipping valgrind ######\n")
479        return
480    if not make_target_exists('check-valgrind'):
481        return
482
483    try:
484        cmd = make_parallel + ['check-valgrind']
485        check_call_cmd(*cmd)
486    except CalledProcessError:
487        for root, _, files in os.walk(os.getcwd()):
488            for f in files:
489                if re.search('test-suite-[a-z]+.log', f) is None:
490                    continue
491                check_call_cmd('cat', os.path.join(root, f))
492        raise Exception('Valgrind tests failed')
493
494
495def maybe_make_coverage():
496    """
497    Potentially runs the unit tests through code coverage for the package
498    via `make check-code-coverage`. If the package does not have code coverage
499    testing then it just skips over this.
500    """
501    if not make_target_exists('check-code-coverage'):
502        return
503
504    # Actually run code coverage
505    try:
506        cmd = make_parallel + ['check-code-coverage']
507        check_call_cmd(*cmd)
508    except CalledProcessError:
509        raise Exception('Code coverage failed')
510
511
512class BuildSystem(object):
513    """
514    Build systems generally provide the means to configure, build, install and
515    test software. The BuildSystem class defines a set of interfaces on top of
516    which Autotools, Meson, CMake and possibly other build system drivers can
517    be implemented, separating out the phases to control whether a package
518    should merely be installed or also tested and analyzed.
519    """
520    def __init__(self, package, path):
521        """Initialise the driver with properties independent of the build system
522
523        Keyword arguments:
524        package: The name of the package. Derived from the path if None
525        path: The path to the package. Set to the working directory if None
526        """
527        self.path = "." if not path else path
528        realpath = os.path.realpath(self.path)
529        self.package = package if package else os.path.basename(realpath)
530        self.build_for_testing = False
531
532    def probe(self):
533        """Test if the build system driver can be applied to the package
534
535        Return True if the driver can drive the package's build system,
536        otherwise False.
537
538        Generally probe() is implemented by testing for the presence of the
539        build system's configuration file(s).
540        """
541        raise NotImplemented
542
543    def dependencies(self):
544        """Provide the package's dependencies
545
546        Returns a list of dependencies. If no dependencies are required then an
547        empty list must be returned.
548
549        Generally dependencies() is implemented by analysing and extracting the
550        data from the build system configuration.
551        """
552        raise NotImplemented
553
554    def configure(self, build_for_testing):
555        """Configure the source ready for building
556
557        Should raise an exception if configuration failed.
558
559        Keyword arguments:
560        build_for_testing: Mark the package as being built for testing rather
561                           than for installation as a dependency for the
562                           package under test. Setting to True generally
563                           implies that the package will be configured to build
564                           with debug information, at a low level of
565                           optimisation and possibly with sanitizers enabled.
566
567        Generally configure() is implemented by invoking the build system
568        tooling to generate Makefiles or equivalent.
569        """
570        raise NotImplemented
571
572    def build(self):
573        """Build the software ready for installation and/or testing
574
575        Should raise an exception if the build fails
576
577        Generally build() is implemented by invoking `make` or `ninja`.
578        """
579        raise NotImplemented
580
581    def install(self):
582        """Install the software ready for use
583
584        Should raise an exception if installation fails
585
586        Like build(), install() is generally implemented by invoking `make` or
587        `ninja`.
588        """
589        raise NotImplemented
590
591    def test(self):
592        """Build and run the test suite associated with the package
593
594        Should raise an exception if the build or testing fails.
595
596        Like install(), test() is generally implemented by invoking `make` or
597        `ninja`.
598        """
599        raise NotImplemented
600
601    def analyze(self):
602        """Run any supported analysis tools over the codebase
603
604        Should raise an exception if analysis fails.
605
606        Some analysis tools such as scan-build need injection into the build
607        system. analyze() provides the necessary hook to implement such
608        behaviour. Analyzers independent of the build system can also be
609        specified here but at the cost of possible duplication of code between
610        the build system driver implementations.
611        """
612        raise NotImplemented
613
614
615class Autotools(BuildSystem):
616    def __init__(self, package=None, path=None):
617        super(Autotools, self).__init__(package, path)
618
619    def probe(self):
620        return os.path.isfile(os.path.join(self.path, 'configure.ac'))
621
622    def dependencies(self):
623        configure_ac = os.path.join(self.path, 'configure.ac')
624
625        contents = ''
626        # Prepend some special function overrides so we can parse out
627        # dependencies
628        for macro in DEPENDENCIES.keys():
629            contents += ('m4_define([' + macro + '], [' + macro + '_START$' +
630                         str(DEPENDENCIES_OFFSET[macro] + 1) +
631                         macro + '_END])\n')
632        with open(configure_ac, "rt") as f:
633            contents += f.read()
634
635        autoconf_cmdline = ['autoconf', '-Wno-undefined', '-']
636        autoconf_process = subprocess.Popen(autoconf_cmdline,
637                                            stdin=subprocess.PIPE,
638                                            stdout=subprocess.PIPE,
639                                            stderr=subprocess.PIPE)
640        document = contents.encode('utf-8')
641        (stdout, stderr) = autoconf_process.communicate(input=document)
642        if not stdout:
643            print(stderr)
644            raise Exception("Failed to run autoconf for parsing dependencies")
645
646        # Parse out all of the dependency text
647        matches = []
648        for macro in DEPENDENCIES.keys():
649            pattern = '(' + macro + ')_START(.*?)' + macro + '_END'
650            for match in re.compile(pattern).finditer(stdout.decode('utf-8')):
651                matches.append((match.group(1), match.group(2)))
652
653        # Look up dependencies from the text
654        found_deps = []
655        for macro, deptext in matches:
656            for potential_dep in deptext.split(' '):
657                for known_dep in DEPENDENCIES[macro].keys():
658                    if potential_dep.startswith(known_dep):
659                        found_deps.append(DEPENDENCIES[macro][known_dep])
660
661        return found_deps
662
663    def _configure_feature(self, flag, enabled):
664        """
665        Returns an configure flag as a string
666
667        Parameters:
668        flag                The name of the flag
669        enabled             Whether the flag is enabled or disabled
670        """
671        return '--' + ('enable' if enabled else 'disable') + '-' + flag
672
673    def configure(self, build_for_testing):
674        self.build_for_testing = build_for_testing
675        conf_flags = [
676            self._configure_feature('silent-rules', False),
677            self._configure_feature('examples', build_for_testing),
678            self._configure_feature('tests', build_for_testing),
679            self._configure_feature('itests', INTEGRATION_TEST),
680        ]
681        if not TEST_ONLY:
682            conf_flags.extend([
683                self._configure_feature('code-coverage', build_for_testing),
684                self._configure_feature('valgrind', build_for_testing),
685            ])
686        # Add any necessary configure flags for package
687        if CONFIGURE_FLAGS.get(self.package) is not None:
688            conf_flags.extend(CONFIGURE_FLAGS.get(self.package))
689        for bootstrap in ['bootstrap.sh', 'bootstrap', 'autogen.sh']:
690            if os.path.exists(bootstrap):
691                check_call_cmd('./' + bootstrap)
692                break
693        check_call_cmd('./configure', *conf_flags)
694
695    def build(self):
696        check_call_cmd(*make_parallel)
697
698    def install(self):
699        check_call_cmd('sudo', '-n', '--', *(make_parallel + ['install']))
700
701    def test(self):
702        try:
703            cmd = make_parallel + ['check']
704            for i in range(0, args.repeat):
705                check_call_cmd(*cmd)
706        except CalledProcessError:
707            for root, _, files in os.walk(os.getcwd()):
708                if 'test-suite.log' not in files:
709                    continue
710                check_call_cmd('cat', os.path.join(root, 'test-suite.log'))
711            raise Exception('Unit tests failed')
712
713    def analyze(self):
714        maybe_make_valgrind()
715        maybe_make_coverage()
716        run_cppcheck()
717
718
719class CMake(BuildSystem):
720    def __init__(self, package=None, path=None):
721        super(CMake, self).__init__(package, path)
722
723    def probe(self):
724        return os.path.isfile(os.path.join(self.path, 'CMakeLists.txt'))
725
726    def dependencies(self):
727        return []
728
729    def configure(self, build_for_testing):
730        self.build_for_testing = build_for_testing
731        if INTEGRATION_TEST:
732            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
733                           '-DITESTS=ON', '.')
734        else:
735            check_call_cmd('cmake', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '.')
736
737    def build(self):
738        check_call_cmd('cmake', '--build', '.', '--', '-j',
739                       str(multiprocessing.cpu_count()))
740
741    def install(self):
742        pass
743
744    def test(self):
745        if make_target_exists('test'):
746            check_call_cmd('ctest', '.')
747
748    def analyze(self):
749        if TEST_ONLY:
750            return
751
752        if os.path.isfile('.clang-tidy'):
753            try:
754                os.mkdir("tidy-build")
755            except FileExistsError as e:
756                pass
757            # clang-tidy needs to run on a clang-specific build
758            check_call_cmd('cmake', '-DCMAKE_C_COMPILER=clang',
759                           '-DCMAKE_CXX_COMPILER=clang++',
760                           '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON',
761                           '-H.',
762                           '-Btidy-build')
763            # we need to cd here because otherwise clang-tidy doesn't find the
764            # .clang-tidy file in the roots of repos.  Its arguably a "bug"
765            # with run-clang-tidy at a minimum it's "weird" that it requires
766            # the .clang-tidy to be up a dir
767            os.chdir("tidy-build")
768            try:
769                check_call_cmd('run-clang-tidy.py', "-header-filter=.*", '-p',
770                               '.')
771            finally:
772                os.chdir("..")
773
774        maybe_make_valgrind()
775        maybe_make_coverage()
776        run_cppcheck()
777
778
779class Meson(BuildSystem):
780    def __init__(self, package=None, path=None):
781        super(Meson, self).__init__(package, path)
782
783    def probe(self):
784        return os.path.isfile(os.path.join(self.path, 'meson.build'))
785
786    def dependencies(self):
787        meson_build = os.path.join(self.path, 'meson.build')
788        if not os.path.exists(meson_build):
789            return []
790
791        found_deps = []
792        for root, dirs, files in os.walk(self.path):
793            if 'meson.build' not in files:
794                continue
795            with open(os.path.join(root, 'meson.build'), 'rt') as f:
796                build_contents = f.read()
797            pattern = r"dependency\('([^']*)'.*?\),?\n"
798            for match in re.finditer(pattern, build_contents):
799                group = match.group(1)
800                maybe_dep = DEPENDENCIES['PKG_CHECK_MODULES'].get(group)
801                if maybe_dep is not None:
802                    found_deps.append(maybe_dep)
803
804        return found_deps
805
806    def _parse_options(self, options_file):
807        """
808        Returns a set of options defined in the provides meson_options.txt file
809
810        Parameters:
811        options_file        The file containing options
812        """
813        oi = optinterpreter.OptionInterpreter('')
814        oi.process(options_file)
815        return oi.options
816
817    def _configure_boolean(self, val):
818        """
819        Returns the meson flag which signifies the value
820
821        True is true which requires the boolean.
822        False is false which disables the boolean.
823
824        Parameters:
825        val                 The value being converted
826        """
827        if val is True:
828            return 'true'
829        elif val is False:
830            return 'false'
831        else:
832            raise Exception("Bad meson boolean value")
833
834    def _configure_feature(self, val):
835        """
836        Returns the meson flag which signifies the value
837
838        True is enabled which requires the feature.
839        False is disabled which disables the feature.
840        None is auto which autodetects the feature.
841
842        Parameters:
843        val                 The value being converted
844        """
845        if val is True:
846            return "enabled"
847        elif val is False:
848            return "disabled"
849        elif val is None:
850            return "auto"
851        else:
852            raise Exception("Bad meson feature value")
853
854    def _configure_option(self, opts, key, val):
855        """
856        Returns the meson flag which signifies the value
857        based on the type of the opt
858
859        Parameters:
860        opt                 The meson option which we are setting
861        val                 The value being converted
862        """
863        if isinstance(opts[key], coredata.UserBooleanOption):
864            str_val = self._configure_boolean(val)
865        elif isinstance(opts[key], coredata.UserFeatureOption):
866            str_val = self._configure_feature(val)
867        else:
868            raise Exception('Unknown meson option type')
869        return "-D{}={}".format(key, str_val)
870
871    def configure(self, build_for_testing):
872        self.build_for_testing = build_for_testing
873        meson_options = {}
874        if os.path.exists("meson_options.txt"):
875            meson_options = self._parse_options("meson_options.txt")
876        meson_flags = [
877            '-Db_colorout=never',
878            '-Dwerror=true',
879            '-Dwarning_level=3',
880        ]
881        if build_for_testing:
882            meson_flags.append('--buildtype=debug')
883        else:
884            meson_flags.append('--buildtype=debugoptimized')
885        if OptionKey('tests') in meson_options:
886            meson_flags.append(self._configure_option(meson_options, OptionKey('tests'), build_for_testing))
887        if OptionKey('examples') in meson_options:
888            meson_flags.append(self._configure_option(meson_options, OptionKey('examples'), build_for_testing))
889        if OptionKey('itests') in meson_options:
890            meson_flags.append(self._configure_option(meson_options, OptionKey('itests'), INTEGRATION_TEST))
891        if MESON_FLAGS.get(self.package) is not None:
892            meson_flags.extend(MESON_FLAGS.get(self.package))
893        try:
894            check_call_cmd('meson', 'setup', '--reconfigure', 'build',
895                           *meson_flags)
896        except:
897            shutil.rmtree('build')
898            check_call_cmd('meson', 'setup', 'build', *meson_flags)
899
900    def build(self):
901        check_call_cmd('ninja', '-C', 'build')
902
903    def install(self):
904        check_call_cmd('sudo', '-n', '--', 'ninja', '-C', 'build', 'install')
905
906    def test(self):
907        # It is useful to check various settings of the meson.build file
908        # for compatibility, such as meson_version checks.  We shouldn't
909        # do this in the configure path though because it affects subprojects
910        # and dependencies as well, but we only want this applied to the
911        # project-under-test (otherwise an upstream dependency could fail
912        # this check without our control).
913        self._extra_meson_checks()
914
915        try:
916            test_args = ('--repeat', str(args.repeat), '-C', 'build')
917            check_call_cmd('meson', 'test', *test_args)
918
919        except CalledProcessError:
920            for root, _, files in os.walk(os.getcwd()):
921                if 'testlog.txt' not in files:
922                    continue
923                check_call_cmd('cat', os.path.join(root, 'testlog.txt'))
924            raise Exception('Unit tests failed')
925
926    def _setup_exists(self, setup):
927        """
928        Returns whether the meson build supports the named test setup.
929
930        Parameter descriptions:
931        setup              The setup target to check
932        """
933        try:
934            with open(os.devnull, 'w') as devnull:
935                output = subprocess.check_output(
936                        ['meson', 'test', '-C', 'build',
937                         '--setup', setup, '-t', '0'],
938                        stderr=subprocess.STDOUT)
939        except CalledProcessError as e:
940            output = e.output
941        output = output.decode('utf-8')
942        return not re.search('Test setup .* not found from project', output)
943
944    def _maybe_valgrind(self):
945        """
946        Potentially runs the unit tests through valgrind for the package
947        via `meson test`. The package can specify custom valgrind
948        configurations by utilizing add_test_setup() in a meson.build
949        """
950        if not is_valgrind_safe():
951            sys.stderr.write("###### Skipping valgrind ######\n")
952            return
953        try:
954            if self._setup_exists('valgrind'):
955                check_call_cmd('meson', 'test','-t','10','-C', 'build',
956                               '--setup', 'valgrind')
957            else:
958                check_call_cmd('meson', 'test','-t','10', '-C', 'build',
959                               '--wrapper', 'valgrind')
960        except CalledProcessError:
961            for root, _, files in os.walk(os.getcwd()):
962                if 'testlog-valgrind.txt' not in files:
963                    continue
964                cat_args = os.path.join(root, 'testlog-valgrind.txt')
965                check_call_cmd('cat', cat_args)
966            raise Exception('Valgrind tests failed')
967
968    def analyze(self):
969        if TEST_ONLY:
970            return
971
972        self._maybe_valgrind()
973
974        # Run clang-tidy only if the project has a configuration
975        if os.path.isfile('.clang-tidy'):
976            os.environ["CXX"] = "clang++"
977            check_call_cmd('meson', 'setup', 'build-clang')
978            check_call_cmd('run-clang-tidy.py', '-p',
979                           'build-clang')
980        # Run the basic clang static analyzer otherwise
981        else:
982            check_call_cmd('ninja', '-C', 'build',
983                           'scan-build')
984
985        # Run tests through sanitizers
986        # b_lundef is needed if clang++ is CXX since it resolves the
987        # asan symbols at runtime only. We don't want to set it earlier
988        # in the build process to ensure we don't have undefined
989        # runtime code.
990        if is_sanitize_safe():
991            check_call_cmd('meson', 'configure', 'build',
992                           '-Db_sanitize=address,undefined',
993                           '-Db_lundef=false')
994            check_call_cmd('meson', 'test', '-C', 'build',
995                           '--logbase', 'testlog-ubasan')
996            # TODO: Fix memory sanitizer
997            # check_call_cmd('meson', 'configure', 'build',
998            #                '-Db_sanitize=memory')
999            # check_call_cmd('meson', 'test', '-C', 'build'
1000            #                '--logbase', 'testlog-msan')
1001            check_call_cmd('meson', 'configure', 'build',
1002                           '-Db_sanitize=none')
1003        else:
1004            sys.stderr.write("###### Skipping sanitizers ######\n")
1005
1006        # Run coverage checks
1007        check_call_cmd('meson', 'configure', 'build',
1008                       '-Db_coverage=true')
1009        self.test()
1010        # Only build coverage HTML if coverage files were produced
1011        for root, dirs, files in os.walk('build'):
1012            if any([f.endswith('.gcda') for f in files]):
1013                check_call_cmd('ninja', '-C', 'build',
1014                               'coverage-html')
1015                break
1016        check_call_cmd('meson', 'configure', 'build',
1017                       '-Db_coverage=false')
1018        run_cppcheck()
1019
1020    def _extra_meson_checks(self):
1021        with open(os.path.join(self.path, 'meson.build'), 'rt') as f:
1022            build_contents = f.read()
1023
1024        # Find project's specified meson_version.
1025        meson_version = None
1026        pattern = r"meson_version:[^']*'([^']*)'"
1027        for match in re.finditer(pattern, build_contents):
1028            group = match.group(1)
1029            meson_version = group
1030
1031        # C++20 requires at least Meson 0.57 but Meson itself doesn't
1032        # identify this.  Add to our unit-test checks so that we don't
1033        # get a meson.build missing this.
1034        pattern = r"'cpp_std=c\+\+20'"
1035        for match in re.finditer(pattern, build_contents):
1036            if not meson_version or \
1037                    not meson_version_compare(meson_version, ">=0.57"):
1038                raise Exception(
1039                    "C++20 support requires specifying in meson.build: "
1040                    + "meson_version: '>=0.57'"
1041                )
1042
1043
1044class Package(object):
1045    def __init__(self, name=None, path=None):
1046        self.supported = [Meson, Autotools, CMake]
1047        self.name = name
1048        self.path = path
1049        self.test_only = False
1050
1051    def build_systems(self):
1052        instances = (system(self.name, self.path) for system in self.supported)
1053        return (instance for instance in instances if instance.probe())
1054
1055    def build_system(self, preferred=None):
1056        systems = list(self.build_systems())
1057
1058        if not systems:
1059            return None
1060
1061        if preferred:
1062            return {type(system): system for system in systems}[preferred]
1063
1064        return next(iter(systems))
1065
1066    def install(self, system=None):
1067        if not system:
1068            system = self.build_system()
1069
1070        system.configure(False)
1071        system.build()
1072        system.install()
1073
1074    def _test_one(self, system):
1075        system.configure(True)
1076        system.build()
1077        system.install()
1078        system.test()
1079        system.analyze()
1080
1081    def test(self):
1082        for system in self.build_systems():
1083            self._test_one(system)
1084
1085
1086def find_file(filename, basedir):
1087    """
1088    Finds all occurrences of a file (or list of files) in the base
1089    directory and passes them back with their relative paths.
1090
1091    Parameter descriptions:
1092    filename              The name of the file (or list of files) to
1093                          find
1094    basedir               The base directory search in
1095    """
1096
1097    if not isinstance(filename, list):
1098        filename = [ filename ]
1099
1100    filepaths = []
1101    for root, dirs, files in os.walk(basedir):
1102        for f in filename:
1103            if f in files:
1104                filepaths.append(os.path.join(root, f))
1105    return filepaths
1106
1107
1108if __name__ == '__main__':
1109    # CONFIGURE_FLAGS = [GIT REPO]:[CONFIGURE FLAGS]
1110    CONFIGURE_FLAGS = {
1111        'phosphor-logging':
1112        ['--enable-metadata-processing', '--enable-openpower-pel-extension',
1113         'YAML_DIR=/usr/local/share/phosphor-dbus-yaml/yaml']
1114    }
1115
1116    # MESON_FLAGS = [GIT REPO]:[MESON FLAGS]
1117    MESON_FLAGS = {
1118        'phosphor-dbus-interfaces':
1119        ['-Ddata_com_ibm=true', '-Ddata_org_open_power=true'],
1120        'phosphor-logging':
1121        ['-Dopenpower-pel-extension=enabled']
1122    }
1123
1124    # DEPENDENCIES = [MACRO]:[library/header]:[GIT REPO]
1125    DEPENDENCIES = {
1126        'AC_CHECK_LIB': {'mapper': 'phosphor-objmgr'},
1127        'AC_CHECK_HEADER': {
1128            'host-ipmid': 'phosphor-host-ipmid',
1129            'blobs-ipmid': 'phosphor-ipmi-blobs',
1130            'sdbusplus': 'sdbusplus',
1131            'sdeventplus': 'sdeventplus',
1132            'stdplus': 'stdplus',
1133            'gpioplus': 'gpioplus',
1134            'phosphor-logging/log.hpp': 'phosphor-logging',
1135        },
1136        'AC_PATH_PROG': {'sdbus++': 'sdbusplus'},
1137        'PKG_CHECK_MODULES': {
1138            'phosphor-dbus-interfaces': 'phosphor-dbus-interfaces',
1139            'libipmid': 'phosphor-host-ipmid',
1140            'libipmid-host': 'phosphor-host-ipmid',
1141            'sdbusplus': 'sdbusplus',
1142            'sdeventplus': 'sdeventplus',
1143            'stdplus': 'stdplus',
1144            'gpioplus': 'gpioplus',
1145            'phosphor-logging': 'phosphor-logging',
1146            'phosphor-snmp': 'phosphor-snmp',
1147            'ipmiblob': 'ipmi-blob-tool',
1148            'hei': 'openpower-libhei',
1149            'phosphor-ipmi-blobs': 'phosphor-ipmi-blobs',
1150        },
1151    }
1152
1153    # Offset into array of macro parameters MACRO(0, 1, ...N)
1154    DEPENDENCIES_OFFSET = {
1155        'AC_CHECK_LIB': 0,
1156        'AC_CHECK_HEADER': 0,
1157        'AC_PATH_PROG': 1,
1158        'PKG_CHECK_MODULES': 1,
1159    }
1160
1161    # DEPENDENCIES_REGEX = [GIT REPO]:[REGEX STRING]
1162    DEPENDENCIES_REGEX = {
1163        'phosphor-logging': r'\S+-dbus-interfaces$'
1164    }
1165
1166    # Set command line arguments
1167    parser = argparse.ArgumentParser()
1168    parser.add_argument("-w", "--workspace", dest="WORKSPACE", required=True,
1169                        help="Workspace directory location(i.e. /home)")
1170    parser.add_argument("-p", "--package", dest="PACKAGE", required=True,
1171                        help="OpenBMC package to be unit tested")
1172    parser.add_argument("-t", "--test-only", dest="TEST_ONLY",
1173                        action="store_true", required=False, default=False,
1174                        help="Only run test cases, no other validation")
1175    arg_inttests = parser.add_mutually_exclusive_group()
1176    arg_inttests.add_argument("--integration-tests", dest="INTEGRATION_TEST",
1177                        action="store_true", required=False, default=True,
1178                        help="Enable integration tests [default].")
1179    arg_inttests.add_argument("--no-integration-tests", dest="INTEGRATION_TEST",
1180                        action="store_false", required=False,
1181                        help="Disable integration tests.")
1182    parser.add_argument("-v", "--verbose", action="store_true",
1183                        help="Print additional package status messages")
1184    parser.add_argument("-r", "--repeat", help="Repeat tests N times",
1185                        type=int, default=1)
1186    parser.add_argument("-b", "--branch", dest="BRANCH", required=False,
1187                        help="Branch to target for dependent repositories",
1188                        default="master")
1189    parser.add_argument("-n", "--noformat", dest="FORMAT",
1190                        action="store_false", required=False,
1191                        help="Whether or not to run format code")
1192    args = parser.parse_args(sys.argv[1:])
1193    WORKSPACE = args.WORKSPACE
1194    UNIT_TEST_PKG = args.PACKAGE
1195    TEST_ONLY = args.TEST_ONLY
1196    INTEGRATION_TEST = args.INTEGRATION_TEST
1197    BRANCH = args.BRANCH
1198    FORMAT_CODE = args.FORMAT
1199    if args.verbose:
1200        def printline(*line):
1201            for arg in line:
1202                print(arg, end=' ')
1203            print()
1204    else:
1205        def printline(*line):
1206            pass
1207
1208    CODE_SCAN_DIR = os.path.join(WORKSPACE, UNIT_TEST_PKG)
1209
1210    # First validate code formatting if repo has style formatting files.
1211    # The format-code.sh checks for these files.
1212    if FORMAT_CODE:
1213        format_scripts = find_file(['format-code.sh', 'format-code'],
1214                                   CODE_SCAN_DIR)
1215
1216        # use default format-code.sh if no other found
1217        if not format_scripts:
1218            format_scripts.append(os.path.join(WORKSPACE, "format-code.sh"))
1219
1220        for f in format_scripts:
1221            check_call_cmd(f, CODE_SCAN_DIR)
1222
1223    # Check if this repo has a supported make infrastructure
1224    pkg = Package(UNIT_TEST_PKG, CODE_SCAN_DIR)
1225    if not pkg.build_system():
1226        print("No valid build system, exit")
1227        sys.exit(0)
1228
1229    prev_umask = os.umask(000)
1230
1231    # Determine dependencies and add them
1232    dep_added = dict()
1233    dep_added[UNIT_TEST_PKG] = False
1234
1235    # Create dependency tree
1236    dep_tree = DepTree(UNIT_TEST_PKG)
1237    build_dep_tree(UNIT_TEST_PKG, CODE_SCAN_DIR, dep_added, dep_tree, BRANCH)
1238
1239    # Reorder Dependency Tree
1240    for pkg_name, regex_str in DEPENDENCIES_REGEX.items():
1241        dep_tree.ReorderDeps(pkg_name, regex_str)
1242    if args.verbose:
1243        dep_tree.PrintTree()
1244
1245    install_list = dep_tree.GetInstallList()
1246
1247    # We don't want to treat our package as a dependency
1248    install_list.remove(UNIT_TEST_PKG)
1249
1250    # Install reordered dependencies
1251    for dep in install_list:
1252        build_and_install(dep, False)
1253
1254    # Run package unit tests
1255    build_and_install(UNIT_TEST_PKG, True)
1256
1257    os.umask(prev_umask)
1258
1259    # Run any custom CI scripts the repo has, of which there can be
1260    # multiple of and anywhere in the repository.
1261    ci_scripts = find_file(['run-ci.sh', 'run-ci'], CODE_SCAN_DIR)
1262    if ci_scripts:
1263        os.chdir(CODE_SCAN_DIR)
1264        for ci_script in ci_scripts:
1265            check_call_cmd(ci_script)
1266