xref: /openbmc/openbmc/poky/meta/lib/oeqa/utils/commands.py (revision c124f4f2e04dca16a428a76c89677328bc7bf908)
1#
2# Copyright (c) 2013-2014 Intel Corporation
3#
4# SPDX-License-Identifier: MIT
5#
6
7# DESCRIPTION
8# This module is mainly used by scripts/oe-selftest and modules under meta/oeqa/selftest
9# It provides a class and methods for running commands on the host in a convienent way for tests.
10
11import os
12import sys
13import subprocess
14import threading
15import time
16import logging
17from oeqa.utils import CommandError
18from oeqa.utils import ftools
19import re
20import contextlib
21import errno
22# Export test doesn't require bb
23try:
24    import bb
25except ImportError:
26    pass
27
28class Command(object):
29    def __init__(self, command, bg=False, timeout=None, data=None, output_log=None, **options):
30
31        self.defaultopts = {
32            "stdout": subprocess.PIPE,
33            "stderr": subprocess.STDOUT,
34            "stdin": None,
35            "shell": False,
36            "bufsize": -1,
37        }
38
39        self.cmd = command
40        self.bg = bg
41        self.timeout = timeout
42        self.data = data
43
44        self.options = dict(self.defaultopts)
45        if isinstance(self.cmd, str):
46            self.options["shell"] = True
47        if self.data:
48            self.options['stdin'] = subprocess.PIPE
49        self.options.update(options)
50
51        self.status = None
52        # We collect chunks of output before joining them at the end.
53        self._output_chunks = []
54        self._error_chunks = []
55        self.output = None
56        self.error = None
57        self.threads = []
58
59        self.output_log = output_log
60        self.log = logging.getLogger("utils.commands")
61
62    def run(self):
63        self.process = subprocess.Popen(self.cmd, **self.options)
64
65        def readThread(output, stream, logfunc):
66            if logfunc:
67                for line in stream:
68                    output.append(line)
69                    logfunc(line.decode("utf-8", errors='replace').rstrip())
70            else:
71                output.append(stream.read())
72
73        def readStderrThread():
74            readThread(self._error_chunks, self.process.stderr, self.output_log.error if self.output_log else None)
75
76        def readStdoutThread():
77            readThread(self._output_chunks, self.process.stdout, self.output_log.info if self.output_log else None)
78
79        def writeThread():
80            try:
81                self.process.stdin.write(self.data)
82                self.process.stdin.close()
83            except OSError as ex:
84                # It's not an error when the command does not consume all
85                # of our data. subprocess.communicate() also ignores that.
86                if ex.errno != errno.EPIPE:
87                    raise
88
89        # We write in a separate thread because then we can read
90        # without worrying about deadlocks. The additional thread is
91        # expected to terminate by itself and we mark it as a daemon,
92        # so even it should happen to not terminate for whatever
93        # reason, the main process will still exit, which will then
94        # kill the write thread.
95        if self.data:
96            thread = threading.Thread(target=writeThread, daemon=True)
97            thread.start()
98            self.threads.append(thread)
99        if self.process.stderr:
100            thread = threading.Thread(target=readStderrThread)
101            thread.start()
102            self.threads.append(thread)
103        if self.output_log:
104            self.output_log.info('Running: %s' % self.cmd)
105        thread = threading.Thread(target=readStdoutThread)
106        thread.start()
107        self.threads.append(thread)
108
109        self.log.debug("Running command '%s'" % self.cmd)
110
111        if not self.bg:
112            if self.timeout is None:
113                for thread in self.threads:
114                    thread.join()
115            else:
116                deadline = time.time() + self.timeout
117                for thread in self.threads:
118                    timeout = deadline - time.time()
119                    if timeout < 0:
120                        timeout = 0
121                    thread.join(timeout)
122            self.stop()
123
124    def stop(self):
125        for thread in self.threads:
126            if thread.is_alive():
127                self.process.terminate()
128            # let's give it more time to terminate gracefully before killing it
129            thread.join(5)
130            if thread.is_alive():
131                self.process.kill()
132                thread.join()
133
134        def finalize_output(data):
135            if not data:
136                data = ""
137            else:
138                data = b"".join(data)
139                data = data.decode("utf-8", errors='replace').rstrip()
140            return data
141
142        self.output = finalize_output(self._output_chunks)
143        self._output_chunks = None
144        # self.error used to be a byte string earlier, probably unintentionally.
145        # Now it is a normal string, just like self.output.
146        self.error = finalize_output(self._error_chunks)
147        self._error_chunks = None
148        # At this point we know that the process has closed stdout/stderr, so
149        # it is safe and necessary to wait for the actual process completion.
150        self.status = self.process.wait()
151        self.process.stdout.close()
152        if self.process.stderr:
153            self.process.stderr.close()
154
155        self.log.debug("Command '%s' returned %d as exit code." % (self.cmd, self.status))
156        # logging the complete output is insane
157        # bitbake -e output is really big
158        # and makes the log file useless
159        if self.status:
160            lout = "\n".join(self.output.splitlines()[-20:])
161            self.log.debug("Last 20 lines:\n%s" % lout)
162
163
164class Result(object):
165    pass
166
167
168def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=True,
169          native_sysroot=None, target_sys=None, limit_exc_output=0, output_log=None, **options):
170    result = Result()
171
172    if native_sysroot:
173        new_env = dict(options.get('env', os.environ))
174        paths = new_env["PATH"].split(":")
175        paths = [
176            os.path.join(native_sysroot, "bin"),
177            os.path.join(native_sysroot, "sbin"),
178            os.path.join(native_sysroot, "usr", "bin"),
179            os.path.join(native_sysroot, "usr", "sbin"),
180        ] + paths
181        if target_sys:
182            paths = [os.path.join(native_sysroot, "usr", "bin", target_sys)] + paths
183        new_env["PATH"] = ":".join(paths)
184        options['env'] = new_env
185
186    cmd = Command(command, timeout=timeout, output_log=output_log, **options)
187    cmd.run()
188
189    # tests can be heavy on IO and if bitbake can't write out its caches, we see timeouts.
190    # call sync around the tests to ensure the IO queue doesn't get too large, taking any IO
191    # hit here rather than in bitbake shutdown.
192    if sync:
193        p = os.environ['PATH']
194        os.environ['PATH'] = "/usr/bin:/bin:/usr/sbin:/sbin:" + p
195        os.system("sync")
196        os.environ['PATH'] = p
197
198    result.command = command
199    result.status = cmd.status
200    result.output = cmd.output
201    result.error = cmd.error
202    result.pid = cmd.process.pid
203
204    if result.status and not ignore_status:
205        exc_output = result.output
206        if result.error:
207            exc_output = exc_output + result.error
208        if limit_exc_output > 0:
209            split = result.output.splitlines()
210            if len(split) > limit_exc_output:
211                exc_output = "\n... (last %d lines of output)\n" % limit_exc_output + \
212                             '\n'.join(split[-limit_exc_output:])
213        if assert_error:
214            raise AssertionError("Command '%s' returned non-zero exit status %d:\n%s" % (command, result.status, exc_output))
215        else:
216            raise CommandError(result.status, command, exc_output)
217
218    return result
219
220
221def bitbake(command, ignore_status=False, timeout=None, postconfig=None, output_log=None, **options):
222
223    if postconfig:
224        postconfig_file = os.path.join(os.environ.get('BUILDDIR'), 'oeqa-post.conf')
225        ftools.write_file(postconfig_file, postconfig)
226        extra_args = "-R %s" % postconfig_file
227    else:
228        extra_args = ""
229
230    if isinstance(command, str):
231        cmd = "bitbake " + extra_args + " " + command
232    else:
233        cmd = [ "bitbake" ] + [a for a in (command + extra_args.split(" ")) if a not in [""]]
234
235    try:
236        return runCmd(cmd, ignore_status, timeout, output_log=output_log, **options)
237    finally:
238        if postconfig:
239            os.remove(postconfig_file)
240
241
242def get_bb_env(target=None, postconfig=None):
243    if target:
244        return bitbake("-e %s" % target, postconfig=postconfig).output
245    else:
246        return bitbake("-e", postconfig=postconfig).output
247
248def get_bb_vars(variables=None, target=None, postconfig=None):
249    """Get values of multiple bitbake variables"""
250    bbenv = get_bb_env(target, postconfig=postconfig)
251
252    if variables is not None:
253        variables = list(variables)
254    var_re = re.compile(r'^(export )?(?P<var>\w+(_.*)?)="(?P<value>.*)"$')
255    unset_re = re.compile(r'^unset (?P<var>\w+)$')
256    lastline = None
257    values = {}
258    for line in bbenv.splitlines():
259        match = var_re.match(line)
260        val = None
261        if match:
262            val = match.group('value')
263        else:
264            match = unset_re.match(line)
265            if match:
266                # Handle [unexport] variables
267                if lastline.startswith('#   "'):
268                    val = lastline.split('"')[1]
269        if val:
270            var = match.group('var')
271            if variables is None:
272                values[var] = val
273            else:
274                if var in variables:
275                    values[var] = val
276                    variables.remove(var)
277                # Stop after all required variables have been found
278                if not variables:
279                    break
280        lastline = line
281    if variables:
282        # Fill in missing values
283        for var in variables:
284            values[var] = None
285    return values
286
287def get_bb_var(var, target=None, postconfig=None):
288    return get_bb_vars([var], target, postconfig)[var]
289
290def get_test_layer(bblayers=None):
291    if bblayers is None:
292        bblayers = get_bb_var("BBLAYERS")
293    layers = bblayers.split()
294    testlayer = None
295    for l in layers:
296        if '~' in l:
297            l = os.path.expanduser(l)
298        if "/meta-selftest" in l and os.path.isdir(l):
299            testlayer = l
300            break
301    return testlayer
302
303def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec='recipes-*/*'):
304    os.makedirs(os.path.join(templayerdir, 'conf'))
305    corenames = get_bb_var('LAYERSERIES_CORENAMES')
306    with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f:
307        f.write('BBPATH .= ":${LAYERDIR}"\n')
308        f.write('BBFILES += "${LAYERDIR}/%s/*.bb \\' % recipepathspec)
309        f.write('            ${LAYERDIR}/%s/*.bbappend"\n' % recipepathspec)
310        f.write('BBFILE_COLLECTIONS += "%s"\n' % templayername)
311        f.write('BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' % templayername)
312        f.write('BBFILE_PRIORITY_%s = "%d"\n' % (templayername, priority))
313        f.write('BBFILE_PATTERN_IGNORE_EMPTY_%s = "1"\n' % templayername)
314        f.write('LAYERSERIES_COMPAT_%s = "%s"\n' % (templayername, corenames))
315
316@contextlib.contextmanager
317def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, boot_patterns = {}, discard_writes=True):
318    """
319    Starts a context manager for a 'oeqa.targetcontrol.QemuTarget' resource.
320    The underlying Qemu will be booted into a shell when the generator yields
321    and stopped when the 'with' block exits.
322
323    Usage:
324
325        with runqemu('core-image-minimal') as qemu:
326            qemu.run_serial('cat /proc/cpuinfo')
327
328    Args:
329        pn (str): (image) recipe to run on
330        ssh (boolean): whether or not to enable SSH (network access)
331        runqemuparams (str): space-separated list of params to pass to 'runqemu' script (like 'nographics', 'ovmf', etc.)
332        image_fstype (str): IMAGE_FSTYPE to use
333        launch_cmd (str): directly run this command and bypass automatic runqemu parameter generation
334        overrides (dict): dict of "'<bitbake-variable>': value" pairs that allows overriding bitbake variables
335        boot_patterns (dict): dict of "'<pattern-name>': value" pairs to override default boot patterns, e.g. when not booting Linux
336        discard_writes (boolean): enables qemu -snapshot feature to prevent modifying original image
337    """
338
339    import bb.tinfoil
340    import bb.build
341
342    # Need a non-'BitBake' logger to capture the runner output
343    targetlogger = logging.getLogger('TargetRunner')
344    targetlogger.setLevel(logging.DEBUG)
345    handler = logging.StreamHandler(sys.stdout)
346    targetlogger.addHandler(handler)
347
348    tinfoil = bb.tinfoil.Tinfoil()
349    tinfoil.prepare(config_only=False, quiet=True)
350    try:
351        tinfoil.logger.setLevel(logging.WARNING)
352        import oeqa.targetcontrol
353        recipedata = tinfoil.parse_recipe(pn)
354        recipedata.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage")
355        recipedata.setVar("TEST_QEMUBOOT_TIMEOUT", "1000")
356        # Tell QemuTarget() whether need find rootfs/kernel or not
357        if launch_cmd:
358            recipedata.setVar("FIND_ROOTFS", '0')
359        else:
360            recipedata.setVar("FIND_ROOTFS", '1')
361
362        for key, value in overrides.items():
363            recipedata.setVar(key, value)
364
365        logdir = recipedata.getVar("TEST_LOG_DIR")
366
367        qemu = oeqa.targetcontrol.QemuTarget(recipedata, targetlogger, image_fstype, boot_patterns=boot_patterns)
368    finally:
369        # We need to shut down tinfoil early here in case we actually want
370        # to run tinfoil-using utilities with the running QEMU instance.
371        # Luckily QemuTarget doesn't need it after the constructor.
372        tinfoil.shutdown()
373
374    try:
375        qemu.deploy()
376        try:
377            qemu.start(params=qemuparams, ssh=ssh, runqemuparams=runqemuparams, launch_cmd=launch_cmd, discard_writes=discard_writes)
378        except Exception as e:
379            msg = str(e) + '\nFailed to start QEMU - see the logs in %s' % logdir
380            if os.path.exists(qemu.qemurunnerlog):
381                with open(qemu.qemurunnerlog, 'r') as f:
382                    msg = msg + "Qemurunner log output from %s:\n%s" % (qemu.qemurunnerlog, f.read())
383            raise Exception(msg)
384
385        yield qemu
386
387    finally:
388        targetlogger.removeHandler(handler)
389        qemu.stop()
390
391def updateEnv(env_file):
392    """
393    Source a file and update environment.
394    """
395
396    cmd = ". %s; env -0" % env_file
397    result = runCmd(cmd)
398
399    for line in result.output.split("\0"):
400        (key, _, value) = line.partition("=")
401        os.environ[key] = value
402