1#
2# Copyright (c) 2013-2014 Intel Corporation
3#
4# SPDX-License-Identifier: MIT
5#
6
7# DESCRIPTION
8# This module is mainly used by scripts/oe-selftest and modules under meta/oeqa/selftest
9# It provides a class and methods for running commands on the host in a convienent way for tests.
10
11
12
13import os
14import sys
15import signal
16import subprocess
17import threading
18import time
19import logging
20from oeqa.utils import CommandError
21from oeqa.utils import ftools
22import re
23import contextlib
24# Export test doesn't require bb
25try:
26    import bb
27except ImportError:
28    pass
29
30class Command(object):
31    def __init__(self, command, bg=False, timeout=None, data=None, output_log=None, **options):
32
33        self.defaultopts = {
34            "stdout": subprocess.PIPE,
35            "stderr": subprocess.STDOUT,
36            "stdin": None,
37            "shell": False,
38            "bufsize": -1,
39        }
40
41        self.cmd = command
42        self.bg = bg
43        self.timeout = timeout
44        self.data = data
45
46        self.options = dict(self.defaultopts)
47        if isinstance(self.cmd, str):
48            self.options["shell"] = True
49        if self.data:
50            self.options['stdin'] = subprocess.PIPE
51        self.options.update(options)
52
53        self.status = None
54        # We collect chunks of output before joining them at the end.
55        self._output_chunks = []
56        self._error_chunks = []
57        self.output = None
58        self.error = None
59        self.threads = []
60
61        self.output_log = output_log
62        self.log = logging.getLogger("utils.commands")
63
64    def run(self):
65        self.process = subprocess.Popen(self.cmd, **self.options)
66
67        def readThread(output, stream, logfunc):
68            if logfunc:
69                for line in stream:
70                    output.append(line)
71                    logfunc(line.decode("utf-8", errors='replace').rstrip())
72            else:
73                output.append(stream.read())
74
75        def readStderrThread():
76            readThread(self._error_chunks, self.process.stderr, self.output_log.error if self.output_log else None)
77
78        def readStdoutThread():
79            readThread(self._output_chunks, self.process.stdout, self.output_log.info if self.output_log else None)
80
81        def writeThread():
82            try:
83                self.process.stdin.write(self.data)
84                self.process.stdin.close()
85            except OSError as ex:
86                # It's not an error when the command does not consume all
87                # of our data. subprocess.communicate() also ignores that.
88                if ex.errno != EPIPE:
89                    raise
90
91        # We write in a separate thread because then we can read
92        # without worrying about deadlocks. The additional thread is
93        # expected to terminate by itself and we mark it as a daemon,
94        # so even it should happen to not terminate for whatever
95        # reason, the main process will still exit, which will then
96        # kill the write thread.
97        if self.data:
98            thread = threading.Thread(target=writeThread, daemon=True)
99            thread.start()
100            self.threads.append(thread)
101        if self.process.stderr:
102            thread = threading.Thread(target=readStderrThread)
103            thread.start()
104            self.threads.append(thread)
105        if self.output_log:
106            self.output_log.info('Running: %s' % self.cmd)
107        thread = threading.Thread(target=readStdoutThread)
108        thread.start()
109        self.threads.append(thread)
110
111        self.log.debug("Running command '%s'" % self.cmd)
112
113        if not self.bg:
114            if self.timeout is None:
115                for thread in self.threads:
116                    thread.join()
117            else:
118                deadline = time.time() + self.timeout
119                for thread in self.threads:
120                    timeout = deadline - time.time()
121                    if timeout < 0:
122                        timeout = 0
123                    thread.join(timeout)
124            self.stop()
125
126    def stop(self):
127        for thread in self.threads:
128            if thread.is_alive():
129                self.process.terminate()
130            # let's give it more time to terminate gracefully before killing it
131            thread.join(5)
132            if thread.is_alive():
133                self.process.kill()
134                thread.join()
135
136        def finalize_output(data):
137            if not data:
138                data = ""
139            else:
140                data = b"".join(data)
141                data = data.decode("utf-8", errors='replace').rstrip()
142            return data
143
144        self.output = finalize_output(self._output_chunks)
145        self._output_chunks = None
146        # self.error used to be a byte string earlier, probably unintentionally.
147        # Now it is a normal string, just like self.output.
148        self.error = finalize_output(self._error_chunks)
149        self._error_chunks = None
150        # At this point we know that the process has closed stdout/stderr, so
151        # it is safe and necessary to wait for the actual process completion.
152        self.status = self.process.wait()
153        self.process.stdout.close()
154        if self.process.stderr:
155            self.process.stderr.close()
156
157        self.log.debug("Command '%s' returned %d as exit code." % (self.cmd, self.status))
158        # logging the complete output is insane
159        # bitbake -e output is really big
160        # and makes the log file useless
161        if self.status:
162            lout = "\n".join(self.output.splitlines()[-20:])
163            self.log.debug("Last 20 lines:\n%s" % lout)
164
165
166class Result(object):
167    pass
168
169
170def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=True,
171          native_sysroot=None, limit_exc_output=0, output_log=None, **options):
172    result = Result()
173
174    if native_sysroot:
175        extra_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin" % \
176                      (native_sysroot, native_sysroot, native_sysroot)
177        nenv = dict(options.get('env', os.environ))
178        nenv['PATH'] = extra_paths + ':' + nenv.get('PATH', '')
179        options['env'] = nenv
180
181    cmd = Command(command, timeout=timeout, output_log=output_log, **options)
182    cmd.run()
183
184    # tests can be heavy on IO and if bitbake can't write out its caches, we see timeouts.
185    # call sync around the tests to ensure the IO queue doesn't get too large, taking any IO
186    # hit here rather than in bitbake shutdown.
187    if sync:
188        p = os.environ['PATH']
189        os.environ['PATH'] = "/usr/bin:/bin:/usr/sbin:/sbin:" + p
190        os.system("sync")
191        os.environ['PATH'] = p
192
193    result.command = command
194    result.status = cmd.status
195    result.output = cmd.output
196    result.error = cmd.error
197    result.pid = cmd.process.pid
198
199    if result.status and not ignore_status:
200        exc_output = result.output
201        if limit_exc_output > 0:
202            split = result.output.splitlines()
203            if len(split) > limit_exc_output:
204                exc_output = "\n... (last %d lines of output)\n" % limit_exc_output + \
205                             '\n'.join(split[-limit_exc_output:])
206        if assert_error:
207            raise AssertionError("Command '%s' returned non-zero exit status %d:\n%s" % (command, result.status, exc_output))
208        else:
209            raise CommandError(result.status, command, exc_output)
210
211    return result
212
213
214def bitbake(command, ignore_status=False, timeout=None, postconfig=None, output_log=None, **options):
215
216    if postconfig:
217        postconfig_file = os.path.join(os.environ.get('BUILDDIR'), 'oeqa-post.conf')
218        ftools.write_file(postconfig_file, postconfig)
219        extra_args = "-R %s" % postconfig_file
220    else:
221        extra_args = ""
222
223    if isinstance(command, str):
224        cmd = "bitbake " + extra_args + " " + command
225    else:
226        cmd = [ "bitbake" ] + [a for a in (command + extra_args.split(" ")) if a not in [""]]
227
228    try:
229        return runCmd(cmd, ignore_status, timeout, output_log=output_log, **options)
230    finally:
231        if postconfig:
232            os.remove(postconfig_file)
233
234
235def get_bb_env(target=None, postconfig=None):
236    if target:
237        return bitbake("-e %s" % target, postconfig=postconfig).output
238    else:
239        return bitbake("-e", postconfig=postconfig).output
240
241def get_bb_vars(variables=None, target=None, postconfig=None):
242    """Get values of multiple bitbake variables"""
243    bbenv = get_bb_env(target, postconfig=postconfig)
244
245    if variables is not None:
246        variables = list(variables)
247    var_re = re.compile(r'^(export )?(?P<var>\w+(_.*)?)="(?P<value>.*)"$')
248    unset_re = re.compile(r'^unset (?P<var>\w+)$')
249    lastline = None
250    values = {}
251    for line in bbenv.splitlines():
252        match = var_re.match(line)
253        val = None
254        if match:
255            val = match.group('value')
256        else:
257            match = unset_re.match(line)
258            if match:
259                # Handle [unexport] variables
260                if lastline.startswith('#   "'):
261                    val = lastline.split('"')[1]
262        if val:
263            var = match.group('var')
264            if variables is None:
265                values[var] = val
266            else:
267                if var in variables:
268                    values[var] = val
269                    variables.remove(var)
270                # Stop after all required variables have been found
271                if not variables:
272                    break
273        lastline = line
274    if variables:
275        # Fill in missing values
276        for var in variables:
277            values[var] = None
278    return values
279
280def get_bb_var(var, target=None, postconfig=None):
281    return get_bb_vars([var], target, postconfig)[var]
282
283def get_test_layer():
284    layers = get_bb_var("BBLAYERS").split()
285    testlayer = None
286    for l in layers:
287        if '~' in l:
288            l = os.path.expanduser(l)
289        if "/meta-selftest" in l and os.path.isdir(l):
290            testlayer = l
291            break
292    return testlayer
293
294def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec='recipes-*/*'):
295    os.makedirs(os.path.join(templayerdir, 'conf'))
296    with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f:
297        f.write('BBPATH .= ":${LAYERDIR}"\n')
298        f.write('BBFILES += "${LAYERDIR}/%s/*.bb \\' % recipepathspec)
299        f.write('            ${LAYERDIR}/%s/*.bbappend"\n' % recipepathspec)
300        f.write('BBFILE_COLLECTIONS += "%s"\n' % templayername)
301        f.write('BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' % templayername)
302        f.write('BBFILE_PRIORITY_%s = "%d"\n' % (templayername, priority))
303        f.write('BBFILE_PATTERN_IGNORE_EMPTY_%s = "1"\n' % templayername)
304        f.write('LAYERSERIES_COMPAT_%s = "${LAYERSERIES_COMPAT_core}"\n' % templayername)
305
306@contextlib.contextmanager
307def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True):
308    """
309    launch_cmd means directly run the command, don't need set rootfs or env vars.
310    """
311
312    import bb.tinfoil
313    import bb.build
314
315    # Need a non-'BitBake' logger to capture the runner output
316    targetlogger = logging.getLogger('TargetRunner')
317    targetlogger.setLevel(logging.DEBUG)
318    handler = logging.StreamHandler(sys.stdout)
319    targetlogger.addHandler(handler)
320
321    tinfoil = bb.tinfoil.Tinfoil()
322    tinfoil.prepare(config_only=False, quiet=True)
323    try:
324        tinfoil.logger.setLevel(logging.WARNING)
325        import oeqa.targetcontrol
326        recipedata = tinfoil.parse_recipe(pn)
327        recipedata.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage")
328        recipedata.setVar("TEST_QEMUBOOT_TIMEOUT", "1000")
329        # Tell QemuTarget() whether need find rootfs/kernel or not
330        if launch_cmd:
331            recipedata.setVar("FIND_ROOTFS", '0')
332        else:
333            recipedata.setVar("FIND_ROOTFS", '1')
334
335        for key, value in overrides.items():
336            recipedata.setVar(key, value)
337
338        logdir = recipedata.getVar("TEST_LOG_DIR")
339
340        qemu = oeqa.targetcontrol.QemuTarget(recipedata, targetlogger, image_fstype)
341    finally:
342        # We need to shut down tinfoil early here in case we actually want
343        # to run tinfoil-using utilities with the running QEMU instance.
344        # Luckily QemuTarget doesn't need it after the constructor.
345        tinfoil.shutdown()
346
347    try:
348        qemu.deploy()
349        try:
350            qemu.start(params=qemuparams, ssh=ssh, runqemuparams=runqemuparams, launch_cmd=launch_cmd, discard_writes=discard_writes)
351        except Exception as e:
352            msg = str(e) + '\nFailed to start QEMU - see the logs in %s' % logdir
353            if os.path.exists(qemu.qemurunnerlog):
354                with open(qemu.qemurunnerlog, 'r') as f:
355                    msg = msg + "Qemurunner log output from %s:\n%s" % (qemu.qemurunnerlog, f.read())
356            raise Exception(msg)
357
358        yield qemu
359
360    finally:
361        targetlogger.removeHandler(handler)
362        qemu.stop()
363
364def updateEnv(env_file):
365    """
366    Source a file and update environment.
367    """
368
369    cmd = ". %s; env -0" % env_file
370    result = runCmd(cmd)
371
372    for line in result.output.split("\0"):
373        (key, _, value) = line.partition("=")
374        os.environ[key] = value
375