1# 2# Copyright (c) 2013-2014 Intel Corporation 3# 4# SPDX-License-Identifier: MIT 5# 6 7# DESCRIPTION 8# This module is mainly used by scripts/oe-selftest and modules under meta/oeqa/selftest 9# It provides a class and methods for running commands on the host in a convienent way for tests. 10 11import os 12import sys 13import subprocess 14import threading 15import time 16import logging 17from oeqa.utils import CommandError 18from oeqa.utils import ftools 19import re 20import contextlib 21import errno 22# Export test doesn't require bb 23try: 24 import bb 25except ImportError: 26 pass 27 28class Command(object): 29 def __init__(self, command, bg=False, timeout=None, data=None, output_log=None, **options): 30 31 self.defaultopts = { 32 "stdout": subprocess.PIPE, 33 "stderr": subprocess.STDOUT, 34 "stdin": None, 35 "shell": False, 36 "bufsize": -1, 37 } 38 39 self.cmd = command 40 self.bg = bg 41 self.timeout = timeout 42 self.data = data 43 44 self.options = dict(self.defaultopts) 45 if isinstance(self.cmd, str): 46 self.options["shell"] = True 47 if self.data: 48 self.options['stdin'] = subprocess.PIPE 49 self.options.update(options) 50 51 self.status = None 52 # We collect chunks of output before joining them at the end. 53 self._output_chunks = [] 54 self._error_chunks = [] 55 self.output = None 56 self.error = None 57 self.threads = [] 58 59 self.output_log = output_log 60 self.log = logging.getLogger("utils.commands") 61 62 def run(self): 63 self.process = subprocess.Popen(self.cmd, **self.options) 64 65 def readThread(output, stream, logfunc): 66 if logfunc: 67 for line in stream: 68 output.append(line) 69 logfunc(line.decode("utf-8", errors='replace').rstrip()) 70 else: 71 output.append(stream.read()) 72 73 def readStderrThread(): 74 readThread(self._error_chunks, self.process.stderr, self.output_log.error if self.output_log else None) 75 76 def readStdoutThread(): 77 readThread(self._output_chunks, self.process.stdout, self.output_log.info if self.output_log else None) 78 79 def writeThread(): 80 try: 81 self.process.stdin.write(self.data) 82 self.process.stdin.close() 83 except OSError as ex: 84 # It's not an error when the command does not consume all 85 # of our data. subprocess.communicate() also ignores that. 86 if ex.errno != errno.EPIPE: 87 raise 88 89 # We write in a separate thread because then we can read 90 # without worrying about deadlocks. The additional thread is 91 # expected to terminate by itself and we mark it as a daemon, 92 # so even it should happen to not terminate for whatever 93 # reason, the main process will still exit, which will then 94 # kill the write thread. 95 if self.data: 96 thread = threading.Thread(target=writeThread, daemon=True) 97 thread.start() 98 self.threads.append(thread) 99 if self.process.stderr: 100 thread = threading.Thread(target=readStderrThread) 101 thread.start() 102 self.threads.append(thread) 103 if self.output_log: 104 self.output_log.info('Running: %s' % self.cmd) 105 thread = threading.Thread(target=readStdoutThread) 106 thread.start() 107 self.threads.append(thread) 108 109 self.log.debug("Running command '%s'" % self.cmd) 110 111 if not self.bg: 112 if self.timeout is None: 113 for thread in self.threads: 114 thread.join() 115 else: 116 deadline = time.time() + self.timeout 117 for thread in self.threads: 118 timeout = deadline - time.time() 119 if timeout < 0: 120 timeout = 0 121 thread.join(timeout) 122 self.stop() 123 124 def stop(self): 125 for thread in self.threads: 126 if thread.is_alive(): 127 self.process.terminate() 128 # let's give it more time to terminate gracefully before killing it 129 thread.join(5) 130 if thread.is_alive(): 131 self.process.kill() 132 thread.join() 133 134 def finalize_output(data): 135 if not data: 136 data = "" 137 else: 138 data = b"".join(data) 139 data = data.decode("utf-8", errors='replace').rstrip() 140 return data 141 142 self.output = finalize_output(self._output_chunks) 143 self._output_chunks = None 144 # self.error used to be a byte string earlier, probably unintentionally. 145 # Now it is a normal string, just like self.output. 146 self.error = finalize_output(self._error_chunks) 147 self._error_chunks = None 148 # At this point we know that the process has closed stdout/stderr, so 149 # it is safe and necessary to wait for the actual process completion. 150 self.status = self.process.wait() 151 self.process.stdout.close() 152 if self.process.stderr: 153 self.process.stderr.close() 154 155 self.log.debug("Command '%s' returned %d as exit code." % (self.cmd, self.status)) 156 # logging the complete output is insane 157 # bitbake -e output is really big 158 # and makes the log file useless 159 if self.status: 160 lout = "\n".join(self.output.splitlines()[-20:]) 161 self.log.debug("Last 20 lines:\n%s" % lout) 162 163 164class Result(object): 165 pass 166 167 168def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=True, 169 native_sysroot=None, target_sys=None, limit_exc_output=0, output_log=None, **options): 170 result = Result() 171 172 if native_sysroot: 173 new_env = dict(options.get('env', os.environ)) 174 paths = new_env["PATH"].split(":") 175 paths = [ 176 os.path.join(native_sysroot, "bin"), 177 os.path.join(native_sysroot, "sbin"), 178 os.path.join(native_sysroot, "usr", "bin"), 179 os.path.join(native_sysroot, "usr", "sbin"), 180 ] + paths 181 if target_sys: 182 paths = [os.path.join(native_sysroot, "usr", "bin", target_sys)] + paths 183 new_env["PATH"] = ":".join(paths) 184 options['env'] = new_env 185 186 cmd = Command(command, timeout=timeout, output_log=output_log, **options) 187 cmd.run() 188 189 # tests can be heavy on IO and if bitbake can't write out its caches, we see timeouts. 190 # call sync around the tests to ensure the IO queue doesn't get too large, taking any IO 191 # hit here rather than in bitbake shutdown. 192 if sync: 193 p = os.environ['PATH'] 194 os.environ['PATH'] = "/usr/bin:/bin:/usr/sbin:/sbin:" + p 195 os.system("sync") 196 os.environ['PATH'] = p 197 198 result.command = command 199 result.status = cmd.status 200 result.output = cmd.output 201 result.error = cmd.error 202 result.pid = cmd.process.pid 203 204 if result.status and not ignore_status: 205 exc_output = result.output 206 if limit_exc_output > 0: 207 split = result.output.splitlines() 208 if len(split) > limit_exc_output: 209 exc_output = "\n... (last %d lines of output)\n" % limit_exc_output + \ 210 '\n'.join(split[-limit_exc_output:]) 211 if assert_error: 212 raise AssertionError("Command '%s' returned non-zero exit status %d:\n%s" % (command, result.status, exc_output)) 213 else: 214 raise CommandError(result.status, command, exc_output) 215 216 return result 217 218 219def bitbake(command, ignore_status=False, timeout=None, postconfig=None, output_log=None, **options): 220 221 if postconfig: 222 postconfig_file = os.path.join(os.environ.get('BUILDDIR'), 'oeqa-post.conf') 223 ftools.write_file(postconfig_file, postconfig) 224 extra_args = "-R %s" % postconfig_file 225 else: 226 extra_args = "" 227 228 if isinstance(command, str): 229 cmd = "bitbake " + extra_args + " " + command 230 else: 231 cmd = [ "bitbake" ] + [a for a in (command + extra_args.split(" ")) if a not in [""]] 232 233 try: 234 return runCmd(cmd, ignore_status, timeout, output_log=output_log, **options) 235 finally: 236 if postconfig: 237 os.remove(postconfig_file) 238 239 240def get_bb_env(target=None, postconfig=None): 241 if target: 242 return bitbake("-e %s" % target, postconfig=postconfig).output 243 else: 244 return bitbake("-e", postconfig=postconfig).output 245 246def get_bb_vars(variables=None, target=None, postconfig=None): 247 """Get values of multiple bitbake variables""" 248 bbenv = get_bb_env(target, postconfig=postconfig) 249 250 if variables is not None: 251 variables = list(variables) 252 var_re = re.compile(r'^(export )?(?P<var>\w+(_.*)?)="(?P<value>.*)"$') 253 unset_re = re.compile(r'^unset (?P<var>\w+)$') 254 lastline = None 255 values = {} 256 for line in bbenv.splitlines(): 257 match = var_re.match(line) 258 val = None 259 if match: 260 val = match.group('value') 261 else: 262 match = unset_re.match(line) 263 if match: 264 # Handle [unexport] variables 265 if lastline.startswith('# "'): 266 val = lastline.split('"')[1] 267 if val: 268 var = match.group('var') 269 if variables is None: 270 values[var] = val 271 else: 272 if var in variables: 273 values[var] = val 274 variables.remove(var) 275 # Stop after all required variables have been found 276 if not variables: 277 break 278 lastline = line 279 if variables: 280 # Fill in missing values 281 for var in variables: 282 values[var] = None 283 return values 284 285def get_bb_var(var, target=None, postconfig=None): 286 return get_bb_vars([var], target, postconfig)[var] 287 288def get_test_layer(bblayers=None): 289 if bblayers is None: 290 bblayers = get_bb_var("BBLAYERS") 291 layers = bblayers.split() 292 testlayer = None 293 for l in layers: 294 if '~' in l: 295 l = os.path.expanduser(l) 296 if "/meta-selftest" in l and os.path.isdir(l): 297 testlayer = l 298 break 299 return testlayer 300 301def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec='recipes-*/*'): 302 os.makedirs(os.path.join(templayerdir, 'conf')) 303 corenames = get_bb_var('LAYERSERIES_CORENAMES') 304 with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f: 305 f.write('BBPATH .= ":${LAYERDIR}"\n') 306 f.write('BBFILES += "${LAYERDIR}/%s/*.bb \\' % recipepathspec) 307 f.write(' ${LAYERDIR}/%s/*.bbappend"\n' % recipepathspec) 308 f.write('BBFILE_COLLECTIONS += "%s"\n' % templayername) 309 f.write('BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' % templayername) 310 f.write('BBFILE_PRIORITY_%s = "%d"\n' % (templayername, priority)) 311 f.write('BBFILE_PATTERN_IGNORE_EMPTY_%s = "1"\n' % templayername) 312 f.write('LAYERSERIES_COMPAT_%s = "%s"\n' % (templayername, corenames)) 313 314@contextlib.contextmanager 315def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True): 316 """ 317 launch_cmd means directly run the command, don't need set rootfs or env vars. 318 """ 319 320 import bb.tinfoil 321 import bb.build 322 323 # Need a non-'BitBake' logger to capture the runner output 324 targetlogger = logging.getLogger('TargetRunner') 325 targetlogger.setLevel(logging.DEBUG) 326 handler = logging.StreamHandler(sys.stdout) 327 targetlogger.addHandler(handler) 328 329 tinfoil = bb.tinfoil.Tinfoil() 330 tinfoil.prepare(config_only=False, quiet=True) 331 try: 332 tinfoil.logger.setLevel(logging.WARNING) 333 import oeqa.targetcontrol 334 recipedata = tinfoil.parse_recipe(pn) 335 recipedata.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage") 336 recipedata.setVar("TEST_QEMUBOOT_TIMEOUT", "1000") 337 # Tell QemuTarget() whether need find rootfs/kernel or not 338 if launch_cmd: 339 recipedata.setVar("FIND_ROOTFS", '0') 340 else: 341 recipedata.setVar("FIND_ROOTFS", '1') 342 343 for key, value in overrides.items(): 344 recipedata.setVar(key, value) 345 346 logdir = recipedata.getVar("TEST_LOG_DIR") 347 348 qemu = oeqa.targetcontrol.QemuTarget(recipedata, targetlogger, image_fstype) 349 finally: 350 # We need to shut down tinfoil early here in case we actually want 351 # to run tinfoil-using utilities with the running QEMU instance. 352 # Luckily QemuTarget doesn't need it after the constructor. 353 tinfoil.shutdown() 354 355 try: 356 qemu.deploy() 357 try: 358 qemu.start(params=qemuparams, ssh=ssh, runqemuparams=runqemuparams, launch_cmd=launch_cmd, discard_writes=discard_writes) 359 except Exception as e: 360 msg = str(e) + '\nFailed to start QEMU - see the logs in %s' % logdir 361 if os.path.exists(qemu.qemurunnerlog): 362 with open(qemu.qemurunnerlog, 'r') as f: 363 msg = msg + "Qemurunner log output from %s:\n%s" % (qemu.qemurunnerlog, f.read()) 364 raise Exception(msg) 365 366 yield qemu 367 368 finally: 369 targetlogger.removeHandler(handler) 370 qemu.stop() 371 372def updateEnv(env_file): 373 """ 374 Source a file and update environment. 375 """ 376 377 cmd = ". %s; env -0" % env_file 378 result = runCmd(cmd) 379 380 for line in result.output.split("\0"): 381 (key, _, value) = line.partition("=") 382 os.environ[key] = value 383