1# 2# Copyright (C) 2003, 2004 Chris Larson 3# Copyright (C) 2003, 2004 Phil Blundell 4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer 5# Copyright (C) 2005 Holger Hans Peter Freyther 6# Copyright (C) 2005 ROAD GmbH 7# Copyright (C) 2006 - 2007 Richard Purdie 8# 9# SPDX-License-Identifier: GPL-2.0-only 10# 11import enum 12import sys, os, glob, os.path, re, time 13import itertools 14import logging 15import multiprocessing 16import threading 17from io import StringIO, UnsupportedOperation 18from contextlib import closing 19from collections import defaultdict, namedtuple 20import bb, bb.command 21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build 22import queue 23import signal 24import prserv.serv 25import json 26import pickle 27import codecs 28import hashserv 29 30logger = logging.getLogger("BitBake") 31collectlog = logging.getLogger("BitBake.Collection") 32buildlog = logging.getLogger("BitBake.Build") 33parselog = logging.getLogger("BitBake.Parsing") 34providerlog = logging.getLogger("BitBake.Provider") 35 36class NoSpecificMatch(bb.BBHandledException): 37 """ 38 Exception raised when no or multiple file matches are found 39 """ 40 41class NothingToBuild(Exception): 42 """ 43 Exception raised when there is nothing to build 44 """ 45 46class CollectionError(bb.BBHandledException): 47 """ 48 Exception raised when layer configuration is incorrect 49 """ 50 51 52class State(enum.Enum): 53 INITIAL = 0, 54 PARSING = 1, 55 RUNNING = 2, 56 SHUTDOWN = 3, 57 FORCE_SHUTDOWN = 4, 58 STOPPED = 5, 59 ERROR = 6 60 61 62class SkippedPackage: 63 def __init__(self, info = None, reason = None): 64 self.pn = None 65 self.skipreason = None 66 self.provides = None 67 self.rprovides = None 68 69 if info: 70 self.pn = info.pn 71 self.skipreason = info.skipreason 72 self.provides = info.provides 73 self.rprovides = info.packages + info.rprovides 74 for package in info.packages: 75 self.rprovides += info.rprovides_pkg[package] 76 elif reason: 77 self.skipreason = reason 78 79 80class CookerFeatures(object): 81 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS, RECIPE_SIGGEN_INFO] = list(range(4)) 82 83 def __init__(self): 84 self._features=set() 85 86 def setFeature(self, f): 87 # validate we got a request for a feature we support 88 if f not in CookerFeatures._feature_list: 89 return 90 self._features.add(f) 91 92 def __contains__(self, f): 93 return f in self._features 94 95 def __iter__(self): 96 return self._features.__iter__() 97 98 def __next__(self): 99 return next(self._features) 100 101 102class EventWriter: 103 def __init__(self, cooker, eventfile): 104 self.cooker = cooker 105 self.eventfile = eventfile 106 self.event_queue = [] 107 108 def write_variables(self): 109 with open(self.eventfile, "a") as f: 110 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])})) 111 112 def send(self, event): 113 with open(self.eventfile, "a") as f: 114 try: 115 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8') 116 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__, 117 "vars": str_event})) 118 except Exception as err: 119 import traceback 120 print(err, traceback.format_exc()) 121 122 123#============================================================================# 124# BBCooker 125#============================================================================# 126class BBCooker: 127 """ 128 Manages one bitbake build run 129 """ 130 131 def __init__(self, featureSet=None, server=None): 132 self.recipecaches = None 133 self.baseconfig_valid = False 134 self.parsecache_valid = False 135 self.eventlog = None 136 # The skiplists, one per multiconfig 137 self.skiplist_by_mc = defaultdict(dict) 138 self.featureset = CookerFeatures() 139 if featureSet: 140 for f in featureSet: 141 self.featureset.setFeature(f) 142 143 self.orig_syspath = sys.path.copy() 144 self.orig_sysmodules = [*sys.modules] 145 146 self.configuration = bb.cookerdata.CookerConfiguration() 147 148 self.process_server = server 149 self.idleCallBackRegister = None 150 self.waitIdle = None 151 if server: 152 self.idleCallBackRegister = server.register_idle_function 153 self.waitIdle = server.wait_for_idle 154 155 bb.debug(1, "BBCooker starting %s" % time.time()) 156 157 self.configwatched = {} 158 self.parsewatched = {} 159 160 # If being called by something like tinfoil, we need to clean cached data 161 # which may now be invalid 162 bb.parse.clear_cache() 163 bb.parse.BBHandler.cached_statements = {} 164 165 self.ui_cmdline = None 166 self.hashserv = None 167 self.hashservaddr = None 168 169 # TOSTOP must not be set or our children will hang when they output 170 try: 171 fd = sys.stdout.fileno() 172 if os.isatty(fd): 173 import termios 174 tcattr = termios.tcgetattr(fd) 175 if tcattr[3] & termios.TOSTOP: 176 buildlog.info("The terminal had the TOSTOP bit set, clearing...") 177 tcattr[3] = tcattr[3] & ~termios.TOSTOP 178 termios.tcsetattr(fd, termios.TCSANOW, tcattr) 179 except UnsupportedOperation: 180 pass 181 182 self.command = bb.command.Command(self, self.process_server) 183 self.state = State.INITIAL 184 185 self.parser = None 186 187 signal.signal(signal.SIGTERM, self.sigterm_exception) 188 # Let SIGHUP exit as SIGTERM 189 signal.signal(signal.SIGHUP, self.sigterm_exception) 190 191 bb.debug(1, "BBCooker startup complete %s" % time.time()) 192 193 def init_configdata(self): 194 if not hasattr(self, "data"): 195 self.initConfigurationData() 196 bb.debug(1, "BBCooker parsed base configuration %s" % time.time()) 197 self.handlePRServ() 198 199 def _baseconfig_set(self, value): 200 if value and not self.baseconfig_valid: 201 bb.server.process.serverlog("Base config valid") 202 elif not value and self.baseconfig_valid: 203 bb.server.process.serverlog("Base config invalidated") 204 self.baseconfig_valid = value 205 206 def _parsecache_set(self, value): 207 if value and not self.parsecache_valid: 208 bb.server.process.serverlog("Parse cache valid") 209 elif not value and self.parsecache_valid: 210 bb.server.process.serverlog("Parse cache invalidated") 211 self.parsecache_valid = value 212 213 def add_filewatch(self, deps, configwatcher=False): 214 if configwatcher: 215 watcher = self.configwatched 216 else: 217 watcher = self.parsewatched 218 219 for i in deps: 220 f = i[0] 221 mtime = i[1] 222 watcher[f] = mtime 223 224 def sigterm_exception(self, signum, stackframe): 225 if signum == signal.SIGTERM: 226 bb.warn("Cooker received SIGTERM, shutting down...") 227 elif signum == signal.SIGHUP: 228 bb.warn("Cooker received SIGHUP, shutting down...") 229 self.state = State.FORCE_SHUTDOWN 230 bb.event._should_exit.set() 231 232 def setFeatures(self, features): 233 # we only accept a new feature set if we're in state initial, so we can reset without problems 234 if not self.state in [State.INITIAL, State.SHUTDOWN, State.FORCE_SHUTDOWN, State.STOPPED, State.ERROR]: 235 raise Exception("Illegal state for feature set change") 236 original_featureset = list(self.featureset) 237 for feature in features: 238 self.featureset.setFeature(feature) 239 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) 240 if (original_featureset != list(self.featureset)) and self.state != State.ERROR and hasattr(self, "data"): 241 self.reset() 242 243 def initConfigurationData(self): 244 self.state = State.INITIAL 245 self.caches_array = [] 246 247 sys.path = self.orig_syspath.copy() 248 for mod in [*sys.modules]: 249 if mod not in self.orig_sysmodules: 250 del sys.modules[mod] 251 252 self.configwatched = {} 253 254 # Need to preserve BB_CONSOLELOG over resets 255 consolelog = None 256 if hasattr(self, "data"): 257 consolelog = self.data.getVar("BB_CONSOLELOG") 258 259 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: 260 self.enableDataTracking() 261 262 caches_name_array = ['bb.cache:CoreRecipeInfo'] 263 # We hardcode all known cache types in a single place, here. 264 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset: 265 caches_name_array.append("bb.cache_extra:HobRecipeInfo") 266 if CookerFeatures.RECIPE_SIGGEN_INFO in self.featureset: 267 caches_name_array.append("bb.cache:SiggenRecipeInfo") 268 269 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty! 270 # This is the entry point, no further check needed! 271 for var in caches_name_array: 272 try: 273 module_name, cache_name = var.split(':') 274 module = __import__(module_name, fromlist=(cache_name,)) 275 self.caches_array.append(getattr(module, cache_name)) 276 except ImportError as exc: 277 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc)) 278 raise bb.BBHandledException() 279 280 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) 281 self.databuilder.parseBaseConfiguration() 282 self.data = self.databuilder.data 283 self.extraconfigdata = {} 284 285 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG") 286 if not self.configuration.writeeventlog and eventlog: 287 self.setupEventLog(eventlog) 288 289 if consolelog: 290 self.data.setVar("BB_CONSOLELOG", consolelog) 291 292 self.data.setVar('BB_CMDLINE', self.ui_cmdline) 293 294 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: 295 self.disableDataTracking() 296 297 for mc in self.databuilder.mcdata.values(): 298 self.add_filewatch(mc.getVar("__base_depends", False), configwatcher=True) 299 300 self._baseconfig_set(True) 301 self._parsecache_set(False) 302 303 def handlePRServ(self): 304 # Setup a PR Server based on the new configuration 305 try: 306 self.prhost = prserv.serv.auto_start(self.data) 307 except prserv.serv.PRServiceConfigError as e: 308 bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log") 309 310 if self.data.getVar("BB_HASHSERVE") == "auto": 311 # Create a new hash server bound to a unix domain socket 312 if not self.hashserv: 313 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" 314 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None 315 if upstream: 316 try: 317 with hashserv.create_client(upstream) as client: 318 client.ping() 319 except ImportError as e: 320 bb.fatal(""""Unable to use hash equivalence server at '%s' due to missing or incorrect python module: 321%s 322Please install the needed module on the build host, or use an environment containing it (e.g a pip venv or OpenEmbedded's buildtools tarball). 323You can also remove the BB_HASHSERVE_UPSTREAM setting, but this may result in significantly longer build times as bitbake will be unable to reuse prebuilt sstate artefacts.""" 324 % (upstream, repr(e))) 325 except ConnectionError as e: 326 bb.warn("Unable to connect to hash equivalence server at '%s', please correct or remove BB_HASHSERVE_UPSTREAM:\n%s" 327 % (upstream, repr(e))) 328 upstream = None 329 330 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") 331 self.hashserv = hashserv.create_server( 332 self.hashservaddr, 333 dbfile, 334 sync=False, 335 upstream=upstream, 336 ) 337 self.hashserv.serve_as_process(log_level=logging.WARNING) 338 for mc in self.databuilder.mcdata: 339 self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr) 340 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr) 341 342 bb.parse.init_parser(self.data) 343 344 def enableDataTracking(self): 345 self.configuration.tracking = True 346 if hasattr(self, "data"): 347 self.data.enableTracking() 348 349 def disableDataTracking(self): 350 self.configuration.tracking = False 351 if hasattr(self, "data"): 352 self.data.disableTracking() 353 354 def revalidateCaches(self): 355 bb.parse.clear_cache() 356 357 clean = True 358 for f in self.configwatched: 359 if not bb.parse.check_mtime(f, self.configwatched[f]): 360 bb.server.process.serverlog("Found %s changed, invalid cache" % f) 361 self._baseconfig_set(False) 362 self._parsecache_set(False) 363 clean = False 364 break 365 366 if clean: 367 for f in self.parsewatched: 368 if not bb.parse.check_mtime(f, self.parsewatched[f]): 369 bb.server.process.serverlog("Found %s changed, invalid cache" % f) 370 self._parsecache_set(False) 371 clean = False 372 break 373 374 if not clean: 375 bb.parse.BBHandler.cached_statements = {} 376 377 # If writes were made to any of the data stores, we need to recalculate the data 378 # store cache 379 if hasattr(self, "databuilder"): 380 self.databuilder.calc_datastore_hashes() 381 382 def parseConfiguration(self): 383 self.updateCacheSync() 384 385 # Change nice level if we're asked to 386 nice = self.data.getVar("BB_NICE_LEVEL") 387 if nice: 388 curnice = os.nice(0) 389 nice = int(nice) - curnice 390 buildlog.verbose("Renice to %s " % os.nice(nice)) 391 392 if self.recipecaches: 393 del self.recipecaches 394 self.multiconfigs = self.databuilder.mcdata.keys() 395 self.recipecaches = {} 396 for mc in self.multiconfigs: 397 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array) 398 399 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS")) 400 self.collections = {} 401 for mc in self.multiconfigs: 402 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc) 403 404 self._parsecache_set(False) 405 406 def setupEventLog(self, eventlog): 407 if self.eventlog and self.eventlog[0] != eventlog: 408 bb.event.unregister_UIHhandler(self.eventlog[1]) 409 self.eventlog = None 410 if not self.eventlog or self.eventlog[0] != eventlog: 411 # we log all events to a file if so directed 412 # register the log file writer as UI Handler 413 if not os.path.exists(os.path.dirname(eventlog)): 414 bb.utils.mkdirhier(os.path.dirname(eventlog)) 415 writer = EventWriter(self, eventlog) 416 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event']) 417 self.eventlog = (eventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)), writer) 418 419 def updateConfigOpts(self, options, environment, cmdline): 420 self.ui_cmdline = cmdline 421 clean = True 422 for o in options: 423 if o in ['prefile', 'postfile']: 424 # Only these options may require a reparse 425 try: 426 if getattr(self.configuration, o) == options[o]: 427 # Value is the same, no need to mark dirty 428 continue 429 except AttributeError: 430 pass 431 logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o])) 432 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o])) 433 clean = False 434 if hasattr(self.configuration, o): 435 setattr(self.configuration, o, options[o]) 436 437 if self.configuration.writeeventlog: 438 self.setupEventLog(self.configuration.writeeventlog) 439 440 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel 441 bb.msg.loggerDefaultDomains = self.configuration.debug_domains 442 443 if hasattr(self, "data"): 444 origenv = bb.data.init() 445 for k in environment: 446 origenv.setVar(k, environment[k]) 447 self.data.setVar("BB_ORIGENV", origenv) 448 449 for k in bb.utils.approved_variables(): 450 if k in environment and k not in self.configuration.env: 451 logger.debug("Updating new environment variable %s to %s" % (k, environment[k])) 452 self.configuration.env[k] = environment[k] 453 clean = False 454 if k in self.configuration.env and k not in environment: 455 logger.debug("Updating environment variable %s (deleted)" % (k)) 456 del self.configuration.env[k] 457 clean = False 458 if k not in self.configuration.env and k not in environment: 459 continue 460 if environment[k] != self.configuration.env[k]: 461 logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k])) 462 self.configuration.env[k] = environment[k] 463 clean = False 464 465 # Now update all the variables not in the datastore to match 466 self.configuration.env = environment 467 468 self.revalidateCaches() 469 if not clean: 470 logger.debug("Base environment change, triggering reparse") 471 self.reset() 472 473 def showVersions(self): 474 475 (latest_versions, preferred_versions, required) = self.findProviders() 476 477 logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version") 478 logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================") 479 480 for p in sorted(self.recipecaches[''].pkg_pn): 481 preferred = preferred_versions[p] 482 latest = latest_versions[p] 483 requiredstr = "" 484 preferredstr = "" 485 if required[p]: 486 if preferred[0] is not None: 487 requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2] 488 else: 489 bb.fatal("REQUIRED_VERSION of package %s not available" % p) 490 else: 491 preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2] 492 493 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] 494 495 if preferred == latest: 496 preferredstr = "" 497 498 logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr) 499 500 def showEnvironment(self, buildfile=None, pkgs_to_build=None): 501 """ 502 Show the outer or per-recipe environment 503 """ 504 fn = None 505 envdata = None 506 mc = '' 507 if not pkgs_to_build: 508 pkgs_to_build = [] 509 510 orig_tracking = self.configuration.tracking 511 if not orig_tracking: 512 self.enableDataTracking() 513 self.reset() 514 # reset() resets to the UI requested value so we have to redo this 515 self.enableDataTracking() 516 517 def mc_base(p): 518 if p.startswith('mc:'): 519 s = p.split(':') 520 if len(s) == 2: 521 return s[1] 522 return None 523 524 if buildfile: 525 # Parse the configuration here. We need to do it explicitly here since 526 # this showEnvironment() code path doesn't use the cache 527 self.parseConfiguration() 528 529 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile) 530 fn = self.matchFile(fn, mc) 531 fn = bb.cache.realfn2virtual(fn, cls, mc) 532 elif len(pkgs_to_build) == 1: 533 mc = mc_base(pkgs_to_build[0]) 534 if not mc: 535 ignore = self.data.getVar("ASSUME_PROVIDED") or "" 536 if pkgs_to_build[0] in set(ignore.split()): 537 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) 538 539 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True) 540 541 mc = runlist[0][0] 542 fn = runlist[0][3] 543 544 if fn: 545 try: 546 layername = self.collections[mc].calc_bbfile_priority(fn)[2] 547 envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn), layername) 548 except Exception as e: 549 parselog.exception("Unable to read %s", fn) 550 raise 551 else: 552 if not mc in self.databuilder.mcdata: 553 bb.fatal('No multiconfig named "%s" found' % mc) 554 envdata = self.databuilder.mcdata[mc] 555 data.expandKeys(envdata) 556 parse.ast.runAnonFuncs(envdata) 557 558 # Display history 559 with closing(StringIO()) as env: 560 self.data.inchistory.emit(env) 561 logger.plain(env.getvalue()) 562 563 # emit variables and shell functions 564 with closing(StringIO()) as env: 565 data.emit_env(env, envdata, True) 566 logger.plain(env.getvalue()) 567 568 # emit the metadata which isn't valid shell 569 for e in sorted(envdata.keys()): 570 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False): 571 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False)) 572 573 if not orig_tracking: 574 self.disableDataTracking() 575 self.reset() 576 577 def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False): 578 """ 579 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 580 """ 581 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data) 582 583 # A task of None means use the default task 584 if task is None: 585 task = self.configuration.cmd 586 if not task.startswith("do_"): 587 task = "do_%s" % task 588 589 targetlist = self.checkPackages(pkgs_to_build, task) 590 fulltargetlist = [] 591 defaulttask_implicit = '' 592 defaulttask_explicit = False 593 wildcard = False 594 595 # Wild card expansion: 596 # Replace string such as "mc:*:bash" 597 # into "mc:A:bash mc:B:bash bash" 598 for k in targetlist: 599 if k.startswith("mc:") and k.count(':') >= 2: 600 if wildcard: 601 bb.fatal('multiconfig conflict') 602 if k.split(":")[1] == "*": 603 wildcard = True 604 for mc in self.multiconfigs: 605 if mc: 606 fulltargetlist.append(k.replace('*', mc)) 607 # implicit default task 608 else: 609 defaulttask_implicit = k.split(":")[2] 610 else: 611 fulltargetlist.append(k) 612 else: 613 defaulttask_explicit = True 614 fulltargetlist.append(k) 615 616 if not defaulttask_explicit and defaulttask_implicit != '': 617 fulltargetlist.append(defaulttask_implicit) 618 619 bb.debug(1,"Target list: %s" % (str(fulltargetlist))) 620 taskdata = {} 621 localdata = {} 622 623 for mc in self.multiconfigs: 624 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist_by_mc[mc], allowincomplete=allowincomplete) 625 localdata[mc] = bb.data.createCopy(self.databuilder.mcdata[mc]) 626 bb.data.expandKeys(localdata[mc]) 627 628 current = 0 629 runlist = [] 630 for k in fulltargetlist: 631 origk = k 632 mc = "" 633 if k.startswith("mc:") and k.count(':') >= 2: 634 mc = k.split(":")[1] 635 k = ":".join(k.split(":")[2:]) 636 ktask = task 637 if ":do_" in k: 638 k2 = k.split(":do_") 639 k = k2[0] 640 ktask = k2[1] 641 642 if mc not in self.multiconfigs: 643 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc)) 644 645 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k) 646 current += 1 647 if not ktask.startswith("do_"): 648 ktask = "do_%s" % ktask 649 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]: 650 # e.g. in ASSUME_PROVIDED 651 continue 652 fn = taskdata[mc].build_targets[k][0] 653 runlist.append([mc, k, ktask, fn]) 654 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data) 655 656 havemc = False 657 for mc in self.multiconfigs: 658 if taskdata[mc].get_mcdepends(): 659 havemc = True 660 661 # No need to do check providers if there are no mcdeps or not an mc build 662 if havemc or len(self.multiconfigs) > 1: 663 seen = set() 664 new = True 665 # Make sure we can provide the multiconfig dependency 666 while new: 667 mcdeps = set() 668 # Add unresolved first, so we can get multiconfig indirect dependencies on time 669 for mc in self.multiconfigs: 670 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) 671 mcdeps |= set(taskdata[mc].get_mcdepends()) 672 new = False 673 for k in mcdeps: 674 if k in seen: 675 continue 676 l = k.split(':') 677 depmc = l[2] 678 if depmc not in self.multiconfigs: 679 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc)) 680 else: 681 logger.debug("Adding providers for multiconfig dependency %s" % l[3]) 682 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3]) 683 seen.add(k) 684 new = True 685 686 for mc in self.multiconfigs: 687 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) 688 689 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) 690 return taskdata, runlist 691 692 def prepareTreeData(self, pkgs_to_build, task, halt=False): 693 """ 694 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 695 """ 696 697 # We set halt to False here to prevent unbuildable targets raising 698 # an exception when we're just generating data 699 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True) 700 701 return runlist, taskdata 702 703 ######## WARNING : this function requires cache_extra to be enabled ######## 704 705 def generateTaskDepTreeData(self, pkgs_to_build, task): 706 """ 707 Create a dependency graph of pkgs_to_build including reverse dependency 708 information. 709 """ 710 if not task.startswith("do_"): 711 task = "do_%s" % task 712 713 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True) 714 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 715 rq.rqdata.prepare() 716 return self.buildDependTree(rq, taskdata) 717 718 @staticmethod 719 def add_mc_prefix(mc, pn): 720 if mc: 721 return "mc:%s:%s" % (mc, pn) 722 return pn 723 724 def buildDependTree(self, rq, taskdata): 725 seen_fns = [] 726 depend_tree = {} 727 depend_tree["depends"] = {} 728 depend_tree["tdepends"] = {} 729 depend_tree["pn"] = {} 730 depend_tree["rdepends-pn"] = {} 731 depend_tree["packages"] = {} 732 depend_tree["rdepends-pkg"] = {} 733 depend_tree["rrecs-pkg"] = {} 734 depend_tree['providermap'] = {} 735 depend_tree["layer-priorities"] = self.bbfile_config_priorities 736 737 for mc in taskdata: 738 for name, fn in list(taskdata[mc].get_providermap().items()): 739 pn = self.recipecaches[mc].pkg_fn[fn] 740 pn = self.add_mc_prefix(mc, pn) 741 if name != pn: 742 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn] 743 depend_tree['providermap'][name] = (pn, version) 744 745 for tid in rq.rqdata.runtaskentries: 746 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid) 747 pn = self.recipecaches[mc].pkg_fn[taskfn] 748 pn = self.add_mc_prefix(mc, pn) 749 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn] 750 if pn not in depend_tree["pn"]: 751 depend_tree["pn"][pn] = {} 752 depend_tree["pn"][pn]["filename"] = taskfn 753 depend_tree["pn"][pn]["version"] = version 754 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None) 755 756 # if we have extra caches, list all attributes they bring in 757 extra_info = [] 758 for cache_class in self.caches_array: 759 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'): 760 cachefields = getattr(cache_class, 'cachefields', []) 761 extra_info = extra_info + cachefields 762 763 # for all attributes stored, add them to the dependency tree 764 for ei in extra_info: 765 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn] 766 767 768 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid)) 769 if not dotname in depend_tree["tdepends"]: 770 depend_tree["tdepends"][dotname] = [] 771 for dep in rq.rqdata.runtaskentries[tid].depends: 772 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep) 773 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn] 774 if depmc: 775 depmc = "mc:" + depmc + ":" 776 depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep))) 777 if taskfn not in seen_fns: 778 seen_fns.append(taskfn) 779 packages = [] 780 781 depend_tree["depends"][pn] = [] 782 for dep in taskdata[mc].depids[taskfn]: 783 depend_tree["depends"][pn].append(dep) 784 785 depend_tree["rdepends-pn"][pn] = [] 786 for rdep in taskdata[mc].rdepids[taskfn]: 787 depend_tree["rdepends-pn"][pn].append(rdep) 788 789 rdepends = self.recipecaches[mc].rundeps[taskfn] 790 for package in rdepends: 791 depend_tree["rdepends-pkg"][package] = [] 792 for rdepend in rdepends[package]: 793 depend_tree["rdepends-pkg"][package].append(rdepend) 794 packages.append(package) 795 796 rrecs = self.recipecaches[mc].runrecs[taskfn] 797 for package in rrecs: 798 depend_tree["rrecs-pkg"][package] = [] 799 for rdepend in rrecs[package]: 800 depend_tree["rrecs-pkg"][package].append(rdepend) 801 if not package in packages: 802 packages.append(package) 803 804 for package in packages: 805 if package not in depend_tree["packages"]: 806 depend_tree["packages"][package] = {} 807 depend_tree["packages"][package]["pn"] = pn 808 depend_tree["packages"][package]["filename"] = taskfn 809 depend_tree["packages"][package]["version"] = version 810 811 return depend_tree 812 813 ######## WARNING : this function requires cache_extra to be enabled ######## 814 def generatePkgDepTreeData(self, pkgs_to_build, task): 815 """ 816 Create a dependency tree of pkgs_to_build, returning the data. 817 """ 818 if not task.startswith("do_"): 819 task = "do_%s" % task 820 821 _, taskdata = self.prepareTreeData(pkgs_to_build, task) 822 823 seen_fns = [] 824 depend_tree = {} 825 depend_tree["depends"] = {} 826 depend_tree["pn"] = {} 827 depend_tree["rdepends-pn"] = {} 828 depend_tree["rdepends-pkg"] = {} 829 depend_tree["rrecs-pkg"] = {} 830 831 # if we have extra caches, list all attributes they bring in 832 extra_info = [] 833 for cache_class in self.caches_array: 834 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'): 835 cachefields = getattr(cache_class, 'cachefields', []) 836 extra_info = extra_info + cachefields 837 838 tids = [] 839 for mc in taskdata: 840 for tid in taskdata[mc].taskentries: 841 tids.append(tid) 842 843 for tid in tids: 844 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid) 845 846 pn = self.recipecaches[mc].pkg_fn[taskfn] 847 pn = self.add_mc_prefix(mc, pn) 848 849 if pn not in depend_tree["pn"]: 850 depend_tree["pn"][pn] = {} 851 depend_tree["pn"][pn]["filename"] = taskfn 852 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn] 853 depend_tree["pn"][pn]["version"] = version 854 rdepends = self.recipecaches[mc].rundeps[taskfn] 855 rrecs = self.recipecaches[mc].runrecs[taskfn] 856 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None) 857 858 # for all extra attributes stored, add them to the dependency tree 859 for ei in extra_info: 860 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn] 861 862 if taskfn not in seen_fns: 863 seen_fns.append(taskfn) 864 865 depend_tree["depends"][pn] = [] 866 for dep in taskdata[mc].depids[taskfn]: 867 pn_provider = "" 868 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]: 869 fn_provider = taskdata[mc].build_targets[dep][0] 870 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider] 871 else: 872 pn_provider = dep 873 pn_provider = self.add_mc_prefix(mc, pn_provider) 874 depend_tree["depends"][pn].append(pn_provider) 875 876 depend_tree["rdepends-pn"][pn] = [] 877 for rdep in taskdata[mc].rdepids[taskfn]: 878 pn_rprovider = "" 879 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]: 880 fn_rprovider = taskdata[mc].run_targets[rdep][0] 881 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider] 882 else: 883 pn_rprovider = rdep 884 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider) 885 depend_tree["rdepends-pn"][pn].append(pn_rprovider) 886 887 depend_tree["rdepends-pkg"].update(rdepends) 888 depend_tree["rrecs-pkg"].update(rrecs) 889 890 return depend_tree 891 892 def generateDepTreeEvent(self, pkgs_to_build, task): 893 """ 894 Create a task dependency graph of pkgs_to_build. 895 Generate an event with the result 896 """ 897 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) 898 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data) 899 900 def generateDotGraphFiles(self, pkgs_to_build, task): 901 """ 902 Create a task dependency graph of pkgs_to_build. 903 Save the result to a set of .dot files. 904 """ 905 906 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) 907 908 pns = depgraph["pn"].keys() 909 if pns: 910 with open('pn-buildlist', 'w') as f: 911 f.write("%s\n" % "\n".join(sorted(pns))) 912 logger.info("PN build list saved to 'pn-buildlist'") 913 914 # Remove old format output files to ensure no confusion with stale data 915 try: 916 os.unlink('pn-depends.dot') 917 except FileNotFoundError: 918 pass 919 try: 920 os.unlink('package-depends.dot') 921 except FileNotFoundError: 922 pass 923 try: 924 os.unlink('recipe-depends.dot') 925 except FileNotFoundError: 926 pass 927 928 with open('task-depends.dot', 'w') as f: 929 f.write("digraph depends {\n") 930 for task in sorted(depgraph["tdepends"]): 931 (pn, taskname) = task.rsplit(".", 1) 932 fn = depgraph["pn"][pn]["filename"] 933 version = depgraph["pn"][pn]["version"] 934 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn)) 935 for dep in sorted(depgraph["tdepends"][task]): 936 f.write('"%s" -> "%s"\n' % (task, dep)) 937 f.write("}\n") 938 logger.info("Task dependencies saved to 'task-depends.dot'") 939 940 def show_appends_with_no_recipes(self): 941 appends_without_recipes = {} 942 # Determine which bbappends haven't been applied 943 for mc in self.multiconfigs: 944 # First get list of recipes, including skipped 945 recipefns = list(self.recipecaches[mc].pkg_fn.keys()) 946 recipefns.extend(self.skiplist_by_mc[mc].keys()) 947 948 # Work out list of bbappends that have been applied 949 applied_appends = [] 950 for fn in recipefns: 951 applied_appends.extend(self.collections[mc].get_file_appends(fn)) 952 953 appends_without_recipes[mc] = [] 954 for _, appendfn in self.collections[mc].bbappends: 955 if not appendfn in applied_appends: 956 appends_without_recipes[mc].append(appendfn) 957 958 msgs = [] 959 for mc in sorted(appends_without_recipes.keys()): 960 if appends_without_recipes[mc]: 961 msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default', 962 '\n '.join(appends_without_recipes[mc]))) 963 964 if msgs: 965 bb.fatal("\n".join(msgs)) 966 967 def handlePrefProviders(self): 968 969 for mc in self.multiconfigs: 970 localdata = data.createCopy(self.databuilder.mcdata[mc]) 971 bb.data.expandKeys(localdata) 972 973 # Handle PREFERRED_PROVIDERS 974 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split(): 975 try: 976 (providee, provider) = p.split(':') 977 except: 978 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p) 979 continue 980 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider: 981 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee]) 982 self.recipecaches[mc].preferred[providee] = provider 983 984 def findConfigFilePath(self, configfile): 985 """ 986 Find the location on disk of configfile and if it exists and was parsed by BitBake 987 emit the ConfigFilePathFound event with the path to the file. 988 """ 989 path = bb.cookerdata.findConfigFile(configfile, self.data) 990 if not path: 991 return 992 993 # Generate a list of parsed configuration files by searching the files 994 # listed in the __depends and __base_depends variables with a .conf suffix. 995 conffiles = [] 996 dep_files = self.data.getVar('__base_depends', False) or [] 997 dep_files = dep_files + (self.data.getVar('__depends', False) or []) 998 999 for f in dep_files: 1000 if f[0].endswith(".conf"): 1001 conffiles.append(f[0]) 1002 1003 _, conf, conffile = path.rpartition("conf/") 1004 match = os.path.join(conf, conffile) 1005 # Try and find matches for conf/conffilename.conf as we don't always 1006 # have the full path to the file. 1007 for cfg in conffiles: 1008 if cfg.endswith(match): 1009 bb.event.fire(bb.event.ConfigFilePathFound(path), 1010 self.data) 1011 break 1012 1013 def findFilesMatchingInDir(self, filepattern, directory): 1014 """ 1015 Searches for files containing the substring 'filepattern' which are children of 1016 'directory' in each BBPATH. i.e. to find all rootfs package classes available 1017 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes') 1018 or to find all machine configuration files one could call: 1019 findFilesMatchingInDir(self, '.conf', 'conf/machine') 1020 """ 1021 1022 matches = [] 1023 bbpaths = self.data.getVar('BBPATH').split(':') 1024 for path in bbpaths: 1025 dirpath = os.path.join(path, directory) 1026 if os.path.exists(dirpath): 1027 for root, dirs, files in os.walk(dirpath): 1028 for f in files: 1029 if filepattern in f: 1030 matches.append(f) 1031 1032 if matches: 1033 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) 1034 1035 def testCookerCommandEvent(self, filepattern): 1036 # Dummy command used by OEQA selftest to test tinfoil without IO 1037 matches = ["A", "B"] 1038 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) 1039 1040 def findProviders(self, mc=''): 1041 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1042 1043 def findBestProvider(self, pn, mc=''): 1044 if pn in self.recipecaches[mc].providers: 1045 filenames = self.recipecaches[mc].providers[pn] 1046 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc]) 1047 if eligible is not None: 1048 filename = eligible[0] 1049 else: 1050 filename = None 1051 return None, None, None, filename 1052 elif pn in self.recipecaches[mc].pkg_pn: 1053 (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1054 if required and preferred_file is None: 1055 return None, None, None, None 1056 return (latest, latest_f, preferred_ver, preferred_file) 1057 else: 1058 return None, None, None, None 1059 1060 def findConfigFiles(self, varname): 1061 """ 1062 Find config files which are appropriate values for varname. 1063 i.e. MACHINE, DISTRO 1064 """ 1065 possible = [] 1066 var = varname.lower() 1067 1068 data = self.data 1069 # iterate configs 1070 bbpaths = data.getVar('BBPATH').split(':') 1071 for path in bbpaths: 1072 confpath = os.path.join(path, "conf", var) 1073 if os.path.exists(confpath): 1074 for root, dirs, files in os.walk(confpath): 1075 # get all child files, these are appropriate values 1076 for f in files: 1077 val, sep, end = f.rpartition('.') 1078 if end == 'conf': 1079 possible.append(val) 1080 1081 if possible: 1082 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data) 1083 1084 def findInheritsClass(self, klass): 1085 """ 1086 Find all recipes which inherit the specified class 1087 """ 1088 pkg_list = [] 1089 1090 for pfn in self.recipecaches[''].pkg_fn: 1091 inherits = self.recipecaches[''].inherits.get(pfn, None) 1092 if inherits and klass in inherits: 1093 pkg_list.append(self.recipecaches[''].pkg_fn[pfn]) 1094 1095 return pkg_list 1096 1097 def generateTargetsTree(self, klass=None, pkgs=None): 1098 """ 1099 Generate a dependency tree of buildable targets 1100 Generate an event with the result 1101 """ 1102 # if the caller hasn't specified a pkgs list default to universe 1103 if not pkgs: 1104 pkgs = ['universe'] 1105 # if inherited_class passed ensure all recipes which inherit the 1106 # specified class are included in pkgs 1107 if klass: 1108 extra_pkgs = self.findInheritsClass(klass) 1109 pkgs = pkgs + extra_pkgs 1110 1111 # generate a dependency tree for all our packages 1112 tree = self.generatePkgDepTreeData(pkgs, 'build') 1113 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data) 1114 1115 def interactiveMode( self ): 1116 """Drop off into a shell""" 1117 try: 1118 from bb import shell 1119 except ImportError: 1120 parselog.exception("Interactive mode not available") 1121 raise bb.BBHandledException() 1122 else: 1123 shell.start( self ) 1124 1125 1126 def handleCollections(self, collections): 1127 """Handle collections""" 1128 errors = False 1129 self.bbfile_config_priorities = [] 1130 if collections: 1131 collection_priorities = {} 1132 collection_depends = {} 1133 collection_list = collections.split() 1134 min_prio = 0 1135 for c in collection_list: 1136 bb.debug(1,'Processing %s in collection list' % (c)) 1137 1138 # Get collection priority if defined explicitly 1139 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c) 1140 if priority: 1141 try: 1142 prio = int(priority) 1143 except ValueError: 1144 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority) 1145 errors = True 1146 if min_prio == 0 or prio < min_prio: 1147 min_prio = prio 1148 collection_priorities[c] = prio 1149 else: 1150 collection_priorities[c] = None 1151 1152 # Check dependencies and store information for priority calculation 1153 deps = self.data.getVar("LAYERDEPENDS_%s" % c) 1154 if deps: 1155 try: 1156 depDict = bb.utils.explode_dep_versions2(deps) 1157 except bb.utils.VersionStringException as vse: 1158 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse))) 1159 for dep, oplist in list(depDict.items()): 1160 if dep in collection_list: 1161 for opstr in oplist: 1162 layerver = self.data.getVar("LAYERVERSION_%s" % dep) 1163 (op, depver) = opstr.split() 1164 if layerver: 1165 try: 1166 res = bb.utils.vercmp_string_op(layerver, depver, op) 1167 except bb.utils.VersionStringException as vse: 1168 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse))) 1169 if not res: 1170 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver) 1171 errors = True 1172 else: 1173 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep) 1174 errors = True 1175 else: 1176 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep) 1177 errors = True 1178 collection_depends[c] = list(depDict.keys()) 1179 else: 1180 collection_depends[c] = [] 1181 1182 # Check recommends and store information for priority calculation 1183 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c) 1184 if recs: 1185 try: 1186 recDict = bb.utils.explode_dep_versions2(recs) 1187 except bb.utils.VersionStringException as vse: 1188 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse))) 1189 for rec, oplist in list(recDict.items()): 1190 if rec in collection_list: 1191 if oplist: 1192 opstr = oplist[0] 1193 layerver = self.data.getVar("LAYERVERSION_%s" % rec) 1194 if layerver: 1195 (op, recver) = opstr.split() 1196 try: 1197 res = bb.utils.vercmp_string_op(layerver, recver, op) 1198 except bb.utils.VersionStringException as vse: 1199 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse))) 1200 if not res: 1201 parselog.debug3("Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver) 1202 continue 1203 else: 1204 parselog.debug3("Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec) 1205 continue 1206 parselog.debug3("Layer '%s' recommends layer '%s', so we are adding it", c, rec) 1207 collection_depends[c].append(rec) 1208 else: 1209 parselog.debug3("Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec) 1210 1211 # Recursively work out collection priorities based on dependencies 1212 def calc_layer_priority(collection): 1213 if not collection_priorities[collection]: 1214 max_depprio = min_prio 1215 for dep in collection_depends[collection]: 1216 calc_layer_priority(dep) 1217 depprio = collection_priorities[dep] 1218 if depprio > max_depprio: 1219 max_depprio = depprio 1220 max_depprio += 1 1221 parselog.debug("Calculated priority of layer %s as %d", collection, max_depprio) 1222 collection_priorities[collection] = max_depprio 1223 1224 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities 1225 for c in collection_list: 1226 calc_layer_priority(c) 1227 regex = self.data.getVar("BBFILE_PATTERN_%s" % c) 1228 if regex is None: 1229 parselog.error("BBFILE_PATTERN_%s not defined" % c) 1230 errors = True 1231 continue 1232 elif regex == "": 1233 parselog.debug("BBFILE_PATTERN_%s is empty" % c) 1234 cre = re.compile('^NULL$') 1235 errors = False 1236 else: 1237 try: 1238 cre = re.compile(regex) 1239 except re.error: 1240 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex) 1241 errors = True 1242 continue 1243 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c])) 1244 if errors: 1245 # We've already printed the actual error(s) 1246 raise CollectionError("Errors during parsing layer configuration") 1247 1248 def buildSetVars(self): 1249 """ 1250 Setup any variables needed before starting a build 1251 """ 1252 t = time.gmtime() 1253 for mc in self.databuilder.mcdata: 1254 ds = self.databuilder.mcdata[mc] 1255 if not ds.getVar("BUILDNAME", False): 1256 ds.setVar("BUILDNAME", "${DATE}${TIME}") 1257 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t)) 1258 ds.setVar("DATE", time.strftime('%Y%m%d', t)) 1259 ds.setVar("TIME", time.strftime('%H%M%S', t)) 1260 1261 def reset_mtime_caches(self): 1262 """ 1263 Reset mtime caches - this is particularly important when memory resident as something 1264 which is cached is not unlikely to have changed since the last invocation (e.g. a 1265 file associated with a recipe might have been modified by the user). 1266 """ 1267 build.reset_cache() 1268 bb.fetch._checksum_cache.mtime_cache.clear() 1269 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None) 1270 if siggen_cache: 1271 bb.parse.siggen.checksum_cache.mtime_cache.clear() 1272 1273 def matchFiles(self, bf, mc=''): 1274 """ 1275 Find the .bb files which match the expression in 'buildfile'. 1276 """ 1277 if bf.startswith("/") or bf.startswith("../"): 1278 bf = os.path.abspath(bf) 1279 1280 collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)} 1281 filelist, masked, searchdirs = collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) 1282 try: 1283 os.stat(bf) 1284 bf = os.path.abspath(bf) 1285 return [bf] 1286 except OSError: 1287 regexp = re.compile(bf) 1288 matches = [] 1289 for f in filelist: 1290 if regexp.search(f) and os.path.isfile(f): 1291 matches.append(f) 1292 return matches 1293 1294 def matchFile(self, buildfile, mc=''): 1295 """ 1296 Find the .bb file which matches the expression in 'buildfile'. 1297 Raise an error if multiple files 1298 """ 1299 matches = self.matchFiles(buildfile, mc) 1300 if len(matches) != 1: 1301 if matches: 1302 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches)) 1303 if matches: 1304 for f in matches: 1305 msg += "\n %s" % f 1306 parselog.error(msg) 1307 else: 1308 parselog.error("Unable to find any recipe file matching '%s'" % buildfile) 1309 raise NoSpecificMatch 1310 return matches[0] 1311 1312 def buildFile(self, buildfile, task): 1313 """ 1314 Build the file matching regexp buildfile 1315 """ 1316 bb.event.fire(bb.event.BuildInit(), self.data) 1317 1318 # Too many people use -b because they think it's how you normally 1319 # specify a target to be built, so show a warning 1320 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.") 1321 1322 self.buildFileInternal(buildfile, task) 1323 1324 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False): 1325 """ 1326 Build the file matching regexp buildfile 1327 """ 1328 1329 # Parse the configuration here. We need to do it explicitly here since 1330 # buildFile() doesn't use the cache 1331 self.parseConfiguration() 1332 1333 # If we are told to do the None task then query the default task 1334 if task is None: 1335 task = self.configuration.cmd 1336 if not task.startswith("do_"): 1337 task = "do_%s" % task 1338 1339 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile) 1340 fn = self.matchFile(fn, mc) 1341 1342 self.buildSetVars() 1343 self.reset_mtime_caches() 1344 1345 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.databuilder.data_hash, self.caches_array) 1346 1347 layername = self.collections[mc].calc_bbfile_priority(fn)[2] 1348 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername) 1349 infos = dict(infos) 1350 1351 fn = bb.cache.realfn2virtual(fn, cls, mc) 1352 try: 1353 info_array = infos[fn] 1354 except KeyError: 1355 bb.fatal("%s does not exist" % fn) 1356 1357 if info_array[0].skipped: 1358 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason)) 1359 1360 self.recipecaches[mc].add_from_recipeinfo(fn, info_array) 1361 1362 # Tweak some variables 1363 item = info_array[0].pn 1364 self.recipecaches[mc].ignored_dependencies = set() 1365 self.recipecaches[mc].bbfile_priority[fn] = 1 1366 self.configuration.limited_deps = True 1367 1368 # Remove external dependencies 1369 self.recipecaches[mc].task_deps[fn]['depends'] = {} 1370 self.recipecaches[mc].deps[fn] = [] 1371 self.recipecaches[mc].rundeps[fn] = defaultdict(list) 1372 self.recipecaches[mc].runrecs[fn] = defaultdict(list) 1373 1374 bb.parse.siggen.setup_datacache(self.recipecaches) 1375 1376 # Invalidate task for target if force mode active 1377 if self.configuration.force: 1378 logger.verbose("Invalidate task %s, %s", task, fn) 1379 bb.parse.siggen.invalidate_task(task, fn) 1380 1381 # Setup taskdata structure 1382 taskdata = {} 1383 taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt) 1384 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item) 1385 1386 if quietlog: 1387 rqloglevel = bb.runqueue.logger.getEffectiveLevel() 1388 bb.runqueue.logger.setLevel(logging.WARNING) 1389 1390 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME") 1391 if fireevents: 1392 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc]) 1393 if self.eventlog: 1394 self.eventlog[2].write_variables() 1395 bb.event.enable_heartbeat() 1396 1397 # Execute the runqueue 1398 runlist = [[mc, item, task, fn]] 1399 1400 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1401 1402 def buildFileIdle(server, rq, halt): 1403 1404 msg = None 1405 interrupted = 0 1406 if halt or self.state == State.FORCE_SHUTDOWN: 1407 rq.finish_runqueue(True) 1408 msg = "Forced shutdown" 1409 interrupted = 2 1410 elif self.state == State.SHUTDOWN: 1411 rq.finish_runqueue(False) 1412 msg = "Stopped build" 1413 interrupted = 1 1414 failures = 0 1415 try: 1416 retval = rq.execute_runqueue() 1417 except runqueue.TaskFailure as exc: 1418 failures += len(exc.args) 1419 retval = False 1420 except SystemExit as exc: 1421 if quietlog: 1422 bb.runqueue.logger.setLevel(rqloglevel) 1423 return bb.server.process.idleFinish(str(exc)) 1424 1425 if not retval: 1426 if fireevents: 1427 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc]) 1428 bb.event.disable_heartbeat() 1429 # We trashed self.recipecaches above 1430 self._parsecache_set(False) 1431 self.configuration.limited_deps = False 1432 bb.parse.siggen.reset(self.data) 1433 if quietlog: 1434 bb.runqueue.logger.setLevel(rqloglevel) 1435 return bb.server.process.idleFinish(msg) 1436 if retval is True: 1437 return True 1438 return retval 1439 1440 self.idleCallBackRegister(buildFileIdle, rq) 1441 1442 def getTaskSignatures(self, target, tasks): 1443 sig = [] 1444 getAllTaskSignatures = False 1445 1446 if not tasks: 1447 tasks = ["do_build"] 1448 getAllTaskSignatures = True 1449 1450 for task in tasks: 1451 taskdata, runlist = self.buildTaskData(target, task, self.configuration.halt) 1452 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1453 rq.rqdata.prepare() 1454 1455 for l in runlist: 1456 mc, pn, taskname, fn = l 1457 1458 taskdep = rq.rqdata.dataCaches[mc].task_deps[fn] 1459 for t in taskdep['tasks']: 1460 if t in taskdep['nostamp'] or "setscene" in t: 1461 continue 1462 tid = bb.runqueue.build_tid(mc, fn, t) 1463 1464 if t in task or getAllTaskSignatures: 1465 try: 1466 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) 1467 except KeyError: 1468 sig.append(self.getTaskSignatures(target, [t])[0]) 1469 1470 return sig 1471 1472 def buildTargets(self, targets, task): 1473 """ 1474 Attempt to build the targets specified 1475 """ 1476 1477 def buildTargetsIdle(server, rq, halt): 1478 msg = None 1479 interrupted = 0 1480 if halt or self.state == State.FORCE_SHUTDOWN: 1481 bb.event._should_exit.set() 1482 rq.finish_runqueue(True) 1483 msg = "Forced shutdown" 1484 interrupted = 2 1485 elif self.state == State.SHUTDOWN: 1486 rq.finish_runqueue(False) 1487 msg = "Stopped build" 1488 interrupted = 1 1489 failures = 0 1490 try: 1491 retval = rq.execute_runqueue() 1492 except runqueue.TaskFailure as exc: 1493 failures += len(exc.args) 1494 retval = False 1495 except SystemExit as exc: 1496 return bb.server.process.idleFinish(str(exc)) 1497 1498 if not retval: 1499 try: 1500 for mc in self.multiconfigs: 1501 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc]) 1502 finally: 1503 bb.event.disable_heartbeat() 1504 return bb.server.process.idleFinish(msg) 1505 1506 if retval is True: 1507 return True 1508 return retval 1509 1510 self.reset_mtime_caches() 1511 self.buildSetVars() 1512 1513 # If we are told to do the None task then query the default task 1514 if task is None: 1515 task = self.configuration.cmd 1516 1517 if not task.startswith("do_"): 1518 task = "do_%s" % task 1519 1520 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets] 1521 1522 bb.event.fire(bb.event.BuildInit(packages), self.data) 1523 1524 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt) 1525 1526 buildname = self.data.getVar("BUILDNAME", False) 1527 1528 # make targets to always look as <target>:do_<task> 1529 ntargets = [] 1530 for target in runlist: 1531 if target[0]: 1532 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2])) 1533 ntargets.append("%s:%s" % (target[1], target[2])) 1534 1535 for mc in self.multiconfigs: 1536 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc]) 1537 if self.eventlog: 1538 self.eventlog[2].write_variables() 1539 bb.event.enable_heartbeat() 1540 1541 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1542 if 'universe' in targets: 1543 rq.rqdata.warn_multi_bb = True 1544 1545 self.idleCallBackRegister(buildTargetsIdle, rq) 1546 1547 1548 def getAllKeysWithFlags(self, flaglist): 1549 def dummy_autorev(d): 1550 return 1551 1552 dump = {} 1553 # Horrible but for now we need to avoid any sideeffects of autorev being called 1554 saved = bb.fetch2.get_autorev 1555 bb.fetch2.get_autorev = dummy_autorev 1556 for k in self.data.keys(): 1557 try: 1558 expand = True 1559 flags = self.data.getVarFlags(k) 1560 if flags and "func" in flags and "python" in flags: 1561 expand = False 1562 v = self.data.getVar(k, expand) 1563 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart): 1564 dump[k] = { 1565 'v' : str(v) , 1566 'history' : self.data.varhistory.variable(k), 1567 } 1568 for d in flaglist: 1569 if flags and d in flags: 1570 dump[k][d] = flags[d] 1571 else: 1572 dump[k][d] = None 1573 except Exception as e: 1574 print(e) 1575 bb.fetch2.get_autorev = saved 1576 return dump 1577 1578 1579 def updateCacheSync(self): 1580 if self.state == State.RUNNING: 1581 return 1582 1583 if not self.baseconfig_valid: 1584 logger.debug("Reloading base configuration data") 1585 self.initConfigurationData() 1586 self.handlePRServ() 1587 1588 # This is called for all async commands when self.state != running 1589 def updateCache(self): 1590 if self.state == State.RUNNING: 1591 return 1592 1593 if self.state in (State.SHUTDOWN, State.FORCE_SHUTDOWN, State.ERROR): 1594 if hasattr(self.parser, 'shutdown'): 1595 self.parser.shutdown(clean=False) 1596 self.parser.final_cleanup() 1597 raise bb.BBHandledException() 1598 1599 if self.state != State.PARSING: 1600 self.updateCacheSync() 1601 1602 if self.state != State.PARSING and not self.parsecache_valid: 1603 bb.server.process.serverlog("Parsing started") 1604 self.parsewatched = {} 1605 1606 bb.parse.siggen.reset(self.data) 1607 self.parseConfiguration () 1608 if CookerFeatures.SEND_SANITYEVENTS in self.featureset: 1609 for mc in self.multiconfigs: 1610 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc]) 1611 1612 for mc in self.multiconfigs: 1613 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or "" 1614 self.recipecaches[mc].ignored_dependencies = set(ignore.split()) 1615 1616 for dep in self.configuration.extra_assume_provided: 1617 self.recipecaches[mc].ignored_dependencies.add(dep) 1618 1619 mcfilelist = {} 1620 total_masked = 0 1621 searchdirs = set() 1622 for mc in self.multiconfigs: 1623 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) 1624 1625 mcfilelist[mc] = filelist 1626 total_masked += masked 1627 searchdirs |= set(search) 1628 1629 # Add mtimes for directories searched for bb/bbappend files 1630 for dirent in searchdirs: 1631 self.add_filewatch([(dirent, bb.parse.cached_mtime_noerror(dirent))]) 1632 1633 self.parser = CookerParser(self, mcfilelist, total_masked) 1634 self._parsecache_set(True) 1635 1636 self.state = State.PARSING 1637 1638 if not self.parser.parse_next(): 1639 bb.server.process.serverlog("Parsing completed") 1640 collectlog.debug("parsing complete") 1641 if self.parser.error: 1642 raise bb.BBHandledException() 1643 self.show_appends_with_no_recipes() 1644 self.handlePrefProviders() 1645 for mc in self.multiconfigs: 1646 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data) 1647 self.state = State.RUNNING 1648 1649 # Send an event listing all stamps reachable after parsing 1650 # which the metadata may use to clean up stale data 1651 for mc in self.multiconfigs: 1652 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp) 1653 bb.event.fire(event, self.databuilder.mcdata[mc]) 1654 return None 1655 1656 return True 1657 1658 def checkPackages(self, pkgs_to_build, task=None): 1659 1660 # Return a copy, don't modify the original 1661 pkgs_to_build = pkgs_to_build[:] 1662 1663 if not pkgs_to_build: 1664 raise NothingToBuild 1665 1666 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split() 1667 for pkg in pkgs_to_build.copy(): 1668 if pkg in ignore: 1669 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg) 1670 if pkg.startswith("multiconfig:"): 1671 pkgs_to_build.remove(pkg) 1672 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:")) 1673 1674 if 'world' in pkgs_to_build: 1675 pkgs_to_build.remove('world') 1676 for mc in self.multiconfigs: 1677 bb.providers.buildWorldTargetList(self.recipecaches[mc], task) 1678 for t in self.recipecaches[mc].world_target: 1679 if mc: 1680 t = "mc:" + mc + ":" + t 1681 pkgs_to_build.append(t) 1682 1683 if 'universe' in pkgs_to_build: 1684 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.") 1685 parselog.debug("collating packages for \"universe\"") 1686 pkgs_to_build.remove('universe') 1687 for mc in self.multiconfigs: 1688 for t in self.recipecaches[mc].universe_target: 1689 if task: 1690 foundtask = False 1691 for provider_fn in self.recipecaches[mc].providers[t]: 1692 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']: 1693 foundtask = True 1694 break 1695 if not foundtask: 1696 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task)) 1697 continue 1698 if mc: 1699 t = "mc:" + mc + ":" + t 1700 pkgs_to_build.append(t) 1701 1702 return pkgs_to_build 1703 1704 def pre_serve(self): 1705 return 1706 1707 def post_serve(self): 1708 self.shutdown(force=True) 1709 prserv.serv.auto_shutdown() 1710 if hasattr(bb.parse, "siggen"): 1711 bb.parse.siggen.exit() 1712 if self.hashserv: 1713 self.hashserv.process.terminate() 1714 self.hashserv.process.join() 1715 if hasattr(self, "data"): 1716 bb.event.fire(CookerExit(), self.data) 1717 1718 def shutdown(self, force=False): 1719 if force: 1720 self.state = State.FORCE_SHUTDOWN 1721 bb.event._should_exit.set() 1722 else: 1723 self.state = State.SHUTDOWN 1724 1725 if self.parser: 1726 self.parser.shutdown(clean=False) 1727 self.parser.final_cleanup() 1728 1729 def finishcommand(self): 1730 if hasattr(self.parser, 'shutdown'): 1731 self.parser.shutdown(clean=False) 1732 self.parser.final_cleanup() 1733 self.state = State.INITIAL 1734 bb.event._should_exit.clear() 1735 1736 def reset(self): 1737 if hasattr(bb.parse, "siggen"): 1738 bb.parse.siggen.exit() 1739 self.finishcommand() 1740 self.initConfigurationData() 1741 self.handlePRServ() 1742 1743 def clientComplete(self): 1744 """Called when the client is done using the server""" 1745 self.finishcommand() 1746 self.extraconfigdata = {} 1747 self.command.reset() 1748 if hasattr(self, "data"): 1749 self.databuilder.reset() 1750 self.data = self.databuilder.data 1751 # In theory tinfoil could have modified the base data before parsing, 1752 # ideally need to track if anything did modify the datastore 1753 self._parsecache_set(False) 1754 1755class CookerExit(bb.event.Event): 1756 """ 1757 Notify clients of the Cooker shutdown 1758 """ 1759 1760 def __init__(self): 1761 bb.event.Event.__init__(self) 1762 1763 1764class CookerCollectFiles(object): 1765 def __init__(self, priorities, mc=''): 1766 self.mc = mc 1767 self.bbappends = [] 1768 # Priorities is a list of tuples, with the second element as the pattern. 1769 # We need to sort the list with the longest pattern first, and so on to 1770 # the shortest. This allows nested layers to be properly evaluated. 1771 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True) 1772 1773 def calc_bbfile_priority(self, filename): 1774 for layername, _, regex, pri in self.bbfile_config_priorities: 1775 if regex.match(filename): 1776 return pri, regex, layername 1777 return 0, None, None 1778 1779 def get_bbfiles(self): 1780 """Get list of default .bb files by reading out the current directory""" 1781 path = os.getcwd() 1782 contents = os.listdir(path) 1783 bbfiles = [] 1784 for f in contents: 1785 if f.endswith(".bb"): 1786 bbfiles.append(os.path.abspath(os.path.join(path, f))) 1787 return bbfiles 1788 1789 def find_bbfiles(self, path): 1790 """Find all the .bb and .bbappend files in a directory""" 1791 found = [] 1792 for dir, dirs, files in os.walk(path): 1793 for ignored in ('SCCS', 'CVS', '.svn'): 1794 if ignored in dirs: 1795 dirs.remove(ignored) 1796 found += [os.path.join(dir, f) for f in files if (f.endswith(('.bb', '.bbappend')))] 1797 1798 return found 1799 1800 def collect_bbfiles(self, config, eventdata): 1801 """Collect all available .bb build files""" 1802 masked = 0 1803 1804 collectlog.debug("collecting .bb files") 1805 1806 files = (config.getVar( "BBFILES") or "").split() 1807 1808 # Sort files by priority 1809 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] ) 1810 config.setVar("BBFILES_PRIORITIZED", " ".join(files)) 1811 1812 if not files: 1813 files = self.get_bbfiles() 1814 1815 if not files: 1816 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") 1817 bb.event.fire(CookerExit(), eventdata) 1818 1819 # We need to track where we look so that we can know when the cache is invalid. There 1820 # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir() 1821 # calls while we run glob(). 1822 origlistdir = os.listdir 1823 if hasattr(os, 'scandir'): 1824 origscandir = os.scandir 1825 searchdirs = [] 1826 1827 def ourlistdir(d): 1828 searchdirs.append(d) 1829 return origlistdir(d) 1830 1831 def ourscandir(d): 1832 searchdirs.append(d) 1833 return origscandir(d) 1834 1835 os.listdir = ourlistdir 1836 if hasattr(os, 'scandir'): 1837 os.scandir = ourscandir 1838 try: 1839 # Can't use set here as order is important 1840 newfiles = [] 1841 for f in files: 1842 if os.path.isdir(f): 1843 dirfiles = self.find_bbfiles(f) 1844 for g in dirfiles: 1845 if g not in newfiles: 1846 newfiles.append(g) 1847 else: 1848 globbed = glob.glob(f) 1849 if not globbed and os.path.exists(f): 1850 globbed = [f] 1851 # glob gives files in order on disk. Sort to be deterministic. 1852 for g in sorted(globbed): 1853 if g not in newfiles: 1854 newfiles.append(g) 1855 finally: 1856 os.listdir = origlistdir 1857 if hasattr(os, 'scandir'): 1858 os.scandir = origscandir 1859 1860 bbmask = config.getVar('BBMASK') 1861 1862 if bbmask: 1863 # First validate the individual regular expressions and ignore any 1864 # that do not compile 1865 bbmasks = [] 1866 for mask in bbmask.split(): 1867 # When constructing an older style single regex, it's possible for BBMASK 1868 # to end up beginning with '|', which matches and masks _everything_. 1869 if mask.startswith("|"): 1870 collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask) 1871 mask = mask[1:] 1872 try: 1873 re.compile(mask) 1874 bbmasks.append(mask) 1875 except re.error: 1876 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask) 1877 1878 # Then validate the combined regular expressions. This should never 1879 # fail, but better safe than sorry... 1880 bbmask = "|".join(bbmasks) 1881 try: 1882 bbmask_compiled = re.compile(bbmask) 1883 except re.error: 1884 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask) 1885 bbmask = None 1886 1887 bbfiles = [] 1888 bbappend = [] 1889 for f in newfiles: 1890 if bbmask and bbmask_compiled.search(f): 1891 collectlog.debug("skipping masked file %s", f) 1892 masked += 1 1893 continue 1894 if f.endswith('.bb'): 1895 bbfiles.append(f) 1896 elif f.endswith('.bbappend'): 1897 bbappend.append(f) 1898 else: 1899 collectlog.debug("skipping %s: unknown file extension", f) 1900 1901 # Build a list of .bbappend files for each .bb file 1902 for f in bbappend: 1903 base = os.path.basename(f).replace('.bbappend', '.bb') 1904 self.bbappends.append((base, f)) 1905 1906 # Find overlayed recipes 1907 # bbfiles will be in priority order which makes this easy 1908 bbfile_seen = dict() 1909 self.overlayed = defaultdict(list) 1910 for f in reversed(bbfiles): 1911 base = os.path.basename(f) 1912 if base not in bbfile_seen: 1913 bbfile_seen[base] = f 1914 else: 1915 topfile = bbfile_seen[base] 1916 self.overlayed[topfile].append(f) 1917 1918 return (bbfiles, masked, searchdirs) 1919 1920 def get_file_appends(self, fn): 1921 """ 1922 Returns a list of .bbappend files to apply to fn 1923 """ 1924 filelist = [] 1925 f = os.path.basename(fn) 1926 for b in self.bbappends: 1927 (bbappend, filename) = b 1928 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])): 1929 filelist.append(filename) 1930 return tuple(filelist) 1931 1932 def collection_priorities(self, pkgfns, fns, d): 1933 # Return the priorities of the entries in pkgfns 1934 # Also check that all the regexes in self.bbfile_config_priorities are used 1935 # (but to do that we need to ensure skipped recipes aren't counted, nor 1936 # collections in BBFILE_PATTERN_IGNORE_EMPTY) 1937 1938 priorities = {} 1939 seen = set() 1940 matched = set() 1941 1942 matched_regex = set() 1943 unmatched_regex = set() 1944 for _, _, regex, _ in self.bbfile_config_priorities: 1945 unmatched_regex.add(regex) 1946 1947 # Calculate priorities for each file 1948 for p in pkgfns: 1949 realfn, cls, mc = bb.cache.virtualfn2realfn(p) 1950 priorities[p], regex, _ = self.calc_bbfile_priority(realfn) 1951 if regex in unmatched_regex: 1952 matched_regex.add(regex) 1953 unmatched_regex.remove(regex) 1954 seen.add(realfn) 1955 if regex: 1956 matched.add(realfn) 1957 1958 if unmatched_regex: 1959 # Account for bbappend files 1960 for b in self.bbappends: 1961 (bbfile, append) = b 1962 seen.add(append) 1963 1964 # Account for skipped recipes 1965 seen.update(fns) 1966 1967 seen.difference_update(matched) 1968 1969 def already_matched(fn): 1970 for regex in matched_regex: 1971 if regex.match(fn): 1972 return True 1973 return False 1974 1975 for unmatch in unmatched_regex.copy(): 1976 for fn in seen: 1977 if unmatch.match(fn): 1978 # If the bbappend or file was already matched by another regex, skip it 1979 # e.g. for a layer within a layer, the outer regex could match, the inner 1980 # regex may match nothing and we should warn about that 1981 if already_matched(fn): 1982 continue 1983 unmatched_regex.remove(unmatch) 1984 break 1985 1986 for collection, pattern, regex, _ in self.bbfile_config_priorities: 1987 if regex in unmatched_regex: 1988 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1': 1989 collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default', 1990 collection, pattern)) 1991 1992 return priorities 1993 1994class ParsingFailure(Exception): 1995 def __init__(self, realexception, recipe): 1996 self.realexception = realexception 1997 self.recipe = recipe 1998 Exception.__init__(self, realexception, recipe) 1999 2000class Parser(multiprocessing.Process): 2001 def __init__(self, jobs, results, quit, profile): 2002 self.jobs = jobs 2003 self.results = results 2004 self.quit = quit 2005 multiprocessing.Process.__init__(self) 2006 self.context = bb.utils.get_context().copy() 2007 self.handlers = bb.event.get_class_handlers().copy() 2008 self.profile = profile 2009 self.queue_signals = False 2010 self.signal_received = [] 2011 self.signal_threadlock = threading.Lock() 2012 2013 def catch_sig(self, signum, frame): 2014 if self.queue_signals: 2015 self.signal_received.append(signum) 2016 else: 2017 self.handle_sig(signum, frame) 2018 2019 def handle_sig(self, signum, frame): 2020 if signum == signal.SIGTERM: 2021 signal.signal(signal.SIGTERM, signal.SIG_DFL) 2022 os.kill(os.getpid(), signal.SIGTERM) 2023 elif signum == signal.SIGINT: 2024 signal.default_int_handler(signum, frame) 2025 2026 def run(self): 2027 2028 if not self.profile: 2029 self.realrun() 2030 return 2031 2032 try: 2033 import cProfile as profile 2034 except: 2035 import profile 2036 prof = profile.Profile() 2037 try: 2038 profile.Profile.runcall(prof, self.realrun) 2039 finally: 2040 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name 2041 prof.dump_stats(logfile) 2042 2043 def realrun(self): 2044 # Signal handling here is hard. We must not terminate any process or thread holding the write 2045 # lock for the event stream as it will not be released, ever, and things will hang. 2046 # Python handles signals in the main thread/process but they can be raised from any thread and 2047 # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section 2048 # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any 2049 # new thread should also do so) and we defer handling but we handle with the local thread lock 2050 # held (a threading lock, not a multiprocessing one) so that no other thread in the process 2051 # can be in the critical section. 2052 signal.signal(signal.SIGTERM, self.catch_sig) 2053 signal.signal(signal.SIGHUP, signal.SIG_DFL) 2054 signal.signal(signal.SIGINT, self.catch_sig) 2055 bb.utils.set_process_name(multiprocessing.current_process().name) 2056 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1) 2057 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1) 2058 2059 pending = [] 2060 havejobs = True 2061 try: 2062 while havejobs or pending: 2063 if self.quit.is_set(): 2064 break 2065 2066 job = None 2067 try: 2068 job = self.jobs.pop() 2069 except IndexError: 2070 havejobs = False 2071 if job: 2072 result = self.parse(*job) 2073 # Clear the siggen cache after parsing to control memory usage, its huge 2074 bb.parse.siggen.postparsing_clean_cache() 2075 pending.append(result) 2076 2077 if pending: 2078 try: 2079 result = pending.pop() 2080 self.results.put(result, timeout=0.05) 2081 except queue.Full: 2082 pending.append(result) 2083 finally: 2084 self.results.close() 2085 self.results.join_thread() 2086 2087 def parse(self, mc, cache, filename, appends, layername): 2088 try: 2089 origfilter = bb.event.LogHandler.filter 2090 # Record the filename we're parsing into any events generated 2091 def parse_filter(self, record): 2092 record.taskpid = bb.event.worker_pid 2093 record.fn = filename 2094 return True 2095 2096 # Reset our environment and handlers to the original settings 2097 bb.utils.set_context(self.context.copy()) 2098 bb.event.set_class_handlers(self.handlers.copy()) 2099 bb.event.LogHandler.filter = parse_filter 2100 2101 return True, mc, cache.parse(filename, appends, layername) 2102 except Exception as exc: 2103 tb = sys.exc_info()[2] 2104 exc.recipe = filename 2105 return True, None, exc 2106 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown 2107 # and for example a worker thread doesn't just exit on its own in response to 2108 # a SystemExit event for example. 2109 except BaseException as exc: 2110 return True, None, ParsingFailure(exc, filename) 2111 finally: 2112 bb.event.LogHandler.filter = origfilter 2113 2114class CookerParser(object): 2115 def __init__(self, cooker, mcfilelist, masked): 2116 self.mcfilelist = mcfilelist 2117 self.cooker = cooker 2118 self.cfgdata = cooker.data 2119 self.cfghash = cooker.databuilder.data_hash 2120 self.cfgbuilder = cooker.databuilder 2121 2122 # Accounting statistics 2123 self.parsed = 0 2124 self.cached = 0 2125 self.error = 0 2126 self.masked = masked 2127 2128 self.skipped = 0 2129 self.virtuals = 0 2130 2131 self.current = 0 2132 self.process_names = [] 2133 2134 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array) 2135 self.fromcache = set() 2136 self.willparse = set() 2137 for mc in self.cooker.multiconfigs: 2138 for filename in self.mcfilelist[mc]: 2139 appends = self.cooker.collections[mc].get_file_appends(filename) 2140 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2] 2141 if not self.bb_caches[mc].cacheValid(filename, appends): 2142 self.willparse.add((mc, self.bb_caches[mc], filename, appends, layername)) 2143 else: 2144 self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername)) 2145 2146 self.total = len(self.fromcache) + len(self.willparse) 2147 self.toparse = len(self.willparse) 2148 self.progress_chunk = int(max(self.toparse / 100, 1)) 2149 2150 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or 2151 multiprocessing.cpu_count()), self.toparse) 2152 2153 bb.cache.SiggenRecipeInfo.reset() 2154 self.start() 2155 self.haveshutdown = False 2156 self.syncthread = None 2157 2158 def start(self): 2159 self.results = self.load_cached() 2160 self.processes = [] 2161 if self.toparse: 2162 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) 2163 2164 self.parser_quit = multiprocessing.Event() 2165 self.result_queue = multiprocessing.Queue() 2166 2167 def chunkify(lst,n): 2168 return [lst[i::n] for i in range(n)] 2169 self.jobs = chunkify(list(self.willparse), self.num_processes) 2170 2171 for i in range(0, self.num_processes): 2172 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile) 2173 parser.start() 2174 self.process_names.append(parser.name) 2175 self.processes.append(parser) 2176 2177 self.results = itertools.chain(self.results, self.parse_generator()) 2178 2179 def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"): 2180 if not self.toparse: 2181 return 2182 if self.haveshutdown: 2183 return 2184 self.haveshutdown = True 2185 2186 if clean: 2187 event = bb.event.ParseCompleted(self.cached, self.parsed, 2188 self.skipped, self.masked, 2189 self.virtuals, self.error, 2190 self.total) 2191 2192 bb.event.fire(event, self.cfgdata) 2193 else: 2194 bb.event.fire(bb.event.ParseError(eventmsg), self.cfgdata) 2195 bb.error("Parsing halted due to errors, see error messages above") 2196 2197 # Cleanup the queue before call process.join(), otherwise there might be 2198 # deadlocks. 2199 while True: 2200 try: 2201 self.result_queue.get(timeout=0.25) 2202 except queue.Empty: 2203 break 2204 2205 def sync_caches(): 2206 for c in self.bb_caches.values(): 2207 bb.cache.SiggenRecipeInfo.reset() 2208 c.sync() 2209 2210 self.syncthread = threading.Thread(target=sync_caches, name="SyncThread") 2211 self.syncthread.start() 2212 2213 self.parser_quit.set() 2214 2215 for process in self.processes: 2216 process.join(0.5) 2217 2218 for process in self.processes: 2219 if process.exitcode is None: 2220 os.kill(process.pid, signal.SIGINT) 2221 2222 for process in self.processes: 2223 process.join(0.5) 2224 2225 for process in self.processes: 2226 if process.exitcode is None: 2227 process.terminate() 2228 2229 for process in self.processes: 2230 process.join() 2231 # clean up zombies 2232 process.close() 2233 2234 bb.codeparser.parser_cache_save() 2235 bb.codeparser.parser_cache_savemerge() 2236 bb.cache.SiggenRecipeInfo.reset() 2237 bb.fetch.fetcher_parse_done() 2238 if self.cooker.configuration.profile: 2239 profiles = [] 2240 for i in self.process_names: 2241 logfile = "profile-parse-%s.log" % i 2242 if os.path.exists(logfile) and os.path.getsize(logfile): 2243 profiles.append(logfile) 2244 2245 if profiles: 2246 pout = "profile-parse.log.processed" 2247 bb.utils.process_profilelog(profiles, pout = pout) 2248 print("Processed parsing statistics saved to %s" % (pout)) 2249 2250 def final_cleanup(self): 2251 if self.syncthread: 2252 self.syncthread.join() 2253 2254 def load_cached(self): 2255 for mc, cache, filename, appends, layername in self.fromcache: 2256 infos = cache.loadCached(filename, appends) 2257 yield False, mc, infos 2258 2259 def parse_generator(self): 2260 empty = False 2261 while self.processes or not empty: 2262 for process in self.processes.copy(): 2263 if not process.is_alive(): 2264 process.join() 2265 self.processes.remove(process) 2266 2267 if self.parsed >= self.toparse: 2268 break 2269 2270 try: 2271 result = self.result_queue.get(timeout=0.25) 2272 except queue.Empty: 2273 empty = True 2274 yield None, None, None 2275 else: 2276 empty = False 2277 yield result 2278 2279 if not (self.parsed >= self.toparse): 2280 raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None) 2281 2282 2283 def parse_next(self): 2284 result = [] 2285 parsed = None 2286 try: 2287 parsed, mc, result = next(self.results) 2288 if isinstance(result, BaseException): 2289 # Turn exceptions back into exceptions 2290 raise result 2291 if parsed is None: 2292 # Timeout, loop back through the main loop 2293 return True 2294 2295 except StopIteration: 2296 self.shutdown() 2297 return False 2298 except bb.BBHandledException as exc: 2299 self.error += 1 2300 logger.debug('Failed to parse recipe: %s' % exc.recipe) 2301 self.shutdown(clean=False) 2302 return False 2303 except ParsingFailure as exc: 2304 self.error += 1 2305 2306 exc_desc = str(exc) 2307 if isinstance(exc, SystemExit) and not isinstance(exc.code, str): 2308 exc_desc = 'Exited with "%d"' % exc.code 2309 2310 logger.error('Unable to parse %s: %s' % (exc.recipe, exc_desc)) 2311 self.shutdown(clean=False) 2312 return False 2313 except bb.parse.ParseError as exc: 2314 self.error += 1 2315 logger.error(str(exc)) 2316 self.shutdown(clean=False, eventmsg=str(exc)) 2317 return False 2318 except bb.data_smart.ExpansionError as exc: 2319 def skip_frames(f, fn_prefix): 2320 while f and f.tb_frame.f_code.co_filename.startswith(fn_prefix): 2321 f = f.tb_next 2322 return f 2323 2324 self.error += 1 2325 bbdir = os.path.dirname(__file__) + os.sep 2326 etype, value, tb = sys.exc_info() 2327 2328 # Remove any frames where the code comes from bitbake. This 2329 # prevents deep (and pretty useless) backtraces for expansion error 2330 tb = skip_frames(tb, bbdir) 2331 cur = tb 2332 while cur: 2333 cur.tb_next = skip_frames(cur.tb_next, bbdir) 2334 cur = cur.tb_next 2335 2336 logger.error('ExpansionError during parsing %s', value.recipe, 2337 exc_info=(etype, value, tb)) 2338 self.shutdown(clean=False) 2339 return False 2340 except Exception as exc: 2341 self.error += 1 2342 _, value, _ = sys.exc_info() 2343 if hasattr(value, "recipe"): 2344 logger.error('Unable to parse %s' % value.recipe, 2345 exc_info=sys.exc_info()) 2346 else: 2347 # Most likely, an exception occurred during raising an exception 2348 import traceback 2349 logger.error('Exception during parse: %s' % traceback.format_exc()) 2350 self.shutdown(clean=False) 2351 return False 2352 2353 self.current += 1 2354 self.virtuals += len(result) 2355 if parsed: 2356 self.parsed += 1 2357 if self.parsed % self.progress_chunk == 0: 2358 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse), 2359 self.cfgdata) 2360 else: 2361 self.cached += 1 2362 2363 for virtualfn, info_array in result: 2364 if info_array[0].skipped: 2365 self.skipped += 1 2366 self.cooker.skiplist_by_mc[mc][virtualfn] = SkippedPackage(info_array[0]) 2367 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc], 2368 parsed=parsed, watcher = self.cooker.add_filewatch) 2369 return True 2370 2371 def reparse(self, filename): 2372 bb.cache.SiggenRecipeInfo.reset() 2373 to_reparse = set() 2374 for mc in self.cooker.multiconfigs: 2375 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2] 2376 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename), layername)) 2377 2378 for mc, filename, appends, layername in to_reparse: 2379 infos = self.bb_caches[mc].parse(filename, appends, layername) 2380 for vfn, info_array in infos: 2381 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array) 2382