xref: /openbmc/openbmc/poky/bitbake/lib/bb/cooker.py (revision 8fc2a182)
1#
2# Copyright (C) 2003, 2004  Chris Larson
3# Copyright (C) 2003, 2004  Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005        Holger Hans Peter Freyther
6# Copyright (C) 2005        ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import sys, os, glob, os.path, re, time
13import itertools
14import logging
15import multiprocessing
16import sre_constants
17import threading
18from io import StringIO, UnsupportedOperation
19from contextlib import closing
20from collections import defaultdict, namedtuple
21import bb, bb.exceptions, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
23import queue
24import signal
25import prserv.serv
26import pyinotify
27import json
28import pickle
29import codecs
30import hashserv
31
32logger      = logging.getLogger("BitBake")
33collectlog  = logging.getLogger("BitBake.Collection")
34buildlog    = logging.getLogger("BitBake.Build")
35parselog    = logging.getLogger("BitBake.Parsing")
36providerlog = logging.getLogger("BitBake.Provider")
37
38class NoSpecificMatch(bb.BBHandledException):
39    """
40    Exception raised when no or multiple file matches are found
41    """
42
43class NothingToBuild(Exception):
44    """
45    Exception raised when there is nothing to build
46    """
47
48class CollectionError(bb.BBHandledException):
49    """
50    Exception raised when layer configuration is incorrect
51    """
52
53class state:
54    initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
55
56    @classmethod
57    def get_name(cls, code):
58        for name in dir(cls):
59            value = getattr(cls, name)
60            if type(value) == type(cls.initial) and value == code:
61                return name
62        raise ValueError("Invalid status code: %s" % code)
63
64
65class SkippedPackage:
66    def __init__(self, info = None, reason = None):
67        self.pn = None
68        self.skipreason = None
69        self.provides = None
70        self.rprovides = None
71
72        if info:
73            self.pn = info.pn
74            self.skipreason = info.skipreason
75            self.provides = info.provides
76            self.rprovides = info.rprovides
77        elif reason:
78            self.skipreason = reason
79
80
81class CookerFeatures(object):
82    _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
83
84    def __init__(self):
85        self._features=set()
86
87    def setFeature(self, f):
88        # validate we got a request for a feature we support
89        if f not in CookerFeatures._feature_list:
90            return
91        self._features.add(f)
92
93    def __contains__(self, f):
94        return f in self._features
95
96    def __iter__(self):
97        return self._features.__iter__()
98
99    def __next__(self):
100        return next(self._features)
101
102
103class EventWriter:
104    def __init__(self, cooker, eventfile):
105        self.file_inited = None
106        self.cooker = cooker
107        self.eventfile = eventfile
108        self.event_queue = []
109
110    def write_event(self, event):
111        with open(self.eventfile, "a") as f:
112            try:
113                str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
114                f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
115                                             "vars": str_event}))
116            except Exception as err:
117                import traceback
118                print(err, traceback.format_exc())
119
120    def send(self, event):
121        if self.file_inited:
122            # we have the file, just write the event
123            self.write_event(event)
124        else:
125            # init on bb.event.BuildStarted
126            name = "%s.%s" % (event.__module__, event.__class__.__name__)
127            if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
128                with open(self.eventfile, "w") as f:
129                    f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
130
131                self.file_inited = True
132
133                # write pending events
134                for evt in self.event_queue:
135                    self.write_event(evt)
136
137                # also write the current event
138                self.write_event(event)
139            else:
140                # queue all events until the file is inited
141                self.event_queue.append(event)
142
143#============================================================================#
144# BBCooker
145#============================================================================#
146class BBCooker:
147    """
148    Manages one bitbake build run
149    """
150
151    def __init__(self, featureSet=None, idleCallBackRegister=None):
152        self.recipecaches = None
153        self.eventlog = None
154        self.skiplist = {}
155        self.featureset = CookerFeatures()
156        if featureSet:
157            for f in featureSet:
158                self.featureset.setFeature(f)
159
160        self.configuration = bb.cookerdata.CookerConfiguration()
161
162        self.idleCallBackRegister = idleCallBackRegister
163
164        bb.debug(1, "BBCooker starting %s" % time.time())
165        sys.stdout.flush()
166
167        self.configwatcher = pyinotify.WatchManager()
168        bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
169        sys.stdout.flush()
170
171        self.configwatcher.bbseen = set()
172        self.configwatcher.bbwatchedfiles = set()
173        self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
174        bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
175        sys.stdout.flush()
176        self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
177                         pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
178                         pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
179        self.watcher = pyinotify.WatchManager()
180        bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
181        sys.stdout.flush()
182        self.watcher.bbseen = set()
183        self.watcher.bbwatchedfiles = set()
184        self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
185
186        bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
187        sys.stdout.flush()
188
189        # If being called by something like tinfoil, we need to clean cached data
190        # which may now be invalid
191        bb.parse.clear_cache()
192        bb.parse.BBHandler.cached_statements = {}
193
194        self.ui_cmdline = None
195        self.hashserv = None
196        self.hashservaddr = None
197
198        self.inotify_modified_files = []
199
200        def _process_inotify_updates(server, cooker, abort):
201            cooker.process_inotify_updates()
202            return 1.0
203
204        self.idleCallBackRegister(_process_inotify_updates, self)
205
206        # TOSTOP must not be set or our children will hang when they output
207        try:
208            fd = sys.stdout.fileno()
209            if os.isatty(fd):
210                import termios
211                tcattr = termios.tcgetattr(fd)
212                if tcattr[3] & termios.TOSTOP:
213                    buildlog.info("The terminal had the TOSTOP bit set, clearing...")
214                    tcattr[3] = tcattr[3] & ~termios.TOSTOP
215                    termios.tcsetattr(fd, termios.TCSANOW, tcattr)
216        except UnsupportedOperation:
217            pass
218
219        self.command = bb.command.Command(self)
220        self.state = state.initial
221
222        self.parser = None
223
224        signal.signal(signal.SIGTERM, self.sigterm_exception)
225        # Let SIGHUP exit as SIGTERM
226        signal.signal(signal.SIGHUP, self.sigterm_exception)
227
228        bb.debug(1, "BBCooker startup complete %s" % time.time())
229        sys.stdout.flush()
230
231    def init_configdata(self):
232        if not hasattr(self, "data"):
233            self.initConfigurationData()
234            bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
235            sys.stdout.flush()
236            self.handlePRServ()
237
238    def process_inotify_updates(self):
239        for n in [self.confignotifier, self.notifier]:
240            if n.check_events(timeout=0):
241                # read notified events and enqeue them
242                n.read_events()
243                n.process_events()
244
245    def config_notifications(self, event):
246        if event.maskname == "IN_Q_OVERFLOW":
247            bb.warn("inotify event queue overflowed, invalidating caches.")
248            self.parsecache_valid = False
249            self.baseconfig_valid = False
250            bb.parse.clear_cache()
251            return
252        if not event.pathname in self.configwatcher.bbwatchedfiles:
253            return
254        if not event.pathname in self.inotify_modified_files:
255            self.inotify_modified_files.append(event.pathname)
256        self.baseconfig_valid = False
257
258    def notifications(self, event):
259        if event.maskname == "IN_Q_OVERFLOW":
260            bb.warn("inotify event queue overflowed, invalidating caches.")
261            self.parsecache_valid = False
262            bb.parse.clear_cache()
263            return
264        if event.pathname.endswith("bitbake-cookerdaemon.log") \
265                or event.pathname.endswith("bitbake.lock"):
266            return
267        if not event.pathname in self.inotify_modified_files:
268            self.inotify_modified_files.append(event.pathname)
269        self.parsecache_valid = False
270
271    def add_filewatch(self, deps, watcher=None, dirs=False):
272        if not watcher:
273            watcher = self.watcher
274        for i in deps:
275            watcher.bbwatchedfiles.add(i[0])
276            if dirs:
277                f = i[0]
278            else:
279                f = os.path.dirname(i[0])
280            if f in watcher.bbseen:
281                continue
282            watcher.bbseen.add(f)
283            watchtarget = None
284            while True:
285                # We try and add watches for files that don't exist but if they did, would influence
286                # the parser. The parent directory of these files may not exist, in which case we need
287                # to watch any parent that does exist for changes.
288                try:
289                    watcher.add_watch(f, self.watchmask, quiet=False)
290                    if watchtarget:
291                        watcher.bbwatchedfiles.add(watchtarget)
292                    break
293                except pyinotify.WatchManagerError as e:
294                    if 'ENOENT' in str(e):
295                        watchtarget = f
296                        f = os.path.dirname(f)
297                        if f in watcher.bbseen:
298                            break
299                        watcher.bbseen.add(f)
300                        continue
301                    if 'ENOSPC' in str(e):
302                        providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
303                        providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
304                        providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
305                        providerlog.error("Root privilege is required to modify max_user_watches.")
306                    raise
307
308    def sigterm_exception(self, signum, stackframe):
309        if signum == signal.SIGTERM:
310            bb.warn("Cooker received SIGTERM, shutting down...")
311        elif signum == signal.SIGHUP:
312            bb.warn("Cooker received SIGHUP, shutting down...")
313        self.state = state.forceshutdown
314
315    def setFeatures(self, features):
316        # we only accept a new feature set if we're in state initial, so we can reset without problems
317        if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
318            raise Exception("Illegal state for feature set change")
319        original_featureset = list(self.featureset)
320        for feature in features:
321            self.featureset.setFeature(feature)
322        bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
323        if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
324            self.reset()
325
326    def initConfigurationData(self):
327
328        self.state = state.initial
329        self.caches_array = []
330
331        # Need to preserve BB_CONSOLELOG over resets
332        consolelog = None
333        if hasattr(self, "data"):
334            consolelog = self.data.getVar("BB_CONSOLELOG")
335
336        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
337            self.enableDataTracking()
338
339        all_extra_cache_names = []
340        # We hardcode all known cache types in a single place, here.
341        if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
342            all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
343
344        caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
345
346        # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
347        # This is the entry point, no further check needed!
348        for var in caches_name_array:
349            try:
350                module_name, cache_name = var.split(':')
351                module = __import__(module_name, fromlist=(cache_name,))
352                self.caches_array.append(getattr(module, cache_name))
353            except ImportError as exc:
354                logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
355                raise bb.BBHandledException()
356
357        self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
358        self.databuilder.parseBaseConfiguration()
359        self.data = self.databuilder.data
360        self.data_hash = self.databuilder.data_hash
361        self.extraconfigdata = {}
362
363        if consolelog:
364            self.data.setVar("BB_CONSOLELOG", consolelog)
365
366        self.data.setVar('BB_CMDLINE', self.ui_cmdline)
367
368        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
369            self.disableDataTracking()
370
371        for mc in self.databuilder.mcdata.values():
372            mc.renameVar("__depends", "__base_depends")
373            self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
374
375        self.baseconfig_valid = True
376        self.parsecache_valid = False
377
378    def handlePRServ(self):
379        # Setup a PR Server based on the new configuration
380        try:
381            self.prhost = prserv.serv.auto_start(self.data)
382        except prserv.serv.PRServiceConfigError as e:
383            bb.fatal("Unable to start PR Server, exitting")
384
385        if self.data.getVar("BB_HASHSERVE") == "auto":
386            # Create a new hash server bound to a unix domain socket
387            if not self.hashserv:
388                dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
389                self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
390                self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
391                self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
392                self.hashserv.process.start()
393            self.data.setVar("BB_HASHSERVE", self.hashservaddr)
394            self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
395            self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
396            for mc in self.databuilder.mcdata:
397                self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
398
399        bb.parse.init_parser(self.data)
400
401    def enableDataTracking(self):
402        self.configuration.tracking = True
403        if hasattr(self, "data"):
404            self.data.enableTracking()
405
406    def disableDataTracking(self):
407        self.configuration.tracking = False
408        if hasattr(self, "data"):
409            self.data.disableTracking()
410
411    def parseConfiguration(self):
412        # Change nice level if we're asked to
413        nice = self.data.getVar("BB_NICE_LEVEL")
414        if nice:
415            curnice = os.nice(0)
416            nice = int(nice) - curnice
417            buildlog.verbose("Renice to %s " % os.nice(nice))
418
419        if self.recipecaches:
420            del self.recipecaches
421        self.multiconfigs = self.databuilder.mcdata.keys()
422        self.recipecaches = {}
423        for mc in self.multiconfigs:
424            self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
425
426        self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
427
428        self.parsecache_valid = False
429
430    def updateConfigOpts(self, options, environment, cmdline):
431        self.ui_cmdline = cmdline
432        clean = True
433        for o in options:
434            if o in ['prefile', 'postfile']:
435                # Only these options may require a reparse
436                try:
437                    if getattr(self.configuration, o) == options[o]:
438                        # Value is the same, no need to mark dirty
439                        continue
440                except AttributeError:
441                    pass
442                logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
443                print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
444                clean = False
445            if hasattr(self.configuration, o):
446                setattr(self.configuration, o, options[o])
447
448        if self.configuration.writeeventlog:
449            if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
450                bb.event.unregister_UIHhandler(self.eventlog[1])
451            if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
452                # we log all events to a file if so directed
453                # register the log file writer as UI Handler
454                writer = EventWriter(self, self.configuration.writeeventlog)
455                EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
456                self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
457
458        bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
459        bb.msg.loggerDefaultDomains = self.configuration.debug_domains
460
461        if hasattr(self, "data"):
462            origenv = bb.data.init()
463            for k in environment:
464                origenv.setVar(k, environment[k])
465            self.data.setVar("BB_ORIGENV", origenv)
466
467        for k in bb.utils.approved_variables():
468            if k in environment and k not in self.configuration.env:
469                logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
470                self.configuration.env[k] = environment[k]
471                clean = False
472            if k in self.configuration.env and k not in environment:
473                logger.debug(1, "Updating environment variable %s (deleted)" % (k))
474                del self.configuration.env[k]
475                clean = False
476            if k not in self.configuration.env and k not in environment:
477                continue
478            if environment[k] != self.configuration.env[k]:
479                logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
480                self.configuration.env[k] = environment[k]
481                clean = False
482
483        # Now update all the variables not in the datastore to match
484        self.configuration.env = environment
485
486        if not clean:
487            logger.debug(1, "Base environment change, triggering reparse")
488            self.reset()
489
490    def runCommands(self, server, data, abort):
491        """
492        Run any queued asynchronous command
493        This is done by the idle handler so it runs in true context rather than
494        tied to any UI.
495        """
496
497        return self.command.runAsyncCommand()
498
499    def showVersions(self):
500
501        (latest_versions, preferred_versions) = self.findProviders()
502
503        logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
504        logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
505
506        for p in sorted(self.recipecaches[''].pkg_pn):
507            pref = preferred_versions[p]
508            latest = latest_versions[p]
509
510            prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
511            lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
512
513            if pref == latest:
514                prefstr = ""
515
516            logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
517
518    def showEnvironment(self, buildfile=None, pkgs_to_build=None):
519        """
520        Show the outer or per-recipe environment
521        """
522        fn = None
523        envdata = None
524        mc = ''
525        if not pkgs_to_build:
526            pkgs_to_build = []
527
528        orig_tracking = self.configuration.tracking
529        if not orig_tracking:
530            self.enableDataTracking()
531            self.reset()
532
533        def mc_base(p):
534            if p.startswith('mc:'):
535                s = p.split(':')
536                if len(s) == 2:
537                    return s[1]
538            return None
539
540        if buildfile:
541            # Parse the configuration here. We need to do it explicitly here since
542            # this showEnvironment() code path doesn't use the cache
543            self.parseConfiguration()
544
545            fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
546            fn = self.matchFile(fn, mc)
547            fn = bb.cache.realfn2virtual(fn, cls, mc)
548        elif len(pkgs_to_build) == 1:
549            mc = mc_base(pkgs_to_build[0])
550            if not mc:
551                ignore = self.data.getVar("ASSUME_PROVIDED") or ""
552                if pkgs_to_build[0] in set(ignore.split()):
553                    bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
554
555                taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
556
557                mc = runlist[0][0]
558                fn = runlist[0][3]
559
560        if fn:
561            try:
562                bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
563                envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
564            except Exception as e:
565                parselog.exception("Unable to read %s", fn)
566                raise
567        else:
568            if not mc in self.databuilder.mcdata:
569                bb.fatal('Not multiconfig named "%s" found' % mc)
570            envdata = self.databuilder.mcdata[mc]
571            data.expandKeys(envdata)
572            parse.ast.runAnonFuncs(envdata)
573
574        # Display history
575        with closing(StringIO()) as env:
576            self.data.inchistory.emit(env)
577            logger.plain(env.getvalue())
578
579        # emit variables and shell functions
580        with closing(StringIO()) as env:
581            data.emit_env(env, envdata, True)
582            logger.plain(env.getvalue())
583
584        # emit the metadata which isnt valid shell
585        for e in sorted(envdata.keys()):
586            if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
587                logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
588
589        if not orig_tracking:
590            self.disableDataTracking()
591            self.reset()
592
593    def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
594        """
595        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
596        """
597        bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
598
599        # A task of None means use the default task
600        if task is None:
601            task = self.configuration.cmd
602        if not task.startswith("do_"):
603            task = "do_%s" % task
604
605        targetlist = self.checkPackages(pkgs_to_build, task)
606        fulltargetlist = []
607        defaulttask_implicit = ''
608        defaulttask_explicit = False
609        wildcard = False
610
611        # Wild card expansion:
612        # Replace string such as "mc:*:bash"
613        # into "mc:A:bash mc:B:bash bash"
614        for k in targetlist:
615            if k.startswith("mc:"):
616                if wildcard:
617                    bb.fatal('multiconfig conflict')
618                if k.split(":")[1] == "*":
619                    wildcard = True
620                    for mc in self.multiconfigs:
621                        if mc:
622                            fulltargetlist.append(k.replace('*', mc))
623                        # implicit default task
624                        else:
625                            defaulttask_implicit = k.split(":")[2]
626                else:
627                    fulltargetlist.append(k)
628            else:
629                defaulttask_explicit = True
630                fulltargetlist.append(k)
631
632        if not defaulttask_explicit and defaulttask_implicit != '':
633            fulltargetlist.append(defaulttask_implicit)
634
635        bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
636        taskdata = {}
637        localdata = {}
638
639        for mc in self.multiconfigs:
640            taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
641            localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
642            bb.data.expandKeys(localdata[mc])
643
644        current = 0
645        runlist = []
646        for k in fulltargetlist:
647            origk = k
648            mc = ""
649            if k.startswith("mc:"):
650                mc = k.split(":")[1]
651                k = ":".join(k.split(":")[2:])
652            ktask = task
653            if ":do_" in k:
654                k2 = k.split(":do_")
655                k = k2[0]
656                ktask = k2[1]
657
658            if mc not in self.multiconfigs:
659                 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
660
661            taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
662            current += 1
663            if not ktask.startswith("do_"):
664                ktask = "do_%s" % ktask
665            if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
666                # e.g. in ASSUME_PROVIDED
667                continue
668            fn = taskdata[mc].build_targets[k][0]
669            runlist.append([mc, k, ktask, fn])
670            bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
671
672        havemc = False
673        for mc in self.multiconfigs:
674            if taskdata[mc].get_mcdepends():
675                havemc = True
676
677        # No need to do check providers if there are no mcdeps or not an mc build
678        if havemc or len(self.multiconfigs) > 1:
679            seen = set()
680            new = True
681            # Make sure we can provide the multiconfig dependency
682            while new:
683                mcdeps = set()
684                # Add unresolved first, so we can get multiconfig indirect dependencies on time
685                for mc in self.multiconfigs:
686                    taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
687                    mcdeps |= set(taskdata[mc].get_mcdepends())
688                new = False
689                for mc in self.multiconfigs:
690                    for k in mcdeps:
691                        if k in seen:
692                            continue
693                        l = k.split(':')
694                        depmc = l[2]
695                        if depmc not in self.multiconfigs:
696                            bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
697                        else:
698                            logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
699                            taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
700                            seen.add(k)
701                            new = True
702
703        for mc in self.multiconfigs:
704            taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
705
706        bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
707        return taskdata, runlist
708
709    def prepareTreeData(self, pkgs_to_build, task):
710        """
711        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
712        """
713
714        # We set abort to False here to prevent unbuildable targets raising
715        # an exception when we're just generating data
716        taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
717
718        return runlist, taskdata
719
720    ######## WARNING : this function requires cache_extra to be enabled ########
721
722    def generateTaskDepTreeData(self, pkgs_to_build, task):
723        """
724        Create a dependency graph of pkgs_to_build including reverse dependency
725        information.
726        """
727        if not task.startswith("do_"):
728            task = "do_%s" % task
729
730        runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
731        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
732        rq.rqdata.prepare()
733        return self.buildDependTree(rq, taskdata)
734
735    @staticmethod
736    def add_mc_prefix(mc, pn):
737        if mc:
738            return "mc:%s:%s" % (mc, pn)
739        return pn
740
741    def buildDependTree(self, rq, taskdata):
742        seen_fns = []
743        depend_tree = {}
744        depend_tree["depends"] = {}
745        depend_tree["tdepends"] = {}
746        depend_tree["pn"] = {}
747        depend_tree["rdepends-pn"] = {}
748        depend_tree["packages"] = {}
749        depend_tree["rdepends-pkg"] = {}
750        depend_tree["rrecs-pkg"] = {}
751        depend_tree['providermap'] = {}
752        depend_tree["layer-priorities"] = self.bbfile_config_priorities
753
754        for mc in taskdata:
755            for name, fn in list(taskdata[mc].get_providermap().items()):
756                pn = self.recipecaches[mc].pkg_fn[fn]
757                pn = self.add_mc_prefix(mc, pn)
758                if name != pn:
759                    version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
760                    depend_tree['providermap'][name] = (pn, version)
761
762        for tid in rq.rqdata.runtaskentries:
763            (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
764            pn = self.recipecaches[mc].pkg_fn[taskfn]
765            pn = self.add_mc_prefix(mc, pn)
766            version  = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
767            if pn not in depend_tree["pn"]:
768                depend_tree["pn"][pn] = {}
769                depend_tree["pn"][pn]["filename"] = taskfn
770                depend_tree["pn"][pn]["version"] = version
771                depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
772
773                # if we have extra caches, list all attributes they bring in
774                extra_info = []
775                for cache_class in self.caches_array:
776                    if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
777                        cachefields = getattr(cache_class, 'cachefields', [])
778                        extra_info = extra_info + cachefields
779
780                # for all attributes stored, add them to the dependency tree
781                for ei in extra_info:
782                    depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
783
784
785            dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
786            if not dotname in depend_tree["tdepends"]:
787                depend_tree["tdepends"][dotname] = []
788            for dep in rq.rqdata.runtaskentries[tid].depends:
789                (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
790                deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
791                depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
792            if taskfn not in seen_fns:
793                seen_fns.append(taskfn)
794                packages = []
795
796                depend_tree["depends"][pn] = []
797                for dep in taskdata[mc].depids[taskfn]:
798                    depend_tree["depends"][pn].append(dep)
799
800                depend_tree["rdepends-pn"][pn] = []
801                for rdep in taskdata[mc].rdepids[taskfn]:
802                    depend_tree["rdepends-pn"][pn].append(rdep)
803
804                rdepends = self.recipecaches[mc].rundeps[taskfn]
805                for package in rdepends:
806                    depend_tree["rdepends-pkg"][package] = []
807                    for rdepend in rdepends[package]:
808                        depend_tree["rdepends-pkg"][package].append(rdepend)
809                    packages.append(package)
810
811                rrecs = self.recipecaches[mc].runrecs[taskfn]
812                for package in rrecs:
813                    depend_tree["rrecs-pkg"][package] = []
814                    for rdepend in rrecs[package]:
815                        depend_tree["rrecs-pkg"][package].append(rdepend)
816                    if not package in packages:
817                        packages.append(package)
818
819                for package in packages:
820                    if package not in depend_tree["packages"]:
821                        depend_tree["packages"][package] = {}
822                        depend_tree["packages"][package]["pn"] = pn
823                        depend_tree["packages"][package]["filename"] = taskfn
824                        depend_tree["packages"][package]["version"] = version
825
826        return depend_tree
827
828    ######## WARNING : this function requires cache_extra to be enabled ########
829    def generatePkgDepTreeData(self, pkgs_to_build, task):
830        """
831        Create a dependency tree of pkgs_to_build, returning the data.
832        """
833        if not task.startswith("do_"):
834            task = "do_%s" % task
835
836        _, taskdata = self.prepareTreeData(pkgs_to_build, task)
837
838        seen_fns = []
839        depend_tree = {}
840        depend_tree["depends"] = {}
841        depend_tree["pn"] = {}
842        depend_tree["rdepends-pn"] = {}
843        depend_tree["rdepends-pkg"] = {}
844        depend_tree["rrecs-pkg"] = {}
845
846        # if we have extra caches, list all attributes they bring in
847        extra_info = []
848        for cache_class in self.caches_array:
849            if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
850                cachefields = getattr(cache_class, 'cachefields', [])
851                extra_info = extra_info + cachefields
852
853        tids = []
854        for mc in taskdata:
855            for tid in taskdata[mc].taskentries:
856                tids.append(tid)
857
858        for tid in tids:
859            (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
860
861            pn = self.recipecaches[mc].pkg_fn[taskfn]
862            pn = self.add_mc_prefix(mc, pn)
863
864            if pn not in depend_tree["pn"]:
865                depend_tree["pn"][pn] = {}
866                depend_tree["pn"][pn]["filename"] = taskfn
867                version  = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
868                depend_tree["pn"][pn]["version"] = version
869                rdepends = self.recipecaches[mc].rundeps[taskfn]
870                rrecs = self.recipecaches[mc].runrecs[taskfn]
871                depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
872
873                # for all extra attributes stored, add them to the dependency tree
874                for ei in extra_info:
875                    depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
876
877            if taskfn not in seen_fns:
878                seen_fns.append(taskfn)
879
880                depend_tree["depends"][pn] = []
881                for dep in taskdata[mc].depids[taskfn]:
882                    pn_provider = ""
883                    if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
884                        fn_provider = taskdata[mc].build_targets[dep][0]
885                        pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
886                    else:
887                        pn_provider = dep
888                    pn_provider = self.add_mc_prefix(mc, pn_provider)
889                    depend_tree["depends"][pn].append(pn_provider)
890
891                depend_tree["rdepends-pn"][pn] = []
892                for rdep in taskdata[mc].rdepids[taskfn]:
893                    pn_rprovider = ""
894                    if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
895                        fn_rprovider = taskdata[mc].run_targets[rdep][0]
896                        pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
897                    else:
898                        pn_rprovider = rdep
899                    pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
900                    depend_tree["rdepends-pn"][pn].append(pn_rprovider)
901
902                depend_tree["rdepends-pkg"].update(rdepends)
903                depend_tree["rrecs-pkg"].update(rrecs)
904
905        return depend_tree
906
907    def generateDepTreeEvent(self, pkgs_to_build, task):
908        """
909        Create a task dependency graph of pkgs_to_build.
910        Generate an event with the result
911        """
912        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
913        bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
914
915    def generateDotGraphFiles(self, pkgs_to_build, task):
916        """
917        Create a task dependency graph of pkgs_to_build.
918        Save the result to a set of .dot files.
919        """
920
921        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
922
923        with open('pn-buildlist', 'w') as f:
924            for pn in depgraph["pn"]:
925                f.write(pn + "\n")
926        logger.info("PN build list saved to 'pn-buildlist'")
927
928        # Remove old format output files to ensure no confusion with stale data
929        try:
930            os.unlink('pn-depends.dot')
931        except FileNotFoundError:
932            pass
933        try:
934            os.unlink('package-depends.dot')
935        except FileNotFoundError:
936            pass
937        try:
938            os.unlink('recipe-depends.dot')
939        except FileNotFoundError:
940            pass
941
942        with open('task-depends.dot', 'w') as f:
943            f.write("digraph depends {\n")
944            for task in sorted(depgraph["tdepends"]):
945                (pn, taskname) = task.rsplit(".", 1)
946                fn = depgraph["pn"][pn]["filename"]
947                version = depgraph["pn"][pn]["version"]
948                f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
949                for dep in sorted(depgraph["tdepends"][task]):
950                    f.write('"%s" -> "%s"\n' % (task, dep))
951            f.write("}\n")
952        logger.info("Task dependencies saved to 'task-depends.dot'")
953
954    def show_appends_with_no_recipes(self):
955        appends_without_recipes = {}
956        # Determine which bbappends haven't been applied
957        for mc in self.multiconfigs:
958            # First get list of recipes, including skipped
959            recipefns = list(self.recipecaches[mc].pkg_fn.keys())
960            recipefns.extend(self.skiplist.keys())
961
962            # Work out list of bbappends that have been applied
963            applied_appends = []
964            for fn in recipefns:
965                applied_appends.extend(self.collections[mc].get_file_appends(fn))
966
967            appends_without_recipes[mc] = []
968            for _, appendfn in self.collections[mc].bbappends:
969                if not appendfn in applied_appends:
970                    appends_without_recipes[mc].append(appendfn)
971
972        msgs = []
973        for mc in sorted(appends_without_recipes.keys()):
974            if appends_without_recipes[mc]:
975                msgs.append('No recipes in %s available for:\n  %s' % (mc if mc else 'default',
976                                                                        '\n  '.join(appends_without_recipes[mc])))
977
978        if msgs:
979            msg = "\n".join(msgs)
980            warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
981                False) or "no"
982            if warn_only.lower() in ("1", "yes", "true"):
983                bb.warn(msg)
984            else:
985                bb.fatal(msg)
986
987    def handlePrefProviders(self):
988
989        for mc in self.multiconfigs:
990            localdata = data.createCopy(self.databuilder.mcdata[mc])
991            bb.data.expandKeys(localdata)
992
993            # Handle PREFERRED_PROVIDERS
994            for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
995                try:
996                    (providee, provider) = p.split(':')
997                except:
998                    providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
999                    continue
1000                if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1001                    providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1002                self.recipecaches[mc].preferred[providee] = provider
1003
1004    def findConfigFilePath(self, configfile):
1005        """
1006        Find the location on disk of configfile and if it exists and was parsed by BitBake
1007        emit the ConfigFilePathFound event with the path to the file.
1008        """
1009        path = bb.cookerdata.findConfigFile(configfile, self.data)
1010        if not path:
1011            return
1012
1013        # Generate a list of parsed configuration files by searching the files
1014        # listed in the __depends and __base_depends variables with a .conf suffix.
1015        conffiles = []
1016        dep_files = self.data.getVar('__base_depends', False) or []
1017        dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1018
1019        for f in dep_files:
1020            if f[0].endswith(".conf"):
1021                conffiles.append(f[0])
1022
1023        _, conf, conffile = path.rpartition("conf/")
1024        match = os.path.join(conf, conffile)
1025        # Try and find matches for conf/conffilename.conf as we don't always
1026        # have the full path to the file.
1027        for cfg in conffiles:
1028            if cfg.endswith(match):
1029                bb.event.fire(bb.event.ConfigFilePathFound(path),
1030                              self.data)
1031                break
1032
1033    def findFilesMatchingInDir(self, filepattern, directory):
1034        """
1035        Searches for files containing the substring 'filepattern' which are children of
1036        'directory' in each BBPATH. i.e. to find all rootfs package classes available
1037        to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1038        or to find all machine configuration files one could call:
1039        findFilesMatchingInDir(self, '.conf', 'conf/machine')
1040        """
1041
1042        matches = []
1043        bbpaths = self.data.getVar('BBPATH').split(':')
1044        for path in bbpaths:
1045            dirpath = os.path.join(path, directory)
1046            if os.path.exists(dirpath):
1047                for root, dirs, files in os.walk(dirpath):
1048                    for f in files:
1049                        if filepattern in f:
1050                            matches.append(f)
1051
1052        if matches:
1053            bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1054
1055    def findProviders(self, mc=''):
1056        return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1057
1058    def findBestProvider(self, pn, mc=''):
1059        if pn in self.recipecaches[mc].providers:
1060            filenames = self.recipecaches[mc].providers[pn]
1061            eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
1062            filename = eligible[0]
1063            return None, None, None, filename
1064        elif pn in self.recipecaches[mc].pkg_pn:
1065            return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1066        else:
1067            return None, None, None, None
1068
1069    def findConfigFiles(self, varname):
1070        """
1071        Find config files which are appropriate values for varname.
1072        i.e. MACHINE, DISTRO
1073        """
1074        possible = []
1075        var = varname.lower()
1076
1077        data = self.data
1078        # iterate configs
1079        bbpaths = data.getVar('BBPATH').split(':')
1080        for path in bbpaths:
1081            confpath = os.path.join(path, "conf", var)
1082            if os.path.exists(confpath):
1083                for root, dirs, files in os.walk(confpath):
1084                    # get all child files, these are appropriate values
1085                    for f in files:
1086                        val, sep, end = f.rpartition('.')
1087                        if end == 'conf':
1088                            possible.append(val)
1089
1090        if possible:
1091            bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1092
1093    def findInheritsClass(self, klass):
1094        """
1095        Find all recipes which inherit the specified class
1096        """
1097        pkg_list = []
1098
1099        for pfn in self.recipecaches[''].pkg_fn:
1100            inherits = self.recipecaches[''].inherits.get(pfn, None)
1101            if inherits and klass in inherits:
1102                pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
1103
1104        return pkg_list
1105
1106    def generateTargetsTree(self, klass=None, pkgs=None):
1107        """
1108        Generate a dependency tree of buildable targets
1109        Generate an event with the result
1110        """
1111        # if the caller hasn't specified a pkgs list default to universe
1112        if not pkgs:
1113            pkgs = ['universe']
1114        # if inherited_class passed ensure all recipes which inherit the
1115        # specified class are included in pkgs
1116        if klass:
1117            extra_pkgs = self.findInheritsClass(klass)
1118            pkgs = pkgs + extra_pkgs
1119
1120        # generate a dependency tree for all our packages
1121        tree = self.generatePkgDepTreeData(pkgs, 'build')
1122        bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1123
1124    def interactiveMode( self ):
1125        """Drop off into a shell"""
1126        try:
1127            from bb import shell
1128        except ImportError:
1129            parselog.exception("Interactive mode not available")
1130            raise bb.BBHandledException()
1131        else:
1132            shell.start( self )
1133
1134
1135    def handleCollections(self, collections):
1136        """Handle collections"""
1137        errors = False
1138        self.bbfile_config_priorities = []
1139        if collections:
1140            collection_priorities = {}
1141            collection_depends = {}
1142            collection_list = collections.split()
1143            min_prio = 0
1144            for c in collection_list:
1145                bb.debug(1,'Processing %s in collection list' % (c))
1146
1147                # Get collection priority if defined explicitly
1148                priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
1149                if priority:
1150                    try:
1151                        prio = int(priority)
1152                    except ValueError:
1153                        parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1154                        errors = True
1155                    if min_prio == 0 or prio < min_prio:
1156                        min_prio = prio
1157                    collection_priorities[c] = prio
1158                else:
1159                    collection_priorities[c] = None
1160
1161                # Check dependencies and store information for priority calculation
1162                deps = self.data.getVar("LAYERDEPENDS_%s" % c)
1163                if deps:
1164                    try:
1165                        depDict = bb.utils.explode_dep_versions2(deps)
1166                    except bb.utils.VersionStringException as vse:
1167                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1168                    for dep, oplist in list(depDict.items()):
1169                        if dep in collection_list:
1170                            for opstr in oplist:
1171                                layerver = self.data.getVar("LAYERVERSION_%s" % dep)
1172                                (op, depver) = opstr.split()
1173                                if layerver:
1174                                    try:
1175                                        res = bb.utils.vercmp_string_op(layerver, depver, op)
1176                                    except bb.utils.VersionStringException as vse:
1177                                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1178                                    if not res:
1179                                        parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1180                                        errors = True
1181                                else:
1182                                    parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1183                                    errors = True
1184                        else:
1185                            parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1186                            errors = True
1187                    collection_depends[c] = list(depDict.keys())
1188                else:
1189                    collection_depends[c] = []
1190
1191                # Check recommends and store information for priority calculation
1192                recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
1193                if recs:
1194                    try:
1195                        recDict = bb.utils.explode_dep_versions2(recs)
1196                    except bb.utils.VersionStringException as vse:
1197                        bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1198                    for rec, oplist in list(recDict.items()):
1199                        if rec in collection_list:
1200                            if oplist:
1201                                opstr = oplist[0]
1202                                layerver = self.data.getVar("LAYERVERSION_%s" % rec)
1203                                if layerver:
1204                                    (op, recver) = opstr.split()
1205                                    try:
1206                                        res = bb.utils.vercmp_string_op(layerver, recver, op)
1207                                    except bb.utils.VersionStringException as vse:
1208                                        bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1209                                    if not res:
1210                                        parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1211                                        continue
1212                                else:
1213                                    parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1214                                    continue
1215                            parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1216                            collection_depends[c].append(rec)
1217                        else:
1218                            parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1219
1220            # Recursively work out collection priorities based on dependencies
1221            def calc_layer_priority(collection):
1222                if not collection_priorities[collection]:
1223                    max_depprio = min_prio
1224                    for dep in collection_depends[collection]:
1225                        calc_layer_priority(dep)
1226                        depprio = collection_priorities[dep]
1227                        if depprio > max_depprio:
1228                            max_depprio = depprio
1229                    max_depprio += 1
1230                    parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1231                    collection_priorities[collection] = max_depprio
1232
1233            # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1234            for c in collection_list:
1235                calc_layer_priority(c)
1236                regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
1237                if regex is None:
1238                    parselog.error("BBFILE_PATTERN_%s not defined" % c)
1239                    errors = True
1240                    continue
1241                elif regex == "":
1242                    parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
1243                    cre = re.compile('^NULL$')
1244                    errors = False
1245                else:
1246                    try:
1247                        cre = re.compile(regex)
1248                    except re.error:
1249                        parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1250                        errors = True
1251                        continue
1252                self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1253        if errors:
1254            # We've already printed the actual error(s)
1255            raise CollectionError("Errors during parsing layer configuration")
1256
1257    def buildSetVars(self):
1258        """
1259        Setup any variables needed before starting a build
1260        """
1261        t = time.gmtime()
1262        for mc in self.databuilder.mcdata:
1263            ds = self.databuilder.mcdata[mc]
1264            if not ds.getVar("BUILDNAME", False):
1265                ds.setVar("BUILDNAME", "${DATE}${TIME}")
1266            ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1267            ds.setVar("DATE", time.strftime('%Y%m%d', t))
1268            ds.setVar("TIME", time.strftime('%H%M%S', t))
1269
1270    def reset_mtime_caches(self):
1271        """
1272        Reset mtime caches - this is particularly important when memory resident as something
1273        which is cached is not unlikely to have changed since the last invocation (e.g. a
1274        file associated with a recipe might have been modified by the user).
1275        """
1276        build.reset_cache()
1277        bb.fetch._checksum_cache.mtime_cache.clear()
1278        siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1279        if siggen_cache:
1280            bb.parse.siggen.checksum_cache.mtime_cache.clear()
1281
1282    def matchFiles(self, bf, mc=''):
1283        """
1284        Find the .bb files which match the expression in 'buildfile'.
1285        """
1286        if bf.startswith("/") or bf.startswith("../"):
1287            bf = os.path.abspath(bf)
1288
1289        self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1290        filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1291        try:
1292            os.stat(bf)
1293            bf = os.path.abspath(bf)
1294            return [bf]
1295        except OSError:
1296            regexp = re.compile(bf)
1297            matches = []
1298            for f in filelist:
1299                if regexp.search(f) and os.path.isfile(f):
1300                    matches.append(f)
1301            return matches
1302
1303    def matchFile(self, buildfile, mc=''):
1304        """
1305        Find the .bb file which matches the expression in 'buildfile'.
1306        Raise an error if multiple files
1307        """
1308        matches = self.matchFiles(buildfile, mc)
1309        if len(matches) != 1:
1310            if matches:
1311                msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1312                if matches:
1313                    for f in matches:
1314                        msg += "\n    %s" % f
1315                parselog.error(msg)
1316            else:
1317                parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1318            raise NoSpecificMatch
1319        return matches[0]
1320
1321    def buildFile(self, buildfile, task):
1322        """
1323        Build the file matching regexp buildfile
1324        """
1325        bb.event.fire(bb.event.BuildInit(), self.data)
1326
1327        # Too many people use -b because they think it's how you normally
1328        # specify a target to be built, so show a warning
1329        bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1330
1331        self.buildFileInternal(buildfile, task)
1332
1333    def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1334        """
1335        Build the file matching regexp buildfile
1336        """
1337
1338        # Parse the configuration here. We need to do it explicitly here since
1339        # buildFile() doesn't use the cache
1340        self.parseConfiguration()
1341
1342        # If we are told to do the None task then query the default task
1343        if task is None:
1344            task = self.configuration.cmd
1345        if not task.startswith("do_"):
1346            task = "do_%s" % task
1347
1348        fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
1349        fn = self.matchFile(fn, mc)
1350
1351        self.buildSetVars()
1352        self.reset_mtime_caches()
1353
1354        bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
1355
1356        infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
1357        infos = dict(infos)
1358
1359        fn = bb.cache.realfn2virtual(fn, cls, mc)
1360        try:
1361            info_array = infos[fn]
1362        except KeyError:
1363            bb.fatal("%s does not exist" % fn)
1364
1365        if info_array[0].skipped:
1366            bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1367
1368        self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
1369
1370        # Tweak some variables
1371        item = info_array[0].pn
1372        self.recipecaches[mc].ignored_dependencies = set()
1373        self.recipecaches[mc].bbfile_priority[fn] = 1
1374        self.configuration.limited_deps = True
1375
1376        # Remove external dependencies
1377        self.recipecaches[mc].task_deps[fn]['depends'] = {}
1378        self.recipecaches[mc].deps[fn] = []
1379        self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1380        self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1381
1382        # Invalidate task for target if force mode active
1383        if self.configuration.force:
1384            logger.verbose("Invalidate task %s, %s", task, fn)
1385            bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
1386
1387        # Setup taskdata structure
1388        taskdata = {}
1389        taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
1390        taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1391
1392        if quietlog:
1393            rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1394            bb.runqueue.logger.setLevel(logging.WARNING)
1395
1396        buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1397        if fireevents:
1398            bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1399
1400        # Execute the runqueue
1401        runlist = [[mc, item, task, fn]]
1402
1403        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1404
1405        def buildFileIdle(server, rq, abort):
1406
1407            msg = None
1408            interrupted = 0
1409            if abort or self.state == state.forceshutdown:
1410                rq.finish_runqueue(True)
1411                msg = "Forced shutdown"
1412                interrupted = 2
1413            elif self.state == state.shutdown:
1414                rq.finish_runqueue(False)
1415                msg = "Stopped build"
1416                interrupted = 1
1417            failures = 0
1418            try:
1419                retval = rq.execute_runqueue()
1420            except runqueue.TaskFailure as exc:
1421                failures += len(exc.args)
1422                retval = False
1423            except SystemExit as exc:
1424                self.command.finishAsyncCommand(str(exc))
1425                if quietlog:
1426                    bb.runqueue.logger.setLevel(rqloglevel)
1427                return False
1428
1429            if not retval:
1430                if fireevents:
1431                    bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1432                self.command.finishAsyncCommand(msg)
1433                # We trashed self.recipecaches above
1434                self.parsecache_valid = False
1435                self.configuration.limited_deps = False
1436                bb.parse.siggen.reset(self.data)
1437                if quietlog:
1438                    bb.runqueue.logger.setLevel(rqloglevel)
1439                return False
1440            if retval is True:
1441                return True
1442            return retval
1443
1444        self.idleCallBackRegister(buildFileIdle, rq)
1445
1446    def buildTargets(self, targets, task):
1447        """
1448        Attempt to build the targets specified
1449        """
1450
1451        def buildTargetsIdle(server, rq, abort):
1452            msg = None
1453            interrupted = 0
1454            if abort or self.state == state.forceshutdown:
1455                rq.finish_runqueue(True)
1456                msg = "Forced shutdown"
1457                interrupted = 2
1458            elif self.state == state.shutdown:
1459                rq.finish_runqueue(False)
1460                msg = "Stopped build"
1461                interrupted = 1
1462            failures = 0
1463            try:
1464                retval = rq.execute_runqueue()
1465            except runqueue.TaskFailure as exc:
1466                failures += len(exc.args)
1467                retval = False
1468            except SystemExit as exc:
1469                self.command.finishAsyncCommand(str(exc))
1470                return False
1471
1472            if not retval:
1473                try:
1474                    for mc in self.multiconfigs:
1475                        bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1476                finally:
1477                    self.command.finishAsyncCommand(msg)
1478                return False
1479            if retval is True:
1480                return True
1481            return retval
1482
1483        self.reset_mtime_caches()
1484        self.buildSetVars()
1485
1486        # If we are told to do the None task then query the default task
1487        if task is None:
1488            task = self.configuration.cmd
1489
1490        if not task.startswith("do_"):
1491            task = "do_%s" % task
1492
1493        packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1494
1495        bb.event.fire(bb.event.BuildInit(packages), self.data)
1496
1497        taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
1498
1499        buildname = self.data.getVar("BUILDNAME", False)
1500
1501        # make targets to always look as <target>:do_<task>
1502        ntargets = []
1503        for target in runlist:
1504            if target[0]:
1505                ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
1506            ntargets.append("%s:%s" % (target[1], target[2]))
1507
1508        for mc in self.multiconfigs:
1509            bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1510
1511        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1512        if 'universe' in targets:
1513            rq.rqdata.warn_multi_bb = True
1514
1515        self.idleCallBackRegister(buildTargetsIdle, rq)
1516
1517
1518    def getAllKeysWithFlags(self, flaglist):
1519        dump = {}
1520        for k in self.data.keys():
1521            try:
1522                expand = True
1523                flags = self.data.getVarFlags(k)
1524                if flags and "func" in flags and "python" in flags:
1525                    expand = False
1526                v = self.data.getVar(k, expand)
1527                if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1528                    dump[k] = {
1529    'v' : str(v) ,
1530    'history' : self.data.varhistory.variable(k),
1531                    }
1532                    for d in flaglist:
1533                        if flags and d in flags:
1534                            dump[k][d] = flags[d]
1535                        else:
1536                            dump[k][d] = None
1537            except Exception as e:
1538                print(e)
1539        return dump
1540
1541
1542    def updateCacheSync(self):
1543        if self.state == state.running:
1544            return
1545
1546        # reload files for which we got notifications
1547        for p in self.inotify_modified_files:
1548            bb.parse.update_cache(p)
1549            if p in bb.parse.BBHandler.cached_statements:
1550                del bb.parse.BBHandler.cached_statements[p]
1551        self.inotify_modified_files = []
1552
1553        if not self.baseconfig_valid:
1554            logger.debug(1, "Reloading base configuration data")
1555            self.initConfigurationData()
1556            self.handlePRServ()
1557
1558    # This is called for all async commands when self.state != running
1559    def updateCache(self):
1560        if self.state == state.running:
1561            return
1562
1563        if self.state in (state.shutdown, state.forceshutdown, state.error):
1564            if hasattr(self.parser, 'shutdown'):
1565                self.parser.shutdown(clean=False, force = True)
1566                self.parser.final_cleanup()
1567            raise bb.BBHandledException()
1568
1569        if self.state != state.parsing:
1570            self.updateCacheSync()
1571
1572        if self.state != state.parsing and not self.parsecache_valid:
1573            bb.parse.siggen.reset(self.data)
1574            self.parseConfiguration ()
1575            if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1576                for mc in self.multiconfigs:
1577                    bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
1578
1579            for mc in self.multiconfigs:
1580                ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
1581                self.recipecaches[mc].ignored_dependencies = set(ignore.split())
1582
1583                for dep in self.configuration.extra_assume_provided:
1584                    self.recipecaches[mc].ignored_dependencies.add(dep)
1585
1586            self.collections = {}
1587
1588            mcfilelist = {}
1589            total_masked = 0
1590            searchdirs = set()
1591            for mc in self.multiconfigs:
1592                self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1593                (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1594
1595                mcfilelist[mc] = filelist
1596                total_masked += masked
1597                searchdirs |= set(search)
1598
1599            # Add inotify watches for directories searched for bb/bbappend files
1600            for dirent in searchdirs:
1601                self.add_filewatch([[dirent]], dirs=True)
1602
1603            self.parser = CookerParser(self, mcfilelist, total_masked)
1604            self.parsecache_valid = True
1605
1606        self.state = state.parsing
1607
1608        if not self.parser.parse_next():
1609            collectlog.debug(1, "parsing complete")
1610            if self.parser.error:
1611                raise bb.BBHandledException()
1612            self.show_appends_with_no_recipes()
1613            self.handlePrefProviders()
1614            for mc in self.multiconfigs:
1615                self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1616            self.state = state.running
1617
1618            # Send an event listing all stamps reachable after parsing
1619            # which the metadata may use to clean up stale data
1620            for mc in self.multiconfigs:
1621                event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1622                bb.event.fire(event, self.databuilder.mcdata[mc])
1623            return None
1624
1625        return True
1626
1627    def checkPackages(self, pkgs_to_build, task=None):
1628
1629        # Return a copy, don't modify the original
1630        pkgs_to_build = pkgs_to_build[:]
1631
1632        if len(pkgs_to_build) == 0:
1633            raise NothingToBuild
1634
1635        ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
1636        for pkg in pkgs_to_build.copy():
1637            if pkg in ignore:
1638                parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1639            if pkg.startswith("multiconfig:"):
1640                pkgs_to_build.remove(pkg)
1641                pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
1642
1643        if 'world' in pkgs_to_build:
1644            pkgs_to_build.remove('world')
1645            for mc in self.multiconfigs:
1646                bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1647                for t in self.recipecaches[mc].world_target:
1648                    if mc:
1649                        t = "mc:" + mc + ":" + t
1650                    pkgs_to_build.append(t)
1651
1652        if 'universe' in pkgs_to_build:
1653            parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1654            parselog.debug(1, "collating packages for \"universe\"")
1655            pkgs_to_build.remove('universe')
1656            for mc in self.multiconfigs:
1657                for t in self.recipecaches[mc].universe_target:
1658                    if task:
1659                        foundtask = False
1660                        for provider_fn in self.recipecaches[mc].providers[t]:
1661                            if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1662                                foundtask = True
1663                                break
1664                        if not foundtask:
1665                            bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1666                            continue
1667                    if mc:
1668                        t = "mc:" + mc + ":" + t
1669                    pkgs_to_build.append(t)
1670
1671        return pkgs_to_build
1672
1673    def pre_serve(self):
1674        return
1675
1676    def post_serve(self):
1677        self.shutdown(force=True)
1678        prserv.serv.auto_shutdown()
1679        if self.hashserv:
1680            self.hashserv.process.terminate()
1681            self.hashserv.process.join()
1682        if hasattr(self, "data"):
1683            bb.event.fire(CookerExit(), self.data)
1684
1685    def shutdown(self, force = False):
1686        if force:
1687            self.state = state.forceshutdown
1688        else:
1689            self.state = state.shutdown
1690
1691        if self.parser:
1692            self.parser.shutdown(clean=not force, force=force)
1693            self.parser.final_cleanup()
1694
1695    def finishcommand(self):
1696        self.state = state.initial
1697
1698    def reset(self):
1699        self.initConfigurationData()
1700        self.handlePRServ()
1701
1702    def clientComplete(self):
1703        """Called when the client is done using the server"""
1704        self.finishcommand()
1705        self.extraconfigdata = {}
1706        self.command.reset()
1707        if hasattr(self, "data"):
1708           self.databuilder.reset()
1709           self.data = self.databuilder.data
1710        self.parsecache_valid = False
1711        self.baseconfig_valid = False
1712
1713
1714class CookerExit(bb.event.Event):
1715    """
1716    Notify clients of the Cooker shutdown
1717    """
1718
1719    def __init__(self):
1720        bb.event.Event.__init__(self)
1721
1722
1723class CookerCollectFiles(object):
1724    def __init__(self, priorities, mc=''):
1725        self.mc = mc
1726        self.bbappends = []
1727        # Priorities is a list of tupples, with the second element as the pattern.
1728        # We need to sort the list with the longest pattern first, and so on to
1729        # the shortest.  This allows nested layers to be properly evaluated.
1730        self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1731
1732    def calc_bbfile_priority(self, filename):
1733        for _, _, regex, pri in self.bbfile_config_priorities:
1734            if regex.match(filename):
1735                return pri, regex
1736        return 0, None
1737
1738    def get_bbfiles(self):
1739        """Get list of default .bb files by reading out the current directory"""
1740        path = os.getcwd()
1741        contents = os.listdir(path)
1742        bbfiles = []
1743        for f in contents:
1744            if f.endswith(".bb"):
1745                bbfiles.append(os.path.abspath(os.path.join(path, f)))
1746        return bbfiles
1747
1748    def find_bbfiles(self, path):
1749        """Find all the .bb and .bbappend files in a directory"""
1750        found = []
1751        for dir, dirs, files in os.walk(path):
1752            for ignored in ('SCCS', 'CVS', '.svn'):
1753                if ignored in dirs:
1754                    dirs.remove(ignored)
1755            found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1756
1757        return found
1758
1759    def collect_bbfiles(self, config, eventdata):
1760        """Collect all available .bb build files"""
1761        masked = 0
1762
1763        collectlog.debug(1, "collecting .bb files")
1764
1765        files = (config.getVar( "BBFILES") or "").split()
1766
1767        # Sort files by priority
1768        files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
1769        config.setVar("BBFILES_PRIORITIZED", " ".join(files))
1770
1771        if not len(files):
1772            files = self.get_bbfiles()
1773
1774        if not len(files):
1775            collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1776            bb.event.fire(CookerExit(), eventdata)
1777
1778        # We need to track where we look so that we can add inotify watches. There
1779        # is no nice way to do this, this is horrid. We intercept the os.listdir()
1780        # (or os.scandir() for python 3.6+) calls while we run glob().
1781        origlistdir = os.listdir
1782        if hasattr(os, 'scandir'):
1783            origscandir = os.scandir
1784        searchdirs = []
1785
1786        def ourlistdir(d):
1787            searchdirs.append(d)
1788            return origlistdir(d)
1789
1790        def ourscandir(d):
1791            searchdirs.append(d)
1792            return origscandir(d)
1793
1794        os.listdir = ourlistdir
1795        if hasattr(os, 'scandir'):
1796            os.scandir = ourscandir
1797        try:
1798            # Can't use set here as order is important
1799            newfiles = []
1800            for f in files:
1801                if os.path.isdir(f):
1802                    dirfiles = self.find_bbfiles(f)
1803                    for g in dirfiles:
1804                        if g not in newfiles:
1805                            newfiles.append(g)
1806                else:
1807                    globbed = glob.glob(f)
1808                    if not globbed and os.path.exists(f):
1809                        globbed = [f]
1810                    # glob gives files in order on disk. Sort to be deterministic.
1811                    for g in sorted(globbed):
1812                        if g not in newfiles:
1813                            newfiles.append(g)
1814        finally:
1815            os.listdir = origlistdir
1816            if hasattr(os, 'scandir'):
1817                os.scandir = origscandir
1818
1819        bbmask = config.getVar('BBMASK')
1820
1821        if bbmask:
1822            # First validate the individual regular expressions and ignore any
1823            # that do not compile
1824            bbmasks = []
1825            for mask in bbmask.split():
1826                # When constructing an older style single regex, it's possible for BBMASK
1827                # to end up beginning with '|', which matches and masks _everything_.
1828                if mask.startswith("|"):
1829                    collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1830                    mask = mask[1:]
1831                try:
1832                    re.compile(mask)
1833                    bbmasks.append(mask)
1834                except sre_constants.error:
1835                    collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1836
1837            # Then validate the combined regular expressions. This should never
1838            # fail, but better safe than sorry...
1839            bbmask = "|".join(bbmasks)
1840            try:
1841                bbmask_compiled = re.compile(bbmask)
1842            except sre_constants.error:
1843                collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1844                bbmask = None
1845
1846        bbfiles = []
1847        bbappend = []
1848        for f in newfiles:
1849            if bbmask and bbmask_compiled.search(f):
1850                collectlog.debug(1, "skipping masked file %s", f)
1851                masked += 1
1852                continue
1853            if f.endswith('.bb'):
1854                bbfiles.append(f)
1855            elif f.endswith('.bbappend'):
1856                bbappend.append(f)
1857            else:
1858                collectlog.debug(1, "skipping %s: unknown file extension", f)
1859
1860        # Build a list of .bbappend files for each .bb file
1861        for f in bbappend:
1862            base = os.path.basename(f).replace('.bbappend', '.bb')
1863            self.bbappends.append((base, f))
1864
1865        # Find overlayed recipes
1866        # bbfiles will be in priority order which makes this easy
1867        bbfile_seen = dict()
1868        self.overlayed = defaultdict(list)
1869        for f in reversed(bbfiles):
1870            base = os.path.basename(f)
1871            if base not in bbfile_seen:
1872                bbfile_seen[base] = f
1873            else:
1874                topfile = bbfile_seen[base]
1875                self.overlayed[topfile].append(f)
1876
1877        return (bbfiles, masked, searchdirs)
1878
1879    def get_file_appends(self, fn):
1880        """
1881        Returns a list of .bbappend files to apply to fn
1882        """
1883        filelist = []
1884        f = os.path.basename(fn)
1885        for b in self.bbappends:
1886            (bbappend, filename) = b
1887            if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1888                filelist.append(filename)
1889        return tuple(filelist)
1890
1891    def collection_priorities(self, pkgfns, fns, d):
1892        # Return the priorities of the entries in pkgfns
1893        # Also check that all the regexes in self.bbfile_config_priorities are used
1894        # (but to do that we need to ensure skipped recipes aren't counted, nor
1895        # collections in BBFILE_PATTERN_IGNORE_EMPTY)
1896
1897        priorities = {}
1898        seen = set()
1899        matched = set()
1900
1901        matched_regex = set()
1902        unmatched_regex = set()
1903        for _, _, regex, _ in self.bbfile_config_priorities:
1904            unmatched_regex.add(regex)
1905
1906        # Calculate priorities for each file
1907        for p in pkgfns:
1908            realfn, cls, mc = bb.cache.virtualfn2realfn(p)
1909            priorities[p], regex = self.calc_bbfile_priority(realfn)
1910            if regex in unmatched_regex:
1911                matched_regex.add(regex)
1912                unmatched_regex.remove(regex)
1913            seen.add(realfn)
1914            if regex:
1915                matched.add(realfn)
1916
1917        if unmatched_regex:
1918            # Account for bbappend files
1919            for b in self.bbappends:
1920                (bbfile, append) = b
1921                seen.add(append)
1922
1923            # Account for skipped recipes
1924            seen.update(fns)
1925
1926            seen.difference_update(matched)
1927
1928            def already_matched(fn):
1929                for regex in matched_regex:
1930                    if regex.match(fn):
1931                        return True
1932                return False
1933
1934            for unmatch in unmatched_regex.copy():
1935                for fn in seen:
1936                    if unmatch.match(fn):
1937                        # If the bbappend or file was already matched by another regex, skip it
1938                        # e.g. for a layer within a layer, the outer regex could match, the inner
1939                        # regex may match nothing and we should warn about that
1940                        if already_matched(fn):
1941                            continue
1942                        unmatched_regex.remove(unmatch)
1943                        break
1944
1945        for collection, pattern, regex, _ in self.bbfile_config_priorities:
1946            if regex in unmatched_regex:
1947                if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
1948                    collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
1949                                                                                             collection, pattern))
1950
1951        return priorities
1952
1953class ParsingFailure(Exception):
1954    def __init__(self, realexception, recipe):
1955        self.realexception = realexception
1956        self.recipe = recipe
1957        Exception.__init__(self, realexception, recipe)
1958
1959class Parser(multiprocessing.Process):
1960    def __init__(self, jobs, results, quit, init, profile):
1961        self.jobs = jobs
1962        self.results = results
1963        self.quit = quit
1964        self.init = init
1965        multiprocessing.Process.__init__(self)
1966        self.context = bb.utils.get_context().copy()
1967        self.handlers = bb.event.get_class_handlers().copy()
1968        self.profile = profile
1969
1970    def run(self):
1971
1972        if not self.profile:
1973            self.realrun()
1974            return
1975
1976        try:
1977            import cProfile as profile
1978        except:
1979            import profile
1980        prof = profile.Profile()
1981        try:
1982            profile.Profile.runcall(prof, self.realrun)
1983        finally:
1984            logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1985            prof.dump_stats(logfile)
1986
1987    def realrun(self):
1988        if self.init:
1989            self.init()
1990
1991        pending = []
1992        while True:
1993            try:
1994                self.quit.get_nowait()
1995            except queue.Empty:
1996                pass
1997            else:
1998                self.results.close()
1999                self.results.join_thread()
2000                break
2001
2002            if pending:
2003                result = pending.pop()
2004            else:
2005                try:
2006                    job = self.jobs.pop()
2007                except IndexError:
2008                    self.results.close()
2009                    self.results.join_thread()
2010                    break
2011                result = self.parse(*job)
2012                # Clear the siggen cache after parsing to control memory usage, its huge
2013                bb.parse.siggen.postparsing_clean_cache()
2014            try:
2015                self.results.put(result, timeout=0.25)
2016            except queue.Full:
2017                pending.append(result)
2018
2019    def parse(self, mc, cache, filename, appends):
2020        try:
2021            origfilter = bb.event.LogHandler.filter
2022            # Record the filename we're parsing into any events generated
2023            def parse_filter(self, record):
2024                record.taskpid = bb.event.worker_pid
2025                record.fn = filename
2026                return True
2027
2028            # Reset our environment and handlers to the original settings
2029            bb.utils.set_context(self.context.copy())
2030            bb.event.set_class_handlers(self.handlers.copy())
2031            bb.event.LogHandler.filter = parse_filter
2032
2033            return True, mc, cache.parse(filename, appends)
2034        except Exception as exc:
2035            tb = sys.exc_info()[2]
2036            exc.recipe = filename
2037            exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2038            return True, exc
2039        # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2040        # and for example a worker thread doesn't just exit on its own in response to
2041        # a SystemExit event for example.
2042        except BaseException as exc:
2043            return True, ParsingFailure(exc, filename)
2044        finally:
2045            bb.event.LogHandler.filter = origfilter
2046
2047class CookerParser(object):
2048    def __init__(self, cooker, mcfilelist, masked):
2049        self.mcfilelist = mcfilelist
2050        self.cooker = cooker
2051        self.cfgdata = cooker.data
2052        self.cfghash = cooker.data_hash
2053        self.cfgbuilder = cooker.databuilder
2054
2055        # Accounting statistics
2056        self.parsed = 0
2057        self.cached = 0
2058        self.error = 0
2059        self.masked = masked
2060
2061        self.skipped = 0
2062        self.virtuals = 0
2063
2064        self.current = 0
2065        self.process_names = []
2066
2067        self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2068        self.fromcache = set()
2069        self.willparse = set()
2070        for mc in self.cooker.multiconfigs:
2071            for filename in self.mcfilelist[mc]:
2072                appends = self.cooker.collections[mc].get_file_appends(filename)
2073                if not self.bb_caches[mc].cacheValid(filename, appends):
2074                    self.willparse.add((mc, self.bb_caches[mc], filename, appends))
2075                else:
2076                    self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
2077
2078        self.total = len(self.fromcache) + len(self.willparse)
2079        self.toparse = len(self.willparse)
2080        self.progress_chunk = int(max(self.toparse / 100, 1))
2081
2082        self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2083                                 multiprocessing.cpu_count()), self.toparse)
2084
2085        self.start()
2086        self.haveshutdown = False
2087        self.syncthread = None
2088
2089    def start(self):
2090        self.results = self.load_cached()
2091        self.processes = []
2092        if self.toparse:
2093            bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2094            def init():
2095                signal.signal(signal.SIGTERM, signal.SIG_DFL)
2096                signal.signal(signal.SIGHUP, signal.SIG_DFL)
2097                signal.signal(signal.SIGINT, signal.SIG_IGN)
2098                bb.utils.set_process_name(multiprocessing.current_process().name)
2099                multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2100                multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2101
2102            self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2103            self.result_queue = multiprocessing.Queue()
2104
2105            def chunkify(lst,n):
2106                return [lst[i::n] for i in range(n)]
2107            self.jobs = chunkify(list(self.willparse), self.num_processes)
2108
2109            for i in range(0, self.num_processes):
2110                parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2111                parser.start()
2112                self.process_names.append(parser.name)
2113                self.processes.append(parser)
2114
2115            self.results = itertools.chain(self.results, self.parse_generator())
2116
2117    def shutdown(self, clean=True, force=False):
2118        if not self.toparse:
2119            return
2120        if self.haveshutdown:
2121            return
2122        self.haveshutdown = True
2123
2124        if clean:
2125            event = bb.event.ParseCompleted(self.cached, self.parsed,
2126                                            self.skipped, self.masked,
2127                                            self.virtuals, self.error,
2128                                            self.total)
2129
2130            bb.event.fire(event, self.cfgdata)
2131
2132        for process in self.processes:
2133            self.parser_quit.put(None)
2134
2135        # Cleanup the queue before call process.join(), otherwise there might be
2136        # deadlocks.
2137        while True:
2138            try:
2139               self.result_queue.get(timeout=0.25)
2140            except queue.Empty:
2141                break
2142
2143        for process in self.processes:
2144            if force:
2145                process.join(.1)
2146                process.terminate()
2147            else:
2148                process.join()
2149
2150        self.parser_quit.close()
2151        # Allow data left in the cancel queue to be discarded
2152        self.parser_quit.cancel_join_thread()
2153
2154        def sync_caches():
2155            for c in self.bb_caches.values():
2156                c.sync()
2157
2158        sync = threading.Thread(target=sync_caches, name="SyncThread")
2159        self.syncthread = sync
2160        sync.start()
2161        bb.codeparser.parser_cache_savemerge()
2162        bb.fetch.fetcher_parse_done()
2163        if self.cooker.configuration.profile:
2164            profiles = []
2165            for i in self.process_names:
2166                logfile = "profile-parse-%s.log" % i
2167                if os.path.exists(logfile):
2168                    profiles.append(logfile)
2169
2170            pout = "profile-parse.log.processed"
2171            bb.utils.process_profilelog(profiles, pout = pout)
2172            print("Processed parsing statistics saved to %s" % (pout))
2173
2174    def final_cleanup(self):
2175        if self.syncthread:
2176            self.syncthread.join()
2177
2178    def load_cached(self):
2179        for mc, cache, filename, appends in self.fromcache:
2180            cached, infos = cache.load(filename, appends)
2181            yield not cached, mc, infos
2182
2183    def parse_generator(self):
2184        while True:
2185            if self.parsed >= self.toparse:
2186                break
2187
2188            try:
2189                result = self.result_queue.get(timeout=0.25)
2190            except queue.Empty:
2191                pass
2192            else:
2193                value = result[1]
2194                if isinstance(value, BaseException):
2195                    raise value
2196                else:
2197                    yield result
2198
2199    def parse_next(self):
2200        result = []
2201        parsed = None
2202        try:
2203            parsed, mc, result = next(self.results)
2204        except StopIteration:
2205            self.shutdown()
2206            return False
2207        except bb.BBHandledException as exc:
2208            self.error += 1
2209            logger.error('Failed to parse recipe: %s' % exc.recipe)
2210            self.shutdown(clean=False)
2211            return False
2212        except ParsingFailure as exc:
2213            self.error += 1
2214            logger.error('Unable to parse %s: %s' %
2215                     (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2216            self.shutdown(clean=False)
2217            return False
2218        except bb.parse.ParseError as exc:
2219            self.error += 1
2220            logger.error(str(exc))
2221            self.shutdown(clean=False)
2222            return False
2223        except bb.data_smart.ExpansionError as exc:
2224            self.error += 1
2225            bbdir = os.path.dirname(__file__) + os.sep
2226            etype, value, _ = sys.exc_info()
2227            tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2228            logger.error('ExpansionError during parsing %s', value.recipe,
2229                         exc_info=(etype, value, tb))
2230            self.shutdown(clean=False)
2231            return False
2232        except Exception as exc:
2233            self.error += 1
2234            etype, value, tb = sys.exc_info()
2235            if hasattr(value, "recipe"):
2236                logger.error('Unable to parse %s' % value.recipe,
2237                            exc_info=(etype, value, exc.traceback))
2238            else:
2239                # Most likely, an exception occurred during raising an exception
2240                import traceback
2241                logger.error('Exception during parse: %s' % traceback.format_exc())
2242            self.shutdown(clean=False)
2243            return False
2244
2245        self.current += 1
2246        self.virtuals += len(result)
2247        if parsed:
2248            self.parsed += 1
2249            if self.parsed % self.progress_chunk == 0:
2250                bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2251                              self.cfgdata)
2252        else:
2253            self.cached += 1
2254
2255        for virtualfn, info_array in result:
2256            if info_array[0].skipped:
2257                self.skipped += 1
2258                self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2259            self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2260                                        parsed=parsed, watcher = self.cooker.add_filewatch)
2261        return True
2262
2263    def reparse(self, filename):
2264        to_reparse = set()
2265        for mc in self.cooker.multiconfigs:
2266            to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
2267
2268        for mc, filename, appends in to_reparse:
2269            infos = self.bb_caches[mc].parse(filename, appends)
2270            for vfn, info_array in infos:
2271                self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
2272