xref: /openbmc/openbmc/poky/bitbake/lib/bb/cooker.py (revision d159c7fb)
1#
2# Copyright (C) 2003, 2004  Chris Larson
3# Copyright (C) 2003, 2004  Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005        Holger Hans Peter Freyther
6# Copyright (C) 2005        ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import sys, os, glob, os.path, re, time
13import itertools
14import logging
15import multiprocessing
16import sre_constants
17import threading
18from io import StringIO, UnsupportedOperation
19from contextlib import closing
20from collections import defaultdict, namedtuple
21import bb, bb.exceptions, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
23import queue
24import signal
25import prserv.serv
26import pyinotify
27import json
28import pickle
29import codecs
30import hashserv
31
32logger      = logging.getLogger("BitBake")
33collectlog  = logging.getLogger("BitBake.Collection")
34buildlog    = logging.getLogger("BitBake.Build")
35parselog    = logging.getLogger("BitBake.Parsing")
36providerlog = logging.getLogger("BitBake.Provider")
37
38class NoSpecificMatch(bb.BBHandledException):
39    """
40    Exception raised when no or multiple file matches are found
41    """
42
43class NothingToBuild(Exception):
44    """
45    Exception raised when there is nothing to build
46    """
47
48class CollectionError(bb.BBHandledException):
49    """
50    Exception raised when layer configuration is incorrect
51    """
52
53class state:
54    initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
55
56    @classmethod
57    def get_name(cls, code):
58        for name in dir(cls):
59            value = getattr(cls, name)
60            if type(value) == type(cls.initial) and value == code:
61                return name
62        raise ValueError("Invalid status code: %s" % code)
63
64
65class SkippedPackage:
66    def __init__(self, info = None, reason = None):
67        self.pn = None
68        self.skipreason = None
69        self.provides = None
70        self.rprovides = None
71
72        if info:
73            self.pn = info.pn
74            self.skipreason = info.skipreason
75            self.provides = info.provides
76            self.rprovides = info.packages + info.rprovides
77            for package in info.packages:
78                self.rprovides += info.rprovides_pkg[package]
79        elif reason:
80            self.skipreason = reason
81
82
83class CookerFeatures(object):
84    _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
85
86    def __init__(self):
87        self._features=set()
88
89    def setFeature(self, f):
90        # validate we got a request for a feature we support
91        if f not in CookerFeatures._feature_list:
92            return
93        self._features.add(f)
94
95    def __contains__(self, f):
96        return f in self._features
97
98    def __iter__(self):
99        return self._features.__iter__()
100
101    def __next__(self):
102        return next(self._features)
103
104
105class EventWriter:
106    def __init__(self, cooker, eventfile):
107        self.file_inited = None
108        self.cooker = cooker
109        self.eventfile = eventfile
110        self.event_queue = []
111
112    def write_event(self, event):
113        with open(self.eventfile, "a") as f:
114            try:
115                str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
116                f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
117                                             "vars": str_event}))
118            except Exception as err:
119                import traceback
120                print(err, traceback.format_exc())
121
122    def send(self, event):
123        if self.file_inited:
124            # we have the file, just write the event
125            self.write_event(event)
126        else:
127            # init on bb.event.BuildStarted
128            name = "%s.%s" % (event.__module__, event.__class__.__name__)
129            if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
130                with open(self.eventfile, "w") as f:
131                    f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
132
133                self.file_inited = True
134
135                # write pending events
136                for evt in self.event_queue:
137                    self.write_event(evt)
138
139                # also write the current event
140                self.write_event(event)
141            else:
142                # queue all events until the file is inited
143                self.event_queue.append(event)
144
145#============================================================================#
146# BBCooker
147#============================================================================#
148class BBCooker:
149    """
150    Manages one bitbake build run
151    """
152
153    def __init__(self, featureSet=None, idleCallBackRegister=None):
154        self.recipecaches = None
155        self.eventlog = None
156        self.skiplist = {}
157        self.featureset = CookerFeatures()
158        if featureSet:
159            for f in featureSet:
160                self.featureset.setFeature(f)
161
162        self.configuration = bb.cookerdata.CookerConfiguration()
163
164        self.idleCallBackRegister = idleCallBackRegister
165
166        bb.debug(1, "BBCooker starting %s" % time.time())
167        sys.stdout.flush()
168
169        self.configwatcher = pyinotify.WatchManager()
170        bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
171        sys.stdout.flush()
172
173        self.configwatcher.bbseen = set()
174        self.configwatcher.bbwatchedfiles = set()
175        self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
176        bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
177        sys.stdout.flush()
178        self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
179                         pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
180                         pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
181        self.watcher = pyinotify.WatchManager()
182        bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
183        sys.stdout.flush()
184        self.watcher.bbseen = set()
185        self.watcher.bbwatchedfiles = set()
186        self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
187
188        bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
189        sys.stdout.flush()
190
191        # If being called by something like tinfoil, we need to clean cached data
192        # which may now be invalid
193        bb.parse.clear_cache()
194        bb.parse.BBHandler.cached_statements = {}
195
196        self.ui_cmdline = None
197        self.hashserv = None
198        self.hashservaddr = None
199
200        self.inotify_modified_files = []
201
202        def _process_inotify_updates(server, cooker, abort):
203            cooker.process_inotify_updates()
204            return 1.0
205
206        self.idleCallBackRegister(_process_inotify_updates, self)
207
208        # TOSTOP must not be set or our children will hang when they output
209        try:
210            fd = sys.stdout.fileno()
211            if os.isatty(fd):
212                import termios
213                tcattr = termios.tcgetattr(fd)
214                if tcattr[3] & termios.TOSTOP:
215                    buildlog.info("The terminal had the TOSTOP bit set, clearing...")
216                    tcattr[3] = tcattr[3] & ~termios.TOSTOP
217                    termios.tcsetattr(fd, termios.TCSANOW, tcattr)
218        except UnsupportedOperation:
219            pass
220
221        self.command = bb.command.Command(self)
222        self.state = state.initial
223
224        self.parser = None
225
226        signal.signal(signal.SIGTERM, self.sigterm_exception)
227        # Let SIGHUP exit as SIGTERM
228        signal.signal(signal.SIGHUP, self.sigterm_exception)
229
230        bb.debug(1, "BBCooker startup complete %s" % time.time())
231        sys.stdout.flush()
232
233    def init_configdata(self):
234        if not hasattr(self, "data"):
235            self.initConfigurationData()
236            bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
237            sys.stdout.flush()
238            self.handlePRServ()
239
240    def process_inotify_updates(self):
241        for n in [self.confignotifier, self.notifier]:
242            if n.check_events(timeout=0):
243                # read notified events and enqeue them
244                n.read_events()
245                n.process_events()
246
247    def config_notifications(self, event):
248        if event.maskname == "IN_Q_OVERFLOW":
249            bb.warn("inotify event queue overflowed, invalidating caches.")
250            self.parsecache_valid = False
251            self.baseconfig_valid = False
252            bb.parse.clear_cache()
253            return
254        if not event.pathname in self.configwatcher.bbwatchedfiles:
255            return
256        if not event.pathname in self.inotify_modified_files:
257            self.inotify_modified_files.append(event.pathname)
258        self.baseconfig_valid = False
259
260    def notifications(self, event):
261        if event.maskname == "IN_Q_OVERFLOW":
262            bb.warn("inotify event queue overflowed, invalidating caches.")
263            self.parsecache_valid = False
264            bb.parse.clear_cache()
265            return
266        if event.pathname.endswith("bitbake-cookerdaemon.log") \
267                or event.pathname.endswith("bitbake.lock"):
268            return
269        if not event.pathname in self.inotify_modified_files:
270            self.inotify_modified_files.append(event.pathname)
271        self.parsecache_valid = False
272
273    def add_filewatch(self, deps, watcher=None, dirs=False):
274        if not watcher:
275            watcher = self.watcher
276        for i in deps:
277            watcher.bbwatchedfiles.add(i[0])
278            if dirs:
279                f = i[0]
280            else:
281                f = os.path.dirname(i[0])
282            if f in watcher.bbseen:
283                continue
284            watcher.bbseen.add(f)
285            watchtarget = None
286            while True:
287                # We try and add watches for files that don't exist but if they did, would influence
288                # the parser. The parent directory of these files may not exist, in which case we need
289                # to watch any parent that does exist for changes.
290                try:
291                    watcher.add_watch(f, self.watchmask, quiet=False)
292                    if watchtarget:
293                        watcher.bbwatchedfiles.add(watchtarget)
294                    break
295                except pyinotify.WatchManagerError as e:
296                    if 'ENOENT' in str(e):
297                        watchtarget = f
298                        f = os.path.dirname(f)
299                        if f in watcher.bbseen:
300                            break
301                        watcher.bbseen.add(f)
302                        continue
303                    if 'ENOSPC' in str(e):
304                        providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
305                        providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
306                        providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
307                        providerlog.error("Root privilege is required to modify max_user_watches.")
308                    raise
309
310    def sigterm_exception(self, signum, stackframe):
311        if signum == signal.SIGTERM:
312            bb.warn("Cooker received SIGTERM, shutting down...")
313        elif signum == signal.SIGHUP:
314            bb.warn("Cooker received SIGHUP, shutting down...")
315        self.state = state.forceshutdown
316
317    def setFeatures(self, features):
318        # we only accept a new feature set if we're in state initial, so we can reset without problems
319        if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
320            raise Exception("Illegal state for feature set change")
321        original_featureset = list(self.featureset)
322        for feature in features:
323            self.featureset.setFeature(feature)
324        bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
325        if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
326            self.reset()
327
328    def initConfigurationData(self):
329
330        self.state = state.initial
331        self.caches_array = []
332
333        # Need to preserve BB_CONSOLELOG over resets
334        consolelog = None
335        if hasattr(self, "data"):
336            consolelog = self.data.getVar("BB_CONSOLELOG")
337
338        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
339            self.enableDataTracking()
340
341        all_extra_cache_names = []
342        # We hardcode all known cache types in a single place, here.
343        if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
344            all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
345
346        caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
347
348        # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
349        # This is the entry point, no further check needed!
350        for var in caches_name_array:
351            try:
352                module_name, cache_name = var.split(':')
353                module = __import__(module_name, fromlist=(cache_name,))
354                self.caches_array.append(getattr(module, cache_name))
355            except ImportError as exc:
356                logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
357                raise bb.BBHandledException()
358
359        self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
360        self.databuilder.parseBaseConfiguration()
361        self.data = self.databuilder.data
362        self.data_hash = self.databuilder.data_hash
363        self.extraconfigdata = {}
364
365        if consolelog:
366            self.data.setVar("BB_CONSOLELOG", consolelog)
367
368        self.data.setVar('BB_CMDLINE', self.ui_cmdline)
369
370        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
371            self.disableDataTracking()
372
373        for mc in self.databuilder.mcdata.values():
374            mc.renameVar("__depends", "__base_depends")
375            self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
376
377        self.baseconfig_valid = True
378        self.parsecache_valid = False
379
380    def handlePRServ(self):
381        # Setup a PR Server based on the new configuration
382        try:
383            self.prhost = prserv.serv.auto_start(self.data)
384        except prserv.serv.PRServiceConfigError as e:
385            bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
386
387        if self.data.getVar("BB_HASHSERVE") == "auto":
388            # Create a new hash server bound to a unix domain socket
389            if not self.hashserv:
390                dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
391                self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
392                self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
393                self.hashserv.serve_as_process()
394            self.data.setVar("BB_HASHSERVE", self.hashservaddr)
395            self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
396            self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
397            for mc in self.databuilder.mcdata:
398                self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
399
400        bb.parse.init_parser(self.data)
401
402    def enableDataTracking(self):
403        self.configuration.tracking = True
404        if hasattr(self, "data"):
405            self.data.enableTracking()
406
407    def disableDataTracking(self):
408        self.configuration.tracking = False
409        if hasattr(self, "data"):
410            self.data.disableTracking()
411
412    def parseConfiguration(self):
413        self.updateCacheSync()
414
415        # Change nice level if we're asked to
416        nice = self.data.getVar("BB_NICE_LEVEL")
417        if nice:
418            curnice = os.nice(0)
419            nice = int(nice) - curnice
420            buildlog.verbose("Renice to %s " % os.nice(nice))
421
422        if self.recipecaches:
423            del self.recipecaches
424        self.multiconfigs = self.databuilder.mcdata.keys()
425        self.recipecaches = {}
426        for mc in self.multiconfigs:
427            self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
428
429        self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
430
431        self.parsecache_valid = False
432
433    def updateConfigOpts(self, options, environment, cmdline):
434        self.ui_cmdline = cmdline
435        clean = True
436        for o in options:
437            if o in ['prefile', 'postfile']:
438                # Only these options may require a reparse
439                try:
440                    if getattr(self.configuration, o) == options[o]:
441                        # Value is the same, no need to mark dirty
442                        continue
443                except AttributeError:
444                    pass
445                logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
446                print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
447                clean = False
448            if hasattr(self.configuration, o):
449                setattr(self.configuration, o, options[o])
450
451        if self.configuration.writeeventlog:
452            if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
453                bb.event.unregister_UIHhandler(self.eventlog[1])
454            if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
455                # we log all events to a file if so directed
456                # register the log file writer as UI Handler
457                writer = EventWriter(self, self.configuration.writeeventlog)
458                EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
459                self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
460
461        bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
462        bb.msg.loggerDefaultDomains = self.configuration.debug_domains
463
464        if hasattr(self, "data"):
465            origenv = bb.data.init()
466            for k in environment:
467                origenv.setVar(k, environment[k])
468            self.data.setVar("BB_ORIGENV", origenv)
469
470        for k in bb.utils.approved_variables():
471            if k in environment and k not in self.configuration.env:
472                logger.debug("Updating new environment variable %s to %s" % (k, environment[k]))
473                self.configuration.env[k] = environment[k]
474                clean = False
475            if k in self.configuration.env and k not in environment:
476                logger.debug("Updating environment variable %s (deleted)" % (k))
477                del self.configuration.env[k]
478                clean = False
479            if k not in self.configuration.env and k not in environment:
480                continue
481            if environment[k] != self.configuration.env[k]:
482                logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
483                self.configuration.env[k] = environment[k]
484                clean = False
485
486        # Now update all the variables not in the datastore to match
487        self.configuration.env = environment
488
489        if not clean:
490            logger.debug("Base environment change, triggering reparse")
491            self.reset()
492
493    def runCommands(self, server, data, abort):
494        """
495        Run any queued asynchronous command
496        This is done by the idle handler so it runs in true context rather than
497        tied to any UI.
498        """
499
500        return self.command.runAsyncCommand()
501
502    def showVersions(self):
503
504        (latest_versions, preferred_versions, required) = self.findProviders()
505
506        logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
507        logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
508
509        for p in sorted(self.recipecaches[''].pkg_pn):
510            preferred = preferred_versions[p]
511            latest = latest_versions[p]
512            requiredstr = ""
513            preferredstr = ""
514            if required[p]:
515                if preferred[0] is not None:
516                    requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
517                else:
518                    bb.fatal("REQUIRED_VERSION of package %s not available" % p)
519            else:
520                preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
521
522            lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
523
524            if preferred == latest:
525                preferredstr = ""
526
527            logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
528
529    def showEnvironment(self, buildfile=None, pkgs_to_build=None):
530        """
531        Show the outer or per-recipe environment
532        """
533        fn = None
534        envdata = None
535        mc = ''
536        if not pkgs_to_build:
537            pkgs_to_build = []
538
539        orig_tracking = self.configuration.tracking
540        if not orig_tracking:
541            self.enableDataTracking()
542            self.reset()
543
544        def mc_base(p):
545            if p.startswith('mc:'):
546                s = p.split(':')
547                if len(s) == 2:
548                    return s[1]
549            return None
550
551        if buildfile:
552            # Parse the configuration here. We need to do it explicitly here since
553            # this showEnvironment() code path doesn't use the cache
554            self.parseConfiguration()
555
556            fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
557            fn = self.matchFile(fn, mc)
558            fn = bb.cache.realfn2virtual(fn, cls, mc)
559        elif len(pkgs_to_build) == 1:
560            mc = mc_base(pkgs_to_build[0])
561            if not mc:
562                ignore = self.data.getVar("ASSUME_PROVIDED") or ""
563                if pkgs_to_build[0] in set(ignore.split()):
564                    bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
565
566                taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
567
568                mc = runlist[0][0]
569                fn = runlist[0][3]
570
571        if fn:
572            try:
573                bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
574                envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
575            except Exception as e:
576                parselog.exception("Unable to read %s", fn)
577                raise
578        else:
579            if not mc in self.databuilder.mcdata:
580                bb.fatal('Not multiconfig named "%s" found' % mc)
581            envdata = self.databuilder.mcdata[mc]
582            data.expandKeys(envdata)
583            parse.ast.runAnonFuncs(envdata)
584
585        # Display history
586        with closing(StringIO()) as env:
587            self.data.inchistory.emit(env)
588            logger.plain(env.getvalue())
589
590        # emit variables and shell functions
591        with closing(StringIO()) as env:
592            data.emit_env(env, envdata, True)
593            logger.plain(env.getvalue())
594
595        # emit the metadata which isnt valid shell
596        for e in sorted(envdata.keys()):
597            if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
598                logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
599
600        if not orig_tracking:
601            self.disableDataTracking()
602            self.reset()
603
604    def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
605        """
606        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
607        """
608        bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
609
610        # A task of None means use the default task
611        if task is None:
612            task = self.configuration.cmd
613        if not task.startswith("do_"):
614            task = "do_%s" % task
615
616        targetlist = self.checkPackages(pkgs_to_build, task)
617        fulltargetlist = []
618        defaulttask_implicit = ''
619        defaulttask_explicit = False
620        wildcard = False
621
622        # Wild card expansion:
623        # Replace string such as "mc:*:bash"
624        # into "mc:A:bash mc:B:bash bash"
625        for k in targetlist:
626            if k.startswith("mc:") and k.count(':') >= 2:
627                if wildcard:
628                    bb.fatal('multiconfig conflict')
629                if k.split(":")[1] == "*":
630                    wildcard = True
631                    for mc in self.multiconfigs:
632                        if mc:
633                            fulltargetlist.append(k.replace('*', mc))
634                        # implicit default task
635                        else:
636                            defaulttask_implicit = k.split(":")[2]
637                else:
638                    fulltargetlist.append(k)
639            else:
640                defaulttask_explicit = True
641                fulltargetlist.append(k)
642
643        if not defaulttask_explicit and defaulttask_implicit != '':
644            fulltargetlist.append(defaulttask_implicit)
645
646        bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
647        taskdata = {}
648        localdata = {}
649
650        for mc in self.multiconfigs:
651            taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
652            localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
653            bb.data.expandKeys(localdata[mc])
654
655        current = 0
656        runlist = []
657        for k in fulltargetlist:
658            origk = k
659            mc = ""
660            if k.startswith("mc:") and k.count(':') >= 2:
661                mc = k.split(":")[1]
662                k = ":".join(k.split(":")[2:])
663            ktask = task
664            if ":do_" in k:
665                k2 = k.split(":do_")
666                k = k2[0]
667                ktask = k2[1]
668
669            if mc not in self.multiconfigs:
670                 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
671
672            taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
673            current += 1
674            if not ktask.startswith("do_"):
675                ktask = "do_%s" % ktask
676            if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
677                # e.g. in ASSUME_PROVIDED
678                continue
679            fn = taskdata[mc].build_targets[k][0]
680            runlist.append([mc, k, ktask, fn])
681            bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
682
683        havemc = False
684        for mc in self.multiconfigs:
685            if taskdata[mc].get_mcdepends():
686                havemc = True
687
688        # No need to do check providers if there are no mcdeps or not an mc build
689        if havemc or len(self.multiconfigs) > 1:
690            seen = set()
691            new = True
692            # Make sure we can provide the multiconfig dependency
693            while new:
694                mcdeps = set()
695                # Add unresolved first, so we can get multiconfig indirect dependencies on time
696                for mc in self.multiconfigs:
697                    taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
698                    mcdeps |= set(taskdata[mc].get_mcdepends())
699                new = False
700                for mc in self.multiconfigs:
701                    for k in mcdeps:
702                        if k in seen:
703                            continue
704                        l = k.split(':')
705                        depmc = l[2]
706                        if depmc not in self.multiconfigs:
707                            bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
708                        else:
709                            logger.debug("Adding providers for multiconfig dependency %s" % l[3])
710                            taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
711                            seen.add(k)
712                            new = True
713
714        for mc in self.multiconfigs:
715            taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
716
717        bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
718        return taskdata, runlist
719
720    def prepareTreeData(self, pkgs_to_build, task):
721        """
722        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
723        """
724
725        # We set abort to False here to prevent unbuildable targets raising
726        # an exception when we're just generating data
727        taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
728
729        return runlist, taskdata
730
731    ######## WARNING : this function requires cache_extra to be enabled ########
732
733    def generateTaskDepTreeData(self, pkgs_to_build, task):
734        """
735        Create a dependency graph of pkgs_to_build including reverse dependency
736        information.
737        """
738        if not task.startswith("do_"):
739            task = "do_%s" % task
740
741        runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
742        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
743        rq.rqdata.prepare()
744        return self.buildDependTree(rq, taskdata)
745
746    @staticmethod
747    def add_mc_prefix(mc, pn):
748        if mc:
749            return "mc:%s:%s" % (mc, pn)
750        return pn
751
752    def buildDependTree(self, rq, taskdata):
753        seen_fns = []
754        depend_tree = {}
755        depend_tree["depends"] = {}
756        depend_tree["tdepends"] = {}
757        depend_tree["pn"] = {}
758        depend_tree["rdepends-pn"] = {}
759        depend_tree["packages"] = {}
760        depend_tree["rdepends-pkg"] = {}
761        depend_tree["rrecs-pkg"] = {}
762        depend_tree['providermap'] = {}
763        depend_tree["layer-priorities"] = self.bbfile_config_priorities
764
765        for mc in taskdata:
766            for name, fn in list(taskdata[mc].get_providermap().items()):
767                pn = self.recipecaches[mc].pkg_fn[fn]
768                pn = self.add_mc_prefix(mc, pn)
769                if name != pn:
770                    version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
771                    depend_tree['providermap'][name] = (pn, version)
772
773        for tid in rq.rqdata.runtaskentries:
774            (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
775            pn = self.recipecaches[mc].pkg_fn[taskfn]
776            pn = self.add_mc_prefix(mc, pn)
777            version  = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
778            if pn not in depend_tree["pn"]:
779                depend_tree["pn"][pn] = {}
780                depend_tree["pn"][pn]["filename"] = taskfn
781                depend_tree["pn"][pn]["version"] = version
782                depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
783
784                # if we have extra caches, list all attributes they bring in
785                extra_info = []
786                for cache_class in self.caches_array:
787                    if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
788                        cachefields = getattr(cache_class, 'cachefields', [])
789                        extra_info = extra_info + cachefields
790
791                # for all attributes stored, add them to the dependency tree
792                for ei in extra_info:
793                    depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
794
795
796            dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
797            if not dotname in depend_tree["tdepends"]:
798                depend_tree["tdepends"][dotname] = []
799            for dep in rq.rqdata.runtaskentries[tid].depends:
800                (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
801                deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
802                depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
803            if taskfn not in seen_fns:
804                seen_fns.append(taskfn)
805                packages = []
806
807                depend_tree["depends"][pn] = []
808                for dep in taskdata[mc].depids[taskfn]:
809                    depend_tree["depends"][pn].append(dep)
810
811                depend_tree["rdepends-pn"][pn] = []
812                for rdep in taskdata[mc].rdepids[taskfn]:
813                    depend_tree["rdepends-pn"][pn].append(rdep)
814
815                rdepends = self.recipecaches[mc].rundeps[taskfn]
816                for package in rdepends:
817                    depend_tree["rdepends-pkg"][package] = []
818                    for rdepend in rdepends[package]:
819                        depend_tree["rdepends-pkg"][package].append(rdepend)
820                    packages.append(package)
821
822                rrecs = self.recipecaches[mc].runrecs[taskfn]
823                for package in rrecs:
824                    depend_tree["rrecs-pkg"][package] = []
825                    for rdepend in rrecs[package]:
826                        depend_tree["rrecs-pkg"][package].append(rdepend)
827                    if not package in packages:
828                        packages.append(package)
829
830                for package in packages:
831                    if package not in depend_tree["packages"]:
832                        depend_tree["packages"][package] = {}
833                        depend_tree["packages"][package]["pn"] = pn
834                        depend_tree["packages"][package]["filename"] = taskfn
835                        depend_tree["packages"][package]["version"] = version
836
837        return depend_tree
838
839    ######## WARNING : this function requires cache_extra to be enabled ########
840    def generatePkgDepTreeData(self, pkgs_to_build, task):
841        """
842        Create a dependency tree of pkgs_to_build, returning the data.
843        """
844        if not task.startswith("do_"):
845            task = "do_%s" % task
846
847        _, taskdata = self.prepareTreeData(pkgs_to_build, task)
848
849        seen_fns = []
850        depend_tree = {}
851        depend_tree["depends"] = {}
852        depend_tree["pn"] = {}
853        depend_tree["rdepends-pn"] = {}
854        depend_tree["rdepends-pkg"] = {}
855        depend_tree["rrecs-pkg"] = {}
856
857        # if we have extra caches, list all attributes they bring in
858        extra_info = []
859        for cache_class in self.caches_array:
860            if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
861                cachefields = getattr(cache_class, 'cachefields', [])
862                extra_info = extra_info + cachefields
863
864        tids = []
865        for mc in taskdata:
866            for tid in taskdata[mc].taskentries:
867                tids.append(tid)
868
869        for tid in tids:
870            (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
871
872            pn = self.recipecaches[mc].pkg_fn[taskfn]
873            pn = self.add_mc_prefix(mc, pn)
874
875            if pn not in depend_tree["pn"]:
876                depend_tree["pn"][pn] = {}
877                depend_tree["pn"][pn]["filename"] = taskfn
878                version  = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
879                depend_tree["pn"][pn]["version"] = version
880                rdepends = self.recipecaches[mc].rundeps[taskfn]
881                rrecs = self.recipecaches[mc].runrecs[taskfn]
882                depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
883
884                # for all extra attributes stored, add them to the dependency tree
885                for ei in extra_info:
886                    depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
887
888            if taskfn not in seen_fns:
889                seen_fns.append(taskfn)
890
891                depend_tree["depends"][pn] = []
892                for dep in taskdata[mc].depids[taskfn]:
893                    pn_provider = ""
894                    if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
895                        fn_provider = taskdata[mc].build_targets[dep][0]
896                        pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
897                    else:
898                        pn_provider = dep
899                    pn_provider = self.add_mc_prefix(mc, pn_provider)
900                    depend_tree["depends"][pn].append(pn_provider)
901
902                depend_tree["rdepends-pn"][pn] = []
903                for rdep in taskdata[mc].rdepids[taskfn]:
904                    pn_rprovider = ""
905                    if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
906                        fn_rprovider = taskdata[mc].run_targets[rdep][0]
907                        pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
908                    else:
909                        pn_rprovider = rdep
910                    pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
911                    depend_tree["rdepends-pn"][pn].append(pn_rprovider)
912
913                depend_tree["rdepends-pkg"].update(rdepends)
914                depend_tree["rrecs-pkg"].update(rrecs)
915
916        return depend_tree
917
918    def generateDepTreeEvent(self, pkgs_to_build, task):
919        """
920        Create a task dependency graph of pkgs_to_build.
921        Generate an event with the result
922        """
923        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
924        bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
925
926    def generateDotGraphFiles(self, pkgs_to_build, task):
927        """
928        Create a task dependency graph of pkgs_to_build.
929        Save the result to a set of .dot files.
930        """
931
932        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
933
934        with open('pn-buildlist', 'w') as f:
935            for pn in depgraph["pn"]:
936                f.write(pn + "\n")
937        logger.info("PN build list saved to 'pn-buildlist'")
938
939        # Remove old format output files to ensure no confusion with stale data
940        try:
941            os.unlink('pn-depends.dot')
942        except FileNotFoundError:
943            pass
944        try:
945            os.unlink('package-depends.dot')
946        except FileNotFoundError:
947            pass
948        try:
949            os.unlink('recipe-depends.dot')
950        except FileNotFoundError:
951            pass
952
953        with open('task-depends.dot', 'w') as f:
954            f.write("digraph depends {\n")
955            for task in sorted(depgraph["tdepends"]):
956                (pn, taskname) = task.rsplit(".", 1)
957                fn = depgraph["pn"][pn]["filename"]
958                version = depgraph["pn"][pn]["version"]
959                f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
960                for dep in sorted(depgraph["tdepends"][task]):
961                    f.write('"%s" -> "%s"\n' % (task, dep))
962            f.write("}\n")
963        logger.info("Task dependencies saved to 'task-depends.dot'")
964
965    def show_appends_with_no_recipes(self):
966        appends_without_recipes = {}
967        # Determine which bbappends haven't been applied
968        for mc in self.multiconfigs:
969            # First get list of recipes, including skipped
970            recipefns = list(self.recipecaches[mc].pkg_fn.keys())
971            recipefns.extend(self.skiplist.keys())
972
973            # Work out list of bbappends that have been applied
974            applied_appends = []
975            for fn in recipefns:
976                applied_appends.extend(self.collections[mc].get_file_appends(fn))
977
978            appends_without_recipes[mc] = []
979            for _, appendfn in self.collections[mc].bbappends:
980                if not appendfn in applied_appends:
981                    appends_without_recipes[mc].append(appendfn)
982
983        msgs = []
984        for mc in sorted(appends_without_recipes.keys()):
985            if appends_without_recipes[mc]:
986                msgs.append('No recipes in %s available for:\n  %s' % (mc if mc else 'default',
987                                                                        '\n  '.join(appends_without_recipes[mc])))
988
989        if msgs:
990            msg = "\n".join(msgs)
991            warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
992                False) or "no"
993            if warn_only.lower() in ("1", "yes", "true"):
994                bb.warn(msg)
995            else:
996                bb.fatal(msg)
997
998    def handlePrefProviders(self):
999
1000        for mc in self.multiconfigs:
1001            localdata = data.createCopy(self.databuilder.mcdata[mc])
1002            bb.data.expandKeys(localdata)
1003
1004            # Handle PREFERRED_PROVIDERS
1005            for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
1006                try:
1007                    (providee, provider) = p.split(':')
1008                except:
1009                    providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
1010                    continue
1011                if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1012                    providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1013                self.recipecaches[mc].preferred[providee] = provider
1014
1015    def findConfigFilePath(self, configfile):
1016        """
1017        Find the location on disk of configfile and if it exists and was parsed by BitBake
1018        emit the ConfigFilePathFound event with the path to the file.
1019        """
1020        path = bb.cookerdata.findConfigFile(configfile, self.data)
1021        if not path:
1022            return
1023
1024        # Generate a list of parsed configuration files by searching the files
1025        # listed in the __depends and __base_depends variables with a .conf suffix.
1026        conffiles = []
1027        dep_files = self.data.getVar('__base_depends', False) or []
1028        dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1029
1030        for f in dep_files:
1031            if f[0].endswith(".conf"):
1032                conffiles.append(f[0])
1033
1034        _, conf, conffile = path.rpartition("conf/")
1035        match = os.path.join(conf, conffile)
1036        # Try and find matches for conf/conffilename.conf as we don't always
1037        # have the full path to the file.
1038        for cfg in conffiles:
1039            if cfg.endswith(match):
1040                bb.event.fire(bb.event.ConfigFilePathFound(path),
1041                              self.data)
1042                break
1043
1044    def findFilesMatchingInDir(self, filepattern, directory):
1045        """
1046        Searches for files containing the substring 'filepattern' which are children of
1047        'directory' in each BBPATH. i.e. to find all rootfs package classes available
1048        to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1049        or to find all machine configuration files one could call:
1050        findFilesMatchingInDir(self, '.conf', 'conf/machine')
1051        """
1052
1053        matches = []
1054        bbpaths = self.data.getVar('BBPATH').split(':')
1055        for path in bbpaths:
1056            dirpath = os.path.join(path, directory)
1057            if os.path.exists(dirpath):
1058                for root, dirs, files in os.walk(dirpath):
1059                    for f in files:
1060                        if filepattern in f:
1061                            matches.append(f)
1062
1063        if matches:
1064            bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1065
1066    def findProviders(self, mc=''):
1067        return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1068
1069    def findBestProvider(self, pn, mc=''):
1070        if pn in self.recipecaches[mc].providers:
1071            filenames = self.recipecaches[mc].providers[pn]
1072            eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
1073            if eligible is not None:
1074                filename = eligible[0]
1075            else:
1076                filename = None
1077            return None, None, None, filename
1078        elif pn in self.recipecaches[mc].pkg_pn:
1079            (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1080            if required and preferred_file is None:
1081                return None, None, None, None
1082            return (latest, latest_f, preferred_ver, preferred_file)
1083        else:
1084            return None, None, None, None
1085
1086    def findConfigFiles(self, varname):
1087        """
1088        Find config files which are appropriate values for varname.
1089        i.e. MACHINE, DISTRO
1090        """
1091        possible = []
1092        var = varname.lower()
1093
1094        data = self.data
1095        # iterate configs
1096        bbpaths = data.getVar('BBPATH').split(':')
1097        for path in bbpaths:
1098            confpath = os.path.join(path, "conf", var)
1099            if os.path.exists(confpath):
1100                for root, dirs, files in os.walk(confpath):
1101                    # get all child files, these are appropriate values
1102                    for f in files:
1103                        val, sep, end = f.rpartition('.')
1104                        if end == 'conf':
1105                            possible.append(val)
1106
1107        if possible:
1108            bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1109
1110    def findInheritsClass(self, klass):
1111        """
1112        Find all recipes which inherit the specified class
1113        """
1114        pkg_list = []
1115
1116        for pfn in self.recipecaches[''].pkg_fn:
1117            inherits = self.recipecaches[''].inherits.get(pfn, None)
1118            if inherits and klass in inherits:
1119                pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
1120
1121        return pkg_list
1122
1123    def generateTargetsTree(self, klass=None, pkgs=None):
1124        """
1125        Generate a dependency tree of buildable targets
1126        Generate an event with the result
1127        """
1128        # if the caller hasn't specified a pkgs list default to universe
1129        if not pkgs:
1130            pkgs = ['universe']
1131        # if inherited_class passed ensure all recipes which inherit the
1132        # specified class are included in pkgs
1133        if klass:
1134            extra_pkgs = self.findInheritsClass(klass)
1135            pkgs = pkgs + extra_pkgs
1136
1137        # generate a dependency tree for all our packages
1138        tree = self.generatePkgDepTreeData(pkgs, 'build')
1139        bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1140
1141    def interactiveMode( self ):
1142        """Drop off into a shell"""
1143        try:
1144            from bb import shell
1145        except ImportError:
1146            parselog.exception("Interactive mode not available")
1147            raise bb.BBHandledException()
1148        else:
1149            shell.start( self )
1150
1151
1152    def handleCollections(self, collections):
1153        """Handle collections"""
1154        errors = False
1155        self.bbfile_config_priorities = []
1156        if collections:
1157            collection_priorities = {}
1158            collection_depends = {}
1159            collection_list = collections.split()
1160            min_prio = 0
1161            for c in collection_list:
1162                bb.debug(1,'Processing %s in collection list' % (c))
1163
1164                # Get collection priority if defined explicitly
1165                priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
1166                if priority:
1167                    try:
1168                        prio = int(priority)
1169                    except ValueError:
1170                        parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1171                        errors = True
1172                    if min_prio == 0 or prio < min_prio:
1173                        min_prio = prio
1174                    collection_priorities[c] = prio
1175                else:
1176                    collection_priorities[c] = None
1177
1178                # Check dependencies and store information for priority calculation
1179                deps = self.data.getVar("LAYERDEPENDS_%s" % c)
1180                if deps:
1181                    try:
1182                        depDict = bb.utils.explode_dep_versions2(deps)
1183                    except bb.utils.VersionStringException as vse:
1184                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1185                    for dep, oplist in list(depDict.items()):
1186                        if dep in collection_list:
1187                            for opstr in oplist:
1188                                layerver = self.data.getVar("LAYERVERSION_%s" % dep)
1189                                (op, depver) = opstr.split()
1190                                if layerver:
1191                                    try:
1192                                        res = bb.utils.vercmp_string_op(layerver, depver, op)
1193                                    except bb.utils.VersionStringException as vse:
1194                                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1195                                    if not res:
1196                                        parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1197                                        errors = True
1198                                else:
1199                                    parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1200                                    errors = True
1201                        else:
1202                            parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1203                            errors = True
1204                    collection_depends[c] = list(depDict.keys())
1205                else:
1206                    collection_depends[c] = []
1207
1208                # Check recommends and store information for priority calculation
1209                recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
1210                if recs:
1211                    try:
1212                        recDict = bb.utils.explode_dep_versions2(recs)
1213                    except bb.utils.VersionStringException as vse:
1214                        bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1215                    for rec, oplist in list(recDict.items()):
1216                        if rec in collection_list:
1217                            if oplist:
1218                                opstr = oplist[0]
1219                                layerver = self.data.getVar("LAYERVERSION_%s" % rec)
1220                                if layerver:
1221                                    (op, recver) = opstr.split()
1222                                    try:
1223                                        res = bb.utils.vercmp_string_op(layerver, recver, op)
1224                                    except bb.utils.VersionStringException as vse:
1225                                        bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1226                                    if not res:
1227                                        parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1228                                        continue
1229                                else:
1230                                    parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1231                                    continue
1232                            parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1233                            collection_depends[c].append(rec)
1234                        else:
1235                            parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1236
1237            # Recursively work out collection priorities based on dependencies
1238            def calc_layer_priority(collection):
1239                if not collection_priorities[collection]:
1240                    max_depprio = min_prio
1241                    for dep in collection_depends[collection]:
1242                        calc_layer_priority(dep)
1243                        depprio = collection_priorities[dep]
1244                        if depprio > max_depprio:
1245                            max_depprio = depprio
1246                    max_depprio += 1
1247                    parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1248                    collection_priorities[collection] = max_depprio
1249
1250            # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1251            for c in collection_list:
1252                calc_layer_priority(c)
1253                regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
1254                if regex is None:
1255                    parselog.error("BBFILE_PATTERN_%s not defined" % c)
1256                    errors = True
1257                    continue
1258                elif regex == "":
1259                    parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
1260                    cre = re.compile('^NULL$')
1261                    errors = False
1262                else:
1263                    try:
1264                        cre = re.compile(regex)
1265                    except re.error:
1266                        parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1267                        errors = True
1268                        continue
1269                self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1270        if errors:
1271            # We've already printed the actual error(s)
1272            raise CollectionError("Errors during parsing layer configuration")
1273
1274    def buildSetVars(self):
1275        """
1276        Setup any variables needed before starting a build
1277        """
1278        t = time.gmtime()
1279        for mc in self.databuilder.mcdata:
1280            ds = self.databuilder.mcdata[mc]
1281            if not ds.getVar("BUILDNAME", False):
1282                ds.setVar("BUILDNAME", "${DATE}${TIME}")
1283            ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1284            ds.setVar("DATE", time.strftime('%Y%m%d', t))
1285            ds.setVar("TIME", time.strftime('%H%M%S', t))
1286
1287    def reset_mtime_caches(self):
1288        """
1289        Reset mtime caches - this is particularly important when memory resident as something
1290        which is cached is not unlikely to have changed since the last invocation (e.g. a
1291        file associated with a recipe might have been modified by the user).
1292        """
1293        build.reset_cache()
1294        bb.fetch._checksum_cache.mtime_cache.clear()
1295        siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1296        if siggen_cache:
1297            bb.parse.siggen.checksum_cache.mtime_cache.clear()
1298
1299    def matchFiles(self, bf, mc=''):
1300        """
1301        Find the .bb files which match the expression in 'buildfile'.
1302        """
1303        if bf.startswith("/") or bf.startswith("../"):
1304            bf = os.path.abspath(bf)
1305
1306        self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1307        filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1308        try:
1309            os.stat(bf)
1310            bf = os.path.abspath(bf)
1311            return [bf]
1312        except OSError:
1313            regexp = re.compile(bf)
1314            matches = []
1315            for f in filelist:
1316                if regexp.search(f) and os.path.isfile(f):
1317                    matches.append(f)
1318            return matches
1319
1320    def matchFile(self, buildfile, mc=''):
1321        """
1322        Find the .bb file which matches the expression in 'buildfile'.
1323        Raise an error if multiple files
1324        """
1325        matches = self.matchFiles(buildfile, mc)
1326        if len(matches) != 1:
1327            if matches:
1328                msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1329                if matches:
1330                    for f in matches:
1331                        msg += "\n    %s" % f
1332                parselog.error(msg)
1333            else:
1334                parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1335            raise NoSpecificMatch
1336        return matches[0]
1337
1338    def buildFile(self, buildfile, task):
1339        """
1340        Build the file matching regexp buildfile
1341        """
1342        bb.event.fire(bb.event.BuildInit(), self.data)
1343
1344        # Too many people use -b because they think it's how you normally
1345        # specify a target to be built, so show a warning
1346        bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1347
1348        self.buildFileInternal(buildfile, task)
1349
1350    def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1351        """
1352        Build the file matching regexp buildfile
1353        """
1354
1355        # Parse the configuration here. We need to do it explicitly here since
1356        # buildFile() doesn't use the cache
1357        self.parseConfiguration()
1358
1359        # If we are told to do the None task then query the default task
1360        if task is None:
1361            task = self.configuration.cmd
1362        if not task.startswith("do_"):
1363            task = "do_%s" % task
1364
1365        fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
1366        fn = self.matchFile(fn, mc)
1367
1368        self.buildSetVars()
1369        self.reset_mtime_caches()
1370
1371        bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
1372
1373        infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
1374        infos = dict(infos)
1375
1376        fn = bb.cache.realfn2virtual(fn, cls, mc)
1377        try:
1378            info_array = infos[fn]
1379        except KeyError:
1380            bb.fatal("%s does not exist" % fn)
1381
1382        if info_array[0].skipped:
1383            bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1384
1385        self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
1386
1387        # Tweak some variables
1388        item = info_array[0].pn
1389        self.recipecaches[mc].ignored_dependencies = set()
1390        self.recipecaches[mc].bbfile_priority[fn] = 1
1391        self.configuration.limited_deps = True
1392
1393        # Remove external dependencies
1394        self.recipecaches[mc].task_deps[fn]['depends'] = {}
1395        self.recipecaches[mc].deps[fn] = []
1396        self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1397        self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1398
1399        # Invalidate task for target if force mode active
1400        if self.configuration.force:
1401            logger.verbose("Invalidate task %s, %s", task, fn)
1402            bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
1403
1404        # Setup taskdata structure
1405        taskdata = {}
1406        taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
1407        taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1408
1409        if quietlog:
1410            rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1411            bb.runqueue.logger.setLevel(logging.WARNING)
1412
1413        buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1414        if fireevents:
1415            bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1416
1417        # Execute the runqueue
1418        runlist = [[mc, item, task, fn]]
1419
1420        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1421
1422        def buildFileIdle(server, rq, abort):
1423
1424            msg = None
1425            interrupted = 0
1426            if abort or self.state == state.forceshutdown:
1427                rq.finish_runqueue(True)
1428                msg = "Forced shutdown"
1429                interrupted = 2
1430            elif self.state == state.shutdown:
1431                rq.finish_runqueue(False)
1432                msg = "Stopped build"
1433                interrupted = 1
1434            failures = 0
1435            try:
1436                retval = rq.execute_runqueue()
1437            except runqueue.TaskFailure as exc:
1438                failures += len(exc.args)
1439                retval = False
1440            except SystemExit as exc:
1441                self.command.finishAsyncCommand(str(exc))
1442                if quietlog:
1443                    bb.runqueue.logger.setLevel(rqloglevel)
1444                return False
1445
1446            if not retval:
1447                if fireevents:
1448                    bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1449                self.command.finishAsyncCommand(msg)
1450                # We trashed self.recipecaches above
1451                self.parsecache_valid = False
1452                self.configuration.limited_deps = False
1453                bb.parse.siggen.reset(self.data)
1454                if quietlog:
1455                    bb.runqueue.logger.setLevel(rqloglevel)
1456                return False
1457            if retval is True:
1458                return True
1459            return retval
1460
1461        self.idleCallBackRegister(buildFileIdle, rq)
1462
1463    def buildTargets(self, targets, task):
1464        """
1465        Attempt to build the targets specified
1466        """
1467
1468        def buildTargetsIdle(server, rq, abort):
1469            msg = None
1470            interrupted = 0
1471            if abort or self.state == state.forceshutdown:
1472                rq.finish_runqueue(True)
1473                msg = "Forced shutdown"
1474                interrupted = 2
1475            elif self.state == state.shutdown:
1476                rq.finish_runqueue(False)
1477                msg = "Stopped build"
1478                interrupted = 1
1479            failures = 0
1480            try:
1481                retval = rq.execute_runqueue()
1482            except runqueue.TaskFailure as exc:
1483                failures += len(exc.args)
1484                retval = False
1485            except SystemExit as exc:
1486                self.command.finishAsyncCommand(str(exc))
1487                return False
1488
1489            if not retval:
1490                try:
1491                    for mc in self.multiconfigs:
1492                        bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1493                finally:
1494                    self.command.finishAsyncCommand(msg)
1495                return False
1496            if retval is True:
1497                return True
1498            return retval
1499
1500        self.reset_mtime_caches()
1501        self.buildSetVars()
1502
1503        # If we are told to do the None task then query the default task
1504        if task is None:
1505            task = self.configuration.cmd
1506
1507        if not task.startswith("do_"):
1508            task = "do_%s" % task
1509
1510        packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1511
1512        bb.event.fire(bb.event.BuildInit(packages), self.data)
1513
1514        taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
1515
1516        buildname = self.data.getVar("BUILDNAME", False)
1517
1518        # make targets to always look as <target>:do_<task>
1519        ntargets = []
1520        for target in runlist:
1521            if target[0]:
1522                ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
1523            ntargets.append("%s:%s" % (target[1], target[2]))
1524
1525        for mc in self.multiconfigs:
1526            bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1527
1528        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1529        if 'universe' in targets:
1530            rq.rqdata.warn_multi_bb = True
1531
1532        self.idleCallBackRegister(buildTargetsIdle, rq)
1533
1534
1535    def getAllKeysWithFlags(self, flaglist):
1536        dump = {}
1537        for k in self.data.keys():
1538            try:
1539                expand = True
1540                flags = self.data.getVarFlags(k)
1541                if flags and "func" in flags and "python" in flags:
1542                    expand = False
1543                v = self.data.getVar(k, expand)
1544                if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1545                    dump[k] = {
1546    'v' : str(v) ,
1547    'history' : self.data.varhistory.variable(k),
1548                    }
1549                    for d in flaglist:
1550                        if flags and d in flags:
1551                            dump[k][d] = flags[d]
1552                        else:
1553                            dump[k][d] = None
1554            except Exception as e:
1555                print(e)
1556        return dump
1557
1558
1559    def updateCacheSync(self):
1560        if self.state == state.running:
1561            return
1562
1563        # reload files for which we got notifications
1564        for p in self.inotify_modified_files:
1565            bb.parse.update_cache(p)
1566            if p in bb.parse.BBHandler.cached_statements:
1567                del bb.parse.BBHandler.cached_statements[p]
1568        self.inotify_modified_files = []
1569
1570        if not self.baseconfig_valid:
1571            logger.debug("Reloading base configuration data")
1572            self.initConfigurationData()
1573            self.handlePRServ()
1574
1575    # This is called for all async commands when self.state != running
1576    def updateCache(self):
1577        if self.state == state.running:
1578            return
1579
1580        if self.state in (state.shutdown, state.forceshutdown, state.error):
1581            if hasattr(self.parser, 'shutdown'):
1582                self.parser.shutdown(clean=False, force = True)
1583                self.parser.final_cleanup()
1584            raise bb.BBHandledException()
1585
1586        if self.state != state.parsing:
1587            self.updateCacheSync()
1588
1589        if self.state != state.parsing and not self.parsecache_valid:
1590            bb.parse.siggen.reset(self.data)
1591            self.parseConfiguration ()
1592            if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1593                for mc in self.multiconfigs:
1594                    bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
1595
1596            for mc in self.multiconfigs:
1597                ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
1598                self.recipecaches[mc].ignored_dependencies = set(ignore.split())
1599
1600                for dep in self.configuration.extra_assume_provided:
1601                    self.recipecaches[mc].ignored_dependencies.add(dep)
1602
1603            self.collections = {}
1604
1605            mcfilelist = {}
1606            total_masked = 0
1607            searchdirs = set()
1608            for mc in self.multiconfigs:
1609                self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1610                (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1611
1612                mcfilelist[mc] = filelist
1613                total_masked += masked
1614                searchdirs |= set(search)
1615
1616            # Add inotify watches for directories searched for bb/bbappend files
1617            for dirent in searchdirs:
1618                self.add_filewatch([[dirent]], dirs=True)
1619
1620            self.parser = CookerParser(self, mcfilelist, total_masked)
1621            self.parsecache_valid = True
1622
1623        self.state = state.parsing
1624
1625        if not self.parser.parse_next():
1626            collectlog.debug(1, "parsing complete")
1627            if self.parser.error:
1628                raise bb.BBHandledException()
1629            self.show_appends_with_no_recipes()
1630            self.handlePrefProviders()
1631            for mc in self.multiconfigs:
1632                self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1633            self.state = state.running
1634
1635            # Send an event listing all stamps reachable after parsing
1636            # which the metadata may use to clean up stale data
1637            for mc in self.multiconfigs:
1638                event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1639                bb.event.fire(event, self.databuilder.mcdata[mc])
1640            return None
1641
1642        return True
1643
1644    def checkPackages(self, pkgs_to_build, task=None):
1645
1646        # Return a copy, don't modify the original
1647        pkgs_to_build = pkgs_to_build[:]
1648
1649        if len(pkgs_to_build) == 0:
1650            raise NothingToBuild
1651
1652        ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
1653        for pkg in pkgs_to_build.copy():
1654            if pkg in ignore:
1655                parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1656            if pkg.startswith("multiconfig:"):
1657                pkgs_to_build.remove(pkg)
1658                pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
1659
1660        if 'world' in pkgs_to_build:
1661            pkgs_to_build.remove('world')
1662            for mc in self.multiconfigs:
1663                bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1664                for t in self.recipecaches[mc].world_target:
1665                    if mc:
1666                        t = "mc:" + mc + ":" + t
1667                    pkgs_to_build.append(t)
1668
1669        if 'universe' in pkgs_to_build:
1670            parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1671            parselog.debug(1, "collating packages for \"universe\"")
1672            pkgs_to_build.remove('universe')
1673            for mc in self.multiconfigs:
1674                for t in self.recipecaches[mc].universe_target:
1675                    if task:
1676                        foundtask = False
1677                        for provider_fn in self.recipecaches[mc].providers[t]:
1678                            if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1679                                foundtask = True
1680                                break
1681                        if not foundtask:
1682                            bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1683                            continue
1684                    if mc:
1685                        t = "mc:" + mc + ":" + t
1686                    pkgs_to_build.append(t)
1687
1688        return pkgs_to_build
1689
1690    def pre_serve(self):
1691        return
1692
1693    def post_serve(self):
1694        self.shutdown(force=True)
1695        prserv.serv.auto_shutdown()
1696        if self.hashserv:
1697            self.hashserv.process.terminate()
1698            self.hashserv.process.join()
1699        if hasattr(self, "data"):
1700            bb.event.fire(CookerExit(), self.data)
1701
1702    def shutdown(self, force = False):
1703        if force:
1704            self.state = state.forceshutdown
1705        else:
1706            self.state = state.shutdown
1707
1708        if self.parser:
1709            self.parser.shutdown(clean=not force, force=force)
1710            self.parser.final_cleanup()
1711
1712    def finishcommand(self):
1713        self.state = state.initial
1714
1715    def reset(self):
1716        self.initConfigurationData()
1717        self.handlePRServ()
1718
1719    def clientComplete(self):
1720        """Called when the client is done using the server"""
1721        self.finishcommand()
1722        self.extraconfigdata = {}
1723        self.command.reset()
1724        if hasattr(self, "data"):
1725           self.databuilder.reset()
1726           self.data = self.databuilder.data
1727        self.parsecache_valid = False
1728        self.baseconfig_valid = False
1729
1730
1731class CookerExit(bb.event.Event):
1732    """
1733    Notify clients of the Cooker shutdown
1734    """
1735
1736    def __init__(self):
1737        bb.event.Event.__init__(self)
1738
1739
1740class CookerCollectFiles(object):
1741    def __init__(self, priorities, mc=''):
1742        self.mc = mc
1743        self.bbappends = []
1744        # Priorities is a list of tupples, with the second element as the pattern.
1745        # We need to sort the list with the longest pattern first, and so on to
1746        # the shortest.  This allows nested layers to be properly evaluated.
1747        self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1748
1749    def calc_bbfile_priority(self, filename):
1750        for _, _, regex, pri in self.bbfile_config_priorities:
1751            if regex.match(filename):
1752                return pri, regex
1753        return 0, None
1754
1755    def get_bbfiles(self):
1756        """Get list of default .bb files by reading out the current directory"""
1757        path = os.getcwd()
1758        contents = os.listdir(path)
1759        bbfiles = []
1760        for f in contents:
1761            if f.endswith(".bb"):
1762                bbfiles.append(os.path.abspath(os.path.join(path, f)))
1763        return bbfiles
1764
1765    def find_bbfiles(self, path):
1766        """Find all the .bb and .bbappend files in a directory"""
1767        found = []
1768        for dir, dirs, files in os.walk(path):
1769            for ignored in ('SCCS', 'CVS', '.svn'):
1770                if ignored in dirs:
1771                    dirs.remove(ignored)
1772            found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1773
1774        return found
1775
1776    def collect_bbfiles(self, config, eventdata):
1777        """Collect all available .bb build files"""
1778        masked = 0
1779
1780        collectlog.debug(1, "collecting .bb files")
1781
1782        files = (config.getVar( "BBFILES") or "").split()
1783
1784        # Sort files by priority
1785        files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
1786        config.setVar("BBFILES_PRIORITIZED", " ".join(files))
1787
1788        if not len(files):
1789            files = self.get_bbfiles()
1790
1791        if not len(files):
1792            collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1793            bb.event.fire(CookerExit(), eventdata)
1794
1795        # We need to track where we look so that we can add inotify watches. There
1796        # is no nice way to do this, this is horrid. We intercept the os.listdir()
1797        # (or os.scandir() for python 3.6+) calls while we run glob().
1798        origlistdir = os.listdir
1799        if hasattr(os, 'scandir'):
1800            origscandir = os.scandir
1801        searchdirs = []
1802
1803        def ourlistdir(d):
1804            searchdirs.append(d)
1805            return origlistdir(d)
1806
1807        def ourscandir(d):
1808            searchdirs.append(d)
1809            return origscandir(d)
1810
1811        os.listdir = ourlistdir
1812        if hasattr(os, 'scandir'):
1813            os.scandir = ourscandir
1814        try:
1815            # Can't use set here as order is important
1816            newfiles = []
1817            for f in files:
1818                if os.path.isdir(f):
1819                    dirfiles = self.find_bbfiles(f)
1820                    for g in dirfiles:
1821                        if g not in newfiles:
1822                            newfiles.append(g)
1823                else:
1824                    globbed = glob.glob(f)
1825                    if not globbed and os.path.exists(f):
1826                        globbed = [f]
1827                    # glob gives files in order on disk. Sort to be deterministic.
1828                    for g in sorted(globbed):
1829                        if g not in newfiles:
1830                            newfiles.append(g)
1831        finally:
1832            os.listdir = origlistdir
1833            if hasattr(os, 'scandir'):
1834                os.scandir = origscandir
1835
1836        bbmask = config.getVar('BBMASK')
1837
1838        if bbmask:
1839            # First validate the individual regular expressions and ignore any
1840            # that do not compile
1841            bbmasks = []
1842            for mask in bbmask.split():
1843                # When constructing an older style single regex, it's possible for BBMASK
1844                # to end up beginning with '|', which matches and masks _everything_.
1845                if mask.startswith("|"):
1846                    collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1847                    mask = mask[1:]
1848                try:
1849                    re.compile(mask)
1850                    bbmasks.append(mask)
1851                except sre_constants.error:
1852                    collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1853
1854            # Then validate the combined regular expressions. This should never
1855            # fail, but better safe than sorry...
1856            bbmask = "|".join(bbmasks)
1857            try:
1858                bbmask_compiled = re.compile(bbmask)
1859            except sre_constants.error:
1860                collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1861                bbmask = None
1862
1863        bbfiles = []
1864        bbappend = []
1865        for f in newfiles:
1866            if bbmask and bbmask_compiled.search(f):
1867                collectlog.debug(1, "skipping masked file %s", f)
1868                masked += 1
1869                continue
1870            if f.endswith('.bb'):
1871                bbfiles.append(f)
1872            elif f.endswith('.bbappend'):
1873                bbappend.append(f)
1874            else:
1875                collectlog.debug(1, "skipping %s: unknown file extension", f)
1876
1877        # Build a list of .bbappend files for each .bb file
1878        for f in bbappend:
1879            base = os.path.basename(f).replace('.bbappend', '.bb')
1880            self.bbappends.append((base, f))
1881
1882        # Find overlayed recipes
1883        # bbfiles will be in priority order which makes this easy
1884        bbfile_seen = dict()
1885        self.overlayed = defaultdict(list)
1886        for f in reversed(bbfiles):
1887            base = os.path.basename(f)
1888            if base not in bbfile_seen:
1889                bbfile_seen[base] = f
1890            else:
1891                topfile = bbfile_seen[base]
1892                self.overlayed[topfile].append(f)
1893
1894        return (bbfiles, masked, searchdirs)
1895
1896    def get_file_appends(self, fn):
1897        """
1898        Returns a list of .bbappend files to apply to fn
1899        """
1900        filelist = []
1901        f = os.path.basename(fn)
1902        for b in self.bbappends:
1903            (bbappend, filename) = b
1904            if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1905                filelist.append(filename)
1906        return tuple(filelist)
1907
1908    def collection_priorities(self, pkgfns, fns, d):
1909        # Return the priorities of the entries in pkgfns
1910        # Also check that all the regexes in self.bbfile_config_priorities are used
1911        # (but to do that we need to ensure skipped recipes aren't counted, nor
1912        # collections in BBFILE_PATTERN_IGNORE_EMPTY)
1913
1914        priorities = {}
1915        seen = set()
1916        matched = set()
1917
1918        matched_regex = set()
1919        unmatched_regex = set()
1920        for _, _, regex, _ in self.bbfile_config_priorities:
1921            unmatched_regex.add(regex)
1922
1923        # Calculate priorities for each file
1924        for p in pkgfns:
1925            realfn, cls, mc = bb.cache.virtualfn2realfn(p)
1926            priorities[p], regex = self.calc_bbfile_priority(realfn)
1927            if regex in unmatched_regex:
1928                matched_regex.add(regex)
1929                unmatched_regex.remove(regex)
1930            seen.add(realfn)
1931            if regex:
1932                matched.add(realfn)
1933
1934        if unmatched_regex:
1935            # Account for bbappend files
1936            for b in self.bbappends:
1937                (bbfile, append) = b
1938                seen.add(append)
1939
1940            # Account for skipped recipes
1941            seen.update(fns)
1942
1943            seen.difference_update(matched)
1944
1945            def already_matched(fn):
1946                for regex in matched_regex:
1947                    if regex.match(fn):
1948                        return True
1949                return False
1950
1951            for unmatch in unmatched_regex.copy():
1952                for fn in seen:
1953                    if unmatch.match(fn):
1954                        # If the bbappend or file was already matched by another regex, skip it
1955                        # e.g. for a layer within a layer, the outer regex could match, the inner
1956                        # regex may match nothing and we should warn about that
1957                        if already_matched(fn):
1958                            continue
1959                        unmatched_regex.remove(unmatch)
1960                        break
1961
1962        for collection, pattern, regex, _ in self.bbfile_config_priorities:
1963            if regex in unmatched_regex:
1964                if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
1965                    collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
1966                                                                                             collection, pattern))
1967
1968        return priorities
1969
1970class ParsingFailure(Exception):
1971    def __init__(self, realexception, recipe):
1972        self.realexception = realexception
1973        self.recipe = recipe
1974        Exception.__init__(self, realexception, recipe)
1975
1976class Parser(multiprocessing.Process):
1977    def __init__(self, jobs, results, quit, init, profile):
1978        self.jobs = jobs
1979        self.results = results
1980        self.quit = quit
1981        self.init = init
1982        multiprocessing.Process.__init__(self)
1983        self.context = bb.utils.get_context().copy()
1984        self.handlers = bb.event.get_class_handlers().copy()
1985        self.profile = profile
1986
1987    def run(self):
1988
1989        if not self.profile:
1990            self.realrun()
1991            return
1992
1993        try:
1994            import cProfile as profile
1995        except:
1996            import profile
1997        prof = profile.Profile()
1998        try:
1999            profile.Profile.runcall(prof, self.realrun)
2000        finally:
2001            logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
2002            prof.dump_stats(logfile)
2003
2004    def realrun(self):
2005        if self.init:
2006            self.init()
2007
2008        pending = []
2009        while True:
2010            try:
2011                self.quit.get_nowait()
2012            except queue.Empty:
2013                pass
2014            else:
2015                self.results.close()
2016                self.results.join_thread()
2017                break
2018
2019            if pending:
2020                result = pending.pop()
2021            else:
2022                try:
2023                    job = self.jobs.pop()
2024                except IndexError:
2025                    self.results.close()
2026                    self.results.join_thread()
2027                    break
2028                result = self.parse(*job)
2029                # Clear the siggen cache after parsing to control memory usage, its huge
2030                bb.parse.siggen.postparsing_clean_cache()
2031            try:
2032                self.results.put(result, timeout=0.25)
2033            except queue.Full:
2034                pending.append(result)
2035
2036    def parse(self, mc, cache, filename, appends):
2037        try:
2038            origfilter = bb.event.LogHandler.filter
2039            # Record the filename we're parsing into any events generated
2040            def parse_filter(self, record):
2041                record.taskpid = bb.event.worker_pid
2042                record.fn = filename
2043                return True
2044
2045            # Reset our environment and handlers to the original settings
2046            bb.utils.set_context(self.context.copy())
2047            bb.event.set_class_handlers(self.handlers.copy())
2048            bb.event.LogHandler.filter = parse_filter
2049
2050            return True, mc, cache.parse(filename, appends)
2051        except Exception as exc:
2052            tb = sys.exc_info()[2]
2053            exc.recipe = filename
2054            exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2055            return True, exc
2056        # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2057        # and for example a worker thread doesn't just exit on its own in response to
2058        # a SystemExit event for example.
2059        except BaseException as exc:
2060            return True, ParsingFailure(exc, filename)
2061        finally:
2062            bb.event.LogHandler.filter = origfilter
2063
2064class CookerParser(object):
2065    def __init__(self, cooker, mcfilelist, masked):
2066        self.mcfilelist = mcfilelist
2067        self.cooker = cooker
2068        self.cfgdata = cooker.data
2069        self.cfghash = cooker.data_hash
2070        self.cfgbuilder = cooker.databuilder
2071
2072        # Accounting statistics
2073        self.parsed = 0
2074        self.cached = 0
2075        self.error = 0
2076        self.masked = masked
2077
2078        self.skipped = 0
2079        self.virtuals = 0
2080
2081        self.current = 0
2082        self.process_names = []
2083
2084        self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2085        self.fromcache = set()
2086        self.willparse = set()
2087        for mc in self.cooker.multiconfigs:
2088            for filename in self.mcfilelist[mc]:
2089                appends = self.cooker.collections[mc].get_file_appends(filename)
2090                if not self.bb_caches[mc].cacheValid(filename, appends):
2091                    self.willparse.add((mc, self.bb_caches[mc], filename, appends))
2092                else:
2093                    self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
2094
2095        self.total = len(self.fromcache) + len(self.willparse)
2096        self.toparse = len(self.willparse)
2097        self.progress_chunk = int(max(self.toparse / 100, 1))
2098
2099        self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2100                                 multiprocessing.cpu_count()), self.toparse)
2101
2102        self.start()
2103        self.haveshutdown = False
2104        self.syncthread = None
2105
2106    def start(self):
2107        self.results = self.load_cached()
2108        self.processes = []
2109        if self.toparse:
2110            bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2111            def init():
2112                signal.signal(signal.SIGTERM, signal.SIG_DFL)
2113                signal.signal(signal.SIGHUP, signal.SIG_DFL)
2114                signal.signal(signal.SIGINT, signal.SIG_IGN)
2115                bb.utils.set_process_name(multiprocessing.current_process().name)
2116                multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2117                multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2118
2119            self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2120            self.result_queue = multiprocessing.Queue()
2121
2122            def chunkify(lst,n):
2123                return [lst[i::n] for i in range(n)]
2124            self.jobs = chunkify(list(self.willparse), self.num_processes)
2125
2126            for i in range(0, self.num_processes):
2127                parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2128                parser.start()
2129                self.process_names.append(parser.name)
2130                self.processes.append(parser)
2131
2132            self.results = itertools.chain(self.results, self.parse_generator())
2133
2134    def shutdown(self, clean=True, force=False):
2135        if not self.toparse:
2136            return
2137        if self.haveshutdown:
2138            return
2139        self.haveshutdown = True
2140
2141        if clean:
2142            event = bb.event.ParseCompleted(self.cached, self.parsed,
2143                                            self.skipped, self.masked,
2144                                            self.virtuals, self.error,
2145                                            self.total)
2146
2147            bb.event.fire(event, self.cfgdata)
2148
2149        for process in self.processes:
2150            self.parser_quit.put(None)
2151
2152        # Cleanup the queue before call process.join(), otherwise there might be
2153        # deadlocks.
2154        while True:
2155            try:
2156               self.result_queue.get(timeout=0.25)
2157            except queue.Empty:
2158                break
2159
2160        for process in self.processes:
2161            if force:
2162                process.join(.1)
2163                process.terminate()
2164            else:
2165                process.join()
2166
2167        self.parser_quit.close()
2168        # Allow data left in the cancel queue to be discarded
2169        self.parser_quit.cancel_join_thread()
2170
2171        def sync_caches():
2172            for c in self.bb_caches.values():
2173                c.sync()
2174
2175        sync = threading.Thread(target=sync_caches, name="SyncThread")
2176        self.syncthread = sync
2177        sync.start()
2178        bb.codeparser.parser_cache_savemerge()
2179        bb.fetch.fetcher_parse_done()
2180        if self.cooker.configuration.profile:
2181            profiles = []
2182            for i in self.process_names:
2183                logfile = "profile-parse-%s.log" % i
2184                if os.path.exists(logfile):
2185                    profiles.append(logfile)
2186
2187            pout = "profile-parse.log.processed"
2188            bb.utils.process_profilelog(profiles, pout = pout)
2189            print("Processed parsing statistics saved to %s" % (pout))
2190
2191    def final_cleanup(self):
2192        if self.syncthread:
2193            self.syncthread.join()
2194
2195    def load_cached(self):
2196        for mc, cache, filename, appends in self.fromcache:
2197            cached, infos = cache.load(filename, appends)
2198            yield not cached, mc, infos
2199
2200    def parse_generator(self):
2201        while True:
2202            if self.parsed >= self.toparse:
2203                break
2204
2205            try:
2206                result = self.result_queue.get(timeout=0.25)
2207            except queue.Empty:
2208                pass
2209            else:
2210                value = result[1]
2211                if isinstance(value, BaseException):
2212                    raise value
2213                else:
2214                    yield result
2215
2216    def parse_next(self):
2217        result = []
2218        parsed = None
2219        try:
2220            parsed, mc, result = next(self.results)
2221        except StopIteration:
2222            self.shutdown()
2223            return False
2224        except bb.BBHandledException as exc:
2225            self.error += 1
2226            logger.error('Failed to parse recipe: %s' % exc.recipe)
2227            self.shutdown(clean=False, force=True)
2228            return False
2229        except ParsingFailure as exc:
2230            self.error += 1
2231            logger.error('Unable to parse %s: %s' %
2232                     (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2233            self.shutdown(clean=False, force=True)
2234            return False
2235        except bb.parse.ParseError as exc:
2236            self.error += 1
2237            logger.error(str(exc))
2238            self.shutdown(clean=False, force=True)
2239            return False
2240        except bb.data_smart.ExpansionError as exc:
2241            self.error += 1
2242            bbdir = os.path.dirname(__file__) + os.sep
2243            etype, value, _ = sys.exc_info()
2244            tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2245            logger.error('ExpansionError during parsing %s', value.recipe,
2246                         exc_info=(etype, value, tb))
2247            self.shutdown(clean=False, force=True)
2248            return False
2249        except Exception as exc:
2250            self.error += 1
2251            etype, value, tb = sys.exc_info()
2252            if hasattr(value, "recipe"):
2253                logger.error('Unable to parse %s' % value.recipe,
2254                            exc_info=(etype, value, exc.traceback))
2255            else:
2256                # Most likely, an exception occurred during raising an exception
2257                import traceback
2258                logger.error('Exception during parse: %s' % traceback.format_exc())
2259            self.shutdown(clean=False, force=True)
2260            return False
2261
2262        self.current += 1
2263        self.virtuals += len(result)
2264        if parsed:
2265            self.parsed += 1
2266            if self.parsed % self.progress_chunk == 0:
2267                bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2268                              self.cfgdata)
2269        else:
2270            self.cached += 1
2271
2272        for virtualfn, info_array in result:
2273            if info_array[0].skipped:
2274                self.skipped += 1
2275                self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2276            self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2277                                        parsed=parsed, watcher = self.cooker.add_filewatch)
2278        return True
2279
2280    def reparse(self, filename):
2281        to_reparse = set()
2282        for mc in self.cooker.multiconfigs:
2283            to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
2284
2285        for mc, filename, appends in to_reparse:
2286            infos = self.bb_caches[mc].parse(filename, appends)
2287            for vfn, info_array in infos:
2288                self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
2289