xref: /openbmc/openbmc/poky/bitbake/lib/bb/cooker.py (revision 78b72798)
1#
2# Copyright (C) 2003, 2004  Chris Larson
3# Copyright (C) 2003, 2004  Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005        Holger Hans Peter Freyther
6# Copyright (C) 2005        ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import sys, os, glob, os.path, re, time
13import itertools
14import logging
15import multiprocessing
16import threading
17from io import StringIO, UnsupportedOperation
18from contextlib import closing
19from collections import defaultdict, namedtuple
20import bb, bb.exceptions, bb.command
21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
22import queue
23import signal
24import prserv.serv
25import pyinotify
26import json
27import pickle
28import codecs
29import hashserv
30
31logger      = logging.getLogger("BitBake")
32collectlog  = logging.getLogger("BitBake.Collection")
33buildlog    = logging.getLogger("BitBake.Build")
34parselog    = logging.getLogger("BitBake.Parsing")
35providerlog = logging.getLogger("BitBake.Provider")
36
37class NoSpecificMatch(bb.BBHandledException):
38    """
39    Exception raised when no or multiple file matches are found
40    """
41
42class NothingToBuild(Exception):
43    """
44    Exception raised when there is nothing to build
45    """
46
47class CollectionError(bb.BBHandledException):
48    """
49    Exception raised when layer configuration is incorrect
50    """
51
52class state:
53    initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
54
55    @classmethod
56    def get_name(cls, code):
57        for name in dir(cls):
58            value = getattr(cls, name)
59            if type(value) == type(cls.initial) and value == code:
60                return name
61        raise ValueError("Invalid status code: %s" % code)
62
63
64class SkippedPackage:
65    def __init__(self, info = None, reason = None):
66        self.pn = None
67        self.skipreason = None
68        self.provides = None
69        self.rprovides = None
70
71        if info:
72            self.pn = info.pn
73            self.skipreason = info.skipreason
74            self.provides = info.provides
75            self.rprovides = info.packages + info.rprovides
76            for package in info.packages:
77                self.rprovides += info.rprovides_pkg[package]
78        elif reason:
79            self.skipreason = reason
80
81
82class CookerFeatures(object):
83    _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
84
85    def __init__(self):
86        self._features=set()
87
88    def setFeature(self, f):
89        # validate we got a request for a feature we support
90        if f not in CookerFeatures._feature_list:
91            return
92        self._features.add(f)
93
94    def __contains__(self, f):
95        return f in self._features
96
97    def __iter__(self):
98        return self._features.__iter__()
99
100    def __next__(self):
101        return next(self._features)
102
103
104class EventWriter:
105    def __init__(self, cooker, eventfile):
106        self.file_inited = None
107        self.cooker = cooker
108        self.eventfile = eventfile
109        self.event_queue = []
110
111    def write_event(self, event):
112        with open(self.eventfile, "a") as f:
113            try:
114                str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
115                f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
116                                             "vars": str_event}))
117            except Exception as err:
118                import traceback
119                print(err, traceback.format_exc())
120
121    def send(self, event):
122        if self.file_inited:
123            # we have the file, just write the event
124            self.write_event(event)
125        else:
126            # init on bb.event.BuildStarted
127            name = "%s.%s" % (event.__module__, event.__class__.__name__)
128            if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
129                with open(self.eventfile, "w") as f:
130                    f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
131
132                self.file_inited = True
133
134                # write pending events
135                for evt in self.event_queue:
136                    self.write_event(evt)
137
138                # also write the current event
139                self.write_event(event)
140            else:
141                # queue all events until the file is inited
142                self.event_queue.append(event)
143
144#============================================================================#
145# BBCooker
146#============================================================================#
147class BBCooker:
148    """
149    Manages one bitbake build run
150    """
151
152    def __init__(self, featureSet=None, idleCallBackRegister=None):
153        self.recipecaches = None
154        self.eventlog = None
155        self.skiplist = {}
156        self.featureset = CookerFeatures()
157        if featureSet:
158            for f in featureSet:
159                self.featureset.setFeature(f)
160
161        self.orig_syspath = sys.path.copy()
162        self.orig_sysmodules = [*sys.modules]
163
164        self.configuration = bb.cookerdata.CookerConfiguration()
165
166        self.idleCallBackRegister = idleCallBackRegister
167
168        bb.debug(1, "BBCooker starting %s" % time.time())
169        sys.stdout.flush()
170
171        self.configwatcher = None
172        self.confignotifier = None
173
174        self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
175                         pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
176                         pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
177
178        self.watcher = None
179        self.notifier = None
180
181        # If being called by something like tinfoil, we need to clean cached data
182        # which may now be invalid
183        bb.parse.clear_cache()
184        bb.parse.BBHandler.cached_statements = {}
185
186        self.ui_cmdline = None
187        self.hashserv = None
188        self.hashservaddr = None
189
190        self.inotify_modified_files = []
191
192        def _process_inotify_updates(server, cooker, halt):
193            cooker.process_inotify_updates()
194            return 1.0
195
196        self.idleCallBackRegister(_process_inotify_updates, self)
197
198        # TOSTOP must not be set or our children will hang when they output
199        try:
200            fd = sys.stdout.fileno()
201            if os.isatty(fd):
202                import termios
203                tcattr = termios.tcgetattr(fd)
204                if tcattr[3] & termios.TOSTOP:
205                    buildlog.info("The terminal had the TOSTOP bit set, clearing...")
206                    tcattr[3] = tcattr[3] & ~termios.TOSTOP
207                    termios.tcsetattr(fd, termios.TCSANOW, tcattr)
208        except UnsupportedOperation:
209            pass
210
211        self.command = bb.command.Command(self)
212        self.state = state.initial
213
214        self.parser = None
215
216        signal.signal(signal.SIGTERM, self.sigterm_exception)
217        # Let SIGHUP exit as SIGTERM
218        signal.signal(signal.SIGHUP, self.sigterm_exception)
219
220        bb.debug(1, "BBCooker startup complete %s" % time.time())
221        sys.stdout.flush()
222
223    def init_configdata(self):
224        if not hasattr(self, "data"):
225            self.initConfigurationData()
226            bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
227            sys.stdout.flush()
228            self.handlePRServ()
229
230    def setupConfigWatcher(self):
231        if self.configwatcher:
232            self.configwatcher.close()
233            self.confignotifier = None
234            self.configwatcher = None
235        self.configwatcher = pyinotify.WatchManager()
236        self.configwatcher.bbseen = set()
237        self.configwatcher.bbwatchedfiles = set()
238        self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
239
240    def setupParserWatcher(self):
241        if self.watcher:
242            self.watcher.close()
243            self.notifier = None
244            self.watcher = None
245        self.watcher = pyinotify.WatchManager()
246        self.watcher.bbseen = set()
247        self.watcher.bbwatchedfiles = set()
248        self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
249
250    def process_inotify_updates(self):
251        for n in [self.confignotifier, self.notifier]:
252            if n and n.check_events(timeout=0):
253                # read notified events and enqeue them
254                n.read_events()
255                n.process_events()
256
257    def config_notifications(self, event):
258        if event.maskname == "IN_Q_OVERFLOW":
259            bb.warn("inotify event queue overflowed, invalidating caches.")
260            self.parsecache_valid = False
261            self.baseconfig_valid = False
262            bb.parse.clear_cache()
263            return
264        if not event.pathname in self.configwatcher.bbwatchedfiles:
265            return
266        if "IN_ISDIR" in event.maskname:
267            if "IN_CREATE" in event.maskname or "IN_DELETE" in event.maskname:
268                if event.pathname in self.configwatcher.bbseen:
269                    self.configwatcher.bbseen.remove(event.pathname)
270                # Could remove all entries starting with the directory but for now...
271                bb.parse.clear_cache()
272        if not event.pathname in self.inotify_modified_files:
273            self.inotify_modified_files.append(event.pathname)
274        self.baseconfig_valid = False
275
276    def notifications(self, event):
277        if event.maskname == "IN_Q_OVERFLOW":
278            bb.warn("inotify event queue overflowed, invalidating caches.")
279            self.parsecache_valid = False
280            bb.parse.clear_cache()
281            return
282        if event.pathname.endswith("bitbake-cookerdaemon.log") \
283                or event.pathname.endswith("bitbake.lock"):
284            return
285        if "IN_ISDIR" in event.maskname:
286            if "IN_CREATE" in event.maskname or "IN_DELETE" in event.maskname:
287                if event.pathname in self.watcher.bbseen:
288                    self.watcher.bbseen.remove(event.pathname)
289                # Could remove all entries starting with the directory but for now...
290                bb.parse.clear_cache()
291        if not event.pathname in self.inotify_modified_files:
292            self.inotify_modified_files.append(event.pathname)
293        self.parsecache_valid = False
294
295    def add_filewatch(self, deps, watcher=None, dirs=False):
296        if not watcher:
297            watcher = self.watcher
298        for i in deps:
299            watcher.bbwatchedfiles.add(i[0])
300            if dirs:
301                f = i[0]
302            else:
303                f = os.path.dirname(i[0])
304            if f in watcher.bbseen:
305                continue
306            watcher.bbseen.add(f)
307            watchtarget = None
308            while True:
309                # We try and add watches for files that don't exist but if they did, would influence
310                # the parser. The parent directory of these files may not exist, in which case we need
311                # to watch any parent that does exist for changes.
312                try:
313                    watcher.add_watch(f, self.watchmask, quiet=False)
314                    if watchtarget:
315                        watcher.bbwatchedfiles.add(watchtarget)
316                    break
317                except pyinotify.WatchManagerError as e:
318                    if 'ENOENT' in str(e):
319                        watchtarget = f
320                        f = os.path.dirname(f)
321                        if f in watcher.bbseen:
322                            break
323                        watcher.bbseen.add(f)
324                        continue
325                    if 'ENOSPC' in str(e):
326                        providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
327                        providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
328                        providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
329                        providerlog.error("Root privilege is required to modify max_user_watches.")
330                    raise
331
332    def sigterm_exception(self, signum, stackframe):
333        if signum == signal.SIGTERM:
334            bb.warn("Cooker received SIGTERM, shutting down...")
335        elif signum == signal.SIGHUP:
336            bb.warn("Cooker received SIGHUP, shutting down...")
337        self.state = state.forceshutdown
338
339    def setFeatures(self, features):
340        # we only accept a new feature set if we're in state initial, so we can reset without problems
341        if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
342            raise Exception("Illegal state for feature set change")
343        original_featureset = list(self.featureset)
344        for feature in features:
345            self.featureset.setFeature(feature)
346        bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
347        if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
348            self.reset()
349
350    def initConfigurationData(self):
351
352        self.state = state.initial
353        self.caches_array = []
354
355        sys.path = self.orig_syspath.copy()
356        for mod in [*sys.modules]:
357            if mod not in self.orig_sysmodules:
358                del sys.modules[mod]
359
360        self.setupConfigWatcher()
361
362        # Need to preserve BB_CONSOLELOG over resets
363        consolelog = None
364        if hasattr(self, "data"):
365            consolelog = self.data.getVar("BB_CONSOLELOG")
366
367        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
368            self.enableDataTracking()
369
370        all_extra_cache_names = []
371        # We hardcode all known cache types in a single place, here.
372        if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
373            all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
374
375        caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
376
377        # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
378        # This is the entry point, no further check needed!
379        for var in caches_name_array:
380            try:
381                module_name, cache_name = var.split(':')
382                module = __import__(module_name, fromlist=(cache_name,))
383                self.caches_array.append(getattr(module, cache_name))
384            except ImportError as exc:
385                logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
386                raise bb.BBHandledException()
387
388        self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
389        self.databuilder.parseBaseConfiguration()
390        self.data = self.databuilder.data
391        self.data_hash = self.databuilder.data_hash
392        self.extraconfigdata = {}
393
394        if consolelog:
395            self.data.setVar("BB_CONSOLELOG", consolelog)
396
397        self.data.setVar('BB_CMDLINE', self.ui_cmdline)
398
399        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
400            self.disableDataTracking()
401
402        for mc in self.databuilder.mcdata.values():
403            mc.renameVar("__depends", "__base_depends")
404            self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
405
406        self.baseconfig_valid = True
407        self.parsecache_valid = False
408
409    def handlePRServ(self):
410        # Setup a PR Server based on the new configuration
411        try:
412            self.prhost = prserv.serv.auto_start(self.data)
413        except prserv.serv.PRServiceConfigError as e:
414            bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
415
416        if self.data.getVar("BB_HASHSERVE") == "auto":
417            # Create a new hash server bound to a unix domain socket
418            if not self.hashserv:
419                dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
420                upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
421                if upstream:
422                    import socket
423                    try:
424                        sock = socket.create_connection(upstream.split(":"), 5)
425                        sock.close()
426                    except socket.error as e:
427                        bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
428                                 % (upstream, repr(e)))
429
430                self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
431                self.hashserv = hashserv.create_server(
432                    self.hashservaddr,
433                    dbfile,
434                    sync=False,
435                    upstream=upstream,
436                )
437                self.hashserv.serve_as_process()
438            self.data.setVar("BB_HASHSERVE", self.hashservaddr)
439            self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
440            self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
441            for mc in self.databuilder.mcdata:
442                self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
443
444        bb.parse.init_parser(self.data)
445
446    def enableDataTracking(self):
447        self.configuration.tracking = True
448        if hasattr(self, "data"):
449            self.data.enableTracking()
450
451    def disableDataTracking(self):
452        self.configuration.tracking = False
453        if hasattr(self, "data"):
454            self.data.disableTracking()
455
456    def parseConfiguration(self):
457        self.updateCacheSync()
458
459        # Change nice level if we're asked to
460        nice = self.data.getVar("BB_NICE_LEVEL")
461        if nice:
462            curnice = os.nice(0)
463            nice = int(nice) - curnice
464            buildlog.verbose("Renice to %s " % os.nice(nice))
465
466        if self.recipecaches:
467            del self.recipecaches
468        self.multiconfigs = self.databuilder.mcdata.keys()
469        self.recipecaches = {}
470        for mc in self.multiconfigs:
471            self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
472
473        self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
474
475        self.parsecache_valid = False
476
477    def updateConfigOpts(self, options, environment, cmdline):
478        self.ui_cmdline = cmdline
479        clean = True
480        for o in options:
481            if o in ['prefile', 'postfile']:
482                # Only these options may require a reparse
483                try:
484                    if getattr(self.configuration, o) == options[o]:
485                        # Value is the same, no need to mark dirty
486                        continue
487                except AttributeError:
488                    pass
489                logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
490                print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
491                clean = False
492            if hasattr(self.configuration, o):
493                setattr(self.configuration, o, options[o])
494
495        if self.configuration.writeeventlog:
496            if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
497                bb.event.unregister_UIHhandler(self.eventlog[1])
498            if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
499                # we log all events to a file if so directed
500                # register the log file writer as UI Handler
501                writer = EventWriter(self, self.configuration.writeeventlog)
502                EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
503                self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
504
505        bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
506        bb.msg.loggerDefaultDomains = self.configuration.debug_domains
507
508        if hasattr(self, "data"):
509            origenv = bb.data.init()
510            for k in environment:
511                origenv.setVar(k, environment[k])
512            self.data.setVar("BB_ORIGENV", origenv)
513
514        for k in bb.utils.approved_variables():
515            if k in environment and k not in self.configuration.env:
516                logger.debug("Updating new environment variable %s to %s" % (k, environment[k]))
517                self.configuration.env[k] = environment[k]
518                clean = False
519            if k in self.configuration.env and k not in environment:
520                logger.debug("Updating environment variable %s (deleted)" % (k))
521                del self.configuration.env[k]
522                clean = False
523            if k not in self.configuration.env and k not in environment:
524                continue
525            if environment[k] != self.configuration.env[k]:
526                logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
527                self.configuration.env[k] = environment[k]
528                clean = False
529
530        # Now update all the variables not in the datastore to match
531        self.configuration.env = environment
532
533        if not clean:
534            logger.debug("Base environment change, triggering reparse")
535            self.reset()
536
537    def runCommands(self, server, data, halt):
538        """
539        Run any queued asynchronous command
540        This is done by the idle handler so it runs in true context rather than
541        tied to any UI.
542        """
543
544        return self.command.runAsyncCommand()
545
546    def showVersions(self):
547
548        (latest_versions, preferred_versions, required) = self.findProviders()
549
550        logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
551        logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
552
553        for p in sorted(self.recipecaches[''].pkg_pn):
554            preferred = preferred_versions[p]
555            latest = latest_versions[p]
556            requiredstr = ""
557            preferredstr = ""
558            if required[p]:
559                if preferred[0] is not None:
560                    requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
561                else:
562                    bb.fatal("REQUIRED_VERSION of package %s not available" % p)
563            else:
564                preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
565
566            lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
567
568            if preferred == latest:
569                preferredstr = ""
570
571            logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
572
573    def showEnvironment(self, buildfile=None, pkgs_to_build=None):
574        """
575        Show the outer or per-recipe environment
576        """
577        fn = None
578        envdata = None
579        mc = ''
580        if not pkgs_to_build:
581            pkgs_to_build = []
582
583        orig_tracking = self.configuration.tracking
584        if not orig_tracking:
585            self.enableDataTracking()
586            self.reset()
587            # reset() resets to the UI requested value so we have to redo this
588            self.enableDataTracking()
589
590        def mc_base(p):
591            if p.startswith('mc:'):
592                s = p.split(':')
593                if len(s) == 2:
594                    return s[1]
595            return None
596
597        if buildfile:
598            # Parse the configuration here. We need to do it explicitly here since
599            # this showEnvironment() code path doesn't use the cache
600            self.parseConfiguration()
601
602            fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
603            fn = self.matchFile(fn, mc)
604            fn = bb.cache.realfn2virtual(fn, cls, mc)
605        elif len(pkgs_to_build) == 1:
606            mc = mc_base(pkgs_to_build[0])
607            if not mc:
608                ignore = self.data.getVar("ASSUME_PROVIDED") or ""
609                if pkgs_to_build[0] in set(ignore.split()):
610                    bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
611
612                taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True)
613
614                mc = runlist[0][0]
615                fn = runlist[0][3]
616
617        if fn:
618            try:
619                bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
620                envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
621            except Exception as e:
622                parselog.exception("Unable to read %s", fn)
623                raise
624        else:
625            if not mc in self.databuilder.mcdata:
626                bb.fatal('Not multiconfig named "%s" found' % mc)
627            envdata = self.databuilder.mcdata[mc]
628            data.expandKeys(envdata)
629            parse.ast.runAnonFuncs(envdata)
630
631        # Display history
632        with closing(StringIO()) as env:
633            self.data.inchistory.emit(env)
634            logger.plain(env.getvalue())
635
636        # emit variables and shell functions
637        with closing(StringIO()) as env:
638            data.emit_env(env, envdata, True)
639            logger.plain(env.getvalue())
640
641        # emit the metadata which isn't valid shell
642        for e in sorted(envdata.keys()):
643            if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
644                logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
645
646        if not orig_tracking:
647            self.disableDataTracking()
648            self.reset()
649
650    def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False):
651        """
652        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
653        """
654        bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
655
656        # A task of None means use the default task
657        if task is None:
658            task = self.configuration.cmd
659        if not task.startswith("do_"):
660            task = "do_%s" % task
661
662        targetlist = self.checkPackages(pkgs_to_build, task)
663        fulltargetlist = []
664        defaulttask_implicit = ''
665        defaulttask_explicit = False
666        wildcard = False
667
668        # Wild card expansion:
669        # Replace string such as "mc:*:bash"
670        # into "mc:A:bash mc:B:bash bash"
671        for k in targetlist:
672            if k.startswith("mc:") and k.count(':') >= 2:
673                if wildcard:
674                    bb.fatal('multiconfig conflict')
675                if k.split(":")[1] == "*":
676                    wildcard = True
677                    for mc in self.multiconfigs:
678                        if mc:
679                            fulltargetlist.append(k.replace('*', mc))
680                        # implicit default task
681                        else:
682                            defaulttask_implicit = k.split(":")[2]
683                else:
684                    fulltargetlist.append(k)
685            else:
686                defaulttask_explicit = True
687                fulltargetlist.append(k)
688
689        if not defaulttask_explicit and defaulttask_implicit != '':
690            fulltargetlist.append(defaulttask_implicit)
691
692        bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
693        taskdata = {}
694        localdata = {}
695
696        for mc in self.multiconfigs:
697            taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete)
698            localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
699            bb.data.expandKeys(localdata[mc])
700
701        current = 0
702        runlist = []
703        for k in fulltargetlist:
704            origk = k
705            mc = ""
706            if k.startswith("mc:") and k.count(':') >= 2:
707                mc = k.split(":")[1]
708                k = ":".join(k.split(":")[2:])
709            ktask = task
710            if ":do_" in k:
711                k2 = k.split(":do_")
712                k = k2[0]
713                ktask = k2[1]
714
715            if mc not in self.multiconfigs:
716                 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
717
718            taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
719            current += 1
720            if not ktask.startswith("do_"):
721                ktask = "do_%s" % ktask
722            if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
723                # e.g. in ASSUME_PROVIDED
724                continue
725            fn = taskdata[mc].build_targets[k][0]
726            runlist.append([mc, k, ktask, fn])
727            bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
728
729        havemc = False
730        for mc in self.multiconfigs:
731            if taskdata[mc].get_mcdepends():
732                havemc = True
733
734        # No need to do check providers if there are no mcdeps or not an mc build
735        if havemc or len(self.multiconfigs) > 1:
736            seen = set()
737            new = True
738            # Make sure we can provide the multiconfig dependency
739            while new:
740                mcdeps = set()
741                # Add unresolved first, so we can get multiconfig indirect dependencies on time
742                for mc in self.multiconfigs:
743                    taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
744                    mcdeps |= set(taskdata[mc].get_mcdepends())
745                new = False
746                for k in mcdeps:
747                    if k in seen:
748                        continue
749                    l = k.split(':')
750                    depmc = l[2]
751                    if depmc not in self.multiconfigs:
752                        bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
753                    else:
754                        logger.debug("Adding providers for multiconfig dependency %s" % l[3])
755                        taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
756                        seen.add(k)
757                        new = True
758
759        for mc in self.multiconfigs:
760            taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
761
762        bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
763        return taskdata, runlist
764
765    def prepareTreeData(self, pkgs_to_build, task):
766        """
767        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
768        """
769
770        # We set halt to False here to prevent unbuildable targets raising
771        # an exception when we're just generating data
772        taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
773
774        return runlist, taskdata
775
776    ######## WARNING : this function requires cache_extra to be enabled ########
777
778    def generateTaskDepTreeData(self, pkgs_to_build, task):
779        """
780        Create a dependency graph of pkgs_to_build including reverse dependency
781        information.
782        """
783        if not task.startswith("do_"):
784            task = "do_%s" % task
785
786        runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
787        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
788        rq.rqdata.prepare()
789        return self.buildDependTree(rq, taskdata)
790
791    @staticmethod
792    def add_mc_prefix(mc, pn):
793        if mc:
794            return "mc:%s:%s" % (mc, pn)
795        return pn
796
797    def buildDependTree(self, rq, taskdata):
798        seen_fns = []
799        depend_tree = {}
800        depend_tree["depends"] = {}
801        depend_tree["tdepends"] = {}
802        depend_tree["pn"] = {}
803        depend_tree["rdepends-pn"] = {}
804        depend_tree["packages"] = {}
805        depend_tree["rdepends-pkg"] = {}
806        depend_tree["rrecs-pkg"] = {}
807        depend_tree['providermap'] = {}
808        depend_tree["layer-priorities"] = self.bbfile_config_priorities
809
810        for mc in taskdata:
811            for name, fn in list(taskdata[mc].get_providermap().items()):
812                pn = self.recipecaches[mc].pkg_fn[fn]
813                pn = self.add_mc_prefix(mc, pn)
814                if name != pn:
815                    version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
816                    depend_tree['providermap'][name] = (pn, version)
817
818        for tid in rq.rqdata.runtaskentries:
819            (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
820            pn = self.recipecaches[mc].pkg_fn[taskfn]
821            pn = self.add_mc_prefix(mc, pn)
822            version  = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
823            if pn not in depend_tree["pn"]:
824                depend_tree["pn"][pn] = {}
825                depend_tree["pn"][pn]["filename"] = taskfn
826                depend_tree["pn"][pn]["version"] = version
827                depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
828
829                # if we have extra caches, list all attributes they bring in
830                extra_info = []
831                for cache_class in self.caches_array:
832                    if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
833                        cachefields = getattr(cache_class, 'cachefields', [])
834                        extra_info = extra_info + cachefields
835
836                # for all attributes stored, add them to the dependency tree
837                for ei in extra_info:
838                    depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
839
840
841            dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
842            if not dotname in depend_tree["tdepends"]:
843                depend_tree["tdepends"][dotname] = []
844            for dep in rq.rqdata.runtaskentries[tid].depends:
845                (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
846                deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
847                if depmc:
848                    depmc = "mc:" + depmc + ":"
849                depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
850            if taskfn not in seen_fns:
851                seen_fns.append(taskfn)
852                packages = []
853
854                depend_tree["depends"][pn] = []
855                for dep in taskdata[mc].depids[taskfn]:
856                    depend_tree["depends"][pn].append(dep)
857
858                depend_tree["rdepends-pn"][pn] = []
859                for rdep in taskdata[mc].rdepids[taskfn]:
860                    depend_tree["rdepends-pn"][pn].append(rdep)
861
862                rdepends = self.recipecaches[mc].rundeps[taskfn]
863                for package in rdepends:
864                    depend_tree["rdepends-pkg"][package] = []
865                    for rdepend in rdepends[package]:
866                        depend_tree["rdepends-pkg"][package].append(rdepend)
867                    packages.append(package)
868
869                rrecs = self.recipecaches[mc].runrecs[taskfn]
870                for package in rrecs:
871                    depend_tree["rrecs-pkg"][package] = []
872                    for rdepend in rrecs[package]:
873                        depend_tree["rrecs-pkg"][package].append(rdepend)
874                    if not package in packages:
875                        packages.append(package)
876
877                for package in packages:
878                    if package not in depend_tree["packages"]:
879                        depend_tree["packages"][package] = {}
880                        depend_tree["packages"][package]["pn"] = pn
881                        depend_tree["packages"][package]["filename"] = taskfn
882                        depend_tree["packages"][package]["version"] = version
883
884        return depend_tree
885
886    ######## WARNING : this function requires cache_extra to be enabled ########
887    def generatePkgDepTreeData(self, pkgs_to_build, task):
888        """
889        Create a dependency tree of pkgs_to_build, returning the data.
890        """
891        if not task.startswith("do_"):
892            task = "do_%s" % task
893
894        _, taskdata = self.prepareTreeData(pkgs_to_build, task)
895
896        seen_fns = []
897        depend_tree = {}
898        depend_tree["depends"] = {}
899        depend_tree["pn"] = {}
900        depend_tree["rdepends-pn"] = {}
901        depend_tree["rdepends-pkg"] = {}
902        depend_tree["rrecs-pkg"] = {}
903
904        # if we have extra caches, list all attributes they bring in
905        extra_info = []
906        for cache_class in self.caches_array:
907            if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
908                cachefields = getattr(cache_class, 'cachefields', [])
909                extra_info = extra_info + cachefields
910
911        tids = []
912        for mc in taskdata:
913            for tid in taskdata[mc].taskentries:
914                tids.append(tid)
915
916        for tid in tids:
917            (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
918
919            pn = self.recipecaches[mc].pkg_fn[taskfn]
920            pn = self.add_mc_prefix(mc, pn)
921
922            if pn not in depend_tree["pn"]:
923                depend_tree["pn"][pn] = {}
924                depend_tree["pn"][pn]["filename"] = taskfn
925                version  = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
926                depend_tree["pn"][pn]["version"] = version
927                rdepends = self.recipecaches[mc].rundeps[taskfn]
928                rrecs = self.recipecaches[mc].runrecs[taskfn]
929                depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
930
931                # for all extra attributes stored, add them to the dependency tree
932                for ei in extra_info:
933                    depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
934
935            if taskfn not in seen_fns:
936                seen_fns.append(taskfn)
937
938                depend_tree["depends"][pn] = []
939                for dep in taskdata[mc].depids[taskfn]:
940                    pn_provider = ""
941                    if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
942                        fn_provider = taskdata[mc].build_targets[dep][0]
943                        pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
944                    else:
945                        pn_provider = dep
946                    pn_provider = self.add_mc_prefix(mc, pn_provider)
947                    depend_tree["depends"][pn].append(pn_provider)
948
949                depend_tree["rdepends-pn"][pn] = []
950                for rdep in taskdata[mc].rdepids[taskfn]:
951                    pn_rprovider = ""
952                    if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
953                        fn_rprovider = taskdata[mc].run_targets[rdep][0]
954                        pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
955                    else:
956                        pn_rprovider = rdep
957                    pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
958                    depend_tree["rdepends-pn"][pn].append(pn_rprovider)
959
960                depend_tree["rdepends-pkg"].update(rdepends)
961                depend_tree["rrecs-pkg"].update(rrecs)
962
963        return depend_tree
964
965    def generateDepTreeEvent(self, pkgs_to_build, task):
966        """
967        Create a task dependency graph of pkgs_to_build.
968        Generate an event with the result
969        """
970        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
971        bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
972
973    def generateDotGraphFiles(self, pkgs_to_build, task):
974        """
975        Create a task dependency graph of pkgs_to_build.
976        Save the result to a set of .dot files.
977        """
978
979        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
980
981        with open('pn-buildlist', 'w') as f:
982            for pn in depgraph["pn"]:
983                f.write(pn + "\n")
984        logger.info("PN build list saved to 'pn-buildlist'")
985
986        # Remove old format output files to ensure no confusion with stale data
987        try:
988            os.unlink('pn-depends.dot')
989        except FileNotFoundError:
990            pass
991        try:
992            os.unlink('package-depends.dot')
993        except FileNotFoundError:
994            pass
995        try:
996            os.unlink('recipe-depends.dot')
997        except FileNotFoundError:
998            pass
999
1000        with open('task-depends.dot', 'w') as f:
1001            f.write("digraph depends {\n")
1002            for task in sorted(depgraph["tdepends"]):
1003                (pn, taskname) = task.rsplit(".", 1)
1004                fn = depgraph["pn"][pn]["filename"]
1005                version = depgraph["pn"][pn]["version"]
1006                f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
1007                for dep in sorted(depgraph["tdepends"][task]):
1008                    f.write('"%s" -> "%s"\n' % (task, dep))
1009            f.write("}\n")
1010        logger.info("Task dependencies saved to 'task-depends.dot'")
1011
1012    def show_appends_with_no_recipes(self):
1013        appends_without_recipes = {}
1014        # Determine which bbappends haven't been applied
1015        for mc in self.multiconfigs:
1016            # First get list of recipes, including skipped
1017            recipefns = list(self.recipecaches[mc].pkg_fn.keys())
1018            recipefns.extend(self.skiplist.keys())
1019
1020            # Work out list of bbappends that have been applied
1021            applied_appends = []
1022            for fn in recipefns:
1023                applied_appends.extend(self.collections[mc].get_file_appends(fn))
1024
1025            appends_without_recipes[mc] = []
1026            for _, appendfn in self.collections[mc].bbappends:
1027                if not appendfn in applied_appends:
1028                    appends_without_recipes[mc].append(appendfn)
1029
1030        msgs = []
1031        for mc in sorted(appends_without_recipes.keys()):
1032            if appends_without_recipes[mc]:
1033                msgs.append('No recipes in %s available for:\n  %s' % (mc if mc else 'default',
1034                                                                        '\n  '.join(appends_without_recipes[mc])))
1035
1036        if msgs:
1037            msg = "\n".join(msgs)
1038            warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
1039                False) or "no"
1040            if warn_only.lower() in ("1", "yes", "true"):
1041                bb.warn(msg)
1042            else:
1043                bb.fatal(msg)
1044
1045    def handlePrefProviders(self):
1046
1047        for mc in self.multiconfigs:
1048            localdata = data.createCopy(self.databuilder.mcdata[mc])
1049            bb.data.expandKeys(localdata)
1050
1051            # Handle PREFERRED_PROVIDERS
1052            for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
1053                try:
1054                    (providee, provider) = p.split(':')
1055                except:
1056                    providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
1057                    continue
1058                if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1059                    providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1060                self.recipecaches[mc].preferred[providee] = provider
1061
1062    def findConfigFilePath(self, configfile):
1063        """
1064        Find the location on disk of configfile and if it exists and was parsed by BitBake
1065        emit the ConfigFilePathFound event with the path to the file.
1066        """
1067        path = bb.cookerdata.findConfigFile(configfile, self.data)
1068        if not path:
1069            return
1070
1071        # Generate a list of parsed configuration files by searching the files
1072        # listed in the __depends and __base_depends variables with a .conf suffix.
1073        conffiles = []
1074        dep_files = self.data.getVar('__base_depends', False) or []
1075        dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1076
1077        for f in dep_files:
1078            if f[0].endswith(".conf"):
1079                conffiles.append(f[0])
1080
1081        _, conf, conffile = path.rpartition("conf/")
1082        match = os.path.join(conf, conffile)
1083        # Try and find matches for conf/conffilename.conf as we don't always
1084        # have the full path to the file.
1085        for cfg in conffiles:
1086            if cfg.endswith(match):
1087                bb.event.fire(bb.event.ConfigFilePathFound(path),
1088                              self.data)
1089                break
1090
1091    def findFilesMatchingInDir(self, filepattern, directory):
1092        """
1093        Searches for files containing the substring 'filepattern' which are children of
1094        'directory' in each BBPATH. i.e. to find all rootfs package classes available
1095        to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1096        or to find all machine configuration files one could call:
1097        findFilesMatchingInDir(self, '.conf', 'conf/machine')
1098        """
1099
1100        matches = []
1101        bbpaths = self.data.getVar('BBPATH').split(':')
1102        for path in bbpaths:
1103            dirpath = os.path.join(path, directory)
1104            if os.path.exists(dirpath):
1105                for root, dirs, files in os.walk(dirpath):
1106                    for f in files:
1107                        if filepattern in f:
1108                            matches.append(f)
1109
1110        if matches:
1111            bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1112
1113    def testCookerCommandEvent(self, filepattern):
1114        # Dummy command used by OEQA selftest to test tinfoil without IO
1115        matches = ["A", "B"]
1116        bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1117
1118    def findProviders(self, mc=''):
1119        return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1120
1121    def findBestProvider(self, pn, mc=''):
1122        if pn in self.recipecaches[mc].providers:
1123            filenames = self.recipecaches[mc].providers[pn]
1124            eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
1125            if eligible is not None:
1126                filename = eligible[0]
1127            else:
1128                filename = None
1129            return None, None, None, filename
1130        elif pn in self.recipecaches[mc].pkg_pn:
1131            (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1132            if required and preferred_file is None:
1133                return None, None, None, None
1134            return (latest, latest_f, preferred_ver, preferred_file)
1135        else:
1136            return None, None, None, None
1137
1138    def findConfigFiles(self, varname):
1139        """
1140        Find config files which are appropriate values for varname.
1141        i.e. MACHINE, DISTRO
1142        """
1143        possible = []
1144        var = varname.lower()
1145
1146        data = self.data
1147        # iterate configs
1148        bbpaths = data.getVar('BBPATH').split(':')
1149        for path in bbpaths:
1150            confpath = os.path.join(path, "conf", var)
1151            if os.path.exists(confpath):
1152                for root, dirs, files in os.walk(confpath):
1153                    # get all child files, these are appropriate values
1154                    for f in files:
1155                        val, sep, end = f.rpartition('.')
1156                        if end == 'conf':
1157                            possible.append(val)
1158
1159        if possible:
1160            bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1161
1162    def findInheritsClass(self, klass):
1163        """
1164        Find all recipes which inherit the specified class
1165        """
1166        pkg_list = []
1167
1168        for pfn in self.recipecaches[''].pkg_fn:
1169            inherits = self.recipecaches[''].inherits.get(pfn, None)
1170            if inherits and klass in inherits:
1171                pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
1172
1173        return pkg_list
1174
1175    def generateTargetsTree(self, klass=None, pkgs=None):
1176        """
1177        Generate a dependency tree of buildable targets
1178        Generate an event with the result
1179        """
1180        # if the caller hasn't specified a pkgs list default to universe
1181        if not pkgs:
1182            pkgs = ['universe']
1183        # if inherited_class passed ensure all recipes which inherit the
1184        # specified class are included in pkgs
1185        if klass:
1186            extra_pkgs = self.findInheritsClass(klass)
1187            pkgs = pkgs + extra_pkgs
1188
1189        # generate a dependency tree for all our packages
1190        tree = self.generatePkgDepTreeData(pkgs, 'build')
1191        bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1192
1193    def interactiveMode( self ):
1194        """Drop off into a shell"""
1195        try:
1196            from bb import shell
1197        except ImportError:
1198            parselog.exception("Interactive mode not available")
1199            raise bb.BBHandledException()
1200        else:
1201            shell.start( self )
1202
1203
1204    def handleCollections(self, collections):
1205        """Handle collections"""
1206        errors = False
1207        self.bbfile_config_priorities = []
1208        if collections:
1209            collection_priorities = {}
1210            collection_depends = {}
1211            collection_list = collections.split()
1212            min_prio = 0
1213            for c in collection_list:
1214                bb.debug(1,'Processing %s in collection list' % (c))
1215
1216                # Get collection priority if defined explicitly
1217                priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
1218                if priority:
1219                    try:
1220                        prio = int(priority)
1221                    except ValueError:
1222                        parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1223                        errors = True
1224                    if min_prio == 0 or prio < min_prio:
1225                        min_prio = prio
1226                    collection_priorities[c] = prio
1227                else:
1228                    collection_priorities[c] = None
1229
1230                # Check dependencies and store information for priority calculation
1231                deps = self.data.getVar("LAYERDEPENDS_%s" % c)
1232                if deps:
1233                    try:
1234                        depDict = bb.utils.explode_dep_versions2(deps)
1235                    except bb.utils.VersionStringException as vse:
1236                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1237                    for dep, oplist in list(depDict.items()):
1238                        if dep in collection_list:
1239                            for opstr in oplist:
1240                                layerver = self.data.getVar("LAYERVERSION_%s" % dep)
1241                                (op, depver) = opstr.split()
1242                                if layerver:
1243                                    try:
1244                                        res = bb.utils.vercmp_string_op(layerver, depver, op)
1245                                    except bb.utils.VersionStringException as vse:
1246                                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1247                                    if not res:
1248                                        parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1249                                        errors = True
1250                                else:
1251                                    parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1252                                    errors = True
1253                        else:
1254                            parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1255                            errors = True
1256                    collection_depends[c] = list(depDict.keys())
1257                else:
1258                    collection_depends[c] = []
1259
1260                # Check recommends and store information for priority calculation
1261                recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
1262                if recs:
1263                    try:
1264                        recDict = bb.utils.explode_dep_versions2(recs)
1265                    except bb.utils.VersionStringException as vse:
1266                        bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1267                    for rec, oplist in list(recDict.items()):
1268                        if rec in collection_list:
1269                            if oplist:
1270                                opstr = oplist[0]
1271                                layerver = self.data.getVar("LAYERVERSION_%s" % rec)
1272                                if layerver:
1273                                    (op, recver) = opstr.split()
1274                                    try:
1275                                        res = bb.utils.vercmp_string_op(layerver, recver, op)
1276                                    except bb.utils.VersionStringException as vse:
1277                                        bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1278                                    if not res:
1279                                        parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1280                                        continue
1281                                else:
1282                                    parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1283                                    continue
1284                            parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1285                            collection_depends[c].append(rec)
1286                        else:
1287                            parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1288
1289            # Recursively work out collection priorities based on dependencies
1290            def calc_layer_priority(collection):
1291                if not collection_priorities[collection]:
1292                    max_depprio = min_prio
1293                    for dep in collection_depends[collection]:
1294                        calc_layer_priority(dep)
1295                        depprio = collection_priorities[dep]
1296                        if depprio > max_depprio:
1297                            max_depprio = depprio
1298                    max_depprio += 1
1299                    parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1300                    collection_priorities[collection] = max_depprio
1301
1302            # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1303            for c in collection_list:
1304                calc_layer_priority(c)
1305                regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
1306                if regex is None:
1307                    parselog.error("BBFILE_PATTERN_%s not defined" % c)
1308                    errors = True
1309                    continue
1310                elif regex == "":
1311                    parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
1312                    cre = re.compile('^NULL$')
1313                    errors = False
1314                else:
1315                    try:
1316                        cre = re.compile(regex)
1317                    except re.error:
1318                        parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1319                        errors = True
1320                        continue
1321                self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1322        if errors:
1323            # We've already printed the actual error(s)
1324            raise CollectionError("Errors during parsing layer configuration")
1325
1326    def buildSetVars(self):
1327        """
1328        Setup any variables needed before starting a build
1329        """
1330        t = time.gmtime()
1331        for mc in self.databuilder.mcdata:
1332            ds = self.databuilder.mcdata[mc]
1333            if not ds.getVar("BUILDNAME", False):
1334                ds.setVar("BUILDNAME", "${DATE}${TIME}")
1335            ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1336            ds.setVar("DATE", time.strftime('%Y%m%d', t))
1337            ds.setVar("TIME", time.strftime('%H%M%S', t))
1338
1339    def reset_mtime_caches(self):
1340        """
1341        Reset mtime caches - this is particularly important when memory resident as something
1342        which is cached is not unlikely to have changed since the last invocation (e.g. a
1343        file associated with a recipe might have been modified by the user).
1344        """
1345        build.reset_cache()
1346        bb.fetch._checksum_cache.mtime_cache.clear()
1347        siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1348        if siggen_cache:
1349            bb.parse.siggen.checksum_cache.mtime_cache.clear()
1350
1351    def matchFiles(self, bf, mc=''):
1352        """
1353        Find the .bb files which match the expression in 'buildfile'.
1354        """
1355        if bf.startswith("/") or bf.startswith("../"):
1356            bf = os.path.abspath(bf)
1357
1358        self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1359        filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1360        try:
1361            os.stat(bf)
1362            bf = os.path.abspath(bf)
1363            return [bf]
1364        except OSError:
1365            regexp = re.compile(bf)
1366            matches = []
1367            for f in filelist:
1368                if regexp.search(f) and os.path.isfile(f):
1369                    matches.append(f)
1370            return matches
1371
1372    def matchFile(self, buildfile, mc=''):
1373        """
1374        Find the .bb file which matches the expression in 'buildfile'.
1375        Raise an error if multiple files
1376        """
1377        matches = self.matchFiles(buildfile, mc)
1378        if len(matches) != 1:
1379            if matches:
1380                msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1381                if matches:
1382                    for f in matches:
1383                        msg += "\n    %s" % f
1384                parselog.error(msg)
1385            else:
1386                parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1387            raise NoSpecificMatch
1388        return matches[0]
1389
1390    def buildFile(self, buildfile, task):
1391        """
1392        Build the file matching regexp buildfile
1393        """
1394        bb.event.fire(bb.event.BuildInit(), self.data)
1395
1396        # Too many people use -b because they think it's how you normally
1397        # specify a target to be built, so show a warning
1398        bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1399
1400        self.buildFileInternal(buildfile, task)
1401
1402    def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1403        """
1404        Build the file matching regexp buildfile
1405        """
1406
1407        # Parse the configuration here. We need to do it explicitly here since
1408        # buildFile() doesn't use the cache
1409        self.parseConfiguration()
1410
1411        # If we are told to do the None task then query the default task
1412        if task is None:
1413            task = self.configuration.cmd
1414        if not task.startswith("do_"):
1415            task = "do_%s" % task
1416
1417        fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
1418        fn = self.matchFile(fn, mc)
1419
1420        self.buildSetVars()
1421        self.reset_mtime_caches()
1422
1423        bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
1424
1425        infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
1426        infos = dict(infos)
1427
1428        fn = bb.cache.realfn2virtual(fn, cls, mc)
1429        try:
1430            info_array = infos[fn]
1431        except KeyError:
1432            bb.fatal("%s does not exist" % fn)
1433
1434        if info_array[0].skipped:
1435            bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1436
1437        self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
1438
1439        # Tweak some variables
1440        item = info_array[0].pn
1441        self.recipecaches[mc].ignored_dependencies = set()
1442        self.recipecaches[mc].bbfile_priority[fn] = 1
1443        self.configuration.limited_deps = True
1444
1445        # Remove external dependencies
1446        self.recipecaches[mc].task_deps[fn]['depends'] = {}
1447        self.recipecaches[mc].deps[fn] = []
1448        self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1449        self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1450
1451        # Invalidate task for target if force mode active
1452        if self.configuration.force:
1453            logger.verbose("Invalidate task %s, %s", task, fn)
1454            bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
1455
1456        # Setup taskdata structure
1457        taskdata = {}
1458        taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt)
1459        taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1460
1461        if quietlog:
1462            rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1463            bb.runqueue.logger.setLevel(logging.WARNING)
1464
1465        buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1466        if fireevents:
1467            bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1468
1469        # Execute the runqueue
1470        runlist = [[mc, item, task, fn]]
1471
1472        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1473
1474        def buildFileIdle(server, rq, halt):
1475
1476            msg = None
1477            interrupted = 0
1478            if halt or self.state == state.forceshutdown:
1479                rq.finish_runqueue(True)
1480                msg = "Forced shutdown"
1481                interrupted = 2
1482            elif self.state == state.shutdown:
1483                rq.finish_runqueue(False)
1484                msg = "Stopped build"
1485                interrupted = 1
1486            failures = 0
1487            try:
1488                retval = rq.execute_runqueue()
1489            except runqueue.TaskFailure as exc:
1490                failures += len(exc.args)
1491                retval = False
1492            except SystemExit as exc:
1493                self.command.finishAsyncCommand(str(exc))
1494                if quietlog:
1495                    bb.runqueue.logger.setLevel(rqloglevel)
1496                return False
1497
1498            if not retval:
1499                if fireevents:
1500                    bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1501                self.command.finishAsyncCommand(msg)
1502                # We trashed self.recipecaches above
1503                self.parsecache_valid = False
1504                self.configuration.limited_deps = False
1505                bb.parse.siggen.reset(self.data)
1506                if quietlog:
1507                    bb.runqueue.logger.setLevel(rqloglevel)
1508                return False
1509            if retval is True:
1510                return True
1511            return retval
1512
1513        self.idleCallBackRegister(buildFileIdle, rq)
1514
1515    def buildTargets(self, targets, task):
1516        """
1517        Attempt to build the targets specified
1518        """
1519
1520        def buildTargetsIdle(server, rq, halt):
1521            msg = None
1522            interrupted = 0
1523            if halt or self.state == state.forceshutdown:
1524                rq.finish_runqueue(True)
1525                msg = "Forced shutdown"
1526                interrupted = 2
1527            elif self.state == state.shutdown:
1528                rq.finish_runqueue(False)
1529                msg = "Stopped build"
1530                interrupted = 1
1531            failures = 0
1532            try:
1533                retval = rq.execute_runqueue()
1534            except runqueue.TaskFailure as exc:
1535                failures += len(exc.args)
1536                retval = False
1537            except SystemExit as exc:
1538                self.command.finishAsyncCommand(str(exc))
1539                return False
1540
1541            if not retval:
1542                try:
1543                    for mc in self.multiconfigs:
1544                        bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1545                finally:
1546                    self.command.finishAsyncCommand(msg)
1547                return False
1548            if retval is True:
1549                return True
1550            return retval
1551
1552        self.reset_mtime_caches()
1553        self.buildSetVars()
1554
1555        # If we are told to do the None task then query the default task
1556        if task is None:
1557            task = self.configuration.cmd
1558
1559        if not task.startswith("do_"):
1560            task = "do_%s" % task
1561
1562        packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1563
1564        bb.event.fire(bb.event.BuildInit(packages), self.data)
1565
1566        taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt)
1567
1568        buildname = self.data.getVar("BUILDNAME", False)
1569
1570        # make targets to always look as <target>:do_<task>
1571        ntargets = []
1572        for target in runlist:
1573            if target[0]:
1574                ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
1575            ntargets.append("%s:%s" % (target[1], target[2]))
1576
1577        for mc in self.multiconfigs:
1578            bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1579
1580        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1581        if 'universe' in targets:
1582            rq.rqdata.warn_multi_bb = True
1583
1584        self.idleCallBackRegister(buildTargetsIdle, rq)
1585
1586
1587    def getAllKeysWithFlags(self, flaglist):
1588        dump = {}
1589        for k in self.data.keys():
1590            try:
1591                expand = True
1592                flags = self.data.getVarFlags(k)
1593                if flags and "func" in flags and "python" in flags:
1594                    expand = False
1595                v = self.data.getVar(k, expand)
1596                if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1597                    dump[k] = {
1598    'v' : str(v) ,
1599    'history' : self.data.varhistory.variable(k),
1600                    }
1601                    for d in flaglist:
1602                        if flags and d in flags:
1603                            dump[k][d] = flags[d]
1604                        else:
1605                            dump[k][d] = None
1606            except Exception as e:
1607                print(e)
1608        return dump
1609
1610
1611    def updateCacheSync(self):
1612        if self.state == state.running:
1613            return
1614
1615        # reload files for which we got notifications
1616        for p in self.inotify_modified_files:
1617            bb.parse.update_cache(p)
1618            if p in bb.parse.BBHandler.cached_statements:
1619                del bb.parse.BBHandler.cached_statements[p]
1620        self.inotify_modified_files = []
1621
1622        if not self.baseconfig_valid:
1623            logger.debug("Reloading base configuration data")
1624            self.initConfigurationData()
1625            self.handlePRServ()
1626
1627    # This is called for all async commands when self.state != running
1628    def updateCache(self):
1629        if self.state == state.running:
1630            return
1631
1632        if self.state in (state.shutdown, state.forceshutdown, state.error):
1633            if hasattr(self.parser, 'shutdown'):
1634                self.parser.shutdown(clean=False)
1635                self.parser.final_cleanup()
1636            raise bb.BBHandledException()
1637
1638        if self.state != state.parsing:
1639            self.updateCacheSync()
1640
1641        if self.state != state.parsing and not self.parsecache_valid:
1642            self.setupParserWatcher()
1643
1644            bb.parse.siggen.reset(self.data)
1645            self.parseConfiguration ()
1646            if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1647                for mc in self.multiconfigs:
1648                    bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
1649
1650            for mc in self.multiconfigs:
1651                ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
1652                self.recipecaches[mc].ignored_dependencies = set(ignore.split())
1653
1654                for dep in self.configuration.extra_assume_provided:
1655                    self.recipecaches[mc].ignored_dependencies.add(dep)
1656
1657            self.collections = {}
1658
1659            mcfilelist = {}
1660            total_masked = 0
1661            searchdirs = set()
1662            for mc in self.multiconfigs:
1663                self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1664                (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1665
1666                mcfilelist[mc] = filelist
1667                total_masked += masked
1668                searchdirs |= set(search)
1669
1670            # Add inotify watches for directories searched for bb/bbappend files
1671            for dirent in searchdirs:
1672                self.add_filewatch([[dirent]], dirs=True)
1673
1674            self.parser = CookerParser(self, mcfilelist, total_masked)
1675            self.parsecache_valid = True
1676
1677        self.state = state.parsing
1678
1679        if not self.parser.parse_next():
1680            collectlog.debug(1, "parsing complete")
1681            if self.parser.error:
1682                raise bb.BBHandledException()
1683            self.show_appends_with_no_recipes()
1684            self.handlePrefProviders()
1685            for mc in self.multiconfigs:
1686                self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1687            self.state = state.running
1688
1689            # Send an event listing all stamps reachable after parsing
1690            # which the metadata may use to clean up stale data
1691            for mc in self.multiconfigs:
1692                event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1693                bb.event.fire(event, self.databuilder.mcdata[mc])
1694            return None
1695
1696        return True
1697
1698    def checkPackages(self, pkgs_to_build, task=None):
1699
1700        # Return a copy, don't modify the original
1701        pkgs_to_build = pkgs_to_build[:]
1702
1703        if not pkgs_to_build:
1704            raise NothingToBuild
1705
1706        ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
1707        for pkg in pkgs_to_build.copy():
1708            if pkg in ignore:
1709                parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1710            if pkg.startswith("multiconfig:"):
1711                pkgs_to_build.remove(pkg)
1712                pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
1713
1714        if 'world' in pkgs_to_build:
1715            pkgs_to_build.remove('world')
1716            for mc in self.multiconfigs:
1717                bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1718                for t in self.recipecaches[mc].world_target:
1719                    if mc:
1720                        t = "mc:" + mc + ":" + t
1721                    pkgs_to_build.append(t)
1722
1723        if 'universe' in pkgs_to_build:
1724            parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1725            parselog.debug(1, "collating packages for \"universe\"")
1726            pkgs_to_build.remove('universe')
1727            for mc in self.multiconfigs:
1728                for t in self.recipecaches[mc].universe_target:
1729                    if task:
1730                        foundtask = False
1731                        for provider_fn in self.recipecaches[mc].providers[t]:
1732                            if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1733                                foundtask = True
1734                                break
1735                        if not foundtask:
1736                            bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1737                            continue
1738                    if mc:
1739                        t = "mc:" + mc + ":" + t
1740                    pkgs_to_build.append(t)
1741
1742        return pkgs_to_build
1743
1744    def pre_serve(self):
1745        return
1746
1747    def post_serve(self):
1748        self.shutdown(force=True)
1749        prserv.serv.auto_shutdown()
1750        if hasattr(bb.parse, "siggen"):
1751            bb.parse.siggen.exit()
1752        if self.hashserv:
1753            self.hashserv.process.terminate()
1754            self.hashserv.process.join()
1755        if hasattr(self, "data"):
1756            bb.event.fire(CookerExit(), self.data)
1757
1758    def shutdown(self, force = False):
1759        if force:
1760            self.state = state.forceshutdown
1761        else:
1762            self.state = state.shutdown
1763
1764        if self.parser:
1765            self.parser.shutdown(clean=not force)
1766            self.parser.final_cleanup()
1767
1768    def finishcommand(self):
1769        self.state = state.initial
1770
1771    def reset(self):
1772        if hasattr(bb.parse, "siggen"):
1773            bb.parse.siggen.exit()
1774        self.initConfigurationData()
1775        self.handlePRServ()
1776
1777    def clientComplete(self):
1778        """Called when the client is done using the server"""
1779        self.finishcommand()
1780        self.extraconfigdata = {}
1781        self.command.reset()
1782        if hasattr(self, "data"):
1783           self.databuilder.reset()
1784           self.data = self.databuilder.data
1785        self.parsecache_valid = False
1786        self.baseconfig_valid = False
1787
1788
1789class CookerExit(bb.event.Event):
1790    """
1791    Notify clients of the Cooker shutdown
1792    """
1793
1794    def __init__(self):
1795        bb.event.Event.__init__(self)
1796
1797
1798class CookerCollectFiles(object):
1799    def __init__(self, priorities, mc=''):
1800        self.mc = mc
1801        self.bbappends = []
1802        # Priorities is a list of tuples, with the second element as the pattern.
1803        # We need to sort the list with the longest pattern first, and so on to
1804        # the shortest.  This allows nested layers to be properly evaluated.
1805        self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1806
1807    def calc_bbfile_priority(self, filename):
1808        for _, _, regex, pri in self.bbfile_config_priorities:
1809            if regex.match(filename):
1810                return pri, regex
1811        return 0, None
1812
1813    def get_bbfiles(self):
1814        """Get list of default .bb files by reading out the current directory"""
1815        path = os.getcwd()
1816        contents = os.listdir(path)
1817        bbfiles = []
1818        for f in contents:
1819            if f.endswith(".bb"):
1820                bbfiles.append(os.path.abspath(os.path.join(path, f)))
1821        return bbfiles
1822
1823    def find_bbfiles(self, path):
1824        """Find all the .bb and .bbappend files in a directory"""
1825        found = []
1826        for dir, dirs, files in os.walk(path):
1827            for ignored in ('SCCS', 'CVS', '.svn'):
1828                if ignored in dirs:
1829                    dirs.remove(ignored)
1830            found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1831
1832        return found
1833
1834    def collect_bbfiles(self, config, eventdata):
1835        """Collect all available .bb build files"""
1836        masked = 0
1837
1838        collectlog.debug(1, "collecting .bb files")
1839
1840        files = (config.getVar( "BBFILES") or "").split()
1841
1842        # Sort files by priority
1843        files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
1844        config.setVar("BBFILES_PRIORITIZED", " ".join(files))
1845
1846        if not files:
1847            files = self.get_bbfiles()
1848
1849        if not files:
1850            collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1851            bb.event.fire(CookerExit(), eventdata)
1852
1853        # We need to track where we look so that we can add inotify watches. There
1854        # is no nice way to do this, this is horrid. We intercept the os.listdir()
1855        # (or os.scandir() for python 3.6+) calls while we run glob().
1856        origlistdir = os.listdir
1857        if hasattr(os, 'scandir'):
1858            origscandir = os.scandir
1859        searchdirs = []
1860
1861        def ourlistdir(d):
1862            searchdirs.append(d)
1863            return origlistdir(d)
1864
1865        def ourscandir(d):
1866            searchdirs.append(d)
1867            return origscandir(d)
1868
1869        os.listdir = ourlistdir
1870        if hasattr(os, 'scandir'):
1871            os.scandir = ourscandir
1872        try:
1873            # Can't use set here as order is important
1874            newfiles = []
1875            for f in files:
1876                if os.path.isdir(f):
1877                    dirfiles = self.find_bbfiles(f)
1878                    for g in dirfiles:
1879                        if g not in newfiles:
1880                            newfiles.append(g)
1881                else:
1882                    globbed = glob.glob(f)
1883                    if not globbed and os.path.exists(f):
1884                        globbed = [f]
1885                    # glob gives files in order on disk. Sort to be deterministic.
1886                    for g in sorted(globbed):
1887                        if g not in newfiles:
1888                            newfiles.append(g)
1889        finally:
1890            os.listdir = origlistdir
1891            if hasattr(os, 'scandir'):
1892                os.scandir = origscandir
1893
1894        bbmask = config.getVar('BBMASK')
1895
1896        if bbmask:
1897            # First validate the individual regular expressions and ignore any
1898            # that do not compile
1899            bbmasks = []
1900            for mask in bbmask.split():
1901                # When constructing an older style single regex, it's possible for BBMASK
1902                # to end up beginning with '|', which matches and masks _everything_.
1903                if mask.startswith("|"):
1904                    collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1905                    mask = mask[1:]
1906                try:
1907                    re.compile(mask)
1908                    bbmasks.append(mask)
1909                except re.error:
1910                    collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1911
1912            # Then validate the combined regular expressions. This should never
1913            # fail, but better safe than sorry...
1914            bbmask = "|".join(bbmasks)
1915            try:
1916                bbmask_compiled = re.compile(bbmask)
1917            except re.error:
1918                collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1919                bbmask = None
1920
1921        bbfiles = []
1922        bbappend = []
1923        for f in newfiles:
1924            if bbmask and bbmask_compiled.search(f):
1925                collectlog.debug(1, "skipping masked file %s", f)
1926                masked += 1
1927                continue
1928            if f.endswith('.bb'):
1929                bbfiles.append(f)
1930            elif f.endswith('.bbappend'):
1931                bbappend.append(f)
1932            else:
1933                collectlog.debug(1, "skipping %s: unknown file extension", f)
1934
1935        # Build a list of .bbappend files for each .bb file
1936        for f in bbappend:
1937            base = os.path.basename(f).replace('.bbappend', '.bb')
1938            self.bbappends.append((base, f))
1939
1940        # Find overlayed recipes
1941        # bbfiles will be in priority order which makes this easy
1942        bbfile_seen = dict()
1943        self.overlayed = defaultdict(list)
1944        for f in reversed(bbfiles):
1945            base = os.path.basename(f)
1946            if base not in bbfile_seen:
1947                bbfile_seen[base] = f
1948            else:
1949                topfile = bbfile_seen[base]
1950                self.overlayed[topfile].append(f)
1951
1952        return (bbfiles, masked, searchdirs)
1953
1954    def get_file_appends(self, fn):
1955        """
1956        Returns a list of .bbappend files to apply to fn
1957        """
1958        filelist = []
1959        f = os.path.basename(fn)
1960        for b in self.bbappends:
1961            (bbappend, filename) = b
1962            if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1963                filelist.append(filename)
1964        return tuple(filelist)
1965
1966    def collection_priorities(self, pkgfns, fns, d):
1967        # Return the priorities of the entries in pkgfns
1968        # Also check that all the regexes in self.bbfile_config_priorities are used
1969        # (but to do that we need to ensure skipped recipes aren't counted, nor
1970        # collections in BBFILE_PATTERN_IGNORE_EMPTY)
1971
1972        priorities = {}
1973        seen = set()
1974        matched = set()
1975
1976        matched_regex = set()
1977        unmatched_regex = set()
1978        for _, _, regex, _ in self.bbfile_config_priorities:
1979            unmatched_regex.add(regex)
1980
1981        # Calculate priorities for each file
1982        for p in pkgfns:
1983            realfn, cls, mc = bb.cache.virtualfn2realfn(p)
1984            priorities[p], regex = self.calc_bbfile_priority(realfn)
1985            if regex in unmatched_regex:
1986                matched_regex.add(regex)
1987                unmatched_regex.remove(regex)
1988            seen.add(realfn)
1989            if regex:
1990                matched.add(realfn)
1991
1992        if unmatched_regex:
1993            # Account for bbappend files
1994            for b in self.bbappends:
1995                (bbfile, append) = b
1996                seen.add(append)
1997
1998            # Account for skipped recipes
1999            seen.update(fns)
2000
2001            seen.difference_update(matched)
2002
2003            def already_matched(fn):
2004                for regex in matched_regex:
2005                    if regex.match(fn):
2006                        return True
2007                return False
2008
2009            for unmatch in unmatched_regex.copy():
2010                for fn in seen:
2011                    if unmatch.match(fn):
2012                        # If the bbappend or file was already matched by another regex, skip it
2013                        # e.g. for a layer within a layer, the outer regex could match, the inner
2014                        # regex may match nothing and we should warn about that
2015                        if already_matched(fn):
2016                            continue
2017                        unmatched_regex.remove(unmatch)
2018                        break
2019
2020        for collection, pattern, regex, _ in self.bbfile_config_priorities:
2021            if regex in unmatched_regex:
2022                if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
2023                    collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
2024                                                                                             collection, pattern))
2025
2026        return priorities
2027
2028class ParsingFailure(Exception):
2029    def __init__(self, realexception, recipe):
2030        self.realexception = realexception
2031        self.recipe = recipe
2032        Exception.__init__(self, realexception, recipe)
2033
2034class Parser(multiprocessing.Process):
2035    def __init__(self, jobs, results, quit, profile):
2036        self.jobs = jobs
2037        self.results = results
2038        self.quit = quit
2039        multiprocessing.Process.__init__(self)
2040        self.context = bb.utils.get_context().copy()
2041        self.handlers = bb.event.get_class_handlers().copy()
2042        self.profile = profile
2043        self.queue_signals = False
2044        self.signal_received = []
2045        self.signal_threadlock = threading.Lock()
2046
2047    def catch_sig(self, signum, frame):
2048        if self.queue_signals:
2049            self.signal_received.append(signum)
2050        else:
2051            self.handle_sig(signum, frame)
2052
2053    def handle_sig(self, signum, frame):
2054        if signum == signal.SIGTERM:
2055            signal.signal(signal.SIGTERM, signal.SIG_DFL)
2056            os.kill(os.getpid(), signal.SIGTERM)
2057        elif signum == signal.SIGINT:
2058            signal.default_int_handler(signum, frame)
2059
2060    def run(self):
2061
2062        if not self.profile:
2063            self.realrun()
2064            return
2065
2066        try:
2067            import cProfile as profile
2068        except:
2069            import profile
2070        prof = profile.Profile()
2071        try:
2072            profile.Profile.runcall(prof, self.realrun)
2073        finally:
2074            logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
2075            prof.dump_stats(logfile)
2076
2077    def realrun(self):
2078        # Signal handling here is hard. We must not terminate any process or thread holding the write
2079        # lock for the event stream as it will not be released, ever, and things will hang.
2080        # Python handles signals in the main thread/process but they can be raised from any thread and
2081        # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section
2082        # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any
2083        # new thread should also do so) and we defer handling but we handle with the local thread lock
2084        # held (a threading lock, not a multiprocessing one) so that no other thread in the process
2085        # can be in the critical section.
2086        signal.signal(signal.SIGTERM, self.catch_sig)
2087        signal.signal(signal.SIGHUP, signal.SIG_DFL)
2088        signal.signal(signal.SIGINT, self.catch_sig)
2089        bb.utils.set_process_name(multiprocessing.current_process().name)
2090        multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2091        multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2092
2093        pending = []
2094        try:
2095            while True:
2096                try:
2097                    self.quit.get_nowait()
2098                except queue.Empty:
2099                    pass
2100                else:
2101                    break
2102
2103                if pending:
2104                    result = pending.pop()
2105                else:
2106                    try:
2107                        job = self.jobs.pop()
2108                    except IndexError:
2109                        break
2110                    result = self.parse(*job)
2111                    # Clear the siggen cache after parsing to control memory usage, its huge
2112                    bb.parse.siggen.postparsing_clean_cache()
2113                try:
2114                    self.results.put(result, timeout=0.25)
2115                except queue.Full:
2116                    pending.append(result)
2117        finally:
2118            self.results.close()
2119            self.results.join_thread()
2120
2121    def parse(self, mc, cache, filename, appends):
2122        try:
2123            origfilter = bb.event.LogHandler.filter
2124            # Record the filename we're parsing into any events generated
2125            def parse_filter(self, record):
2126                record.taskpid = bb.event.worker_pid
2127                record.fn = filename
2128                return True
2129
2130            # Reset our environment and handlers to the original settings
2131            bb.utils.set_context(self.context.copy())
2132            bb.event.set_class_handlers(self.handlers.copy())
2133            bb.event.LogHandler.filter = parse_filter
2134
2135            return True, mc, cache.parse(filename, appends)
2136        except Exception as exc:
2137            tb = sys.exc_info()[2]
2138            exc.recipe = filename
2139            exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2140            return True, None, exc
2141        # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2142        # and for example a worker thread doesn't just exit on its own in response to
2143        # a SystemExit event for example.
2144        except BaseException as exc:
2145            return True, None, ParsingFailure(exc, filename)
2146        finally:
2147            bb.event.LogHandler.filter = origfilter
2148
2149class CookerParser(object):
2150    def __init__(self, cooker, mcfilelist, masked):
2151        self.mcfilelist = mcfilelist
2152        self.cooker = cooker
2153        self.cfgdata = cooker.data
2154        self.cfghash = cooker.data_hash
2155        self.cfgbuilder = cooker.databuilder
2156
2157        # Accounting statistics
2158        self.parsed = 0
2159        self.cached = 0
2160        self.error = 0
2161        self.masked = masked
2162
2163        self.skipped = 0
2164        self.virtuals = 0
2165
2166        self.current = 0
2167        self.process_names = []
2168
2169        self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2170        self.fromcache = set()
2171        self.willparse = set()
2172        for mc in self.cooker.multiconfigs:
2173            for filename in self.mcfilelist[mc]:
2174                appends = self.cooker.collections[mc].get_file_appends(filename)
2175                if not self.bb_caches[mc].cacheValid(filename, appends):
2176                    self.willparse.add((mc, self.bb_caches[mc], filename, appends))
2177                else:
2178                    self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
2179
2180        self.total = len(self.fromcache) + len(self.willparse)
2181        self.toparse = len(self.willparse)
2182        self.progress_chunk = int(max(self.toparse / 100, 1))
2183
2184        self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2185                                 multiprocessing.cpu_count()), self.toparse)
2186
2187        self.start()
2188        self.haveshutdown = False
2189        self.syncthread = None
2190
2191    def start(self):
2192        self.results = self.load_cached()
2193        self.processes = []
2194        if self.toparse:
2195            bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2196
2197            self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2198            self.result_queue = multiprocessing.Queue()
2199
2200            def chunkify(lst,n):
2201                return [lst[i::n] for i in range(n)]
2202            self.jobs = chunkify(list(self.willparse), self.num_processes)
2203
2204            for i in range(0, self.num_processes):
2205                parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile)
2206                parser.start()
2207                self.process_names.append(parser.name)
2208                self.processes.append(parser)
2209
2210            self.results = itertools.chain(self.results, self.parse_generator())
2211
2212    def shutdown(self, clean=True):
2213        if not self.toparse:
2214            return
2215        if self.haveshutdown:
2216            return
2217        self.haveshutdown = True
2218
2219        if clean:
2220            event = bb.event.ParseCompleted(self.cached, self.parsed,
2221                                            self.skipped, self.masked,
2222                                            self.virtuals, self.error,
2223                                            self.total)
2224
2225            bb.event.fire(event, self.cfgdata)
2226        else:
2227            bb.error("Parsing halted due to errors, see error messages above")
2228
2229        for process in self.processes:
2230            self.parser_quit.put(None)
2231
2232        # Cleanup the queue before call process.join(), otherwise there might be
2233        # deadlocks.
2234        while True:
2235            try:
2236               self.result_queue.get(timeout=0.25)
2237            except queue.Empty:
2238                break
2239
2240        for process in self.processes:
2241            process.join(0.5)
2242
2243        for process in self.processes:
2244            if process.exitcode is None:
2245                os.kill(process.pid, signal.SIGINT)
2246
2247        for process in self.processes:
2248            process.join(0.5)
2249
2250        for process in self.processes:
2251            if process.exitcode is None:
2252                process.terminate()
2253
2254        for process in self.processes:
2255            process.join()
2256            # Added in 3.7, cleans up zombies
2257            if hasattr(process, "close"):
2258                process.close()
2259
2260        self.parser_quit.close()
2261        # Allow data left in the cancel queue to be discarded
2262        self.parser_quit.cancel_join_thread()
2263
2264        def sync_caches():
2265            for c in self.bb_caches.values():
2266                c.sync()
2267
2268        sync = threading.Thread(target=sync_caches, name="SyncThread")
2269        self.syncthread = sync
2270        sync.start()
2271        bb.codeparser.parser_cache_savemerge()
2272        bb.fetch.fetcher_parse_done()
2273        if self.cooker.configuration.profile:
2274            profiles = []
2275            for i in self.process_names:
2276                logfile = "profile-parse-%s.log" % i
2277                if os.path.exists(logfile):
2278                    profiles.append(logfile)
2279
2280            pout = "profile-parse.log.processed"
2281            bb.utils.process_profilelog(profiles, pout = pout)
2282            print("Processed parsing statistics saved to %s" % (pout))
2283
2284    def final_cleanup(self):
2285        if self.syncthread:
2286            self.syncthread.join()
2287
2288    def load_cached(self):
2289        for mc, cache, filename, appends in self.fromcache:
2290            cached, infos = cache.load(filename, appends)
2291            yield not cached, mc, infos
2292
2293    def parse_generator(self):
2294        empty = False
2295        while self.processes or not empty:
2296            for process in self.processes.copy():
2297                if not process.is_alive():
2298                    process.join()
2299                    self.processes.remove(process)
2300
2301            if self.parsed >= self.toparse:
2302                break
2303
2304            try:
2305                result = self.result_queue.get(timeout=0.25)
2306            except queue.Empty:
2307                empty = True
2308                yield None, None, None
2309            else:
2310                empty = False
2311                yield result
2312
2313        if not (self.parsed >= self.toparse):
2314            raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
2315
2316
2317    def parse_next(self):
2318        result = []
2319        parsed = None
2320        try:
2321            parsed, mc, result = next(self.results)
2322            if isinstance(result, BaseException):
2323                # Turn exceptions back into exceptions
2324                raise result
2325            if parsed is None:
2326                # Timeout, loop back through the main loop
2327                return True
2328
2329        except StopIteration:
2330            self.shutdown()
2331            return False
2332        except bb.BBHandledException as exc:
2333            self.error += 1
2334            logger.debug('Failed to parse recipe: %s' % exc.recipe)
2335            self.shutdown(clean=False)
2336            return False
2337        except ParsingFailure as exc:
2338            self.error += 1
2339            logger.error('Unable to parse %s: %s' %
2340                     (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2341            self.shutdown(clean=False)
2342            return False
2343        except bb.parse.ParseError as exc:
2344            self.error += 1
2345            logger.error(str(exc))
2346            self.shutdown(clean=False)
2347            return False
2348        except bb.data_smart.ExpansionError as exc:
2349            self.error += 1
2350            bbdir = os.path.dirname(__file__) + os.sep
2351            etype, value, _ = sys.exc_info()
2352            tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2353            logger.error('ExpansionError during parsing %s', value.recipe,
2354                         exc_info=(etype, value, tb))
2355            self.shutdown(clean=False)
2356            return False
2357        except Exception as exc:
2358            self.error += 1
2359            etype, value, tb = sys.exc_info()
2360            if hasattr(value, "recipe"):
2361                logger.error('Unable to parse %s' % value.recipe,
2362                            exc_info=(etype, value, exc.traceback))
2363            else:
2364                # Most likely, an exception occurred during raising an exception
2365                import traceback
2366                logger.error('Exception during parse: %s' % traceback.format_exc())
2367            self.shutdown(clean=False)
2368            return False
2369
2370        self.current += 1
2371        self.virtuals += len(result)
2372        if parsed:
2373            self.parsed += 1
2374            if self.parsed % self.progress_chunk == 0:
2375                bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2376                              self.cfgdata)
2377        else:
2378            self.cached += 1
2379
2380        for virtualfn, info_array in result:
2381            if info_array[0].skipped:
2382                self.skipped += 1
2383                self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2384            self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2385                                        parsed=parsed, watcher = self.cooker.add_filewatch)
2386        return True
2387
2388    def reparse(self, filename):
2389        to_reparse = set()
2390        for mc in self.cooker.multiconfigs:
2391            to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
2392
2393        for mc, filename, appends in to_reparse:
2394            infos = self.bb_caches[mc].parse(filename, appends)
2395            for vfn, info_array in infos:
2396                self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
2397