xref: /openbmc/openbmc/poky/bitbake/lib/bb/cooker.py (revision 8f840685)
1#
2# Copyright (C) 2003, 2004  Chris Larson
3# Copyright (C) 2003, 2004  Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005        Holger Hans Peter Freyther
6# Copyright (C) 2005        ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import sys, os, glob, os.path, re, time
13import itertools
14import logging
15import multiprocessing
16import threading
17from io import StringIO, UnsupportedOperation
18from contextlib import closing
19from collections import defaultdict, namedtuple
20import bb, bb.exceptions, bb.command
21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
22import queue
23import signal
24import prserv.serv
25import pyinotify
26import json
27import pickle
28import codecs
29import hashserv
30
31logger      = logging.getLogger("BitBake")
32collectlog  = logging.getLogger("BitBake.Collection")
33buildlog    = logging.getLogger("BitBake.Build")
34parselog    = logging.getLogger("BitBake.Parsing")
35providerlog = logging.getLogger("BitBake.Provider")
36
37class NoSpecificMatch(bb.BBHandledException):
38    """
39    Exception raised when no or multiple file matches are found
40    """
41
42class NothingToBuild(Exception):
43    """
44    Exception raised when there is nothing to build
45    """
46
47class CollectionError(bb.BBHandledException):
48    """
49    Exception raised when layer configuration is incorrect
50    """
51
52class state:
53    initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
54
55    @classmethod
56    def get_name(cls, code):
57        for name in dir(cls):
58            value = getattr(cls, name)
59            if type(value) == type(cls.initial) and value == code:
60                return name
61        raise ValueError("Invalid status code: %s" % code)
62
63
64class SkippedPackage:
65    def __init__(self, info = None, reason = None):
66        self.pn = None
67        self.skipreason = None
68        self.provides = None
69        self.rprovides = None
70
71        if info:
72            self.pn = info.pn
73            self.skipreason = info.skipreason
74            self.provides = info.provides
75            self.rprovides = info.packages + info.rprovides
76            for package in info.packages:
77                self.rprovides += info.rprovides_pkg[package]
78        elif reason:
79            self.skipreason = reason
80
81
82class CookerFeatures(object):
83    _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS, RECIPE_SIGGEN_INFO] = list(range(4))
84
85    def __init__(self):
86        self._features=set()
87
88    def setFeature(self, f):
89        # validate we got a request for a feature we support
90        if f not in CookerFeatures._feature_list:
91            return
92        self._features.add(f)
93
94    def __contains__(self, f):
95        return f in self._features
96
97    def __iter__(self):
98        return self._features.__iter__()
99
100    def __next__(self):
101        return next(self._features)
102
103
104class EventWriter:
105    def __init__(self, cooker, eventfile):
106        self.file_inited = None
107        self.cooker = cooker
108        self.eventfile = eventfile
109        self.event_queue = []
110
111    def write_event(self, event):
112        with open(self.eventfile, "a") as f:
113            try:
114                str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
115                f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
116                                             "vars": str_event}))
117            except Exception as err:
118                import traceback
119                print(err, traceback.format_exc())
120
121    def send(self, event):
122        if self.file_inited:
123            # we have the file, just write the event
124            self.write_event(event)
125        else:
126            # init on bb.event.BuildStarted
127            name = "%s.%s" % (event.__module__, event.__class__.__name__)
128            if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
129                with open(self.eventfile, "w") as f:
130                    f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
131
132                self.file_inited = True
133
134                # write pending events
135                for evt in self.event_queue:
136                    self.write_event(evt)
137
138                # also write the current event
139                self.write_event(event)
140            else:
141                # queue all events until the file is inited
142                self.event_queue.append(event)
143
144#============================================================================#
145# BBCooker
146#============================================================================#
147class BBCooker:
148    """
149    Manages one bitbake build run
150    """
151
152    def __init__(self, featureSet=None, server=None):
153        self.recipecaches = None
154        self.baseconfig_valid = False
155        self.parsecache_valid = False
156        self.eventlog = None
157        self.skiplist = {}
158        self.featureset = CookerFeatures()
159        if featureSet:
160            for f in featureSet:
161                self.featureset.setFeature(f)
162
163        self.orig_syspath = sys.path.copy()
164        self.orig_sysmodules = [*sys.modules]
165
166        self.configuration = bb.cookerdata.CookerConfiguration()
167
168        self.process_server = server
169        self.idleCallBackRegister = None
170        self.waitIdle = None
171        if server:
172            self.idleCallBackRegister = server.register_idle_function
173            self.waitIdle = server.wait_for_idle
174
175        bb.debug(1, "BBCooker starting %s" % time.time())
176        sys.stdout.flush()
177
178        self.configwatcher = None
179        self.confignotifier = None
180
181        self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
182                         pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
183                         pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
184
185        self.watcher = None
186        self.notifier = None
187
188        # If being called by something like tinfoil, we need to clean cached data
189        # which may now be invalid
190        bb.parse.clear_cache()
191        bb.parse.BBHandler.cached_statements = {}
192
193        self.ui_cmdline = None
194        self.hashserv = None
195        self.hashservaddr = None
196
197        self.inotify_modified_files = []
198
199        # TOSTOP must not be set or our children will hang when they output
200        try:
201            fd = sys.stdout.fileno()
202            if os.isatty(fd):
203                import termios
204                tcattr = termios.tcgetattr(fd)
205                if tcattr[3] & termios.TOSTOP:
206                    buildlog.info("The terminal had the TOSTOP bit set, clearing...")
207                    tcattr[3] = tcattr[3] & ~termios.TOSTOP
208                    termios.tcsetattr(fd, termios.TCSANOW, tcattr)
209        except UnsupportedOperation:
210            pass
211
212        self.command = bb.command.Command(self, self.process_server)
213        self.state = state.initial
214
215        self.parser = None
216
217        signal.signal(signal.SIGTERM, self.sigterm_exception)
218        # Let SIGHUP exit as SIGTERM
219        signal.signal(signal.SIGHUP, self.sigterm_exception)
220
221        bb.debug(1, "BBCooker startup complete %s" % time.time())
222        sys.stdout.flush()
223
224        self.inotify_threadlock = threading.Lock()
225
226    def init_configdata(self):
227        if not hasattr(self, "data"):
228            self.initConfigurationData()
229            bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
230            sys.stdout.flush()
231            self.handlePRServ()
232
233    def setupConfigWatcher(self):
234        with bb.utils.lock_timeout(self.inotify_threadlock):
235            if self.configwatcher:
236                self.configwatcher.close()
237                self.confignotifier = None
238                self.configwatcher = None
239            self.configwatcher = pyinotify.WatchManager()
240            self.configwatcher.bbseen = set()
241            self.configwatcher.bbwatchedfiles = set()
242            self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
243
244    def setupParserWatcher(self):
245        with bb.utils.lock_timeout(self.inotify_threadlock):
246            if self.watcher:
247                self.watcher.close()
248                self.notifier = None
249                self.watcher = None
250            self.watcher = pyinotify.WatchManager()
251            self.watcher.bbseen = set()
252            self.watcher.bbwatchedfiles = set()
253            self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
254
255    def process_inotify_updates(self):
256        with bb.utils.lock_timeout(self.inotify_threadlock):
257            for n in [self.confignotifier, self.notifier]:
258                if n and n.check_events(timeout=0):
259                    # read notified events and enqueue them
260                    n.read_events()
261
262    def process_inotify_updates_apply(self):
263        with bb.utils.lock_timeout(self.inotify_threadlock):
264            for n in [self.confignotifier, self.notifier]:
265                if n and n.check_events(timeout=0):
266                    n.read_events()
267                    n.process_events()
268
269    def _baseconfig_set(self, value):
270        if value and not self.baseconfig_valid:
271            bb.server.process.serverlog("Base config valid")
272        elif not value and self.baseconfig_valid:
273            bb.server.process.serverlog("Base config invalidated")
274        self.baseconfig_valid = value
275
276    def _parsecache_set(self, value):
277        if value and not self.parsecache_valid:
278            bb.server.process.serverlog("Parse cache valid")
279        elif not value and self.parsecache_valid:
280            bb.server.process.serverlog("Parse cache invalidated")
281        self.parsecache_valid = value
282
283    def config_notifications(self, event):
284        if event.maskname == "IN_Q_OVERFLOW":
285            bb.warn("inotify event queue overflowed, invalidating caches.")
286            self._parsecache_set(False)
287            self._baseconfig_set(False)
288            bb.parse.clear_cache()
289            return
290        if not event.pathname in self.configwatcher.bbwatchedfiles:
291            return
292        if "IN_ISDIR" in event.maskname:
293            if "IN_CREATE" in event.maskname or "IN_DELETE" in event.maskname:
294                if event.pathname in self.configwatcher.bbseen:
295                    self.configwatcher.bbseen.remove(event.pathname)
296                # Could remove all entries starting with the directory but for now...
297                bb.parse.clear_cache()
298        if not event.pathname in self.inotify_modified_files:
299            self.inotify_modified_files.append(event.pathname)
300        self._baseconfig_set(False)
301
302    def notifications(self, event):
303        if event.maskname == "IN_Q_OVERFLOW":
304            bb.warn("inotify event queue overflowed, invalidating caches.")
305            self._parsecache_set(False)
306            bb.parse.clear_cache()
307            return
308        if event.pathname.endswith("bitbake-cookerdaemon.log") \
309                or event.pathname.endswith("bitbake.lock"):
310            return
311        if "IN_ISDIR" in event.maskname:
312            if "IN_CREATE" in event.maskname or "IN_DELETE" in event.maskname:
313                if event.pathname in self.watcher.bbseen:
314                    self.watcher.bbseen.remove(event.pathname)
315                # Could remove all entries starting with the directory but for now...
316                bb.parse.clear_cache()
317        if not event.pathname in self.inotify_modified_files:
318            self.inotify_modified_files.append(event.pathname)
319        self._parsecache_set(False)
320
321    def add_filewatch(self, deps, watcher=None, dirs=False):
322        if not watcher:
323            watcher = self.watcher
324        for i in deps:
325            watcher.bbwatchedfiles.add(i[0])
326            if dirs:
327                f = i[0]
328            else:
329                f = os.path.dirname(i[0])
330            if f in watcher.bbseen:
331                continue
332            watcher.bbseen.add(f)
333            watchtarget = None
334            while True:
335                # We try and add watches for files that don't exist but if they did, would influence
336                # the parser. The parent directory of these files may not exist, in which case we need
337                # to watch any parent that does exist for changes.
338                try:
339                    watcher.add_watch(f, self.watchmask, quiet=False)
340                    if watchtarget:
341                        watcher.bbwatchedfiles.add(watchtarget)
342                    break
343                except pyinotify.WatchManagerError as e:
344                    if 'ENOENT' in str(e):
345                        watchtarget = f
346                        f = os.path.dirname(f)
347                        if f in watcher.bbseen:
348                            break
349                        watcher.bbseen.add(f)
350                        continue
351                    if 'ENOSPC' in str(e):
352                        providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
353                        providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
354                        providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
355                        providerlog.error("Root privilege is required to modify max_user_watches.")
356                    raise
357
358    def handle_inotify_updates(self):
359        # reload files for which we got notifications
360        for p in self.inotify_modified_files:
361            bb.parse.update_cache(p)
362            if p in bb.parse.BBHandler.cached_statements:
363                del bb.parse.BBHandler.cached_statements[p]
364        self.inotify_modified_files = []
365
366    def sigterm_exception(self, signum, stackframe):
367        if signum == signal.SIGTERM:
368            bb.warn("Cooker received SIGTERM, shutting down...")
369        elif signum == signal.SIGHUP:
370            bb.warn("Cooker received SIGHUP, shutting down...")
371        self.state = state.forceshutdown
372        bb.event._should_exit.set()
373
374    def setFeatures(self, features):
375        # we only accept a new feature set if we're in state initial, so we can reset without problems
376        if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
377            raise Exception("Illegal state for feature set change")
378        original_featureset = list(self.featureset)
379        for feature in features:
380            self.featureset.setFeature(feature)
381        bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
382        if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
383            self.reset()
384
385    def initConfigurationData(self):
386
387        self.state = state.initial
388        self.caches_array = []
389
390        sys.path = self.orig_syspath.copy()
391        for mod in [*sys.modules]:
392            if mod not in self.orig_sysmodules:
393                del sys.modules[mod]
394
395        self.handle_inotify_updates()
396        self.setupConfigWatcher()
397
398        # Need to preserve BB_CONSOLELOG over resets
399        consolelog = None
400        if hasattr(self, "data"):
401            consolelog = self.data.getVar("BB_CONSOLELOG")
402
403        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
404            self.enableDataTracking()
405
406        caches_name_array = ['bb.cache:CoreRecipeInfo']
407        # We hardcode all known cache types in a single place, here.
408        if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
409            caches_name_array.append("bb.cache_extra:HobRecipeInfo")
410        if CookerFeatures.RECIPE_SIGGEN_INFO in self.featureset:
411            caches_name_array.append("bb.cache:SiggenRecipeInfo")
412
413        # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
414        # This is the entry point, no further check needed!
415        for var in caches_name_array:
416            try:
417                module_name, cache_name = var.split(':')
418                module = __import__(module_name, fromlist=(cache_name,))
419                self.caches_array.append(getattr(module, cache_name))
420            except ImportError as exc:
421                logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
422                raise bb.BBHandledException()
423
424        self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
425        self.databuilder.parseBaseConfiguration()
426        self.data = self.databuilder.data
427        self.data_hash = self.databuilder.data_hash
428        self.extraconfigdata = {}
429
430        if consolelog:
431            self.data.setVar("BB_CONSOLELOG", consolelog)
432
433        self.data.setVar('BB_CMDLINE', self.ui_cmdline)
434
435        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
436            self.disableDataTracking()
437
438        for mc in self.databuilder.mcdata.values():
439            self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
440
441        self._baseconfig_set(True)
442        self._parsecache_set(False)
443
444    def handlePRServ(self):
445        # Setup a PR Server based on the new configuration
446        try:
447            self.prhost = prserv.serv.auto_start(self.data)
448        except prserv.serv.PRServiceConfigError as e:
449            bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
450
451        if self.data.getVar("BB_HASHSERVE") == "auto":
452            # Create a new hash server bound to a unix domain socket
453            if not self.hashserv:
454                dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
455                upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
456                if upstream:
457                    import socket
458                    try:
459                        sock = socket.create_connection(upstream.split(":"), 5)
460                        sock.close()
461                    except socket.error as e:
462                        bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
463                                 % (upstream, repr(e)))
464
465                self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
466                self.hashserv = hashserv.create_server(
467                    self.hashservaddr,
468                    dbfile,
469                    sync=False,
470                    upstream=upstream,
471                )
472                self.hashserv.serve_as_process()
473            for mc in self.databuilder.mcdata:
474                self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
475                self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
476
477        bb.parse.init_parser(self.data)
478
479    def enableDataTracking(self):
480        self.configuration.tracking = True
481        if hasattr(self, "data"):
482            self.data.enableTracking()
483
484    def disableDataTracking(self):
485        self.configuration.tracking = False
486        if hasattr(self, "data"):
487            self.data.disableTracking()
488
489    def parseConfiguration(self):
490        self.updateCacheSync()
491
492        # Change nice level if we're asked to
493        nice = self.data.getVar("BB_NICE_LEVEL")
494        if nice:
495            curnice = os.nice(0)
496            nice = int(nice) - curnice
497            buildlog.verbose("Renice to %s " % os.nice(nice))
498
499        if self.recipecaches:
500            del self.recipecaches
501        self.multiconfigs = self.databuilder.mcdata.keys()
502        self.recipecaches = {}
503        for mc in self.multiconfigs:
504            self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
505
506        self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
507        self.collections = {}
508        for mc in self.multiconfigs:
509            self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
510
511        self._parsecache_set(False)
512
513    def updateConfigOpts(self, options, environment, cmdline):
514        self.ui_cmdline = cmdline
515        clean = True
516        for o in options:
517            if o in ['prefile', 'postfile']:
518                # Only these options may require a reparse
519                try:
520                    if getattr(self.configuration, o) == options[o]:
521                        # Value is the same, no need to mark dirty
522                        continue
523                except AttributeError:
524                    pass
525                logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
526                print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
527                clean = False
528            if hasattr(self.configuration, o):
529                setattr(self.configuration, o, options[o])
530
531        if self.configuration.writeeventlog:
532            if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
533                bb.event.unregister_UIHhandler(self.eventlog[1])
534            if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
535                # we log all events to a file if so directed
536                # register the log file writer as UI Handler
537                writer = EventWriter(self, self.configuration.writeeventlog)
538                EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
539                self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
540
541        bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
542        bb.msg.loggerDefaultDomains = self.configuration.debug_domains
543
544        if hasattr(self, "data"):
545            origenv = bb.data.init()
546            for k in environment:
547                origenv.setVar(k, environment[k])
548            self.data.setVar("BB_ORIGENV", origenv)
549
550        for k in bb.utils.approved_variables():
551            if k in environment and k not in self.configuration.env:
552                logger.debug("Updating new environment variable %s to %s" % (k, environment[k]))
553                self.configuration.env[k] = environment[k]
554                clean = False
555            if k in self.configuration.env and k not in environment:
556                logger.debug("Updating environment variable %s (deleted)" % (k))
557                del self.configuration.env[k]
558                clean = False
559            if k not in self.configuration.env and k not in environment:
560                continue
561            if environment[k] != self.configuration.env[k]:
562                logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
563                self.configuration.env[k] = environment[k]
564                clean = False
565
566        # Now update all the variables not in the datastore to match
567        self.configuration.env = environment
568
569        if not clean:
570            logger.debug("Base environment change, triggering reparse")
571            self.reset()
572
573    def showVersions(self):
574
575        (latest_versions, preferred_versions, required) = self.findProviders()
576
577        logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
578        logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
579
580        for p in sorted(self.recipecaches[''].pkg_pn):
581            preferred = preferred_versions[p]
582            latest = latest_versions[p]
583            requiredstr = ""
584            preferredstr = ""
585            if required[p]:
586                if preferred[0] is not None:
587                    requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
588                else:
589                    bb.fatal("REQUIRED_VERSION of package %s not available" % p)
590            else:
591                preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
592
593            lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
594
595            if preferred == latest:
596                preferredstr = ""
597
598            logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
599
600    def showEnvironment(self, buildfile=None, pkgs_to_build=None):
601        """
602        Show the outer or per-recipe environment
603        """
604        fn = None
605        envdata = None
606        mc = ''
607        if not pkgs_to_build:
608            pkgs_to_build = []
609
610        orig_tracking = self.configuration.tracking
611        if not orig_tracking:
612            self.enableDataTracking()
613            self.reset()
614            # reset() resets to the UI requested value so we have to redo this
615            self.enableDataTracking()
616
617        def mc_base(p):
618            if p.startswith('mc:'):
619                s = p.split(':')
620                if len(s) == 2:
621                    return s[1]
622            return None
623
624        if buildfile:
625            # Parse the configuration here. We need to do it explicitly here since
626            # this showEnvironment() code path doesn't use the cache
627            self.parseConfiguration()
628
629            fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
630            fn = self.matchFile(fn, mc)
631            fn = bb.cache.realfn2virtual(fn, cls, mc)
632        elif len(pkgs_to_build) == 1:
633            mc = mc_base(pkgs_to_build[0])
634            if not mc:
635                ignore = self.data.getVar("ASSUME_PROVIDED") or ""
636                if pkgs_to_build[0] in set(ignore.split()):
637                    bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
638
639                taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True)
640
641                mc = runlist[0][0]
642                fn = runlist[0][3]
643
644        if fn:
645            try:
646                layername = self.collections[mc].calc_bbfile_priority(fn)[2]
647                envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn), layername)
648            except Exception as e:
649                parselog.exception("Unable to read %s", fn)
650                raise
651        else:
652            if not mc in self.databuilder.mcdata:
653                bb.fatal('Not multiconfig named "%s" found' % mc)
654            envdata = self.databuilder.mcdata[mc]
655            data.expandKeys(envdata)
656            parse.ast.runAnonFuncs(envdata)
657
658        # Display history
659        with closing(StringIO()) as env:
660            self.data.inchistory.emit(env)
661            logger.plain(env.getvalue())
662
663        # emit variables and shell functions
664        with closing(StringIO()) as env:
665            data.emit_env(env, envdata, True)
666            logger.plain(env.getvalue())
667
668        # emit the metadata which isn't valid shell
669        for e in sorted(envdata.keys()):
670            if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
671                logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
672
673        if not orig_tracking:
674            self.disableDataTracking()
675            self.reset()
676
677    def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False):
678        """
679        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
680        """
681        bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
682
683        # A task of None means use the default task
684        if task is None:
685            task = self.configuration.cmd
686        if not task.startswith("do_"):
687            task = "do_%s" % task
688
689        targetlist = self.checkPackages(pkgs_to_build, task)
690        fulltargetlist = []
691        defaulttask_implicit = ''
692        defaulttask_explicit = False
693        wildcard = False
694
695        # Wild card expansion:
696        # Replace string such as "mc:*:bash"
697        # into "mc:A:bash mc:B:bash bash"
698        for k in targetlist:
699            if k.startswith("mc:") and k.count(':') >= 2:
700                if wildcard:
701                    bb.fatal('multiconfig conflict')
702                if k.split(":")[1] == "*":
703                    wildcard = True
704                    for mc in self.multiconfigs:
705                        if mc:
706                            fulltargetlist.append(k.replace('*', mc))
707                        # implicit default task
708                        else:
709                            defaulttask_implicit = k.split(":")[2]
710                else:
711                    fulltargetlist.append(k)
712            else:
713                defaulttask_explicit = True
714                fulltargetlist.append(k)
715
716        if not defaulttask_explicit and defaulttask_implicit != '':
717            fulltargetlist.append(defaulttask_implicit)
718
719        bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
720        taskdata = {}
721        localdata = {}
722
723        for mc in self.multiconfigs:
724            taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete)
725            localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
726            bb.data.expandKeys(localdata[mc])
727
728        current = 0
729        runlist = []
730        for k in fulltargetlist:
731            origk = k
732            mc = ""
733            if k.startswith("mc:") and k.count(':') >= 2:
734                mc = k.split(":")[1]
735                k = ":".join(k.split(":")[2:])
736            ktask = task
737            if ":do_" in k:
738                k2 = k.split(":do_")
739                k = k2[0]
740                ktask = k2[1]
741
742            if mc not in self.multiconfigs:
743                 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
744
745            taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
746            current += 1
747            if not ktask.startswith("do_"):
748                ktask = "do_%s" % ktask
749            if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
750                # e.g. in ASSUME_PROVIDED
751                continue
752            fn = taskdata[mc].build_targets[k][0]
753            runlist.append([mc, k, ktask, fn])
754            bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
755
756        havemc = False
757        for mc in self.multiconfigs:
758            if taskdata[mc].get_mcdepends():
759                havemc = True
760
761        # No need to do check providers if there are no mcdeps or not an mc build
762        if havemc or len(self.multiconfigs) > 1:
763            seen = set()
764            new = True
765            # Make sure we can provide the multiconfig dependency
766            while new:
767                mcdeps = set()
768                # Add unresolved first, so we can get multiconfig indirect dependencies on time
769                for mc in self.multiconfigs:
770                    taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
771                    mcdeps |= set(taskdata[mc].get_mcdepends())
772                new = False
773                for k in mcdeps:
774                    if k in seen:
775                        continue
776                    l = k.split(':')
777                    depmc = l[2]
778                    if depmc not in self.multiconfigs:
779                        bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
780                    else:
781                        logger.debug("Adding providers for multiconfig dependency %s" % l[3])
782                        taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
783                        seen.add(k)
784                        new = True
785
786        for mc in self.multiconfigs:
787            taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
788
789        bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
790        return taskdata, runlist
791
792    def prepareTreeData(self, pkgs_to_build, task):
793        """
794        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
795        """
796
797        # We set halt to False here to prevent unbuildable targets raising
798        # an exception when we're just generating data
799        taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
800
801        return runlist, taskdata
802
803    ######## WARNING : this function requires cache_extra to be enabled ########
804
805    def generateTaskDepTreeData(self, pkgs_to_build, task):
806        """
807        Create a dependency graph of pkgs_to_build including reverse dependency
808        information.
809        """
810        if not task.startswith("do_"):
811            task = "do_%s" % task
812
813        runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
814        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
815        rq.rqdata.prepare()
816        return self.buildDependTree(rq, taskdata)
817
818    @staticmethod
819    def add_mc_prefix(mc, pn):
820        if mc:
821            return "mc:%s:%s" % (mc, pn)
822        return pn
823
824    def buildDependTree(self, rq, taskdata):
825        seen_fns = []
826        depend_tree = {}
827        depend_tree["depends"] = {}
828        depend_tree["tdepends"] = {}
829        depend_tree["pn"] = {}
830        depend_tree["rdepends-pn"] = {}
831        depend_tree["packages"] = {}
832        depend_tree["rdepends-pkg"] = {}
833        depend_tree["rrecs-pkg"] = {}
834        depend_tree['providermap'] = {}
835        depend_tree["layer-priorities"] = self.bbfile_config_priorities
836
837        for mc in taskdata:
838            for name, fn in list(taskdata[mc].get_providermap().items()):
839                pn = self.recipecaches[mc].pkg_fn[fn]
840                pn = self.add_mc_prefix(mc, pn)
841                if name != pn:
842                    version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
843                    depend_tree['providermap'][name] = (pn, version)
844
845        for tid in rq.rqdata.runtaskentries:
846            (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
847            pn = self.recipecaches[mc].pkg_fn[taskfn]
848            pn = self.add_mc_prefix(mc, pn)
849            version  = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
850            if pn not in depend_tree["pn"]:
851                depend_tree["pn"][pn] = {}
852                depend_tree["pn"][pn]["filename"] = taskfn
853                depend_tree["pn"][pn]["version"] = version
854                depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
855
856                # if we have extra caches, list all attributes they bring in
857                extra_info = []
858                for cache_class in self.caches_array:
859                    if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
860                        cachefields = getattr(cache_class, 'cachefields', [])
861                        extra_info = extra_info + cachefields
862
863                # for all attributes stored, add them to the dependency tree
864                for ei in extra_info:
865                    depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
866
867
868            dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
869            if not dotname in depend_tree["tdepends"]:
870                depend_tree["tdepends"][dotname] = []
871            for dep in rq.rqdata.runtaskentries[tid].depends:
872                (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
873                deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
874                if depmc:
875                    depmc = "mc:" + depmc + ":"
876                depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
877            if taskfn not in seen_fns:
878                seen_fns.append(taskfn)
879                packages = []
880
881                depend_tree["depends"][pn] = []
882                for dep in taskdata[mc].depids[taskfn]:
883                    depend_tree["depends"][pn].append(dep)
884
885                depend_tree["rdepends-pn"][pn] = []
886                for rdep in taskdata[mc].rdepids[taskfn]:
887                    depend_tree["rdepends-pn"][pn].append(rdep)
888
889                rdepends = self.recipecaches[mc].rundeps[taskfn]
890                for package in rdepends:
891                    depend_tree["rdepends-pkg"][package] = []
892                    for rdepend in rdepends[package]:
893                        depend_tree["rdepends-pkg"][package].append(rdepend)
894                    packages.append(package)
895
896                rrecs = self.recipecaches[mc].runrecs[taskfn]
897                for package in rrecs:
898                    depend_tree["rrecs-pkg"][package] = []
899                    for rdepend in rrecs[package]:
900                        depend_tree["rrecs-pkg"][package].append(rdepend)
901                    if not package in packages:
902                        packages.append(package)
903
904                for package in packages:
905                    if package not in depend_tree["packages"]:
906                        depend_tree["packages"][package] = {}
907                        depend_tree["packages"][package]["pn"] = pn
908                        depend_tree["packages"][package]["filename"] = taskfn
909                        depend_tree["packages"][package]["version"] = version
910
911        return depend_tree
912
913    ######## WARNING : this function requires cache_extra to be enabled ########
914    def generatePkgDepTreeData(self, pkgs_to_build, task):
915        """
916        Create a dependency tree of pkgs_to_build, returning the data.
917        """
918        if not task.startswith("do_"):
919            task = "do_%s" % task
920
921        _, taskdata = self.prepareTreeData(pkgs_to_build, task)
922
923        seen_fns = []
924        depend_tree = {}
925        depend_tree["depends"] = {}
926        depend_tree["pn"] = {}
927        depend_tree["rdepends-pn"] = {}
928        depend_tree["rdepends-pkg"] = {}
929        depend_tree["rrecs-pkg"] = {}
930
931        # if we have extra caches, list all attributes they bring in
932        extra_info = []
933        for cache_class in self.caches_array:
934            if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
935                cachefields = getattr(cache_class, 'cachefields', [])
936                extra_info = extra_info + cachefields
937
938        tids = []
939        for mc in taskdata:
940            for tid in taskdata[mc].taskentries:
941                tids.append(tid)
942
943        for tid in tids:
944            (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
945
946            pn = self.recipecaches[mc].pkg_fn[taskfn]
947            pn = self.add_mc_prefix(mc, pn)
948
949            if pn not in depend_tree["pn"]:
950                depend_tree["pn"][pn] = {}
951                depend_tree["pn"][pn]["filename"] = taskfn
952                version  = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
953                depend_tree["pn"][pn]["version"] = version
954                rdepends = self.recipecaches[mc].rundeps[taskfn]
955                rrecs = self.recipecaches[mc].runrecs[taskfn]
956                depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
957
958                # for all extra attributes stored, add them to the dependency tree
959                for ei in extra_info:
960                    depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
961
962            if taskfn not in seen_fns:
963                seen_fns.append(taskfn)
964
965                depend_tree["depends"][pn] = []
966                for dep in taskdata[mc].depids[taskfn]:
967                    pn_provider = ""
968                    if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
969                        fn_provider = taskdata[mc].build_targets[dep][0]
970                        pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
971                    else:
972                        pn_provider = dep
973                    pn_provider = self.add_mc_prefix(mc, pn_provider)
974                    depend_tree["depends"][pn].append(pn_provider)
975
976                depend_tree["rdepends-pn"][pn] = []
977                for rdep in taskdata[mc].rdepids[taskfn]:
978                    pn_rprovider = ""
979                    if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
980                        fn_rprovider = taskdata[mc].run_targets[rdep][0]
981                        pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
982                    else:
983                        pn_rprovider = rdep
984                    pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
985                    depend_tree["rdepends-pn"][pn].append(pn_rprovider)
986
987                depend_tree["rdepends-pkg"].update(rdepends)
988                depend_tree["rrecs-pkg"].update(rrecs)
989
990        return depend_tree
991
992    def generateDepTreeEvent(self, pkgs_to_build, task):
993        """
994        Create a task dependency graph of pkgs_to_build.
995        Generate an event with the result
996        """
997        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
998        bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
999
1000    def generateDotGraphFiles(self, pkgs_to_build, task):
1001        """
1002        Create a task dependency graph of pkgs_to_build.
1003        Save the result to a set of .dot files.
1004        """
1005
1006        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
1007
1008        with open('pn-buildlist', 'w') as f:
1009            for pn in depgraph["pn"]:
1010                f.write(pn + "\n")
1011        logger.info("PN build list saved to 'pn-buildlist'")
1012
1013        # Remove old format output files to ensure no confusion with stale data
1014        try:
1015            os.unlink('pn-depends.dot')
1016        except FileNotFoundError:
1017            pass
1018        try:
1019            os.unlink('package-depends.dot')
1020        except FileNotFoundError:
1021            pass
1022        try:
1023            os.unlink('recipe-depends.dot')
1024        except FileNotFoundError:
1025            pass
1026
1027        with open('task-depends.dot', 'w') as f:
1028            f.write("digraph depends {\n")
1029            for task in sorted(depgraph["tdepends"]):
1030                (pn, taskname) = task.rsplit(".", 1)
1031                fn = depgraph["pn"][pn]["filename"]
1032                version = depgraph["pn"][pn]["version"]
1033                f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
1034                for dep in sorted(depgraph["tdepends"][task]):
1035                    f.write('"%s" -> "%s"\n' % (task, dep))
1036            f.write("}\n")
1037        logger.info("Task dependencies saved to 'task-depends.dot'")
1038
1039    def show_appends_with_no_recipes(self):
1040        appends_without_recipes = {}
1041        # Determine which bbappends haven't been applied
1042        for mc in self.multiconfigs:
1043            # First get list of recipes, including skipped
1044            recipefns = list(self.recipecaches[mc].pkg_fn.keys())
1045            recipefns.extend(self.skiplist.keys())
1046
1047            # Work out list of bbappends that have been applied
1048            applied_appends = []
1049            for fn in recipefns:
1050                applied_appends.extend(self.collections[mc].get_file_appends(fn))
1051
1052            appends_without_recipes[mc] = []
1053            for _, appendfn in self.collections[mc].bbappends:
1054                if not appendfn in applied_appends:
1055                    appends_without_recipes[mc].append(appendfn)
1056
1057        msgs = []
1058        for mc in sorted(appends_without_recipes.keys()):
1059            if appends_without_recipes[mc]:
1060                msgs.append('No recipes in %s available for:\n  %s' % (mc if mc else 'default',
1061                                                                        '\n  '.join(appends_without_recipes[mc])))
1062
1063        if msgs:
1064            msg = "\n".join(msgs)
1065            warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
1066                False) or "no"
1067            if warn_only.lower() in ("1", "yes", "true"):
1068                bb.warn(msg)
1069            else:
1070                bb.fatal(msg)
1071
1072    def handlePrefProviders(self):
1073
1074        for mc in self.multiconfigs:
1075            localdata = data.createCopy(self.databuilder.mcdata[mc])
1076            bb.data.expandKeys(localdata)
1077
1078            # Handle PREFERRED_PROVIDERS
1079            for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
1080                try:
1081                    (providee, provider) = p.split(':')
1082                except:
1083                    providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
1084                    continue
1085                if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1086                    providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1087                self.recipecaches[mc].preferred[providee] = provider
1088
1089    def findConfigFilePath(self, configfile):
1090        """
1091        Find the location on disk of configfile and if it exists and was parsed by BitBake
1092        emit the ConfigFilePathFound event with the path to the file.
1093        """
1094        path = bb.cookerdata.findConfigFile(configfile, self.data)
1095        if not path:
1096            return
1097
1098        # Generate a list of parsed configuration files by searching the files
1099        # listed in the __depends and __base_depends variables with a .conf suffix.
1100        conffiles = []
1101        dep_files = self.data.getVar('__base_depends', False) or []
1102        dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1103
1104        for f in dep_files:
1105            if f[0].endswith(".conf"):
1106                conffiles.append(f[0])
1107
1108        _, conf, conffile = path.rpartition("conf/")
1109        match = os.path.join(conf, conffile)
1110        # Try and find matches for conf/conffilename.conf as we don't always
1111        # have the full path to the file.
1112        for cfg in conffiles:
1113            if cfg.endswith(match):
1114                bb.event.fire(bb.event.ConfigFilePathFound(path),
1115                              self.data)
1116                break
1117
1118    def findFilesMatchingInDir(self, filepattern, directory):
1119        """
1120        Searches for files containing the substring 'filepattern' which are children of
1121        'directory' in each BBPATH. i.e. to find all rootfs package classes available
1122        to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1123        or to find all machine configuration files one could call:
1124        findFilesMatchingInDir(self, '.conf', 'conf/machine')
1125        """
1126
1127        matches = []
1128        bbpaths = self.data.getVar('BBPATH').split(':')
1129        for path in bbpaths:
1130            dirpath = os.path.join(path, directory)
1131            if os.path.exists(dirpath):
1132                for root, dirs, files in os.walk(dirpath):
1133                    for f in files:
1134                        if filepattern in f:
1135                            matches.append(f)
1136
1137        if matches:
1138            bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1139
1140    def testCookerCommandEvent(self, filepattern):
1141        # Dummy command used by OEQA selftest to test tinfoil without IO
1142        matches = ["A", "B"]
1143        bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1144
1145    def findProviders(self, mc=''):
1146        return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1147
1148    def findBestProvider(self, pn, mc=''):
1149        if pn in self.recipecaches[mc].providers:
1150            filenames = self.recipecaches[mc].providers[pn]
1151            eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
1152            if eligible is not None:
1153                filename = eligible[0]
1154            else:
1155                filename = None
1156            return None, None, None, filename
1157        elif pn in self.recipecaches[mc].pkg_pn:
1158            (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1159            if required and preferred_file is None:
1160                return None, None, None, None
1161            return (latest, latest_f, preferred_ver, preferred_file)
1162        else:
1163            return None, None, None, None
1164
1165    def findConfigFiles(self, varname):
1166        """
1167        Find config files which are appropriate values for varname.
1168        i.e. MACHINE, DISTRO
1169        """
1170        possible = []
1171        var = varname.lower()
1172
1173        data = self.data
1174        # iterate configs
1175        bbpaths = data.getVar('BBPATH').split(':')
1176        for path in bbpaths:
1177            confpath = os.path.join(path, "conf", var)
1178            if os.path.exists(confpath):
1179                for root, dirs, files in os.walk(confpath):
1180                    # get all child files, these are appropriate values
1181                    for f in files:
1182                        val, sep, end = f.rpartition('.')
1183                        if end == 'conf':
1184                            possible.append(val)
1185
1186        if possible:
1187            bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1188
1189    def findInheritsClass(self, klass):
1190        """
1191        Find all recipes which inherit the specified class
1192        """
1193        pkg_list = []
1194
1195        for pfn in self.recipecaches[''].pkg_fn:
1196            inherits = self.recipecaches[''].inherits.get(pfn, None)
1197            if inherits and klass in inherits:
1198                pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
1199
1200        return pkg_list
1201
1202    def generateTargetsTree(self, klass=None, pkgs=None):
1203        """
1204        Generate a dependency tree of buildable targets
1205        Generate an event with the result
1206        """
1207        # if the caller hasn't specified a pkgs list default to universe
1208        if not pkgs:
1209            pkgs = ['universe']
1210        # if inherited_class passed ensure all recipes which inherit the
1211        # specified class are included in pkgs
1212        if klass:
1213            extra_pkgs = self.findInheritsClass(klass)
1214            pkgs = pkgs + extra_pkgs
1215
1216        # generate a dependency tree for all our packages
1217        tree = self.generatePkgDepTreeData(pkgs, 'build')
1218        bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1219
1220    def interactiveMode( self ):
1221        """Drop off into a shell"""
1222        try:
1223            from bb import shell
1224        except ImportError:
1225            parselog.exception("Interactive mode not available")
1226            raise bb.BBHandledException()
1227        else:
1228            shell.start( self )
1229
1230
1231    def handleCollections(self, collections):
1232        """Handle collections"""
1233        errors = False
1234        self.bbfile_config_priorities = []
1235        if collections:
1236            collection_priorities = {}
1237            collection_depends = {}
1238            collection_list = collections.split()
1239            min_prio = 0
1240            for c in collection_list:
1241                bb.debug(1,'Processing %s in collection list' % (c))
1242
1243                # Get collection priority if defined explicitly
1244                priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
1245                if priority:
1246                    try:
1247                        prio = int(priority)
1248                    except ValueError:
1249                        parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1250                        errors = True
1251                    if min_prio == 0 or prio < min_prio:
1252                        min_prio = prio
1253                    collection_priorities[c] = prio
1254                else:
1255                    collection_priorities[c] = None
1256
1257                # Check dependencies and store information for priority calculation
1258                deps = self.data.getVar("LAYERDEPENDS_%s" % c)
1259                if deps:
1260                    try:
1261                        depDict = bb.utils.explode_dep_versions2(deps)
1262                    except bb.utils.VersionStringException as vse:
1263                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1264                    for dep, oplist in list(depDict.items()):
1265                        if dep in collection_list:
1266                            for opstr in oplist:
1267                                layerver = self.data.getVar("LAYERVERSION_%s" % dep)
1268                                (op, depver) = opstr.split()
1269                                if layerver:
1270                                    try:
1271                                        res = bb.utils.vercmp_string_op(layerver, depver, op)
1272                                    except bb.utils.VersionStringException as vse:
1273                                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1274                                    if not res:
1275                                        parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1276                                        errors = True
1277                                else:
1278                                    parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1279                                    errors = True
1280                        else:
1281                            parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1282                            errors = True
1283                    collection_depends[c] = list(depDict.keys())
1284                else:
1285                    collection_depends[c] = []
1286
1287                # Check recommends and store information for priority calculation
1288                recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
1289                if recs:
1290                    try:
1291                        recDict = bb.utils.explode_dep_versions2(recs)
1292                    except bb.utils.VersionStringException as vse:
1293                        bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1294                    for rec, oplist in list(recDict.items()):
1295                        if rec in collection_list:
1296                            if oplist:
1297                                opstr = oplist[0]
1298                                layerver = self.data.getVar("LAYERVERSION_%s" % rec)
1299                                if layerver:
1300                                    (op, recver) = opstr.split()
1301                                    try:
1302                                        res = bb.utils.vercmp_string_op(layerver, recver, op)
1303                                    except bb.utils.VersionStringException as vse:
1304                                        bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1305                                    if not res:
1306                                        parselog.debug3("Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1307                                        continue
1308                                else:
1309                                    parselog.debug3("Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1310                                    continue
1311                            parselog.debug3("Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1312                            collection_depends[c].append(rec)
1313                        else:
1314                            parselog.debug3("Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1315
1316            # Recursively work out collection priorities based on dependencies
1317            def calc_layer_priority(collection):
1318                if not collection_priorities[collection]:
1319                    max_depprio = min_prio
1320                    for dep in collection_depends[collection]:
1321                        calc_layer_priority(dep)
1322                        depprio = collection_priorities[dep]
1323                        if depprio > max_depprio:
1324                            max_depprio = depprio
1325                    max_depprio += 1
1326                    parselog.debug("Calculated priority of layer %s as %d", collection, max_depprio)
1327                    collection_priorities[collection] = max_depprio
1328
1329            # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1330            for c in collection_list:
1331                calc_layer_priority(c)
1332                regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
1333                if regex is None:
1334                    parselog.error("BBFILE_PATTERN_%s not defined" % c)
1335                    errors = True
1336                    continue
1337                elif regex == "":
1338                    parselog.debug("BBFILE_PATTERN_%s is empty" % c)
1339                    cre = re.compile('^NULL$')
1340                    errors = False
1341                else:
1342                    try:
1343                        cre = re.compile(regex)
1344                    except re.error:
1345                        parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1346                        errors = True
1347                        continue
1348                self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1349        if errors:
1350            # We've already printed the actual error(s)
1351            raise CollectionError("Errors during parsing layer configuration")
1352
1353    def buildSetVars(self):
1354        """
1355        Setup any variables needed before starting a build
1356        """
1357        t = time.gmtime()
1358        for mc in self.databuilder.mcdata:
1359            ds = self.databuilder.mcdata[mc]
1360            if not ds.getVar("BUILDNAME", False):
1361                ds.setVar("BUILDNAME", "${DATE}${TIME}")
1362            ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1363            ds.setVar("DATE", time.strftime('%Y%m%d', t))
1364            ds.setVar("TIME", time.strftime('%H%M%S', t))
1365
1366    def reset_mtime_caches(self):
1367        """
1368        Reset mtime caches - this is particularly important when memory resident as something
1369        which is cached is not unlikely to have changed since the last invocation (e.g. a
1370        file associated with a recipe might have been modified by the user).
1371        """
1372        build.reset_cache()
1373        bb.fetch._checksum_cache.mtime_cache.clear()
1374        siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1375        if siggen_cache:
1376            bb.parse.siggen.checksum_cache.mtime_cache.clear()
1377
1378    def matchFiles(self, bf, mc=''):
1379        """
1380        Find the .bb files which match the expression in 'buildfile'.
1381        """
1382        if bf.startswith("/") or bf.startswith("../"):
1383            bf = os.path.abspath(bf)
1384
1385        collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1386        filelist, masked, searchdirs = collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1387        try:
1388            os.stat(bf)
1389            bf = os.path.abspath(bf)
1390            return [bf]
1391        except OSError:
1392            regexp = re.compile(bf)
1393            matches = []
1394            for f in filelist:
1395                if regexp.search(f) and os.path.isfile(f):
1396                    matches.append(f)
1397            return matches
1398
1399    def matchFile(self, buildfile, mc=''):
1400        """
1401        Find the .bb file which matches the expression in 'buildfile'.
1402        Raise an error if multiple files
1403        """
1404        matches = self.matchFiles(buildfile, mc)
1405        if len(matches) != 1:
1406            if matches:
1407                msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1408                if matches:
1409                    for f in matches:
1410                        msg += "\n    %s" % f
1411                parselog.error(msg)
1412            else:
1413                parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1414            raise NoSpecificMatch
1415        return matches[0]
1416
1417    def buildFile(self, buildfile, task):
1418        """
1419        Build the file matching regexp buildfile
1420        """
1421        bb.event.fire(bb.event.BuildInit(), self.data)
1422
1423        # Too many people use -b because they think it's how you normally
1424        # specify a target to be built, so show a warning
1425        bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1426
1427        self.buildFileInternal(buildfile, task)
1428
1429    def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1430        """
1431        Build the file matching regexp buildfile
1432        """
1433
1434        # Parse the configuration here. We need to do it explicitly here since
1435        # buildFile() doesn't use the cache
1436        self.parseConfiguration()
1437
1438        # If we are told to do the None task then query the default task
1439        if task is None:
1440            task = self.configuration.cmd
1441        if not task.startswith("do_"):
1442            task = "do_%s" % task
1443
1444        fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
1445        fn = self.matchFile(fn, mc)
1446
1447        self.buildSetVars()
1448        self.reset_mtime_caches()
1449
1450        bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
1451
1452        layername = self.collections[mc].calc_bbfile_priority(fn)[2]
1453        infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
1454        infos = dict(infos)
1455
1456        fn = bb.cache.realfn2virtual(fn, cls, mc)
1457        try:
1458            info_array = infos[fn]
1459        except KeyError:
1460            bb.fatal("%s does not exist" % fn)
1461
1462        if info_array[0].skipped:
1463            bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1464
1465        self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
1466
1467        # Tweak some variables
1468        item = info_array[0].pn
1469        self.recipecaches[mc].ignored_dependencies = set()
1470        self.recipecaches[mc].bbfile_priority[fn] = 1
1471        self.configuration.limited_deps = True
1472
1473        # Remove external dependencies
1474        self.recipecaches[mc].task_deps[fn]['depends'] = {}
1475        self.recipecaches[mc].deps[fn] = []
1476        self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1477        self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1478
1479        bb.parse.siggen.setup_datacache(self.recipecaches)
1480
1481        # Invalidate task for target if force mode active
1482        if self.configuration.force:
1483            logger.verbose("Invalidate task %s, %s", task, fn)
1484            bb.parse.siggen.invalidate_task(task, fn)
1485
1486        # Setup taskdata structure
1487        taskdata = {}
1488        taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt)
1489        taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1490
1491        if quietlog:
1492            rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1493            bb.runqueue.logger.setLevel(logging.WARNING)
1494
1495        buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1496        if fireevents:
1497            bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1498            bb.event.enable_heartbeat()
1499
1500        # Execute the runqueue
1501        runlist = [[mc, item, task, fn]]
1502
1503        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1504
1505        def buildFileIdle(server, rq, halt):
1506
1507            msg = None
1508            interrupted = 0
1509            if halt or self.state == state.forceshutdown:
1510                rq.finish_runqueue(True)
1511                msg = "Forced shutdown"
1512                interrupted = 2
1513            elif self.state == state.shutdown:
1514                rq.finish_runqueue(False)
1515                msg = "Stopped build"
1516                interrupted = 1
1517            failures = 0
1518            try:
1519                retval = rq.execute_runqueue()
1520            except runqueue.TaskFailure as exc:
1521                failures += len(exc.args)
1522                retval = False
1523            except SystemExit as exc:
1524                if quietlog:
1525                    bb.runqueue.logger.setLevel(rqloglevel)
1526                return bb.server.process.idleFinish(str(exc))
1527
1528            if not retval:
1529                if fireevents:
1530                    bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1531                    bb.event.disable_heartbeat()
1532                # We trashed self.recipecaches above
1533                self._parsecache_set(False)
1534                self.configuration.limited_deps = False
1535                bb.parse.siggen.reset(self.data)
1536                if quietlog:
1537                    bb.runqueue.logger.setLevel(rqloglevel)
1538                return bb.server.process.idleFinish(msg)
1539            if retval is True:
1540                return True
1541            return retval
1542
1543        self.idleCallBackRegister(buildFileIdle, rq)
1544
1545    def buildTargets(self, targets, task):
1546        """
1547        Attempt to build the targets specified
1548        """
1549
1550        def buildTargetsIdle(server, rq, halt):
1551            msg = None
1552            interrupted = 0
1553            if halt or self.state == state.forceshutdown:
1554                bb.event._should_exit.set()
1555                rq.finish_runqueue(True)
1556                msg = "Forced shutdown"
1557                interrupted = 2
1558            elif self.state == state.shutdown:
1559                rq.finish_runqueue(False)
1560                msg = "Stopped build"
1561                interrupted = 1
1562            failures = 0
1563            try:
1564                retval = rq.execute_runqueue()
1565            except runqueue.TaskFailure as exc:
1566                failures += len(exc.args)
1567                retval = False
1568            except SystemExit as exc:
1569                return bb.server.process.idleFinish(str(exc))
1570
1571            if not retval:
1572                try:
1573                    for mc in self.multiconfigs:
1574                        bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1575                finally:
1576                    bb.event.disable_heartbeat()
1577                return bb.server.process.idleFinish(msg)
1578
1579            if retval is True:
1580                return True
1581            return retval
1582
1583        self.reset_mtime_caches()
1584        self.buildSetVars()
1585
1586        # If we are told to do the None task then query the default task
1587        if task is None:
1588            task = self.configuration.cmd
1589
1590        if not task.startswith("do_"):
1591            task = "do_%s" % task
1592
1593        packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1594
1595        bb.event.fire(bb.event.BuildInit(packages), self.data)
1596
1597        taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt)
1598
1599        buildname = self.data.getVar("BUILDNAME", False)
1600
1601        # make targets to always look as <target>:do_<task>
1602        ntargets = []
1603        for target in runlist:
1604            if target[0]:
1605                ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
1606            ntargets.append("%s:%s" % (target[1], target[2]))
1607
1608        for mc in self.multiconfigs:
1609            bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1610        bb.event.enable_heartbeat()
1611
1612        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1613        if 'universe' in targets:
1614            rq.rqdata.warn_multi_bb = True
1615
1616        self.idleCallBackRegister(buildTargetsIdle, rq)
1617
1618
1619    def getAllKeysWithFlags(self, flaglist):
1620        dump = {}
1621        for k in self.data.keys():
1622            try:
1623                expand = True
1624                flags = self.data.getVarFlags(k)
1625                if flags and "func" in flags and "python" in flags:
1626                    expand = False
1627                v = self.data.getVar(k, expand)
1628                if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1629                    dump[k] = {
1630    'v' : str(v) ,
1631    'history' : self.data.varhistory.variable(k),
1632                    }
1633                    for d in flaglist:
1634                        if flags and d in flags:
1635                            dump[k][d] = flags[d]
1636                        else:
1637                            dump[k][d] = None
1638            except Exception as e:
1639                print(e)
1640        return dump
1641
1642
1643    def updateCacheSync(self):
1644        if self.state == state.running:
1645            return
1646
1647        self.handle_inotify_updates()
1648
1649        if not self.baseconfig_valid:
1650            logger.debug("Reloading base configuration data")
1651            self.initConfigurationData()
1652            self.handlePRServ()
1653
1654    # This is called for all async commands when self.state != running
1655    def updateCache(self):
1656        if self.state == state.running:
1657            return
1658
1659        if self.state in (state.shutdown, state.forceshutdown, state.error):
1660            if hasattr(self.parser, 'shutdown'):
1661                self.parser.shutdown(clean=False)
1662                self.parser.final_cleanup()
1663            raise bb.BBHandledException()
1664
1665        if self.state != state.parsing:
1666            self.updateCacheSync()
1667
1668        if self.state != state.parsing and not self.parsecache_valid:
1669            bb.server.process.serverlog("Parsing started")
1670            self.setupParserWatcher()
1671
1672            bb.parse.siggen.reset(self.data)
1673            self.parseConfiguration ()
1674            if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1675                for mc in self.multiconfigs:
1676                    bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
1677
1678            for mc in self.multiconfigs:
1679                ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
1680                self.recipecaches[mc].ignored_dependencies = set(ignore.split())
1681
1682                for dep in self.configuration.extra_assume_provided:
1683                    self.recipecaches[mc].ignored_dependencies.add(dep)
1684
1685            mcfilelist = {}
1686            total_masked = 0
1687            searchdirs = set()
1688            for mc in self.multiconfigs:
1689                (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1690
1691                mcfilelist[mc] = filelist
1692                total_masked += masked
1693                searchdirs |= set(search)
1694
1695            # Add inotify watches for directories searched for bb/bbappend files
1696            for dirent in searchdirs:
1697                self.add_filewatch([[dirent]], dirs=True)
1698
1699            self.parser = CookerParser(self, mcfilelist, total_masked)
1700            self._parsecache_set(True)
1701
1702        self.state = state.parsing
1703
1704        if not self.parser.parse_next():
1705            collectlog.debug("parsing complete")
1706            if self.parser.error:
1707                raise bb.BBHandledException()
1708            self.show_appends_with_no_recipes()
1709            self.handlePrefProviders()
1710            for mc in self.multiconfigs:
1711                self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1712            self.state = state.running
1713
1714            # Send an event listing all stamps reachable after parsing
1715            # which the metadata may use to clean up stale data
1716            for mc in self.multiconfigs:
1717                event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1718                bb.event.fire(event, self.databuilder.mcdata[mc])
1719            return None
1720
1721        return True
1722
1723    def checkPackages(self, pkgs_to_build, task=None):
1724
1725        # Return a copy, don't modify the original
1726        pkgs_to_build = pkgs_to_build[:]
1727
1728        if not pkgs_to_build:
1729            raise NothingToBuild
1730
1731        ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
1732        for pkg in pkgs_to_build.copy():
1733            if pkg in ignore:
1734                parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1735            if pkg.startswith("multiconfig:"):
1736                pkgs_to_build.remove(pkg)
1737                pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
1738
1739        if 'world' in pkgs_to_build:
1740            pkgs_to_build.remove('world')
1741            for mc in self.multiconfigs:
1742                bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1743                for t in self.recipecaches[mc].world_target:
1744                    if mc:
1745                        t = "mc:" + mc + ":" + t
1746                    pkgs_to_build.append(t)
1747
1748        if 'universe' in pkgs_to_build:
1749            parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1750            parselog.debug("collating packages for \"universe\"")
1751            pkgs_to_build.remove('universe')
1752            for mc in self.multiconfigs:
1753                for t in self.recipecaches[mc].universe_target:
1754                    if task:
1755                        foundtask = False
1756                        for provider_fn in self.recipecaches[mc].providers[t]:
1757                            if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1758                                foundtask = True
1759                                break
1760                        if not foundtask:
1761                            bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1762                            continue
1763                    if mc:
1764                        t = "mc:" + mc + ":" + t
1765                    pkgs_to_build.append(t)
1766
1767        return pkgs_to_build
1768
1769    def pre_serve(self):
1770        return
1771
1772    def post_serve(self):
1773        self.shutdown(force=True)
1774        prserv.serv.auto_shutdown()
1775        if hasattr(bb.parse, "siggen"):
1776            bb.parse.siggen.exit()
1777        if self.hashserv:
1778            self.hashserv.process.terminate()
1779            self.hashserv.process.join()
1780        if hasattr(self, "data"):
1781            bb.event.fire(CookerExit(), self.data)
1782
1783    def shutdown(self, force=False):
1784        if force:
1785            self.state = state.forceshutdown
1786            bb.event._should_exit.set()
1787        else:
1788            self.state = state.shutdown
1789
1790        if self.parser:
1791            self.parser.shutdown(clean=False)
1792            self.parser.final_cleanup()
1793
1794    def finishcommand(self):
1795        if hasattr(self.parser, 'shutdown'):
1796            self.parser.shutdown(clean=False)
1797            self.parser.final_cleanup()
1798        self.state = state.initial
1799        bb.event._should_exit.clear()
1800
1801    def reset(self):
1802        if hasattr(bb.parse, "siggen"):
1803            bb.parse.siggen.exit()
1804        self.finishcommand()
1805        self.initConfigurationData()
1806        self.handlePRServ()
1807
1808    def clientComplete(self):
1809        """Called when the client is done using the server"""
1810        self.finishcommand()
1811        self.extraconfigdata = {}
1812        self.command.reset()
1813        if hasattr(self, "data"):
1814           self.databuilder.reset()
1815           self.data = self.databuilder.data
1816        # In theory tinfoil could have modified the base data before parsing,
1817        # ideally need to track if anything did modify the datastore
1818        self._parsecache_set(False)
1819
1820class CookerExit(bb.event.Event):
1821    """
1822    Notify clients of the Cooker shutdown
1823    """
1824
1825    def __init__(self):
1826        bb.event.Event.__init__(self)
1827
1828
1829class CookerCollectFiles(object):
1830    def __init__(self, priorities, mc=''):
1831        self.mc = mc
1832        self.bbappends = []
1833        # Priorities is a list of tuples, with the second element as the pattern.
1834        # We need to sort the list with the longest pattern first, and so on to
1835        # the shortest.  This allows nested layers to be properly evaluated.
1836        self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1837
1838    def calc_bbfile_priority(self, filename):
1839        for layername, _, regex, pri in self.bbfile_config_priorities:
1840            if regex.match(filename):
1841                return pri, regex, layername
1842        return 0, None, None
1843
1844    def get_bbfiles(self):
1845        """Get list of default .bb files by reading out the current directory"""
1846        path = os.getcwd()
1847        contents = os.listdir(path)
1848        bbfiles = []
1849        for f in contents:
1850            if f.endswith(".bb"):
1851                bbfiles.append(os.path.abspath(os.path.join(path, f)))
1852        return bbfiles
1853
1854    def find_bbfiles(self, path):
1855        """Find all the .bb and .bbappend files in a directory"""
1856        found = []
1857        for dir, dirs, files in os.walk(path):
1858            for ignored in ('SCCS', 'CVS', '.svn'):
1859                if ignored in dirs:
1860                    dirs.remove(ignored)
1861            found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1862
1863        return found
1864
1865    def collect_bbfiles(self, config, eventdata):
1866        """Collect all available .bb build files"""
1867        masked = 0
1868
1869        collectlog.debug("collecting .bb files")
1870
1871        files = (config.getVar( "BBFILES") or "").split()
1872
1873        # Sort files by priority
1874        files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
1875        config.setVar("BBFILES_PRIORITIZED", " ".join(files))
1876
1877        if not files:
1878            files = self.get_bbfiles()
1879
1880        if not files:
1881            collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1882            bb.event.fire(CookerExit(), eventdata)
1883
1884        # We need to track where we look so that we can add inotify watches. There
1885        # is no nice way to do this, this is horrid. We intercept the os.listdir()
1886        # (or os.scandir() for python 3.6+) calls while we run glob().
1887        origlistdir = os.listdir
1888        if hasattr(os, 'scandir'):
1889            origscandir = os.scandir
1890        searchdirs = []
1891
1892        def ourlistdir(d):
1893            searchdirs.append(d)
1894            return origlistdir(d)
1895
1896        def ourscandir(d):
1897            searchdirs.append(d)
1898            return origscandir(d)
1899
1900        os.listdir = ourlistdir
1901        if hasattr(os, 'scandir'):
1902            os.scandir = ourscandir
1903        try:
1904            # Can't use set here as order is important
1905            newfiles = []
1906            for f in files:
1907                if os.path.isdir(f):
1908                    dirfiles = self.find_bbfiles(f)
1909                    for g in dirfiles:
1910                        if g not in newfiles:
1911                            newfiles.append(g)
1912                else:
1913                    globbed = glob.glob(f)
1914                    if not globbed and os.path.exists(f):
1915                        globbed = [f]
1916                    # glob gives files in order on disk. Sort to be deterministic.
1917                    for g in sorted(globbed):
1918                        if g not in newfiles:
1919                            newfiles.append(g)
1920        finally:
1921            os.listdir = origlistdir
1922            if hasattr(os, 'scandir'):
1923                os.scandir = origscandir
1924
1925        bbmask = config.getVar('BBMASK')
1926
1927        if bbmask:
1928            # First validate the individual regular expressions and ignore any
1929            # that do not compile
1930            bbmasks = []
1931            for mask in bbmask.split():
1932                # When constructing an older style single regex, it's possible for BBMASK
1933                # to end up beginning with '|', which matches and masks _everything_.
1934                if mask.startswith("|"):
1935                    collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1936                    mask = mask[1:]
1937                try:
1938                    re.compile(mask)
1939                    bbmasks.append(mask)
1940                except re.error:
1941                    collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1942
1943            # Then validate the combined regular expressions. This should never
1944            # fail, but better safe than sorry...
1945            bbmask = "|".join(bbmasks)
1946            try:
1947                bbmask_compiled = re.compile(bbmask)
1948            except re.error:
1949                collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1950                bbmask = None
1951
1952        bbfiles = []
1953        bbappend = []
1954        for f in newfiles:
1955            if bbmask and bbmask_compiled.search(f):
1956                collectlog.debug("skipping masked file %s", f)
1957                masked += 1
1958                continue
1959            if f.endswith('.bb'):
1960                bbfiles.append(f)
1961            elif f.endswith('.bbappend'):
1962                bbappend.append(f)
1963            else:
1964                collectlog.debug("skipping %s: unknown file extension", f)
1965
1966        # Build a list of .bbappend files for each .bb file
1967        for f in bbappend:
1968            base = os.path.basename(f).replace('.bbappend', '.bb')
1969            self.bbappends.append((base, f))
1970
1971        # Find overlayed recipes
1972        # bbfiles will be in priority order which makes this easy
1973        bbfile_seen = dict()
1974        self.overlayed = defaultdict(list)
1975        for f in reversed(bbfiles):
1976            base = os.path.basename(f)
1977            if base not in bbfile_seen:
1978                bbfile_seen[base] = f
1979            else:
1980                topfile = bbfile_seen[base]
1981                self.overlayed[topfile].append(f)
1982
1983        return (bbfiles, masked, searchdirs)
1984
1985    def get_file_appends(self, fn):
1986        """
1987        Returns a list of .bbappend files to apply to fn
1988        """
1989        filelist = []
1990        f = os.path.basename(fn)
1991        for b in self.bbappends:
1992            (bbappend, filename) = b
1993            if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1994                filelist.append(filename)
1995        return tuple(filelist)
1996
1997    def collection_priorities(self, pkgfns, fns, d):
1998        # Return the priorities of the entries in pkgfns
1999        # Also check that all the regexes in self.bbfile_config_priorities are used
2000        # (but to do that we need to ensure skipped recipes aren't counted, nor
2001        # collections in BBFILE_PATTERN_IGNORE_EMPTY)
2002
2003        priorities = {}
2004        seen = set()
2005        matched = set()
2006
2007        matched_regex = set()
2008        unmatched_regex = set()
2009        for _, _, regex, _ in self.bbfile_config_priorities:
2010            unmatched_regex.add(regex)
2011
2012        # Calculate priorities for each file
2013        for p in pkgfns:
2014            realfn, cls, mc = bb.cache.virtualfn2realfn(p)
2015            priorities[p], regex, _ = self.calc_bbfile_priority(realfn)
2016            if regex in unmatched_regex:
2017                matched_regex.add(regex)
2018                unmatched_regex.remove(regex)
2019            seen.add(realfn)
2020            if regex:
2021                matched.add(realfn)
2022
2023        if unmatched_regex:
2024            # Account for bbappend files
2025            for b in self.bbappends:
2026                (bbfile, append) = b
2027                seen.add(append)
2028
2029            # Account for skipped recipes
2030            seen.update(fns)
2031
2032            seen.difference_update(matched)
2033
2034            def already_matched(fn):
2035                for regex in matched_regex:
2036                    if regex.match(fn):
2037                        return True
2038                return False
2039
2040            for unmatch in unmatched_regex.copy():
2041                for fn in seen:
2042                    if unmatch.match(fn):
2043                        # If the bbappend or file was already matched by another regex, skip it
2044                        # e.g. for a layer within a layer, the outer regex could match, the inner
2045                        # regex may match nothing and we should warn about that
2046                        if already_matched(fn):
2047                            continue
2048                        unmatched_regex.remove(unmatch)
2049                        break
2050
2051        for collection, pattern, regex, _ in self.bbfile_config_priorities:
2052            if regex in unmatched_regex:
2053                if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
2054                    collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
2055                                                                                             collection, pattern))
2056
2057        return priorities
2058
2059class ParsingFailure(Exception):
2060    def __init__(self, realexception, recipe):
2061        self.realexception = realexception
2062        self.recipe = recipe
2063        Exception.__init__(self, realexception, recipe)
2064
2065class Parser(multiprocessing.Process):
2066    def __init__(self, jobs, results, quit, profile):
2067        self.jobs = jobs
2068        self.results = results
2069        self.quit = quit
2070        multiprocessing.Process.__init__(self)
2071        self.context = bb.utils.get_context().copy()
2072        self.handlers = bb.event.get_class_handlers().copy()
2073        self.profile = profile
2074        self.queue_signals = False
2075        self.signal_received = []
2076        self.signal_threadlock = threading.Lock()
2077
2078    def catch_sig(self, signum, frame):
2079        if self.queue_signals:
2080            self.signal_received.append(signum)
2081        else:
2082            self.handle_sig(signum, frame)
2083
2084    def handle_sig(self, signum, frame):
2085        if signum == signal.SIGTERM:
2086            signal.signal(signal.SIGTERM, signal.SIG_DFL)
2087            os.kill(os.getpid(), signal.SIGTERM)
2088        elif signum == signal.SIGINT:
2089            signal.default_int_handler(signum, frame)
2090
2091    def run(self):
2092
2093        if not self.profile:
2094            self.realrun()
2095            return
2096
2097        try:
2098            import cProfile as profile
2099        except:
2100            import profile
2101        prof = profile.Profile()
2102        try:
2103            profile.Profile.runcall(prof, self.realrun)
2104        finally:
2105            logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
2106            prof.dump_stats(logfile)
2107
2108    def realrun(self):
2109        # Signal handling here is hard. We must not terminate any process or thread holding the write
2110        # lock for the event stream as it will not be released, ever, and things will hang.
2111        # Python handles signals in the main thread/process but they can be raised from any thread and
2112        # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section
2113        # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any
2114        # new thread should also do so) and we defer handling but we handle with the local thread lock
2115        # held (a threading lock, not a multiprocessing one) so that no other thread in the process
2116        # can be in the critical section.
2117        signal.signal(signal.SIGTERM, self.catch_sig)
2118        signal.signal(signal.SIGHUP, signal.SIG_DFL)
2119        signal.signal(signal.SIGINT, self.catch_sig)
2120        bb.utils.set_process_name(multiprocessing.current_process().name)
2121        multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2122        multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2123
2124        pending = []
2125        havejobs = True
2126        try:
2127            while havejobs or pending:
2128                if self.quit.is_set():
2129                    break
2130
2131                job = None
2132                try:
2133                    job = self.jobs.pop()
2134                except IndexError:
2135                    havejobs = False
2136                if job:
2137                    result = self.parse(*job)
2138                    # Clear the siggen cache after parsing to control memory usage, its huge
2139                    bb.parse.siggen.postparsing_clean_cache()
2140                    pending.append(result)
2141
2142                if pending:
2143                    try:
2144                        result = pending.pop()
2145                        self.results.put(result, timeout=0.05)
2146                    except queue.Full:
2147                        pending.append(result)
2148        finally:
2149            self.results.close()
2150            self.results.join_thread()
2151
2152    def parse(self, mc, cache, filename, appends, layername):
2153        try:
2154            origfilter = bb.event.LogHandler.filter
2155            # Record the filename we're parsing into any events generated
2156            def parse_filter(self, record):
2157                record.taskpid = bb.event.worker_pid
2158                record.fn = filename
2159                return True
2160
2161            # Reset our environment and handlers to the original settings
2162            bb.utils.set_context(self.context.copy())
2163            bb.event.set_class_handlers(self.handlers.copy())
2164            bb.event.LogHandler.filter = parse_filter
2165
2166            return True, mc, cache.parse(filename, appends, layername)
2167        except Exception as exc:
2168            tb = sys.exc_info()[2]
2169            exc.recipe = filename
2170            exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2171            return True, None, exc
2172        # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2173        # and for example a worker thread doesn't just exit on its own in response to
2174        # a SystemExit event for example.
2175        except BaseException as exc:
2176            return True, None, ParsingFailure(exc, filename)
2177        finally:
2178            bb.event.LogHandler.filter = origfilter
2179
2180class CookerParser(object):
2181    def __init__(self, cooker, mcfilelist, masked):
2182        self.mcfilelist = mcfilelist
2183        self.cooker = cooker
2184        self.cfgdata = cooker.data
2185        self.cfghash = cooker.data_hash
2186        self.cfgbuilder = cooker.databuilder
2187
2188        # Accounting statistics
2189        self.parsed = 0
2190        self.cached = 0
2191        self.error = 0
2192        self.masked = masked
2193
2194        self.skipped = 0
2195        self.virtuals = 0
2196
2197        self.current = 0
2198        self.process_names = []
2199
2200        self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2201        self.fromcache = set()
2202        self.willparse = set()
2203        for mc in self.cooker.multiconfigs:
2204            for filename in self.mcfilelist[mc]:
2205                appends = self.cooker.collections[mc].get_file_appends(filename)
2206                layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2207                if not self.bb_caches[mc].cacheValid(filename, appends):
2208                    self.willparse.add((mc, self.bb_caches[mc], filename, appends, layername))
2209                else:
2210                    self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername))
2211
2212        self.total = len(self.fromcache) + len(self.willparse)
2213        self.toparse = len(self.willparse)
2214        self.progress_chunk = int(max(self.toparse / 100, 1))
2215
2216        self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2217                                 multiprocessing.cpu_count()), self.toparse)
2218
2219        bb.cache.SiggenRecipeInfo.reset()
2220        self.start()
2221        self.haveshutdown = False
2222        self.syncthread = None
2223
2224    def start(self):
2225        self.results = self.load_cached()
2226        self.processes = []
2227        if self.toparse:
2228            bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2229
2230            self.parser_quit = multiprocessing.Event()
2231            self.result_queue = multiprocessing.Queue()
2232
2233            def chunkify(lst,n):
2234                return [lst[i::n] for i in range(n)]
2235            self.jobs = chunkify(list(self.willparse), self.num_processes)
2236
2237            for i in range(0, self.num_processes):
2238                parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile)
2239                parser.start()
2240                self.process_names.append(parser.name)
2241                self.processes.append(parser)
2242
2243            self.results = itertools.chain(self.results, self.parse_generator())
2244
2245    def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"):
2246        if not self.toparse:
2247            return
2248        if self.haveshutdown:
2249            return
2250        self.haveshutdown = True
2251
2252        if clean:
2253            event = bb.event.ParseCompleted(self.cached, self.parsed,
2254                                            self.skipped, self.masked,
2255                                            self.virtuals, self.error,
2256                                            self.total)
2257
2258            bb.event.fire(event, self.cfgdata)
2259        else:
2260            bb.event.fire(bb.event.ParseError(eventmsg), self.cfgdata)
2261            bb.error("Parsing halted due to errors, see error messages above")
2262
2263        # Cleanup the queue before call process.join(), otherwise there might be
2264        # deadlocks.
2265        while True:
2266            try:
2267               self.result_queue.get(timeout=0.25)
2268            except queue.Empty:
2269                break
2270
2271        def sync_caches():
2272            for c in self.bb_caches.values():
2273                bb.cache.SiggenRecipeInfo.reset()
2274                c.sync()
2275
2276        self.syncthread = threading.Thread(target=sync_caches, name="SyncThread")
2277        self.syncthread.start()
2278
2279        self.parser_quit.set()
2280
2281        for process in self.processes:
2282            process.join(0.5)
2283
2284        for process in self.processes:
2285            if process.exitcode is None:
2286                os.kill(process.pid, signal.SIGINT)
2287
2288        for process in self.processes:
2289            process.join(0.5)
2290
2291        for process in self.processes:
2292            if process.exitcode is None:
2293                process.terminate()
2294
2295        for process in self.processes:
2296            process.join()
2297            # Added in 3.7, cleans up zombies
2298            if hasattr(process, "close"):
2299                process.close()
2300
2301        bb.codeparser.parser_cache_save()
2302        bb.codeparser.parser_cache_savemerge()
2303        bb.cache.SiggenRecipeInfo.reset()
2304        bb.fetch.fetcher_parse_done()
2305        if self.cooker.configuration.profile:
2306            profiles = []
2307            for i in self.process_names:
2308                logfile = "profile-parse-%s.log" % i
2309                if os.path.exists(logfile):
2310                    profiles.append(logfile)
2311
2312            pout = "profile-parse.log.processed"
2313            bb.utils.process_profilelog(profiles, pout = pout)
2314            print("Processed parsing statistics saved to %s" % (pout))
2315
2316    def final_cleanup(self):
2317        if self.syncthread:
2318            self.syncthread.join()
2319
2320    def load_cached(self):
2321        for mc, cache, filename, appends, layername in self.fromcache:
2322            infos = cache.loadCached(filename, appends)
2323            yield False, mc, infos
2324
2325    def parse_generator(self):
2326        empty = False
2327        while self.processes or not empty:
2328            for process in self.processes.copy():
2329                if not process.is_alive():
2330                    process.join()
2331                    self.processes.remove(process)
2332
2333            if self.parsed >= self.toparse:
2334                break
2335
2336            try:
2337                result = self.result_queue.get(timeout=0.25)
2338            except queue.Empty:
2339                empty = True
2340                yield None, None, None
2341            else:
2342                empty = False
2343                yield result
2344
2345        if not (self.parsed >= self.toparse):
2346            raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
2347
2348
2349    def parse_next(self):
2350        result = []
2351        parsed = None
2352        try:
2353            parsed, mc, result = next(self.results)
2354            if isinstance(result, BaseException):
2355                # Turn exceptions back into exceptions
2356                raise result
2357            if parsed is None:
2358                # Timeout, loop back through the main loop
2359                return True
2360
2361        except StopIteration:
2362            self.shutdown()
2363            return False
2364        except bb.BBHandledException as exc:
2365            self.error += 1
2366            logger.debug('Failed to parse recipe: %s' % exc.recipe)
2367            self.shutdown(clean=False)
2368            return False
2369        except ParsingFailure as exc:
2370            self.error += 1
2371            logger.error('Unable to parse %s: %s' %
2372                     (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2373            self.shutdown(clean=False)
2374            return False
2375        except bb.parse.ParseError as exc:
2376            self.error += 1
2377            logger.error(str(exc))
2378            self.shutdown(clean=False, eventmsg=str(exc))
2379            return False
2380        except bb.data_smart.ExpansionError as exc:
2381            self.error += 1
2382            bbdir = os.path.dirname(__file__) + os.sep
2383            etype, value, _ = sys.exc_info()
2384            tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2385            logger.error('ExpansionError during parsing %s', value.recipe,
2386                         exc_info=(etype, value, tb))
2387            self.shutdown(clean=False)
2388            return False
2389        except Exception as exc:
2390            self.error += 1
2391            etype, value, tb = sys.exc_info()
2392            if hasattr(value, "recipe"):
2393                logger.error('Unable to parse %s' % value.recipe,
2394                            exc_info=(etype, value, exc.traceback))
2395            else:
2396                # Most likely, an exception occurred during raising an exception
2397                import traceback
2398                logger.error('Exception during parse: %s' % traceback.format_exc())
2399            self.shutdown(clean=False)
2400            return False
2401
2402        self.current += 1
2403        self.virtuals += len(result)
2404        if parsed:
2405            self.parsed += 1
2406            if self.parsed % self.progress_chunk == 0:
2407                bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2408                              self.cfgdata)
2409        else:
2410            self.cached += 1
2411
2412        for virtualfn, info_array in result:
2413            if info_array[0].skipped:
2414                self.skipped += 1
2415                self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2416            self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2417                                        parsed=parsed, watcher = self.cooker.add_filewatch)
2418        return True
2419
2420    def reparse(self, filename):
2421        bb.cache.SiggenRecipeInfo.reset()
2422        to_reparse = set()
2423        for mc in self.cooker.multiconfigs:
2424            layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2425            to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename), layername))
2426
2427        for mc, filename, appends, layername in to_reparse:
2428            infos = self.bb_caches[mc].parse(filename, appends, layername)
2429            for vfn, info_array in infos:
2430                self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
2431