xref: /openbmc/openbmc/poky/bitbake/lib/bb/cooker.py (revision 96e4b4e121e0e2da1535d7d537d6a982a6ff5bc0)
1#
2# Copyright (C) 2003, 2004  Chris Larson
3# Copyright (C) 2003, 2004  Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005        Holger Hans Peter Freyther
6# Copyright (C) 2005        ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11import enum
12import sys, os, glob, os.path, re, time
13import itertools
14import logging
15import multiprocessing
16import threading
17from io import StringIO, UnsupportedOperation
18from contextlib import closing
19from collections import defaultdict, namedtuple
20import bb, bb.command
21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
22import queue
23import signal
24import prserv.serv
25import json
26import pickle
27import codecs
28import hashserv
29
30logger      = logging.getLogger("BitBake")
31collectlog  = logging.getLogger("BitBake.Collection")
32buildlog    = logging.getLogger("BitBake.Build")
33parselog    = logging.getLogger("BitBake.Parsing")
34providerlog = logging.getLogger("BitBake.Provider")
35
36class NoSpecificMatch(bb.BBHandledException):
37    """
38    Exception raised when no or multiple file matches are found
39    """
40
41class NothingToBuild(Exception):
42    """
43    Exception raised when there is nothing to build
44    """
45
46class CollectionError(bb.BBHandledException):
47    """
48    Exception raised when layer configuration is incorrect
49    """
50
51
52class State(enum.Enum):
53    INITIAL = 0,
54    PARSING = 1,
55    RUNNING = 2,
56    SHUTDOWN = 3,
57    FORCE_SHUTDOWN = 4,
58    STOPPED = 5,
59    ERROR = 6
60
61
62class SkippedPackage:
63    def __init__(self, info = None, reason = None):
64        self.pn = None
65        self.skipreason = None
66        self.provides = None
67        self.rprovides = None
68
69        if info:
70            self.pn = info.pn
71            self.skipreason = info.skipreason
72            self.provides = info.provides
73            self.rprovides = info.packages + info.rprovides
74            for package in info.packages:
75                self.rprovides += info.rprovides_pkg[package]
76        elif reason:
77            self.skipreason = reason
78
79
80class CookerFeatures(object):
81    _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS, RECIPE_SIGGEN_INFO] = list(range(4))
82
83    def __init__(self):
84        self._features=set()
85
86    def setFeature(self, f):
87        # validate we got a request for a feature we support
88        if f not in CookerFeatures._feature_list:
89            return
90        self._features.add(f)
91
92    def __contains__(self, f):
93        return f in self._features
94
95    def __iter__(self):
96        return self._features.__iter__()
97
98    def __next__(self):
99        return next(self._features)
100
101
102class EventWriter:
103    def __init__(self, cooker, eventfile):
104        self.cooker = cooker
105        self.eventfile = eventfile
106        self.event_queue = []
107
108    def write_variables(self):
109        with open(self.eventfile, "a") as f:
110            f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
111
112    def send(self, event):
113        with open(self.eventfile, "a") as f:
114            try:
115                str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
116                f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
117                                             "vars": str_event}))
118            except Exception as err:
119                import traceback
120                print(err, traceback.format_exc())
121
122
123#============================================================================#
124# BBCooker
125#============================================================================#
126class BBCooker:
127    """
128    Manages one bitbake build run
129    """
130
131    def __init__(self, featureSet=None, server=None):
132        self.recipecaches = None
133        self.baseconfig_valid = False
134        self.parsecache_valid = False
135        self.eventlog = None
136        # The skiplists, one per multiconfig
137        self.skiplist_by_mc = defaultdict(dict)
138        self.featureset = CookerFeatures()
139        if featureSet:
140            for f in featureSet:
141                self.featureset.setFeature(f)
142
143        self.orig_syspath = sys.path.copy()
144        self.orig_sysmodules = [*sys.modules]
145
146        self.configuration = bb.cookerdata.CookerConfiguration()
147
148        self.process_server = server
149        self.idleCallBackRegister = None
150        self.waitIdle = None
151        if server:
152            self.idleCallBackRegister = server.register_idle_function
153            self.waitIdle = server.wait_for_idle
154
155        bb.debug(1, "BBCooker starting %s" % time.time())
156
157        self.configwatched = {}
158        self.parsewatched = {}
159
160        # If being called by something like tinfoil, we need to clean cached data
161        # which may now be invalid
162        bb.parse.clear_cache()
163        bb.parse.BBHandler.cached_statements = {}
164
165        self.ui_cmdline = None
166        self.hashserv = None
167        self.hashservaddr = None
168
169        # TOSTOP must not be set or our children will hang when they output
170        try:
171            fd = sys.stdout.fileno()
172            if os.isatty(fd):
173                import termios
174                tcattr = termios.tcgetattr(fd)
175                if tcattr[3] & termios.TOSTOP:
176                    buildlog.info("The terminal had the TOSTOP bit set, clearing...")
177                    tcattr[3] = tcattr[3] & ~termios.TOSTOP
178                    termios.tcsetattr(fd, termios.TCSANOW, tcattr)
179        except UnsupportedOperation:
180            pass
181
182        self.command = bb.command.Command(self, self.process_server)
183        self.state = State.INITIAL
184
185        self.parser = None
186
187        signal.signal(signal.SIGTERM, self.sigterm_exception)
188        # Let SIGHUP exit as SIGTERM
189        signal.signal(signal.SIGHUP, self.sigterm_exception)
190
191        bb.debug(1, "BBCooker startup complete %s" % time.time())
192
193    def init_configdata(self):
194        if not hasattr(self, "data"):
195            self.initConfigurationData()
196            bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
197            self.handlePRServ()
198
199    def _baseconfig_set(self, value):
200        if value and not self.baseconfig_valid:
201            bb.server.process.serverlog("Base config valid")
202        elif not value and self.baseconfig_valid:
203            bb.server.process.serverlog("Base config invalidated")
204        self.baseconfig_valid = value
205
206    def _parsecache_set(self, value):
207        if value and not self.parsecache_valid:
208            bb.server.process.serverlog("Parse cache valid")
209        elif not value and self.parsecache_valid:
210            bb.server.process.serverlog("Parse cache invalidated")
211        self.parsecache_valid = value
212
213    def add_filewatch(self, deps, configwatcher=False):
214        if configwatcher:
215            watcher = self.configwatched
216        else:
217            watcher = self.parsewatched
218
219        for i in deps:
220            f = i[0]
221            mtime = i[1]
222            watcher[f] = mtime
223
224    def sigterm_exception(self, signum, stackframe):
225        if signum == signal.SIGTERM:
226            bb.warn("Cooker received SIGTERM, shutting down...")
227        elif signum == signal.SIGHUP:
228            bb.warn("Cooker received SIGHUP, shutting down...")
229        self.state = State.FORCE_SHUTDOWN
230        bb.event._should_exit.set()
231
232    def setFeatures(self, features):
233        # we only accept a new feature set if we're in state initial, so we can reset without problems
234        if not self.state in [State.INITIAL, State.SHUTDOWN, State.FORCE_SHUTDOWN, State.STOPPED, State.ERROR]:
235            raise Exception("Illegal state for feature set change")
236        original_featureset = list(self.featureset)
237        for feature in features:
238            self.featureset.setFeature(feature)
239        bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
240        if (original_featureset != list(self.featureset)) and self.state != State.ERROR and hasattr(self, "data"):
241            self.reset()
242
243    def initConfigurationData(self):
244        self.state = State.INITIAL
245        self.caches_array = []
246
247        sys.path = self.orig_syspath.copy()
248        for mod in [*sys.modules]:
249            if mod not in self.orig_sysmodules:
250                del sys.modules[mod]
251
252        self.configwatched = {}
253
254        # Need to preserve BB_CONSOLELOG over resets
255        consolelog = None
256        if hasattr(self, "data"):
257            consolelog = self.data.getVar("BB_CONSOLELOG")
258
259        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
260            self.enableDataTracking()
261
262        caches_name_array = ['bb.cache:CoreRecipeInfo']
263        # We hardcode all known cache types in a single place, here.
264        if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
265            caches_name_array.append("bb.cache_extra:HobRecipeInfo")
266        if CookerFeatures.RECIPE_SIGGEN_INFO in self.featureset:
267            caches_name_array.append("bb.cache:SiggenRecipeInfo")
268
269        # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
270        # This is the entry point, no further check needed!
271        for var in caches_name_array:
272            try:
273                module_name, cache_name = var.split(':')
274                module = __import__(module_name, fromlist=(cache_name,))
275                self.caches_array.append(getattr(module, cache_name))
276            except ImportError as exc:
277                logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
278                raise bb.BBHandledException()
279
280        self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
281        self.databuilder.parseBaseConfiguration()
282        self.data = self.databuilder.data
283        self.extraconfigdata = {}
284
285        eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
286        if not self.configuration.writeeventlog and eventlog:
287            self.setupEventLog(eventlog)
288
289        if consolelog:
290            self.data.setVar("BB_CONSOLELOG", consolelog)
291
292        self.data.setVar('BB_CMDLINE', self.ui_cmdline)
293
294        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
295            self.disableDataTracking()
296
297        for mc in self.databuilder.mcdata.values():
298            self.add_filewatch(mc.getVar("__base_depends", False), configwatcher=True)
299
300        self._baseconfig_set(True)
301        self._parsecache_set(False)
302
303    def handlePRServ(self):
304        # Setup a PR Server based on the new configuration
305        try:
306            self.prhost = prserv.serv.auto_start(self.data)
307        except prserv.serv.PRServiceConfigError as e:
308            bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
309
310        if self.data.getVar("BB_HASHSERVE") == "auto":
311            # Create a new hash server bound to a unix domain socket
312            if not self.hashserv:
313                dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
314                upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
315                if upstream:
316                    try:
317                        with hashserv.create_client(upstream) as client:
318                            client.ping()
319                    except (ConnectionError, ImportError) as e:
320                        bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
321                                 % (upstream, repr(e)))
322                        upstream = None
323
324                self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
325                self.hashserv = hashserv.create_server(
326                    self.hashservaddr,
327                    dbfile,
328                    sync=False,
329                    upstream=upstream,
330                )
331                self.hashserv.serve_as_process(log_level=logging.WARNING)
332            for mc in self.databuilder.mcdata:
333                self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
334                self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
335
336        bb.parse.init_parser(self.data)
337
338    def enableDataTracking(self):
339        self.configuration.tracking = True
340        if hasattr(self, "data"):
341            self.data.enableTracking()
342
343    def disableDataTracking(self):
344        self.configuration.tracking = False
345        if hasattr(self, "data"):
346            self.data.disableTracking()
347
348    def revalidateCaches(self):
349        bb.parse.clear_cache()
350
351        clean = True
352        for f in self.configwatched:
353            if not bb.parse.check_mtime(f, self.configwatched[f]):
354                bb.server.process.serverlog("Found %s changed, invalid cache" % f)
355                self._baseconfig_set(False)
356                self._parsecache_set(False)
357                clean = False
358                break
359
360        if clean:
361            for f in self.parsewatched:
362                if not bb.parse.check_mtime(f, self.parsewatched[f]):
363                    bb.server.process.serverlog("Found %s changed, invalid cache" % f)
364                    self._parsecache_set(False)
365                    clean = False
366                    break
367
368        if not clean:
369            bb.parse.BBHandler.cached_statements = {}
370
371        # If writes were made to any of the data stores, we need to recalculate the data
372        # store cache
373        if hasattr(self, "databuilder"):
374            self.databuilder.calc_datastore_hashes()
375
376    def parseConfiguration(self):
377        self.updateCacheSync()
378
379        # Change nice level if we're asked to
380        nice = self.data.getVar("BB_NICE_LEVEL")
381        if nice:
382            curnice = os.nice(0)
383            nice = int(nice) - curnice
384            buildlog.verbose("Renice to %s " % os.nice(nice))
385
386        if self.recipecaches:
387            del self.recipecaches
388        self.multiconfigs = self.databuilder.mcdata.keys()
389        self.recipecaches = {}
390        for mc in self.multiconfigs:
391            self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
392
393        self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
394        self.collections = {}
395        for mc in self.multiconfigs:
396            self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
397
398        self._parsecache_set(False)
399
400    def setupEventLog(self, eventlog):
401        if self.eventlog and self.eventlog[0] != eventlog:
402            bb.event.unregister_UIHhandler(self.eventlog[1])
403            self.eventlog = None
404        if not self.eventlog or self.eventlog[0] != eventlog:
405            # we log all events to a file if so directed
406            # register the log file writer as UI Handler
407            if not os.path.exists(os.path.dirname(eventlog)):
408                bb.utils.mkdirhier(os.path.dirname(eventlog))
409            writer = EventWriter(self, eventlog)
410            EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
411            self.eventlog = (eventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)), writer)
412
413    def updateConfigOpts(self, options, environment, cmdline):
414        self.ui_cmdline = cmdline
415        clean = True
416        for o in options:
417            if o in ['prefile', 'postfile']:
418                # Only these options may require a reparse
419                try:
420                    if getattr(self.configuration, o) == options[o]:
421                        # Value is the same, no need to mark dirty
422                        continue
423                except AttributeError:
424                    pass
425                logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
426                print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
427                clean = False
428            if hasattr(self.configuration, o):
429                setattr(self.configuration, o, options[o])
430
431        if self.configuration.writeeventlog:
432            self.setupEventLog(self.configuration.writeeventlog)
433
434        bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
435        bb.msg.loggerDefaultDomains = self.configuration.debug_domains
436
437        if hasattr(self, "data"):
438            origenv = bb.data.init()
439            for k in environment:
440                origenv.setVar(k, environment[k])
441            self.data.setVar("BB_ORIGENV", origenv)
442
443        for k in bb.utils.approved_variables():
444            if k in environment and k not in self.configuration.env:
445                logger.debug("Updating new environment variable %s to %s" % (k, environment[k]))
446                self.configuration.env[k] = environment[k]
447                clean = False
448            if k in self.configuration.env and k not in environment:
449                logger.debug("Updating environment variable %s (deleted)" % (k))
450                del self.configuration.env[k]
451                clean = False
452            if k not in self.configuration.env and k not in environment:
453                continue
454            if environment[k] != self.configuration.env[k]:
455                logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
456                self.configuration.env[k] = environment[k]
457                clean = False
458
459        # Now update all the variables not in the datastore to match
460        self.configuration.env = environment
461
462        self.revalidateCaches()
463        if not clean:
464            logger.debug("Base environment change, triggering reparse")
465            self.reset()
466
467    def showVersions(self):
468
469        (latest_versions, preferred_versions, required) = self.findProviders()
470
471        logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
472        logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
473
474        for p in sorted(self.recipecaches[''].pkg_pn):
475            preferred = preferred_versions[p]
476            latest = latest_versions[p]
477            requiredstr = ""
478            preferredstr = ""
479            if required[p]:
480                if preferred[0] is not None:
481                    requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
482                else:
483                    bb.fatal("REQUIRED_VERSION of package %s not available" % p)
484            else:
485                preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
486
487            lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
488
489            if preferred == latest:
490                preferredstr = ""
491
492            logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
493
494    def showEnvironment(self, buildfile=None, pkgs_to_build=None):
495        """
496        Show the outer or per-recipe environment
497        """
498        fn = None
499        envdata = None
500        mc = ''
501        if not pkgs_to_build:
502            pkgs_to_build = []
503
504        orig_tracking = self.configuration.tracking
505        if not orig_tracking:
506            self.enableDataTracking()
507            self.reset()
508            # reset() resets to the UI requested value so we have to redo this
509            self.enableDataTracking()
510
511        def mc_base(p):
512            if p.startswith('mc:'):
513                s = p.split(':')
514                if len(s) == 2:
515                    return s[1]
516            return None
517
518        if buildfile:
519            # Parse the configuration here. We need to do it explicitly here since
520            # this showEnvironment() code path doesn't use the cache
521            self.parseConfiguration()
522
523            fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
524            fn = self.matchFile(fn, mc)
525            fn = bb.cache.realfn2virtual(fn, cls, mc)
526        elif len(pkgs_to_build) == 1:
527            mc = mc_base(pkgs_to_build[0])
528            if not mc:
529                ignore = self.data.getVar("ASSUME_PROVIDED") or ""
530                if pkgs_to_build[0] in set(ignore.split()):
531                    bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
532
533                taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True)
534
535                mc = runlist[0][0]
536                fn = runlist[0][3]
537
538        if fn:
539            try:
540                layername = self.collections[mc].calc_bbfile_priority(fn)[2]
541                envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn), layername)
542            except Exception as e:
543                parselog.exception("Unable to read %s", fn)
544                raise
545        else:
546            if not mc in self.databuilder.mcdata:
547                bb.fatal('No multiconfig named "%s" found' % mc)
548            envdata = self.databuilder.mcdata[mc]
549            data.expandKeys(envdata)
550            parse.ast.runAnonFuncs(envdata)
551
552        # Display history
553        with closing(StringIO()) as env:
554            self.data.inchistory.emit(env)
555            logger.plain(env.getvalue())
556
557        # emit variables and shell functions
558        with closing(StringIO()) as env:
559            data.emit_env(env, envdata, True)
560            logger.plain(env.getvalue())
561
562        # emit the metadata which isn't valid shell
563        for e in sorted(envdata.keys()):
564            if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
565                logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
566
567        if not orig_tracking:
568            self.disableDataTracking()
569            self.reset()
570
571    def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False):
572        """
573        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
574        """
575        bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
576
577        # A task of None means use the default task
578        if task is None:
579            task = self.configuration.cmd
580        if not task.startswith("do_"):
581            task = "do_%s" % task
582
583        targetlist = self.checkPackages(pkgs_to_build, task)
584        fulltargetlist = []
585        defaulttask_implicit = ''
586        defaulttask_explicit = False
587        wildcard = False
588
589        # Wild card expansion:
590        # Replace string such as "mc:*:bash"
591        # into "mc:A:bash mc:B:bash bash"
592        for k in targetlist:
593            if k.startswith("mc:") and k.count(':') >= 2:
594                if wildcard:
595                    bb.fatal('multiconfig conflict')
596                if k.split(":")[1] == "*":
597                    wildcard = True
598                    for mc in self.multiconfigs:
599                        if mc:
600                            fulltargetlist.append(k.replace('*', mc))
601                        # implicit default task
602                        else:
603                            defaulttask_implicit = k.split(":")[2]
604                else:
605                    fulltargetlist.append(k)
606            else:
607                defaulttask_explicit = True
608                fulltargetlist.append(k)
609
610        if not defaulttask_explicit and defaulttask_implicit != '':
611            fulltargetlist.append(defaulttask_implicit)
612
613        bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
614        taskdata = {}
615        localdata = {}
616
617        for mc in self.multiconfigs:
618            taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist_by_mc[mc], allowincomplete=allowincomplete)
619            localdata[mc] = bb.data.createCopy(self.databuilder.mcdata[mc])
620            bb.data.expandKeys(localdata[mc])
621
622        current = 0
623        runlist = []
624        for k in fulltargetlist:
625            origk = k
626            mc = ""
627            if k.startswith("mc:") and k.count(':') >= 2:
628                mc = k.split(":")[1]
629                k = ":".join(k.split(":")[2:])
630            ktask = task
631            if ":do_" in k:
632                k2 = k.split(":do_")
633                k = k2[0]
634                ktask = k2[1]
635
636            if mc not in self.multiconfigs:
637                 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
638
639            taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
640            current += 1
641            if not ktask.startswith("do_"):
642                ktask = "do_%s" % ktask
643            if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
644                # e.g. in ASSUME_PROVIDED
645                continue
646            fn = taskdata[mc].build_targets[k][0]
647            runlist.append([mc, k, ktask, fn])
648            bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
649
650        havemc = False
651        for mc in self.multiconfigs:
652            if taskdata[mc].get_mcdepends():
653                havemc = True
654
655        # No need to do check providers if there are no mcdeps or not an mc build
656        if havemc or len(self.multiconfigs) > 1:
657            seen = set()
658            new = True
659            # Make sure we can provide the multiconfig dependency
660            while new:
661                mcdeps = set()
662                # Add unresolved first, so we can get multiconfig indirect dependencies on time
663                for mc in self.multiconfigs:
664                    taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
665                    mcdeps |= set(taskdata[mc].get_mcdepends())
666                new = False
667                for k in mcdeps:
668                    if k in seen:
669                        continue
670                    l = k.split(':')
671                    depmc = l[2]
672                    if depmc not in self.multiconfigs:
673                        bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
674                    else:
675                        logger.debug("Adding providers for multiconfig dependency %s" % l[3])
676                        taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
677                        seen.add(k)
678                        new = True
679
680        for mc in self.multiconfigs:
681            taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
682
683        bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
684        return taskdata, runlist
685
686    def prepareTreeData(self, pkgs_to_build, task, halt=False):
687        """
688        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
689        """
690
691        # We set halt to False here to prevent unbuildable targets raising
692        # an exception when we're just generating data
693        taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True)
694
695        return runlist, taskdata
696
697    ######## WARNING : this function requires cache_extra to be enabled ########
698
699    def generateTaskDepTreeData(self, pkgs_to_build, task):
700        """
701        Create a dependency graph of pkgs_to_build including reverse dependency
702        information.
703        """
704        if not task.startswith("do_"):
705            task = "do_%s" % task
706
707        runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True)
708        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
709        rq.rqdata.prepare()
710        return self.buildDependTree(rq, taskdata)
711
712    @staticmethod
713    def add_mc_prefix(mc, pn):
714        if mc:
715            return "mc:%s:%s" % (mc, pn)
716        return pn
717
718    def buildDependTree(self, rq, taskdata):
719        seen_fns = []
720        depend_tree = {}
721        depend_tree["depends"] = {}
722        depend_tree["tdepends"] = {}
723        depend_tree["pn"] = {}
724        depend_tree["rdepends-pn"] = {}
725        depend_tree["packages"] = {}
726        depend_tree["rdepends-pkg"] = {}
727        depend_tree["rrecs-pkg"] = {}
728        depend_tree['providermap'] = {}
729        depend_tree["layer-priorities"] = self.bbfile_config_priorities
730
731        for mc in taskdata:
732            for name, fn in list(taskdata[mc].get_providermap().items()):
733                pn = self.recipecaches[mc].pkg_fn[fn]
734                pn = self.add_mc_prefix(mc, pn)
735                if name != pn:
736                    version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
737                    depend_tree['providermap'][name] = (pn, version)
738
739        for tid in rq.rqdata.runtaskentries:
740            (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
741            pn = self.recipecaches[mc].pkg_fn[taskfn]
742            pn = self.add_mc_prefix(mc, pn)
743            version  = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
744            if pn not in depend_tree["pn"]:
745                depend_tree["pn"][pn] = {}
746                depend_tree["pn"][pn]["filename"] = taskfn
747                depend_tree["pn"][pn]["version"] = version
748                depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
749
750                # if we have extra caches, list all attributes they bring in
751                extra_info = []
752                for cache_class in self.caches_array:
753                    if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
754                        cachefields = getattr(cache_class, 'cachefields', [])
755                        extra_info = extra_info + cachefields
756
757                # for all attributes stored, add them to the dependency tree
758                for ei in extra_info:
759                    depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
760
761
762            dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
763            if not dotname in depend_tree["tdepends"]:
764                depend_tree["tdepends"][dotname] = []
765            for dep in rq.rqdata.runtaskentries[tid].depends:
766                (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
767                deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
768                if depmc:
769                    depmc = "mc:" + depmc + ":"
770                depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
771            if taskfn not in seen_fns:
772                seen_fns.append(taskfn)
773                packages = []
774
775                depend_tree["depends"][pn] = []
776                for dep in taskdata[mc].depids[taskfn]:
777                    depend_tree["depends"][pn].append(dep)
778
779                depend_tree["rdepends-pn"][pn] = []
780                for rdep in taskdata[mc].rdepids[taskfn]:
781                    depend_tree["rdepends-pn"][pn].append(rdep)
782
783                rdepends = self.recipecaches[mc].rundeps[taskfn]
784                for package in rdepends:
785                    depend_tree["rdepends-pkg"][package] = []
786                    for rdepend in rdepends[package]:
787                        depend_tree["rdepends-pkg"][package].append(rdepend)
788                    packages.append(package)
789
790                rrecs = self.recipecaches[mc].runrecs[taskfn]
791                for package in rrecs:
792                    depend_tree["rrecs-pkg"][package] = []
793                    for rdepend in rrecs[package]:
794                        depend_tree["rrecs-pkg"][package].append(rdepend)
795                    if not package in packages:
796                        packages.append(package)
797
798                for package in packages:
799                    if package not in depend_tree["packages"]:
800                        depend_tree["packages"][package] = {}
801                        depend_tree["packages"][package]["pn"] = pn
802                        depend_tree["packages"][package]["filename"] = taskfn
803                        depend_tree["packages"][package]["version"] = version
804
805        return depend_tree
806
807    ######## WARNING : this function requires cache_extra to be enabled ########
808    def generatePkgDepTreeData(self, pkgs_to_build, task):
809        """
810        Create a dependency tree of pkgs_to_build, returning the data.
811        """
812        if not task.startswith("do_"):
813            task = "do_%s" % task
814
815        _, taskdata = self.prepareTreeData(pkgs_to_build, task)
816
817        seen_fns = []
818        depend_tree = {}
819        depend_tree["depends"] = {}
820        depend_tree["pn"] = {}
821        depend_tree["rdepends-pn"] = {}
822        depend_tree["rdepends-pkg"] = {}
823        depend_tree["rrecs-pkg"] = {}
824
825        # if we have extra caches, list all attributes they bring in
826        extra_info = []
827        for cache_class in self.caches_array:
828            if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
829                cachefields = getattr(cache_class, 'cachefields', [])
830                extra_info = extra_info + cachefields
831
832        tids = []
833        for mc in taskdata:
834            for tid in taskdata[mc].taskentries:
835                tids.append(tid)
836
837        for tid in tids:
838            (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
839
840            pn = self.recipecaches[mc].pkg_fn[taskfn]
841            pn = self.add_mc_prefix(mc, pn)
842
843            if pn not in depend_tree["pn"]:
844                depend_tree["pn"][pn] = {}
845                depend_tree["pn"][pn]["filename"] = taskfn
846                version  = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
847                depend_tree["pn"][pn]["version"] = version
848                rdepends = self.recipecaches[mc].rundeps[taskfn]
849                rrecs = self.recipecaches[mc].runrecs[taskfn]
850                depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
851
852                # for all extra attributes stored, add them to the dependency tree
853                for ei in extra_info:
854                    depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
855
856            if taskfn not in seen_fns:
857                seen_fns.append(taskfn)
858
859                depend_tree["depends"][pn] = []
860                for dep in taskdata[mc].depids[taskfn]:
861                    pn_provider = ""
862                    if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
863                        fn_provider = taskdata[mc].build_targets[dep][0]
864                        pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
865                    else:
866                        pn_provider = dep
867                    pn_provider = self.add_mc_prefix(mc, pn_provider)
868                    depend_tree["depends"][pn].append(pn_provider)
869
870                depend_tree["rdepends-pn"][pn] = []
871                for rdep in taskdata[mc].rdepids[taskfn]:
872                    pn_rprovider = ""
873                    if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
874                        fn_rprovider = taskdata[mc].run_targets[rdep][0]
875                        pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
876                    else:
877                        pn_rprovider = rdep
878                    pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
879                    depend_tree["rdepends-pn"][pn].append(pn_rprovider)
880
881                depend_tree["rdepends-pkg"].update(rdepends)
882                depend_tree["rrecs-pkg"].update(rrecs)
883
884        return depend_tree
885
886    def generateDepTreeEvent(self, pkgs_to_build, task):
887        """
888        Create a task dependency graph of pkgs_to_build.
889        Generate an event with the result
890        """
891        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
892        bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
893
894    def generateDotGraphFiles(self, pkgs_to_build, task):
895        """
896        Create a task dependency graph of pkgs_to_build.
897        Save the result to a set of .dot files.
898        """
899
900        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
901
902        pns = depgraph["pn"].keys()
903        if pns:
904            with open('pn-buildlist', 'w') as f:
905                f.write("%s\n" % "\n".join(sorted(pns)))
906            logger.info("PN build list saved to 'pn-buildlist'")
907
908        # Remove old format output files to ensure no confusion with stale data
909        try:
910            os.unlink('pn-depends.dot')
911        except FileNotFoundError:
912            pass
913        try:
914            os.unlink('package-depends.dot')
915        except FileNotFoundError:
916            pass
917        try:
918            os.unlink('recipe-depends.dot')
919        except FileNotFoundError:
920            pass
921
922        with open('task-depends.dot', 'w') as f:
923            f.write("digraph depends {\n")
924            for task in sorted(depgraph["tdepends"]):
925                (pn, taskname) = task.rsplit(".", 1)
926                fn = depgraph["pn"][pn]["filename"]
927                version = depgraph["pn"][pn]["version"]
928                f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
929                for dep in sorted(depgraph["tdepends"][task]):
930                    f.write('"%s" -> "%s"\n' % (task, dep))
931            f.write("}\n")
932        logger.info("Task dependencies saved to 'task-depends.dot'")
933
934    def show_appends_with_no_recipes(self):
935        appends_without_recipes = {}
936        # Determine which bbappends haven't been applied
937        for mc in self.multiconfigs:
938            # First get list of recipes, including skipped
939            recipefns = list(self.recipecaches[mc].pkg_fn.keys())
940            recipefns.extend(self.skiplist_by_mc[mc].keys())
941
942            # Work out list of bbappends that have been applied
943            applied_appends = []
944            for fn in recipefns:
945                applied_appends.extend(self.collections[mc].get_file_appends(fn))
946
947            appends_without_recipes[mc] = []
948            for _, appendfn in self.collections[mc].bbappends:
949                if not appendfn in applied_appends:
950                    appends_without_recipes[mc].append(appendfn)
951
952        msgs = []
953        for mc in sorted(appends_without_recipes.keys()):
954            if appends_without_recipes[mc]:
955                msgs.append('No recipes in %s available for:\n  %s' % (mc if mc else 'default',
956                                                                        '\n  '.join(appends_without_recipes[mc])))
957
958        if msgs:
959            bb.fatal("\n".join(msgs))
960
961    def handlePrefProviders(self):
962
963        for mc in self.multiconfigs:
964            localdata = data.createCopy(self.databuilder.mcdata[mc])
965            bb.data.expandKeys(localdata)
966
967            # Handle PREFERRED_PROVIDERS
968            for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
969                try:
970                    (providee, provider) = p.split(':')
971                except:
972                    providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
973                    continue
974                if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
975                    providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
976                self.recipecaches[mc].preferred[providee] = provider
977
978    def findConfigFilePath(self, configfile):
979        """
980        Find the location on disk of configfile and if it exists and was parsed by BitBake
981        emit the ConfigFilePathFound event with the path to the file.
982        """
983        path = bb.cookerdata.findConfigFile(configfile, self.data)
984        if not path:
985            return
986
987        # Generate a list of parsed configuration files by searching the files
988        # listed in the __depends and __base_depends variables with a .conf suffix.
989        conffiles = []
990        dep_files = self.data.getVar('__base_depends', False) or []
991        dep_files = dep_files + (self.data.getVar('__depends', False) or [])
992
993        for f in dep_files:
994            if f[0].endswith(".conf"):
995                conffiles.append(f[0])
996
997        _, conf, conffile = path.rpartition("conf/")
998        match = os.path.join(conf, conffile)
999        # Try and find matches for conf/conffilename.conf as we don't always
1000        # have the full path to the file.
1001        for cfg in conffiles:
1002            if cfg.endswith(match):
1003                bb.event.fire(bb.event.ConfigFilePathFound(path),
1004                              self.data)
1005                break
1006
1007    def findFilesMatchingInDir(self, filepattern, directory):
1008        """
1009        Searches for files containing the substring 'filepattern' which are children of
1010        'directory' in each BBPATH. i.e. to find all rootfs package classes available
1011        to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1012        or to find all machine configuration files one could call:
1013        findFilesMatchingInDir(self, '.conf', 'conf/machine')
1014        """
1015
1016        matches = []
1017        bbpaths = self.data.getVar('BBPATH').split(':')
1018        for path in bbpaths:
1019            dirpath = os.path.join(path, directory)
1020            if os.path.exists(dirpath):
1021                for root, dirs, files in os.walk(dirpath):
1022                    for f in files:
1023                        if filepattern in f:
1024                            matches.append(f)
1025
1026        if matches:
1027            bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1028
1029    def testCookerCommandEvent(self, filepattern):
1030        # Dummy command used by OEQA selftest to test tinfoil without IO
1031        matches = ["A", "B"]
1032        bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1033
1034    def findProviders(self, mc=''):
1035        return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1036
1037    def findBestProvider(self, pn, mc=''):
1038        if pn in self.recipecaches[mc].providers:
1039            filenames = self.recipecaches[mc].providers[pn]
1040            eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
1041            if eligible is not None:
1042                filename = eligible[0]
1043            else:
1044                filename = None
1045            return None, None, None, filename
1046        elif pn in self.recipecaches[mc].pkg_pn:
1047            (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1048            if required and preferred_file is None:
1049                return None, None, None, None
1050            return (latest, latest_f, preferred_ver, preferred_file)
1051        else:
1052            return None, None, None, None
1053
1054    def findConfigFiles(self, varname):
1055        """
1056        Find config files which are appropriate values for varname.
1057        i.e. MACHINE, DISTRO
1058        """
1059        possible = []
1060        var = varname.lower()
1061
1062        data = self.data
1063        # iterate configs
1064        bbpaths = data.getVar('BBPATH').split(':')
1065        for path in bbpaths:
1066            confpath = os.path.join(path, "conf", var)
1067            if os.path.exists(confpath):
1068                for root, dirs, files in os.walk(confpath):
1069                    # get all child files, these are appropriate values
1070                    for f in files:
1071                        val, sep, end = f.rpartition('.')
1072                        if end == 'conf':
1073                            possible.append(val)
1074
1075        if possible:
1076            bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1077
1078    def findInheritsClass(self, klass):
1079        """
1080        Find all recipes which inherit the specified class
1081        """
1082        pkg_list = []
1083
1084        for pfn in self.recipecaches[''].pkg_fn:
1085            inherits = self.recipecaches[''].inherits.get(pfn, None)
1086            if inherits and klass in inherits:
1087                pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
1088
1089        return pkg_list
1090
1091    def generateTargetsTree(self, klass=None, pkgs=None):
1092        """
1093        Generate a dependency tree of buildable targets
1094        Generate an event with the result
1095        """
1096        # if the caller hasn't specified a pkgs list default to universe
1097        if not pkgs:
1098            pkgs = ['universe']
1099        # if inherited_class passed ensure all recipes which inherit the
1100        # specified class are included in pkgs
1101        if klass:
1102            extra_pkgs = self.findInheritsClass(klass)
1103            pkgs = pkgs + extra_pkgs
1104
1105        # generate a dependency tree for all our packages
1106        tree = self.generatePkgDepTreeData(pkgs, 'build')
1107        bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1108
1109    def interactiveMode( self ):
1110        """Drop off into a shell"""
1111        try:
1112            from bb import shell
1113        except ImportError:
1114            parselog.exception("Interactive mode not available")
1115            raise bb.BBHandledException()
1116        else:
1117            shell.start( self )
1118
1119
1120    def handleCollections(self, collections):
1121        """Handle collections"""
1122        errors = False
1123        self.bbfile_config_priorities = []
1124        if collections:
1125            collection_priorities = {}
1126            collection_depends = {}
1127            collection_list = collections.split()
1128            min_prio = 0
1129            for c in collection_list:
1130                bb.debug(1,'Processing %s in collection list' % (c))
1131
1132                # Get collection priority if defined explicitly
1133                priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
1134                if priority:
1135                    try:
1136                        prio = int(priority)
1137                    except ValueError:
1138                        parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1139                        errors = True
1140                    if min_prio == 0 or prio < min_prio:
1141                        min_prio = prio
1142                    collection_priorities[c] = prio
1143                else:
1144                    collection_priorities[c] = None
1145
1146                # Check dependencies and store information for priority calculation
1147                deps = self.data.getVar("LAYERDEPENDS_%s" % c)
1148                if deps:
1149                    try:
1150                        depDict = bb.utils.explode_dep_versions2(deps)
1151                    except bb.utils.VersionStringException as vse:
1152                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1153                    for dep, oplist in list(depDict.items()):
1154                        if dep in collection_list:
1155                            for opstr in oplist:
1156                                layerver = self.data.getVar("LAYERVERSION_%s" % dep)
1157                                (op, depver) = opstr.split()
1158                                if layerver:
1159                                    try:
1160                                        res = bb.utils.vercmp_string_op(layerver, depver, op)
1161                                    except bb.utils.VersionStringException as vse:
1162                                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1163                                    if not res:
1164                                        parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1165                                        errors = True
1166                                else:
1167                                    parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1168                                    errors = True
1169                        else:
1170                            parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1171                            errors = True
1172                    collection_depends[c] = list(depDict.keys())
1173                else:
1174                    collection_depends[c] = []
1175
1176                # Check recommends and store information for priority calculation
1177                recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
1178                if recs:
1179                    try:
1180                        recDict = bb.utils.explode_dep_versions2(recs)
1181                    except bb.utils.VersionStringException as vse:
1182                        bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1183                    for rec, oplist in list(recDict.items()):
1184                        if rec in collection_list:
1185                            if oplist:
1186                                opstr = oplist[0]
1187                                layerver = self.data.getVar("LAYERVERSION_%s" % rec)
1188                                if layerver:
1189                                    (op, recver) = opstr.split()
1190                                    try:
1191                                        res = bb.utils.vercmp_string_op(layerver, recver, op)
1192                                    except bb.utils.VersionStringException as vse:
1193                                        bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1194                                    if not res:
1195                                        parselog.debug3("Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1196                                        continue
1197                                else:
1198                                    parselog.debug3("Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1199                                    continue
1200                            parselog.debug3("Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1201                            collection_depends[c].append(rec)
1202                        else:
1203                            parselog.debug3("Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1204
1205            # Recursively work out collection priorities based on dependencies
1206            def calc_layer_priority(collection):
1207                if not collection_priorities[collection]:
1208                    max_depprio = min_prio
1209                    for dep in collection_depends[collection]:
1210                        calc_layer_priority(dep)
1211                        depprio = collection_priorities[dep]
1212                        if depprio > max_depprio:
1213                            max_depprio = depprio
1214                    max_depprio += 1
1215                    parselog.debug("Calculated priority of layer %s as %d", collection, max_depprio)
1216                    collection_priorities[collection] = max_depprio
1217
1218            # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1219            for c in collection_list:
1220                calc_layer_priority(c)
1221                regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
1222                if regex is None:
1223                    parselog.error("BBFILE_PATTERN_%s not defined" % c)
1224                    errors = True
1225                    continue
1226                elif regex == "":
1227                    parselog.debug("BBFILE_PATTERN_%s is empty" % c)
1228                    cre = re.compile('^NULL$')
1229                    errors = False
1230                else:
1231                    try:
1232                        cre = re.compile(regex)
1233                    except re.error:
1234                        parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1235                        errors = True
1236                        continue
1237                self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1238        if errors:
1239            # We've already printed the actual error(s)
1240            raise CollectionError("Errors during parsing layer configuration")
1241
1242    def buildSetVars(self):
1243        """
1244        Setup any variables needed before starting a build
1245        """
1246        t = time.gmtime()
1247        for mc in self.databuilder.mcdata:
1248            ds = self.databuilder.mcdata[mc]
1249            if not ds.getVar("BUILDNAME", False):
1250                ds.setVar("BUILDNAME", "${DATE}${TIME}")
1251            ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1252            ds.setVar("DATE", time.strftime('%Y%m%d', t))
1253            ds.setVar("TIME", time.strftime('%H%M%S', t))
1254
1255    def reset_mtime_caches(self):
1256        """
1257        Reset mtime caches - this is particularly important when memory resident as something
1258        which is cached is not unlikely to have changed since the last invocation (e.g. a
1259        file associated with a recipe might have been modified by the user).
1260        """
1261        build.reset_cache()
1262        bb.fetch._checksum_cache.mtime_cache.clear()
1263        siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1264        if siggen_cache:
1265            bb.parse.siggen.checksum_cache.mtime_cache.clear()
1266
1267    def matchFiles(self, bf, mc=''):
1268        """
1269        Find the .bb files which match the expression in 'buildfile'.
1270        """
1271        if bf.startswith("/") or bf.startswith("../"):
1272            bf = os.path.abspath(bf)
1273
1274        collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1275        filelist, masked, searchdirs = collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1276        try:
1277            os.stat(bf)
1278            bf = os.path.abspath(bf)
1279            return [bf]
1280        except OSError:
1281            regexp = re.compile(bf)
1282            matches = []
1283            for f in filelist:
1284                if regexp.search(f) and os.path.isfile(f):
1285                    matches.append(f)
1286            return matches
1287
1288    def matchFile(self, buildfile, mc=''):
1289        """
1290        Find the .bb file which matches the expression in 'buildfile'.
1291        Raise an error if multiple files
1292        """
1293        matches = self.matchFiles(buildfile, mc)
1294        if len(matches) != 1:
1295            if matches:
1296                msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1297                if matches:
1298                    for f in matches:
1299                        msg += "\n    %s" % f
1300                parselog.error(msg)
1301            else:
1302                parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1303            raise NoSpecificMatch
1304        return matches[0]
1305
1306    def buildFile(self, buildfile, task):
1307        """
1308        Build the file matching regexp buildfile
1309        """
1310        bb.event.fire(bb.event.BuildInit(), self.data)
1311
1312        # Too many people use -b because they think it's how you normally
1313        # specify a target to be built, so show a warning
1314        bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1315
1316        self.buildFileInternal(buildfile, task)
1317
1318    def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1319        """
1320        Build the file matching regexp buildfile
1321        """
1322
1323        # Parse the configuration here. We need to do it explicitly here since
1324        # buildFile() doesn't use the cache
1325        self.parseConfiguration()
1326
1327        # If we are told to do the None task then query the default task
1328        if task is None:
1329            task = self.configuration.cmd
1330        if not task.startswith("do_"):
1331            task = "do_%s" % task
1332
1333        fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
1334        fn = self.matchFile(fn, mc)
1335
1336        self.buildSetVars()
1337        self.reset_mtime_caches()
1338
1339        bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.databuilder.data_hash, self.caches_array)
1340
1341        layername = self.collections[mc].calc_bbfile_priority(fn)[2]
1342        infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
1343        infos = dict(infos)
1344
1345        fn = bb.cache.realfn2virtual(fn, cls, mc)
1346        try:
1347            info_array = infos[fn]
1348        except KeyError:
1349            bb.fatal("%s does not exist" % fn)
1350
1351        if info_array[0].skipped:
1352            bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1353
1354        self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
1355
1356        # Tweak some variables
1357        item = info_array[0].pn
1358        self.recipecaches[mc].ignored_dependencies = set()
1359        self.recipecaches[mc].bbfile_priority[fn] = 1
1360        self.configuration.limited_deps = True
1361
1362        # Remove external dependencies
1363        self.recipecaches[mc].task_deps[fn]['depends'] = {}
1364        self.recipecaches[mc].deps[fn] = []
1365        self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1366        self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1367
1368        bb.parse.siggen.setup_datacache(self.recipecaches)
1369
1370        # Invalidate task for target if force mode active
1371        if self.configuration.force:
1372            logger.verbose("Invalidate task %s, %s", task, fn)
1373            bb.parse.siggen.invalidate_task(task, fn)
1374
1375        # Setup taskdata structure
1376        taskdata = {}
1377        taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt)
1378        taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1379
1380        if quietlog:
1381            rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1382            bb.runqueue.logger.setLevel(logging.WARNING)
1383
1384        buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1385        if fireevents:
1386            bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1387            if self.eventlog:
1388                self.eventlog[2].write_variables()
1389            bb.event.enable_heartbeat()
1390
1391        # Execute the runqueue
1392        runlist = [[mc, item, task, fn]]
1393
1394        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1395
1396        def buildFileIdle(server, rq, halt):
1397
1398            msg = None
1399            interrupted = 0
1400            if halt or self.state == State.FORCE_SHUTDOWN:
1401                rq.finish_runqueue(True)
1402                msg = "Forced shutdown"
1403                interrupted = 2
1404            elif self.state == State.SHUTDOWN:
1405                rq.finish_runqueue(False)
1406                msg = "Stopped build"
1407                interrupted = 1
1408            failures = 0
1409            try:
1410                retval = rq.execute_runqueue()
1411            except runqueue.TaskFailure as exc:
1412                failures += len(exc.args)
1413                retval = False
1414            except SystemExit as exc:
1415                if quietlog:
1416                    bb.runqueue.logger.setLevel(rqloglevel)
1417                return bb.server.process.idleFinish(str(exc))
1418
1419            if not retval:
1420                if fireevents:
1421                    bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1422                    bb.event.disable_heartbeat()
1423                # We trashed self.recipecaches above
1424                self._parsecache_set(False)
1425                self.configuration.limited_deps = False
1426                bb.parse.siggen.reset(self.data)
1427                if quietlog:
1428                    bb.runqueue.logger.setLevel(rqloglevel)
1429                return bb.server.process.idleFinish(msg)
1430            if retval is True:
1431                return True
1432            return retval
1433
1434        self.idleCallBackRegister(buildFileIdle, rq)
1435
1436    def getTaskSignatures(self, target, tasks):
1437        sig = []
1438        getAllTaskSignatures = False
1439
1440        if not tasks:
1441            tasks = ["do_build"]
1442            getAllTaskSignatures = True
1443
1444        for task in tasks:
1445            taskdata, runlist = self.buildTaskData(target, task, self.configuration.halt)
1446            rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1447            rq.rqdata.prepare()
1448
1449            for l in runlist:
1450                mc, pn, taskname, fn = l
1451
1452                taskdep = rq.rqdata.dataCaches[mc].task_deps[fn]
1453                for t in taskdep['tasks']:
1454                    if t in taskdep['nostamp'] or "setscene" in t:
1455                        continue
1456                    tid = bb.runqueue.build_tid(mc, fn, t)
1457
1458                    if t in task or getAllTaskSignatures:
1459                        try:
1460                            sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
1461                        except KeyError:
1462                            sig.append(self.getTaskSignatures(target, [t])[0])
1463
1464        return sig
1465
1466    def buildTargets(self, targets, task):
1467        """
1468        Attempt to build the targets specified
1469        """
1470
1471        def buildTargetsIdle(server, rq, halt):
1472            msg = None
1473            interrupted = 0
1474            if halt or self.state == State.FORCE_SHUTDOWN:
1475                bb.event._should_exit.set()
1476                rq.finish_runqueue(True)
1477                msg = "Forced shutdown"
1478                interrupted = 2
1479            elif self.state == State.SHUTDOWN:
1480                rq.finish_runqueue(False)
1481                msg = "Stopped build"
1482                interrupted = 1
1483            failures = 0
1484            try:
1485                retval = rq.execute_runqueue()
1486            except runqueue.TaskFailure as exc:
1487                failures += len(exc.args)
1488                retval = False
1489            except SystemExit as exc:
1490                return bb.server.process.idleFinish(str(exc))
1491
1492            if not retval:
1493                try:
1494                    for mc in self.multiconfigs:
1495                        bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1496                finally:
1497                    bb.event.disable_heartbeat()
1498                return bb.server.process.idleFinish(msg)
1499
1500            if retval is True:
1501                return True
1502            return retval
1503
1504        self.reset_mtime_caches()
1505        self.buildSetVars()
1506
1507        # If we are told to do the None task then query the default task
1508        if task is None:
1509            task = self.configuration.cmd
1510
1511        if not task.startswith("do_"):
1512            task = "do_%s" % task
1513
1514        packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1515
1516        bb.event.fire(bb.event.BuildInit(packages), self.data)
1517
1518        taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt)
1519
1520        buildname = self.data.getVar("BUILDNAME", False)
1521
1522        # make targets to always look as <target>:do_<task>
1523        ntargets = []
1524        for target in runlist:
1525            if target[0]:
1526                ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
1527            ntargets.append("%s:%s" % (target[1], target[2]))
1528
1529        for mc in self.multiconfigs:
1530            bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1531        if self.eventlog:
1532            self.eventlog[2].write_variables()
1533        bb.event.enable_heartbeat()
1534
1535        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1536        if 'universe' in targets:
1537            rq.rqdata.warn_multi_bb = True
1538
1539        self.idleCallBackRegister(buildTargetsIdle, rq)
1540
1541
1542    def getAllKeysWithFlags(self, flaglist):
1543        def dummy_autorev(d):
1544            return
1545
1546        dump = {}
1547        # Horrible but for now we need to avoid any sideeffects of autorev being called
1548        saved = bb.fetch2.get_autorev
1549        bb.fetch2.get_autorev = dummy_autorev
1550        for k in self.data.keys():
1551            try:
1552                expand = True
1553                flags = self.data.getVarFlags(k)
1554                if flags and "func" in flags and "python" in flags:
1555                    expand = False
1556                v = self.data.getVar(k, expand)
1557                if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1558                    dump[k] = {
1559    'v' : str(v) ,
1560    'history' : self.data.varhistory.variable(k),
1561                    }
1562                    for d in flaglist:
1563                        if flags and d in flags:
1564                            dump[k][d] = flags[d]
1565                        else:
1566                            dump[k][d] = None
1567            except Exception as e:
1568                print(e)
1569        bb.fetch2.get_autorev = saved
1570        return dump
1571
1572
1573    def updateCacheSync(self):
1574        if self.state == State.RUNNING:
1575            return
1576
1577        if not self.baseconfig_valid:
1578            logger.debug("Reloading base configuration data")
1579            self.initConfigurationData()
1580            self.handlePRServ()
1581
1582    # This is called for all async commands when self.state != running
1583    def updateCache(self):
1584        if self.state == State.RUNNING:
1585            return
1586
1587        if self.state in (State.SHUTDOWN, State.FORCE_SHUTDOWN, State.ERROR):
1588            if hasattr(self.parser, 'shutdown'):
1589                self.parser.shutdown(clean=False)
1590                self.parser.final_cleanup()
1591            raise bb.BBHandledException()
1592
1593        if self.state != State.PARSING:
1594            self.updateCacheSync()
1595
1596        if self.state != State.PARSING and not self.parsecache_valid:
1597            bb.server.process.serverlog("Parsing started")
1598            self.parsewatched = {}
1599
1600            bb.parse.siggen.reset(self.data)
1601            self.parseConfiguration ()
1602            if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1603                for mc in self.multiconfigs:
1604                    bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
1605
1606            for mc in self.multiconfigs:
1607                ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
1608                self.recipecaches[mc].ignored_dependencies = set(ignore.split())
1609
1610                for dep in self.configuration.extra_assume_provided:
1611                    self.recipecaches[mc].ignored_dependencies.add(dep)
1612
1613            mcfilelist = {}
1614            total_masked = 0
1615            searchdirs = set()
1616            for mc in self.multiconfigs:
1617                (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1618
1619                mcfilelist[mc] = filelist
1620                total_masked += masked
1621                searchdirs |= set(search)
1622
1623            # Add mtimes for directories searched for bb/bbappend files
1624            for dirent in searchdirs:
1625                self.add_filewatch([(dirent, bb.parse.cached_mtime_noerror(dirent))])
1626
1627            self.parser = CookerParser(self, mcfilelist, total_masked)
1628            self._parsecache_set(True)
1629
1630        self.state = State.PARSING
1631
1632        if not self.parser.parse_next():
1633            collectlog.debug("parsing complete")
1634            if self.parser.error:
1635                raise bb.BBHandledException()
1636            self.show_appends_with_no_recipes()
1637            self.handlePrefProviders()
1638            for mc in self.multiconfigs:
1639                self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1640            self.state = State.RUNNING
1641
1642            # Send an event listing all stamps reachable after parsing
1643            # which the metadata may use to clean up stale data
1644            for mc in self.multiconfigs:
1645                event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1646                bb.event.fire(event, self.databuilder.mcdata[mc])
1647            return None
1648
1649        return True
1650
1651    def checkPackages(self, pkgs_to_build, task=None):
1652
1653        # Return a copy, don't modify the original
1654        pkgs_to_build = pkgs_to_build[:]
1655
1656        if not pkgs_to_build:
1657            raise NothingToBuild
1658
1659        ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
1660        for pkg in pkgs_to_build.copy():
1661            if pkg in ignore:
1662                parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1663            if pkg.startswith("multiconfig:"):
1664                pkgs_to_build.remove(pkg)
1665                pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
1666
1667        if 'world' in pkgs_to_build:
1668            pkgs_to_build.remove('world')
1669            for mc in self.multiconfigs:
1670                bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1671                for t in self.recipecaches[mc].world_target:
1672                    if mc:
1673                        t = "mc:" + mc + ":" + t
1674                    pkgs_to_build.append(t)
1675
1676        if 'universe' in pkgs_to_build:
1677            parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1678            parselog.debug("collating packages for \"universe\"")
1679            pkgs_to_build.remove('universe')
1680            for mc in self.multiconfigs:
1681                for t in self.recipecaches[mc].universe_target:
1682                    if task:
1683                        foundtask = False
1684                        for provider_fn in self.recipecaches[mc].providers[t]:
1685                            if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1686                                foundtask = True
1687                                break
1688                        if not foundtask:
1689                            bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1690                            continue
1691                    if mc:
1692                        t = "mc:" + mc + ":" + t
1693                    pkgs_to_build.append(t)
1694
1695        return pkgs_to_build
1696
1697    def pre_serve(self):
1698        return
1699
1700    def post_serve(self):
1701        self.shutdown(force=True)
1702        prserv.serv.auto_shutdown()
1703        if hasattr(bb.parse, "siggen"):
1704            bb.parse.siggen.exit()
1705        if self.hashserv:
1706            self.hashserv.process.terminate()
1707            self.hashserv.process.join()
1708        if hasattr(self, "data"):
1709            bb.event.fire(CookerExit(), self.data)
1710
1711    def shutdown(self, force=False):
1712        if force:
1713            self.state = State.FORCE_SHUTDOWN
1714            bb.event._should_exit.set()
1715        else:
1716            self.state = State.SHUTDOWN
1717
1718        if self.parser:
1719            self.parser.shutdown(clean=False)
1720            self.parser.final_cleanup()
1721
1722    def finishcommand(self):
1723        if hasattr(self.parser, 'shutdown'):
1724            self.parser.shutdown(clean=False)
1725            self.parser.final_cleanup()
1726        self.state = State.INITIAL
1727        bb.event._should_exit.clear()
1728
1729    def reset(self):
1730        if hasattr(bb.parse, "siggen"):
1731            bb.parse.siggen.exit()
1732        self.finishcommand()
1733        self.initConfigurationData()
1734        self.handlePRServ()
1735
1736    def clientComplete(self):
1737        """Called when the client is done using the server"""
1738        self.finishcommand()
1739        self.extraconfigdata = {}
1740        self.command.reset()
1741        if hasattr(self, "data"):
1742           self.databuilder.reset()
1743           self.data = self.databuilder.data
1744        # In theory tinfoil could have modified the base data before parsing,
1745        # ideally need to track if anything did modify the datastore
1746        self._parsecache_set(False)
1747
1748class CookerExit(bb.event.Event):
1749    """
1750    Notify clients of the Cooker shutdown
1751    """
1752
1753    def __init__(self):
1754        bb.event.Event.__init__(self)
1755
1756
1757class CookerCollectFiles(object):
1758    def __init__(self, priorities, mc=''):
1759        self.mc = mc
1760        self.bbappends = []
1761        # Priorities is a list of tuples, with the second element as the pattern.
1762        # We need to sort the list with the longest pattern first, and so on to
1763        # the shortest.  This allows nested layers to be properly evaluated.
1764        self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1765
1766    def calc_bbfile_priority(self, filename):
1767        for layername, _, regex, pri in self.bbfile_config_priorities:
1768            if regex.match(filename):
1769                return pri, regex, layername
1770        return 0, None, None
1771
1772    def get_bbfiles(self):
1773        """Get list of default .bb files by reading out the current directory"""
1774        path = os.getcwd()
1775        contents = os.listdir(path)
1776        bbfiles = []
1777        for f in contents:
1778            if f.endswith(".bb"):
1779                bbfiles.append(os.path.abspath(os.path.join(path, f)))
1780        return bbfiles
1781
1782    def find_bbfiles(self, path):
1783        """Find all the .bb and .bbappend files in a directory"""
1784        found = []
1785        for dir, dirs, files in os.walk(path):
1786            for ignored in ('SCCS', 'CVS', '.svn'):
1787                if ignored in dirs:
1788                    dirs.remove(ignored)
1789            found += [os.path.join(dir, f) for f in files if (f.endswith(('.bb', '.bbappend')))]
1790
1791        return found
1792
1793    def collect_bbfiles(self, config, eventdata):
1794        """Collect all available .bb build files"""
1795        masked = 0
1796
1797        collectlog.debug("collecting .bb files")
1798
1799        files = (config.getVar( "BBFILES") or "").split()
1800
1801        # Sort files by priority
1802        files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
1803        config.setVar("BBFILES_PRIORITIZED", " ".join(files))
1804
1805        if not files:
1806            files = self.get_bbfiles()
1807
1808        if not files:
1809            collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1810            bb.event.fire(CookerExit(), eventdata)
1811
1812        # We need to track where we look so that we can know when the cache is invalid. There
1813        # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir()
1814        # calls while we run glob().
1815        origlistdir = os.listdir
1816        if hasattr(os, 'scandir'):
1817            origscandir = os.scandir
1818        searchdirs = []
1819
1820        def ourlistdir(d):
1821            searchdirs.append(d)
1822            return origlistdir(d)
1823
1824        def ourscandir(d):
1825            searchdirs.append(d)
1826            return origscandir(d)
1827
1828        os.listdir = ourlistdir
1829        if hasattr(os, 'scandir'):
1830            os.scandir = ourscandir
1831        try:
1832            # Can't use set here as order is important
1833            newfiles = []
1834            for f in files:
1835                if os.path.isdir(f):
1836                    dirfiles = self.find_bbfiles(f)
1837                    for g in dirfiles:
1838                        if g not in newfiles:
1839                            newfiles.append(g)
1840                else:
1841                    globbed = glob.glob(f)
1842                    if not globbed and os.path.exists(f):
1843                        globbed = [f]
1844                    # glob gives files in order on disk. Sort to be deterministic.
1845                    for g in sorted(globbed):
1846                        if g not in newfiles:
1847                            newfiles.append(g)
1848        finally:
1849            os.listdir = origlistdir
1850            if hasattr(os, 'scandir'):
1851                os.scandir = origscandir
1852
1853        bbmask = config.getVar('BBMASK')
1854
1855        if bbmask:
1856            # First validate the individual regular expressions and ignore any
1857            # that do not compile
1858            bbmasks = []
1859            for mask in bbmask.split():
1860                # When constructing an older style single regex, it's possible for BBMASK
1861                # to end up beginning with '|', which matches and masks _everything_.
1862                if mask.startswith("|"):
1863                    collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1864                    mask = mask[1:]
1865                try:
1866                    re.compile(mask)
1867                    bbmasks.append(mask)
1868                except re.error:
1869                    collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1870
1871            # Then validate the combined regular expressions. This should never
1872            # fail, but better safe than sorry...
1873            bbmask = "|".join(bbmasks)
1874            try:
1875                bbmask_compiled = re.compile(bbmask)
1876            except re.error:
1877                collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1878                bbmask = None
1879
1880        bbfiles = []
1881        bbappend = []
1882        for f in newfiles:
1883            if bbmask and bbmask_compiled.search(f):
1884                collectlog.debug("skipping masked file %s", f)
1885                masked += 1
1886                continue
1887            if f.endswith('.bb'):
1888                bbfiles.append(f)
1889            elif f.endswith('.bbappend'):
1890                bbappend.append(f)
1891            else:
1892                collectlog.debug("skipping %s: unknown file extension", f)
1893
1894        # Build a list of .bbappend files for each .bb file
1895        for f in bbappend:
1896            base = os.path.basename(f).replace('.bbappend', '.bb')
1897            self.bbappends.append((base, f))
1898
1899        # Find overlayed recipes
1900        # bbfiles will be in priority order which makes this easy
1901        bbfile_seen = dict()
1902        self.overlayed = defaultdict(list)
1903        for f in reversed(bbfiles):
1904            base = os.path.basename(f)
1905            if base not in bbfile_seen:
1906                bbfile_seen[base] = f
1907            else:
1908                topfile = bbfile_seen[base]
1909                self.overlayed[topfile].append(f)
1910
1911        return (bbfiles, masked, searchdirs)
1912
1913    def get_file_appends(self, fn):
1914        """
1915        Returns a list of .bbappend files to apply to fn
1916        """
1917        filelist = []
1918        f = os.path.basename(fn)
1919        for b in self.bbappends:
1920            (bbappend, filename) = b
1921            if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1922                filelist.append(filename)
1923        return tuple(filelist)
1924
1925    def collection_priorities(self, pkgfns, fns, d):
1926        # Return the priorities of the entries in pkgfns
1927        # Also check that all the regexes in self.bbfile_config_priorities are used
1928        # (but to do that we need to ensure skipped recipes aren't counted, nor
1929        # collections in BBFILE_PATTERN_IGNORE_EMPTY)
1930
1931        priorities = {}
1932        seen = set()
1933        matched = set()
1934
1935        matched_regex = set()
1936        unmatched_regex = set()
1937        for _, _, regex, _ in self.bbfile_config_priorities:
1938            unmatched_regex.add(regex)
1939
1940        # Calculate priorities for each file
1941        for p in pkgfns:
1942            realfn, cls, mc = bb.cache.virtualfn2realfn(p)
1943            priorities[p], regex, _ = self.calc_bbfile_priority(realfn)
1944            if regex in unmatched_regex:
1945                matched_regex.add(regex)
1946                unmatched_regex.remove(regex)
1947            seen.add(realfn)
1948            if regex:
1949                matched.add(realfn)
1950
1951        if unmatched_regex:
1952            # Account for bbappend files
1953            for b in self.bbappends:
1954                (bbfile, append) = b
1955                seen.add(append)
1956
1957            # Account for skipped recipes
1958            seen.update(fns)
1959
1960            seen.difference_update(matched)
1961
1962            def already_matched(fn):
1963                for regex in matched_regex:
1964                    if regex.match(fn):
1965                        return True
1966                return False
1967
1968            for unmatch in unmatched_regex.copy():
1969                for fn in seen:
1970                    if unmatch.match(fn):
1971                        # If the bbappend or file was already matched by another regex, skip it
1972                        # e.g. for a layer within a layer, the outer regex could match, the inner
1973                        # regex may match nothing and we should warn about that
1974                        if already_matched(fn):
1975                            continue
1976                        unmatched_regex.remove(unmatch)
1977                        break
1978
1979        for collection, pattern, regex, _ in self.bbfile_config_priorities:
1980            if regex in unmatched_regex:
1981                if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
1982                    collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
1983                                                                                             collection, pattern))
1984
1985        return priorities
1986
1987class ParsingFailure(Exception):
1988    def __init__(self, realexception, recipe):
1989        self.realexception = realexception
1990        self.recipe = recipe
1991        Exception.__init__(self, realexception, recipe)
1992
1993class Parser(multiprocessing.Process):
1994    def __init__(self, jobs, results, quit, profile):
1995        self.jobs = jobs
1996        self.results = results
1997        self.quit = quit
1998        multiprocessing.Process.__init__(self)
1999        self.context = bb.utils.get_context().copy()
2000        self.handlers = bb.event.get_class_handlers().copy()
2001        self.profile = profile
2002        self.queue_signals = False
2003        self.signal_received = []
2004        self.signal_threadlock = threading.Lock()
2005
2006    def catch_sig(self, signum, frame):
2007        if self.queue_signals:
2008            self.signal_received.append(signum)
2009        else:
2010            self.handle_sig(signum, frame)
2011
2012    def handle_sig(self, signum, frame):
2013        if signum == signal.SIGTERM:
2014            signal.signal(signal.SIGTERM, signal.SIG_DFL)
2015            os.kill(os.getpid(), signal.SIGTERM)
2016        elif signum == signal.SIGINT:
2017            signal.default_int_handler(signum, frame)
2018
2019    def run(self):
2020
2021        if not self.profile:
2022            self.realrun()
2023            return
2024
2025        try:
2026            import cProfile as profile
2027        except:
2028            import profile
2029        prof = profile.Profile()
2030        try:
2031            profile.Profile.runcall(prof, self.realrun)
2032        finally:
2033            logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
2034            prof.dump_stats(logfile)
2035
2036    def realrun(self):
2037        # Signal handling here is hard. We must not terminate any process or thread holding the write
2038        # lock for the event stream as it will not be released, ever, and things will hang.
2039        # Python handles signals in the main thread/process but they can be raised from any thread and
2040        # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section
2041        # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any
2042        # new thread should also do so) and we defer handling but we handle with the local thread lock
2043        # held (a threading lock, not a multiprocessing one) so that no other thread in the process
2044        # can be in the critical section.
2045        signal.signal(signal.SIGTERM, self.catch_sig)
2046        signal.signal(signal.SIGHUP, signal.SIG_DFL)
2047        signal.signal(signal.SIGINT, self.catch_sig)
2048        bb.utils.set_process_name(multiprocessing.current_process().name)
2049        multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2050        multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2051
2052        pending = []
2053        havejobs = True
2054        try:
2055            while havejobs or pending:
2056                if self.quit.is_set():
2057                    break
2058
2059                job = None
2060                try:
2061                    job = self.jobs.pop()
2062                except IndexError:
2063                    havejobs = False
2064                if job:
2065                    result = self.parse(*job)
2066                    # Clear the siggen cache after parsing to control memory usage, its huge
2067                    bb.parse.siggen.postparsing_clean_cache()
2068                    pending.append(result)
2069
2070                if pending:
2071                    try:
2072                        result = pending.pop()
2073                        self.results.put(result, timeout=0.05)
2074                    except queue.Full:
2075                        pending.append(result)
2076        finally:
2077            self.results.close()
2078            self.results.join_thread()
2079
2080    def parse(self, mc, cache, filename, appends, layername):
2081        try:
2082            origfilter = bb.event.LogHandler.filter
2083            # Record the filename we're parsing into any events generated
2084            def parse_filter(self, record):
2085                record.taskpid = bb.event.worker_pid
2086                record.fn = filename
2087                return True
2088
2089            # Reset our environment and handlers to the original settings
2090            bb.utils.set_context(self.context.copy())
2091            bb.event.set_class_handlers(self.handlers.copy())
2092            bb.event.LogHandler.filter = parse_filter
2093
2094            return True, mc, cache.parse(filename, appends, layername)
2095        except Exception as exc:
2096            tb = sys.exc_info()[2]
2097            exc.recipe = filename
2098            return True, None, exc
2099        # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2100        # and for example a worker thread doesn't just exit on its own in response to
2101        # a SystemExit event for example.
2102        except BaseException as exc:
2103            return True, None, ParsingFailure(exc, filename)
2104        finally:
2105            bb.event.LogHandler.filter = origfilter
2106
2107class CookerParser(object):
2108    def __init__(self, cooker, mcfilelist, masked):
2109        self.mcfilelist = mcfilelist
2110        self.cooker = cooker
2111        self.cfgdata = cooker.data
2112        self.cfghash = cooker.databuilder.data_hash
2113        self.cfgbuilder = cooker.databuilder
2114
2115        # Accounting statistics
2116        self.parsed = 0
2117        self.cached = 0
2118        self.error = 0
2119        self.masked = masked
2120
2121        self.skipped = 0
2122        self.virtuals = 0
2123
2124        self.current = 0
2125        self.process_names = []
2126
2127        self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2128        self.fromcache = set()
2129        self.willparse = set()
2130        for mc in self.cooker.multiconfigs:
2131            for filename in self.mcfilelist[mc]:
2132                appends = self.cooker.collections[mc].get_file_appends(filename)
2133                layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2134                if not self.bb_caches[mc].cacheValid(filename, appends):
2135                    self.willparse.add((mc, self.bb_caches[mc], filename, appends, layername))
2136                else:
2137                    self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername))
2138
2139        self.total = len(self.fromcache) + len(self.willparse)
2140        self.toparse = len(self.willparse)
2141        self.progress_chunk = int(max(self.toparse / 100, 1))
2142
2143        self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2144                                 multiprocessing.cpu_count()), self.toparse)
2145
2146        bb.cache.SiggenRecipeInfo.reset()
2147        self.start()
2148        self.haveshutdown = False
2149        self.syncthread = None
2150
2151    def start(self):
2152        self.results = self.load_cached()
2153        self.processes = []
2154        if self.toparse:
2155            bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2156
2157            self.parser_quit = multiprocessing.Event()
2158            self.result_queue = multiprocessing.Queue()
2159
2160            def chunkify(lst,n):
2161                return [lst[i::n] for i in range(n)]
2162            self.jobs = chunkify(list(self.willparse), self.num_processes)
2163
2164            for i in range(0, self.num_processes):
2165                parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile)
2166                parser.start()
2167                self.process_names.append(parser.name)
2168                self.processes.append(parser)
2169
2170            self.results = itertools.chain(self.results, self.parse_generator())
2171
2172    def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"):
2173        if not self.toparse:
2174            return
2175        if self.haveshutdown:
2176            return
2177        self.haveshutdown = True
2178
2179        if clean:
2180            event = bb.event.ParseCompleted(self.cached, self.parsed,
2181                                            self.skipped, self.masked,
2182                                            self.virtuals, self.error,
2183                                            self.total)
2184
2185            bb.event.fire(event, self.cfgdata)
2186        else:
2187            bb.event.fire(bb.event.ParseError(eventmsg), self.cfgdata)
2188            bb.error("Parsing halted due to errors, see error messages above")
2189
2190        # Cleanup the queue before call process.join(), otherwise there might be
2191        # deadlocks.
2192        while True:
2193            try:
2194               self.result_queue.get(timeout=0.25)
2195            except queue.Empty:
2196                break
2197
2198        def sync_caches():
2199            for c in self.bb_caches.values():
2200                bb.cache.SiggenRecipeInfo.reset()
2201                c.sync()
2202
2203        self.syncthread = threading.Thread(target=sync_caches, name="SyncThread")
2204        self.syncthread.start()
2205
2206        self.parser_quit.set()
2207
2208        for process in self.processes:
2209            process.join(0.5)
2210
2211        for process in self.processes:
2212            if process.exitcode is None:
2213                os.kill(process.pid, signal.SIGINT)
2214
2215        for process in self.processes:
2216            process.join(0.5)
2217
2218        for process in self.processes:
2219            if process.exitcode is None:
2220                process.terminate()
2221
2222        for process in self.processes:
2223            process.join()
2224            # clean up zombies
2225            process.close()
2226
2227        bb.codeparser.parser_cache_save()
2228        bb.codeparser.parser_cache_savemerge()
2229        bb.cache.SiggenRecipeInfo.reset()
2230        bb.fetch.fetcher_parse_done()
2231        if self.cooker.configuration.profile:
2232            profiles = []
2233            for i in self.process_names:
2234                logfile = "profile-parse-%s.log" % i
2235                if os.path.exists(logfile) and os.path.getsize(logfile):
2236                    profiles.append(logfile)
2237
2238            if profiles:
2239                pout = "profile-parse.log.processed"
2240                bb.utils.process_profilelog(profiles, pout = pout)
2241                print("Processed parsing statistics saved to %s" % (pout))
2242
2243    def final_cleanup(self):
2244        if self.syncthread:
2245            self.syncthread.join()
2246
2247    def load_cached(self):
2248        for mc, cache, filename, appends, layername in self.fromcache:
2249            infos = cache.loadCached(filename, appends)
2250            yield False, mc, infos
2251
2252    def parse_generator(self):
2253        empty = False
2254        while self.processes or not empty:
2255            for process in self.processes.copy():
2256                if not process.is_alive():
2257                    process.join()
2258                    self.processes.remove(process)
2259
2260            if self.parsed >= self.toparse:
2261                break
2262
2263            try:
2264                result = self.result_queue.get(timeout=0.25)
2265            except queue.Empty:
2266                empty = True
2267                yield None, None, None
2268            else:
2269                empty = False
2270                yield result
2271
2272        if not (self.parsed >= self.toparse):
2273            raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
2274
2275
2276    def parse_next(self):
2277        result = []
2278        parsed = None
2279        try:
2280            parsed, mc, result = next(self.results)
2281            if isinstance(result, BaseException):
2282                # Turn exceptions back into exceptions
2283                raise result
2284            if parsed is None:
2285                # Timeout, loop back through the main loop
2286                return True
2287
2288        except StopIteration:
2289            self.shutdown()
2290            return False
2291        except bb.BBHandledException as exc:
2292            self.error += 1
2293            logger.debug('Failed to parse recipe: %s' % exc.recipe)
2294            self.shutdown(clean=False)
2295            return False
2296        except ParsingFailure as exc:
2297            self.error += 1
2298
2299            exc_desc = str(exc)
2300            if isinstance(exc, SystemExit) and not isinstance(exc.code, str):
2301                exc_desc = 'Exited with "%d"' % exc.code
2302
2303            logger.error('Unable to parse %s: %s' % (exc.recipe, exc_desc))
2304            self.shutdown(clean=False)
2305            return False
2306        except bb.parse.ParseError as exc:
2307            self.error += 1
2308            logger.error(str(exc))
2309            self.shutdown(clean=False, eventmsg=str(exc))
2310            return False
2311        except bb.data_smart.ExpansionError as exc:
2312            def skip_frames(f, fn_prefix):
2313                while f and f.tb_frame.f_code.co_filename.startswith(fn_prefix):
2314                    f = f.tb_next
2315                return f
2316
2317            self.error += 1
2318            bbdir = os.path.dirname(__file__) + os.sep
2319            etype, value, tb = sys.exc_info()
2320
2321            # Remove any frames where the code comes from bitbake. This
2322            # prevents deep (and pretty useless) backtraces for expansion error
2323            tb = skip_frames(tb, bbdir)
2324            cur = tb
2325            while cur:
2326                cur.tb_next = skip_frames(cur.tb_next, bbdir)
2327                cur = cur.tb_next
2328
2329            logger.error('ExpansionError during parsing %s', value.recipe,
2330                         exc_info=(etype, value, tb))
2331            self.shutdown(clean=False)
2332            return False
2333        except Exception as exc:
2334            self.error += 1
2335            _, value, _ = sys.exc_info()
2336            if hasattr(value, "recipe"):
2337                logger.error('Unable to parse %s' % value.recipe,
2338                            exc_info=sys.exc_info())
2339            else:
2340                # Most likely, an exception occurred during raising an exception
2341                import traceback
2342                logger.error('Exception during parse: %s' % traceback.format_exc())
2343            self.shutdown(clean=False)
2344            return False
2345
2346        self.current += 1
2347        self.virtuals += len(result)
2348        if parsed:
2349            self.parsed += 1
2350            if self.parsed % self.progress_chunk == 0:
2351                bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2352                              self.cfgdata)
2353        else:
2354            self.cached += 1
2355
2356        for virtualfn, info_array in result:
2357            if info_array[0].skipped:
2358                self.skipped += 1
2359                self.cooker.skiplist_by_mc[mc][virtualfn] = SkippedPackage(info_array[0])
2360            self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2361                                        parsed=parsed, watcher = self.cooker.add_filewatch)
2362        return True
2363
2364    def reparse(self, filename):
2365        bb.cache.SiggenRecipeInfo.reset()
2366        to_reparse = set()
2367        for mc in self.cooker.multiconfigs:
2368            layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2369            to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename), layername))
2370
2371        for mc, filename, appends, layername in to_reparse:
2372            infos = self.bb_caches[mc].parse(filename, appends, layername)
2373            for vfn, info_array in infos:
2374                self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
2375