xref: /openbmc/openbmc/poky/bitbake/lib/bb/cooker.py (revision f1e5d6968976c2341c6d554bfcc8895f1b33c26b)
1#
2# Copyright (C) 2003, 2004  Chris Larson
3# Copyright (C) 2003, 2004  Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005        Holger Hans Peter Freyther
6# Copyright (C) 2005        ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11import enum
12import sys, os, glob, os.path, re, time
13import itertools
14import logging
15import multiprocessing
16import threading
17from io import StringIO, UnsupportedOperation
18from contextlib import closing
19from collections import defaultdict, namedtuple
20import bb, bb.command
21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
22import queue
23import signal
24import prserv.serv
25import json
26import pickle
27import codecs
28import hashserv
29
30logger      = logging.getLogger("BitBake")
31collectlog  = logging.getLogger("BitBake.Collection")
32buildlog    = logging.getLogger("BitBake.Build")
33parselog    = logging.getLogger("BitBake.Parsing")
34providerlog = logging.getLogger("BitBake.Provider")
35
36class NoSpecificMatch(bb.BBHandledException):
37    """
38    Exception raised when no or multiple file matches are found
39    """
40
41class NothingToBuild(Exception):
42    """
43    Exception raised when there is nothing to build
44    """
45
46class CollectionError(bb.BBHandledException):
47    """
48    Exception raised when layer configuration is incorrect
49    """
50
51
52class State(enum.Enum):
53    INITIAL = 0,
54    PARSING = 1,
55    RUNNING = 2,
56    SHUTDOWN = 3,
57    FORCE_SHUTDOWN = 4,
58    STOPPED = 5,
59    ERROR = 6
60
61
62class SkippedPackage:
63    def __init__(self, info = None, reason = None):
64        self.pn = None
65        self.skipreason = None
66        self.provides = None
67        self.rprovides = None
68
69        if info:
70            self.pn = info.pn
71            self.skipreason = info.skipreason
72            self.provides = info.provides
73            self.rprovides = info.packages + info.rprovides
74            for package in info.packages:
75                self.rprovides += info.rprovides_pkg[package]
76        elif reason:
77            self.skipreason = reason
78
79
80class CookerFeatures(object):
81    _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS, RECIPE_SIGGEN_INFO] = list(range(4))
82
83    def __init__(self):
84        self._features=set()
85
86    def setFeature(self, f):
87        # validate we got a request for a feature we support
88        if f not in CookerFeatures._feature_list:
89            return
90        self._features.add(f)
91
92    def __contains__(self, f):
93        return f in self._features
94
95    def __iter__(self):
96        return self._features.__iter__()
97
98    def __next__(self):
99        return next(self._features)
100
101
102class EventWriter:
103    def __init__(self, cooker, eventfile):
104        self.cooker = cooker
105        self.eventfile = eventfile
106        self.event_queue = []
107
108    def write_variables(self):
109        with open(self.eventfile, "a") as f:
110            f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
111
112    def send(self, event):
113        with open(self.eventfile, "a") as f:
114            try:
115                str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
116                f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
117                                             "vars": str_event}))
118            except Exception as err:
119                import traceback
120                print(err, traceback.format_exc())
121
122
123#============================================================================#
124# BBCooker
125#============================================================================#
126class BBCooker:
127    """
128    Manages one bitbake build run
129    """
130
131    def __init__(self, featureSet=None, server=None):
132        self.recipecaches = None
133        self.baseconfig_valid = False
134        self.parsecache_valid = False
135        self.eventlog = None
136        self.skiplist = {}
137        self.featureset = CookerFeatures()
138        if featureSet:
139            for f in featureSet:
140                self.featureset.setFeature(f)
141
142        self.orig_syspath = sys.path.copy()
143        self.orig_sysmodules = [*sys.modules]
144
145        self.configuration = bb.cookerdata.CookerConfiguration()
146
147        self.process_server = server
148        self.idleCallBackRegister = None
149        self.waitIdle = None
150        if server:
151            self.idleCallBackRegister = server.register_idle_function
152            self.waitIdle = server.wait_for_idle
153
154        bb.debug(1, "BBCooker starting %s" % time.time())
155
156        self.configwatched = {}
157        self.parsewatched = {}
158
159        # If being called by something like tinfoil, we need to clean cached data
160        # which may now be invalid
161        bb.parse.clear_cache()
162        bb.parse.BBHandler.cached_statements = {}
163
164        self.ui_cmdline = None
165        self.hashserv = None
166        self.hashservaddr = None
167
168        # TOSTOP must not be set or our children will hang when they output
169        try:
170            fd = sys.stdout.fileno()
171            if os.isatty(fd):
172                import termios
173                tcattr = termios.tcgetattr(fd)
174                if tcattr[3] & termios.TOSTOP:
175                    buildlog.info("The terminal had the TOSTOP bit set, clearing...")
176                    tcattr[3] = tcattr[3] & ~termios.TOSTOP
177                    termios.tcsetattr(fd, termios.TCSANOW, tcattr)
178        except UnsupportedOperation:
179            pass
180
181        self.command = bb.command.Command(self, self.process_server)
182        self.state = State.INITIAL
183
184        self.parser = None
185
186        signal.signal(signal.SIGTERM, self.sigterm_exception)
187        # Let SIGHUP exit as SIGTERM
188        signal.signal(signal.SIGHUP, self.sigterm_exception)
189
190        bb.debug(1, "BBCooker startup complete %s" % time.time())
191
192    def init_configdata(self):
193        if not hasattr(self, "data"):
194            self.initConfigurationData()
195            bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
196            self.handlePRServ()
197
198    def _baseconfig_set(self, value):
199        if value and not self.baseconfig_valid:
200            bb.server.process.serverlog("Base config valid")
201        elif not value and self.baseconfig_valid:
202            bb.server.process.serverlog("Base config invalidated")
203        self.baseconfig_valid = value
204
205    def _parsecache_set(self, value):
206        if value and not self.parsecache_valid:
207            bb.server.process.serverlog("Parse cache valid")
208        elif not value and self.parsecache_valid:
209            bb.server.process.serverlog("Parse cache invalidated")
210        self.parsecache_valid = value
211
212    def add_filewatch(self, deps, configwatcher=False):
213        if configwatcher:
214            watcher = self.configwatched
215        else:
216            watcher = self.parsewatched
217
218        for i in deps:
219            f = i[0]
220            mtime = i[1]
221            watcher[f] = mtime
222
223    def sigterm_exception(self, signum, stackframe):
224        if signum == signal.SIGTERM:
225            bb.warn("Cooker received SIGTERM, shutting down...")
226        elif signum == signal.SIGHUP:
227            bb.warn("Cooker received SIGHUP, shutting down...")
228        self.state = State.FORCE_SHUTDOWN
229        bb.event._should_exit.set()
230
231    def setFeatures(self, features):
232        # we only accept a new feature set if we're in state initial, so we can reset without problems
233        if not self.state in [State.INITIAL, State.SHUTDOWN, State.FORCE_SHUTDOWN, State.STOPPED, State.ERROR]:
234            raise Exception("Illegal state for feature set change")
235        original_featureset = list(self.featureset)
236        for feature in features:
237            self.featureset.setFeature(feature)
238        bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
239        if (original_featureset != list(self.featureset)) and self.state != State.ERROR and hasattr(self, "data"):
240            self.reset()
241
242    def initConfigurationData(self):
243        self.state = State.INITIAL
244        self.caches_array = []
245
246        sys.path = self.orig_syspath.copy()
247        for mod in [*sys.modules]:
248            if mod not in self.orig_sysmodules:
249                del sys.modules[mod]
250
251        self.configwatched = {}
252
253        # Need to preserve BB_CONSOLELOG over resets
254        consolelog = None
255        if hasattr(self, "data"):
256            consolelog = self.data.getVar("BB_CONSOLELOG")
257
258        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
259            self.enableDataTracking()
260
261        caches_name_array = ['bb.cache:CoreRecipeInfo']
262        # We hardcode all known cache types in a single place, here.
263        if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
264            caches_name_array.append("bb.cache_extra:HobRecipeInfo")
265        if CookerFeatures.RECIPE_SIGGEN_INFO in self.featureset:
266            caches_name_array.append("bb.cache:SiggenRecipeInfo")
267
268        # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
269        # This is the entry point, no further check needed!
270        for var in caches_name_array:
271            try:
272                module_name, cache_name = var.split(':')
273                module = __import__(module_name, fromlist=(cache_name,))
274                self.caches_array.append(getattr(module, cache_name))
275            except ImportError as exc:
276                logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
277                raise bb.BBHandledException()
278
279        self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
280        self.databuilder.parseBaseConfiguration()
281        self.data = self.databuilder.data
282        self.extraconfigdata = {}
283
284        eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
285        if not self.configuration.writeeventlog and eventlog:
286            self.setupEventLog(eventlog)
287
288        if consolelog:
289            self.data.setVar("BB_CONSOLELOG", consolelog)
290
291        self.data.setVar('BB_CMDLINE', self.ui_cmdline)
292
293        if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
294            self.disableDataTracking()
295
296        for mc in self.databuilder.mcdata.values():
297            self.add_filewatch(mc.getVar("__base_depends", False), configwatcher=True)
298
299        self._baseconfig_set(True)
300        self._parsecache_set(False)
301
302    def handlePRServ(self):
303        # Setup a PR Server based on the new configuration
304        try:
305            self.prhost = prserv.serv.auto_start(self.data)
306        except prserv.serv.PRServiceConfigError as e:
307            bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
308
309        if self.data.getVar("BB_HASHSERVE") == "auto":
310            # Create a new hash server bound to a unix domain socket
311            if not self.hashserv:
312                dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
313                upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
314                if upstream:
315                    try:
316                        with hashserv.create_client(upstream) as client:
317                            client.ping()
318                    except (ConnectionError, ImportError) as e:
319                        bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
320                                 % (upstream, repr(e)))
321                        upstream = None
322
323                self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
324                self.hashserv = hashserv.create_server(
325                    self.hashservaddr,
326                    dbfile,
327                    sync=False,
328                    upstream=upstream,
329                )
330                self.hashserv.serve_as_process(log_level=logging.WARNING)
331            for mc in self.databuilder.mcdata:
332                self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
333                self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
334
335        bb.parse.init_parser(self.data)
336
337    def enableDataTracking(self):
338        self.configuration.tracking = True
339        if hasattr(self, "data"):
340            self.data.enableTracking()
341
342    def disableDataTracking(self):
343        self.configuration.tracking = False
344        if hasattr(self, "data"):
345            self.data.disableTracking()
346
347    def revalidateCaches(self):
348        bb.parse.clear_cache()
349
350        clean = True
351        for f in self.configwatched:
352            if not bb.parse.check_mtime(f, self.configwatched[f]):
353                bb.server.process.serverlog("Found %s changed, invalid cache" % f)
354                self._baseconfig_set(False)
355                self._parsecache_set(False)
356                clean = False
357                break
358
359        if clean:
360            for f in self.parsewatched:
361                if not bb.parse.check_mtime(f, self.parsewatched[f]):
362                    bb.server.process.serverlog("Found %s changed, invalid cache" % f)
363                    self._parsecache_set(False)
364                    clean = False
365                    break
366
367        if not clean:
368            bb.parse.BBHandler.cached_statements = {}
369
370        # If writes were made to any of the data stores, we need to recalculate the data
371        # store cache
372        if hasattr(self, "databuilder"):
373            self.databuilder.calc_datastore_hashes()
374
375    def parseConfiguration(self):
376        self.updateCacheSync()
377
378        # Change nice level if we're asked to
379        nice = self.data.getVar("BB_NICE_LEVEL")
380        if nice:
381            curnice = os.nice(0)
382            nice = int(nice) - curnice
383            buildlog.verbose("Renice to %s " % os.nice(nice))
384
385        if self.recipecaches:
386            del self.recipecaches
387        self.multiconfigs = self.databuilder.mcdata.keys()
388        self.recipecaches = {}
389        for mc in self.multiconfigs:
390            self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
391
392        self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
393        self.collections = {}
394        for mc in self.multiconfigs:
395            self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
396
397        self._parsecache_set(False)
398
399    def setupEventLog(self, eventlog):
400        if self.eventlog and self.eventlog[0] != eventlog:
401            bb.event.unregister_UIHhandler(self.eventlog[1])
402            self.eventlog = None
403        if not self.eventlog or self.eventlog[0] != eventlog:
404            # we log all events to a file if so directed
405            # register the log file writer as UI Handler
406            if not os.path.exists(os.path.dirname(eventlog)):
407                bb.utils.mkdirhier(os.path.dirname(eventlog))
408            writer = EventWriter(self, eventlog)
409            EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
410            self.eventlog = (eventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)), writer)
411
412    def updateConfigOpts(self, options, environment, cmdline):
413        self.ui_cmdline = cmdline
414        clean = True
415        for o in options:
416            if o in ['prefile', 'postfile']:
417                # Only these options may require a reparse
418                try:
419                    if getattr(self.configuration, o) == options[o]:
420                        # Value is the same, no need to mark dirty
421                        continue
422                except AttributeError:
423                    pass
424                logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
425                print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
426                clean = False
427            if hasattr(self.configuration, o):
428                setattr(self.configuration, o, options[o])
429
430        if self.configuration.writeeventlog:
431            self.setupEventLog(self.configuration.writeeventlog)
432
433        bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
434        bb.msg.loggerDefaultDomains = self.configuration.debug_domains
435
436        if hasattr(self, "data"):
437            origenv = bb.data.init()
438            for k in environment:
439                origenv.setVar(k, environment[k])
440            self.data.setVar("BB_ORIGENV", origenv)
441
442        for k in bb.utils.approved_variables():
443            if k in environment and k not in self.configuration.env:
444                logger.debug("Updating new environment variable %s to %s" % (k, environment[k]))
445                self.configuration.env[k] = environment[k]
446                clean = False
447            if k in self.configuration.env and k not in environment:
448                logger.debug("Updating environment variable %s (deleted)" % (k))
449                del self.configuration.env[k]
450                clean = False
451            if k not in self.configuration.env and k not in environment:
452                continue
453            if environment[k] != self.configuration.env[k]:
454                logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
455                self.configuration.env[k] = environment[k]
456                clean = False
457
458        # Now update all the variables not in the datastore to match
459        self.configuration.env = environment
460
461        self.revalidateCaches()
462        if not clean:
463            logger.debug("Base environment change, triggering reparse")
464            self.reset()
465
466    def showVersions(self):
467
468        (latest_versions, preferred_versions, required) = self.findProviders()
469
470        logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
471        logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
472
473        for p in sorted(self.recipecaches[''].pkg_pn):
474            preferred = preferred_versions[p]
475            latest = latest_versions[p]
476            requiredstr = ""
477            preferredstr = ""
478            if required[p]:
479                if preferred[0] is not None:
480                    requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
481                else:
482                    bb.fatal("REQUIRED_VERSION of package %s not available" % p)
483            else:
484                preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
485
486            lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
487
488            if preferred == latest:
489                preferredstr = ""
490
491            logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
492
493    def showEnvironment(self, buildfile=None, pkgs_to_build=None):
494        """
495        Show the outer or per-recipe environment
496        """
497        fn = None
498        envdata = None
499        mc = ''
500        if not pkgs_to_build:
501            pkgs_to_build = []
502
503        orig_tracking = self.configuration.tracking
504        if not orig_tracking:
505            self.enableDataTracking()
506            self.reset()
507            # reset() resets to the UI requested value so we have to redo this
508            self.enableDataTracking()
509
510        def mc_base(p):
511            if p.startswith('mc:'):
512                s = p.split(':')
513                if len(s) == 2:
514                    return s[1]
515            return None
516
517        if buildfile:
518            # Parse the configuration here. We need to do it explicitly here since
519            # this showEnvironment() code path doesn't use the cache
520            self.parseConfiguration()
521
522            fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
523            fn = self.matchFile(fn, mc)
524            fn = bb.cache.realfn2virtual(fn, cls, mc)
525        elif len(pkgs_to_build) == 1:
526            mc = mc_base(pkgs_to_build[0])
527            if not mc:
528                ignore = self.data.getVar("ASSUME_PROVIDED") or ""
529                if pkgs_to_build[0] in set(ignore.split()):
530                    bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
531
532                taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True)
533
534                mc = runlist[0][0]
535                fn = runlist[0][3]
536
537        if fn:
538            try:
539                layername = self.collections[mc].calc_bbfile_priority(fn)[2]
540                envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn), layername)
541            except Exception as e:
542                parselog.exception("Unable to read %s", fn)
543                raise
544        else:
545            if not mc in self.databuilder.mcdata:
546                bb.fatal('No multiconfig named "%s" found' % mc)
547            envdata = self.databuilder.mcdata[mc]
548            data.expandKeys(envdata)
549            parse.ast.runAnonFuncs(envdata)
550
551        # Display history
552        with closing(StringIO()) as env:
553            self.data.inchistory.emit(env)
554            logger.plain(env.getvalue())
555
556        # emit variables and shell functions
557        with closing(StringIO()) as env:
558            data.emit_env(env, envdata, True)
559            logger.plain(env.getvalue())
560
561        # emit the metadata which isn't valid shell
562        for e in sorted(envdata.keys()):
563            if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
564                logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
565
566        if not orig_tracking:
567            self.disableDataTracking()
568            self.reset()
569
570    def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False):
571        """
572        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
573        """
574        bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
575
576        # A task of None means use the default task
577        if task is None:
578            task = self.configuration.cmd
579        if not task.startswith("do_"):
580            task = "do_%s" % task
581
582        targetlist = self.checkPackages(pkgs_to_build, task)
583        fulltargetlist = []
584        defaulttask_implicit = ''
585        defaulttask_explicit = False
586        wildcard = False
587
588        # Wild card expansion:
589        # Replace string such as "mc:*:bash"
590        # into "mc:A:bash mc:B:bash bash"
591        for k in targetlist:
592            if k.startswith("mc:") and k.count(':') >= 2:
593                if wildcard:
594                    bb.fatal('multiconfig conflict')
595                if k.split(":")[1] == "*":
596                    wildcard = True
597                    for mc in self.multiconfigs:
598                        if mc:
599                            fulltargetlist.append(k.replace('*', mc))
600                        # implicit default task
601                        else:
602                            defaulttask_implicit = k.split(":")[2]
603                else:
604                    fulltargetlist.append(k)
605            else:
606                defaulttask_explicit = True
607                fulltargetlist.append(k)
608
609        if not defaulttask_explicit and defaulttask_implicit != '':
610            fulltargetlist.append(defaulttask_implicit)
611
612        bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
613        taskdata = {}
614        localdata = {}
615
616        for mc in self.multiconfigs:
617            taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete)
618            localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
619            bb.data.expandKeys(localdata[mc])
620
621        current = 0
622        runlist = []
623        for k in fulltargetlist:
624            origk = k
625            mc = ""
626            if k.startswith("mc:") and k.count(':') >= 2:
627                mc = k.split(":")[1]
628                k = ":".join(k.split(":")[2:])
629            ktask = task
630            if ":do_" in k:
631                k2 = k.split(":do_")
632                k = k2[0]
633                ktask = k2[1]
634
635            if mc not in self.multiconfigs:
636                 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
637
638            taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
639            current += 1
640            if not ktask.startswith("do_"):
641                ktask = "do_%s" % ktask
642            if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
643                # e.g. in ASSUME_PROVIDED
644                continue
645            fn = taskdata[mc].build_targets[k][0]
646            runlist.append([mc, k, ktask, fn])
647            bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
648
649        havemc = False
650        for mc in self.multiconfigs:
651            if taskdata[mc].get_mcdepends():
652                havemc = True
653
654        # No need to do check providers if there are no mcdeps or not an mc build
655        if havemc or len(self.multiconfigs) > 1:
656            seen = set()
657            new = True
658            # Make sure we can provide the multiconfig dependency
659            while new:
660                mcdeps = set()
661                # Add unresolved first, so we can get multiconfig indirect dependencies on time
662                for mc in self.multiconfigs:
663                    taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
664                    mcdeps |= set(taskdata[mc].get_mcdepends())
665                new = False
666                for k in mcdeps:
667                    if k in seen:
668                        continue
669                    l = k.split(':')
670                    depmc = l[2]
671                    if depmc not in self.multiconfigs:
672                        bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
673                    else:
674                        logger.debug("Adding providers for multiconfig dependency %s" % l[3])
675                        taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
676                        seen.add(k)
677                        new = True
678
679        for mc in self.multiconfigs:
680            taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
681
682        bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
683        return taskdata, runlist
684
685    def prepareTreeData(self, pkgs_to_build, task, halt=False):
686        """
687        Prepare a runqueue and taskdata object for iteration over pkgs_to_build
688        """
689
690        # We set halt to False here to prevent unbuildable targets raising
691        # an exception when we're just generating data
692        taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True)
693
694        return runlist, taskdata
695
696    ######## WARNING : this function requires cache_extra to be enabled ########
697
698    def generateTaskDepTreeData(self, pkgs_to_build, task):
699        """
700        Create a dependency graph of pkgs_to_build including reverse dependency
701        information.
702        """
703        if not task.startswith("do_"):
704            task = "do_%s" % task
705
706        runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True)
707        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
708        rq.rqdata.prepare()
709        return self.buildDependTree(rq, taskdata)
710
711    @staticmethod
712    def add_mc_prefix(mc, pn):
713        if mc:
714            return "mc:%s:%s" % (mc, pn)
715        return pn
716
717    def buildDependTree(self, rq, taskdata):
718        seen_fns = []
719        depend_tree = {}
720        depend_tree["depends"] = {}
721        depend_tree["tdepends"] = {}
722        depend_tree["pn"] = {}
723        depend_tree["rdepends-pn"] = {}
724        depend_tree["packages"] = {}
725        depend_tree["rdepends-pkg"] = {}
726        depend_tree["rrecs-pkg"] = {}
727        depend_tree['providermap'] = {}
728        depend_tree["layer-priorities"] = self.bbfile_config_priorities
729
730        for mc in taskdata:
731            for name, fn in list(taskdata[mc].get_providermap().items()):
732                pn = self.recipecaches[mc].pkg_fn[fn]
733                pn = self.add_mc_prefix(mc, pn)
734                if name != pn:
735                    version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
736                    depend_tree['providermap'][name] = (pn, version)
737
738        for tid in rq.rqdata.runtaskentries:
739            (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
740            pn = self.recipecaches[mc].pkg_fn[taskfn]
741            pn = self.add_mc_prefix(mc, pn)
742            version  = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
743            if pn not in depend_tree["pn"]:
744                depend_tree["pn"][pn] = {}
745                depend_tree["pn"][pn]["filename"] = taskfn
746                depend_tree["pn"][pn]["version"] = version
747                depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
748
749                # if we have extra caches, list all attributes they bring in
750                extra_info = []
751                for cache_class in self.caches_array:
752                    if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
753                        cachefields = getattr(cache_class, 'cachefields', [])
754                        extra_info = extra_info + cachefields
755
756                # for all attributes stored, add them to the dependency tree
757                for ei in extra_info:
758                    depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
759
760
761            dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
762            if not dotname in depend_tree["tdepends"]:
763                depend_tree["tdepends"][dotname] = []
764            for dep in rq.rqdata.runtaskentries[tid].depends:
765                (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
766                deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
767                if depmc:
768                    depmc = "mc:" + depmc + ":"
769                depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
770            if taskfn not in seen_fns:
771                seen_fns.append(taskfn)
772                packages = []
773
774                depend_tree["depends"][pn] = []
775                for dep in taskdata[mc].depids[taskfn]:
776                    depend_tree["depends"][pn].append(dep)
777
778                depend_tree["rdepends-pn"][pn] = []
779                for rdep in taskdata[mc].rdepids[taskfn]:
780                    depend_tree["rdepends-pn"][pn].append(rdep)
781
782                rdepends = self.recipecaches[mc].rundeps[taskfn]
783                for package in rdepends:
784                    depend_tree["rdepends-pkg"][package] = []
785                    for rdepend in rdepends[package]:
786                        depend_tree["rdepends-pkg"][package].append(rdepend)
787                    packages.append(package)
788
789                rrecs = self.recipecaches[mc].runrecs[taskfn]
790                for package in rrecs:
791                    depend_tree["rrecs-pkg"][package] = []
792                    for rdepend in rrecs[package]:
793                        depend_tree["rrecs-pkg"][package].append(rdepend)
794                    if not package in packages:
795                        packages.append(package)
796
797                for package in packages:
798                    if package not in depend_tree["packages"]:
799                        depend_tree["packages"][package] = {}
800                        depend_tree["packages"][package]["pn"] = pn
801                        depend_tree["packages"][package]["filename"] = taskfn
802                        depend_tree["packages"][package]["version"] = version
803
804        return depend_tree
805
806    ######## WARNING : this function requires cache_extra to be enabled ########
807    def generatePkgDepTreeData(self, pkgs_to_build, task):
808        """
809        Create a dependency tree of pkgs_to_build, returning the data.
810        """
811        if not task.startswith("do_"):
812            task = "do_%s" % task
813
814        _, taskdata = self.prepareTreeData(pkgs_to_build, task)
815
816        seen_fns = []
817        depend_tree = {}
818        depend_tree["depends"] = {}
819        depend_tree["pn"] = {}
820        depend_tree["rdepends-pn"] = {}
821        depend_tree["rdepends-pkg"] = {}
822        depend_tree["rrecs-pkg"] = {}
823
824        # if we have extra caches, list all attributes they bring in
825        extra_info = []
826        for cache_class in self.caches_array:
827            if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
828                cachefields = getattr(cache_class, 'cachefields', [])
829                extra_info = extra_info + cachefields
830
831        tids = []
832        for mc in taskdata:
833            for tid in taskdata[mc].taskentries:
834                tids.append(tid)
835
836        for tid in tids:
837            (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
838
839            pn = self.recipecaches[mc].pkg_fn[taskfn]
840            pn = self.add_mc_prefix(mc, pn)
841
842            if pn not in depend_tree["pn"]:
843                depend_tree["pn"][pn] = {}
844                depend_tree["pn"][pn]["filename"] = taskfn
845                version  = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
846                depend_tree["pn"][pn]["version"] = version
847                rdepends = self.recipecaches[mc].rundeps[taskfn]
848                rrecs = self.recipecaches[mc].runrecs[taskfn]
849                depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
850
851                # for all extra attributes stored, add them to the dependency tree
852                for ei in extra_info:
853                    depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
854
855            if taskfn not in seen_fns:
856                seen_fns.append(taskfn)
857
858                depend_tree["depends"][pn] = []
859                for dep in taskdata[mc].depids[taskfn]:
860                    pn_provider = ""
861                    if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
862                        fn_provider = taskdata[mc].build_targets[dep][0]
863                        pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
864                    else:
865                        pn_provider = dep
866                    pn_provider = self.add_mc_prefix(mc, pn_provider)
867                    depend_tree["depends"][pn].append(pn_provider)
868
869                depend_tree["rdepends-pn"][pn] = []
870                for rdep in taskdata[mc].rdepids[taskfn]:
871                    pn_rprovider = ""
872                    if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
873                        fn_rprovider = taskdata[mc].run_targets[rdep][0]
874                        pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
875                    else:
876                        pn_rprovider = rdep
877                    pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
878                    depend_tree["rdepends-pn"][pn].append(pn_rprovider)
879
880                depend_tree["rdepends-pkg"].update(rdepends)
881                depend_tree["rrecs-pkg"].update(rrecs)
882
883        return depend_tree
884
885    def generateDepTreeEvent(self, pkgs_to_build, task):
886        """
887        Create a task dependency graph of pkgs_to_build.
888        Generate an event with the result
889        """
890        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
891        bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
892
893    def generateDotGraphFiles(self, pkgs_to_build, task):
894        """
895        Create a task dependency graph of pkgs_to_build.
896        Save the result to a set of .dot files.
897        """
898
899        depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
900
901        pns = depgraph["pn"].keys()
902        if pns:
903            with open('pn-buildlist', 'w') as f:
904                f.write("%s\n" % "\n".join(sorted(pns)))
905            logger.info("PN build list saved to 'pn-buildlist'")
906
907        # Remove old format output files to ensure no confusion with stale data
908        try:
909            os.unlink('pn-depends.dot')
910        except FileNotFoundError:
911            pass
912        try:
913            os.unlink('package-depends.dot')
914        except FileNotFoundError:
915            pass
916        try:
917            os.unlink('recipe-depends.dot')
918        except FileNotFoundError:
919            pass
920
921        with open('task-depends.dot', 'w') as f:
922            f.write("digraph depends {\n")
923            for task in sorted(depgraph["tdepends"]):
924                (pn, taskname) = task.rsplit(".", 1)
925                fn = depgraph["pn"][pn]["filename"]
926                version = depgraph["pn"][pn]["version"]
927                f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
928                for dep in sorted(depgraph["tdepends"][task]):
929                    f.write('"%s" -> "%s"\n' % (task, dep))
930            f.write("}\n")
931        logger.info("Task dependencies saved to 'task-depends.dot'")
932
933    def show_appends_with_no_recipes(self):
934        appends_without_recipes = {}
935        # Determine which bbappends haven't been applied
936        for mc in self.multiconfigs:
937            # First get list of recipes, including skipped
938            recipefns = list(self.recipecaches[mc].pkg_fn.keys())
939            recipefns.extend(self.skiplist.keys())
940
941            # Work out list of bbappends that have been applied
942            applied_appends = []
943            for fn in recipefns:
944                applied_appends.extend(self.collections[mc].get_file_appends(fn))
945
946            appends_without_recipes[mc] = []
947            for _, appendfn in self.collections[mc].bbappends:
948                if not appendfn in applied_appends:
949                    appends_without_recipes[mc].append(appendfn)
950
951        msgs = []
952        for mc in sorted(appends_without_recipes.keys()):
953            if appends_without_recipes[mc]:
954                msgs.append('No recipes in %s available for:\n  %s' % (mc if mc else 'default',
955                                                                        '\n  '.join(appends_without_recipes[mc])))
956
957        if msgs:
958            bb.fatal("\n".join(msgs))
959
960    def handlePrefProviders(self):
961
962        for mc in self.multiconfigs:
963            localdata = data.createCopy(self.databuilder.mcdata[mc])
964            bb.data.expandKeys(localdata)
965
966            # Handle PREFERRED_PROVIDERS
967            for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
968                try:
969                    (providee, provider) = p.split(':')
970                except:
971                    providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
972                    continue
973                if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
974                    providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
975                self.recipecaches[mc].preferred[providee] = provider
976
977    def findConfigFilePath(self, configfile):
978        """
979        Find the location on disk of configfile and if it exists and was parsed by BitBake
980        emit the ConfigFilePathFound event with the path to the file.
981        """
982        path = bb.cookerdata.findConfigFile(configfile, self.data)
983        if not path:
984            return
985
986        # Generate a list of parsed configuration files by searching the files
987        # listed in the __depends and __base_depends variables with a .conf suffix.
988        conffiles = []
989        dep_files = self.data.getVar('__base_depends', False) or []
990        dep_files = dep_files + (self.data.getVar('__depends', False) or [])
991
992        for f in dep_files:
993            if f[0].endswith(".conf"):
994                conffiles.append(f[0])
995
996        _, conf, conffile = path.rpartition("conf/")
997        match = os.path.join(conf, conffile)
998        # Try and find matches for conf/conffilename.conf as we don't always
999        # have the full path to the file.
1000        for cfg in conffiles:
1001            if cfg.endswith(match):
1002                bb.event.fire(bb.event.ConfigFilePathFound(path),
1003                              self.data)
1004                break
1005
1006    def findFilesMatchingInDir(self, filepattern, directory):
1007        """
1008        Searches for files containing the substring 'filepattern' which are children of
1009        'directory' in each BBPATH. i.e. to find all rootfs package classes available
1010        to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1011        or to find all machine configuration files one could call:
1012        findFilesMatchingInDir(self, '.conf', 'conf/machine')
1013        """
1014
1015        matches = []
1016        bbpaths = self.data.getVar('BBPATH').split(':')
1017        for path in bbpaths:
1018            dirpath = os.path.join(path, directory)
1019            if os.path.exists(dirpath):
1020                for root, dirs, files in os.walk(dirpath):
1021                    for f in files:
1022                        if filepattern in f:
1023                            matches.append(f)
1024
1025        if matches:
1026            bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1027
1028    def testCookerCommandEvent(self, filepattern):
1029        # Dummy command used by OEQA selftest to test tinfoil without IO
1030        matches = ["A", "B"]
1031        bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1032
1033    def findProviders(self, mc=''):
1034        return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1035
1036    def findBestProvider(self, pn, mc=''):
1037        if pn in self.recipecaches[mc].providers:
1038            filenames = self.recipecaches[mc].providers[pn]
1039            eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
1040            if eligible is not None:
1041                filename = eligible[0]
1042            else:
1043                filename = None
1044            return None, None, None, filename
1045        elif pn in self.recipecaches[mc].pkg_pn:
1046            (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1047            if required and preferred_file is None:
1048                return None, None, None, None
1049            return (latest, latest_f, preferred_ver, preferred_file)
1050        else:
1051            return None, None, None, None
1052
1053    def findConfigFiles(self, varname):
1054        """
1055        Find config files which are appropriate values for varname.
1056        i.e. MACHINE, DISTRO
1057        """
1058        possible = []
1059        var = varname.lower()
1060
1061        data = self.data
1062        # iterate configs
1063        bbpaths = data.getVar('BBPATH').split(':')
1064        for path in bbpaths:
1065            confpath = os.path.join(path, "conf", var)
1066            if os.path.exists(confpath):
1067                for root, dirs, files in os.walk(confpath):
1068                    # get all child files, these are appropriate values
1069                    for f in files:
1070                        val, sep, end = f.rpartition('.')
1071                        if end == 'conf':
1072                            possible.append(val)
1073
1074        if possible:
1075            bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1076
1077    def findInheritsClass(self, klass):
1078        """
1079        Find all recipes which inherit the specified class
1080        """
1081        pkg_list = []
1082
1083        for pfn in self.recipecaches[''].pkg_fn:
1084            inherits = self.recipecaches[''].inherits.get(pfn, None)
1085            if inherits and klass in inherits:
1086                pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
1087
1088        return pkg_list
1089
1090    def generateTargetsTree(self, klass=None, pkgs=None):
1091        """
1092        Generate a dependency tree of buildable targets
1093        Generate an event with the result
1094        """
1095        # if the caller hasn't specified a pkgs list default to universe
1096        if not pkgs:
1097            pkgs = ['universe']
1098        # if inherited_class passed ensure all recipes which inherit the
1099        # specified class are included in pkgs
1100        if klass:
1101            extra_pkgs = self.findInheritsClass(klass)
1102            pkgs = pkgs + extra_pkgs
1103
1104        # generate a dependency tree for all our packages
1105        tree = self.generatePkgDepTreeData(pkgs, 'build')
1106        bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1107
1108    def interactiveMode( self ):
1109        """Drop off into a shell"""
1110        try:
1111            from bb import shell
1112        except ImportError:
1113            parselog.exception("Interactive mode not available")
1114            raise bb.BBHandledException()
1115        else:
1116            shell.start( self )
1117
1118
1119    def handleCollections(self, collections):
1120        """Handle collections"""
1121        errors = False
1122        self.bbfile_config_priorities = []
1123        if collections:
1124            collection_priorities = {}
1125            collection_depends = {}
1126            collection_list = collections.split()
1127            min_prio = 0
1128            for c in collection_list:
1129                bb.debug(1,'Processing %s in collection list' % (c))
1130
1131                # Get collection priority if defined explicitly
1132                priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
1133                if priority:
1134                    try:
1135                        prio = int(priority)
1136                    except ValueError:
1137                        parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1138                        errors = True
1139                    if min_prio == 0 or prio < min_prio:
1140                        min_prio = prio
1141                    collection_priorities[c] = prio
1142                else:
1143                    collection_priorities[c] = None
1144
1145                # Check dependencies and store information for priority calculation
1146                deps = self.data.getVar("LAYERDEPENDS_%s" % c)
1147                if deps:
1148                    try:
1149                        depDict = bb.utils.explode_dep_versions2(deps)
1150                    except bb.utils.VersionStringException as vse:
1151                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1152                    for dep, oplist in list(depDict.items()):
1153                        if dep in collection_list:
1154                            for opstr in oplist:
1155                                layerver = self.data.getVar("LAYERVERSION_%s" % dep)
1156                                (op, depver) = opstr.split()
1157                                if layerver:
1158                                    try:
1159                                        res = bb.utils.vercmp_string_op(layerver, depver, op)
1160                                    except bb.utils.VersionStringException as vse:
1161                                        bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1162                                    if not res:
1163                                        parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1164                                        errors = True
1165                                else:
1166                                    parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1167                                    errors = True
1168                        else:
1169                            parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1170                            errors = True
1171                    collection_depends[c] = list(depDict.keys())
1172                else:
1173                    collection_depends[c] = []
1174
1175                # Check recommends and store information for priority calculation
1176                recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
1177                if recs:
1178                    try:
1179                        recDict = bb.utils.explode_dep_versions2(recs)
1180                    except bb.utils.VersionStringException as vse:
1181                        bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1182                    for rec, oplist in list(recDict.items()):
1183                        if rec in collection_list:
1184                            if oplist:
1185                                opstr = oplist[0]
1186                                layerver = self.data.getVar("LAYERVERSION_%s" % rec)
1187                                if layerver:
1188                                    (op, recver) = opstr.split()
1189                                    try:
1190                                        res = bb.utils.vercmp_string_op(layerver, recver, op)
1191                                    except bb.utils.VersionStringException as vse:
1192                                        bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1193                                    if not res:
1194                                        parselog.debug3("Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1195                                        continue
1196                                else:
1197                                    parselog.debug3("Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1198                                    continue
1199                            parselog.debug3("Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1200                            collection_depends[c].append(rec)
1201                        else:
1202                            parselog.debug3("Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1203
1204            # Recursively work out collection priorities based on dependencies
1205            def calc_layer_priority(collection):
1206                if not collection_priorities[collection]:
1207                    max_depprio = min_prio
1208                    for dep in collection_depends[collection]:
1209                        calc_layer_priority(dep)
1210                        depprio = collection_priorities[dep]
1211                        if depprio > max_depprio:
1212                            max_depprio = depprio
1213                    max_depprio += 1
1214                    parselog.debug("Calculated priority of layer %s as %d", collection, max_depprio)
1215                    collection_priorities[collection] = max_depprio
1216
1217            # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1218            for c in collection_list:
1219                calc_layer_priority(c)
1220                regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
1221                if regex is None:
1222                    parselog.error("BBFILE_PATTERN_%s not defined" % c)
1223                    errors = True
1224                    continue
1225                elif regex == "":
1226                    parselog.debug("BBFILE_PATTERN_%s is empty" % c)
1227                    cre = re.compile('^NULL$')
1228                    errors = False
1229                else:
1230                    try:
1231                        cre = re.compile(regex)
1232                    except re.error:
1233                        parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1234                        errors = True
1235                        continue
1236                self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1237        if errors:
1238            # We've already printed the actual error(s)
1239            raise CollectionError("Errors during parsing layer configuration")
1240
1241    def buildSetVars(self):
1242        """
1243        Setup any variables needed before starting a build
1244        """
1245        t = time.gmtime()
1246        for mc in self.databuilder.mcdata:
1247            ds = self.databuilder.mcdata[mc]
1248            if not ds.getVar("BUILDNAME", False):
1249                ds.setVar("BUILDNAME", "${DATE}${TIME}")
1250            ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1251            ds.setVar("DATE", time.strftime('%Y%m%d', t))
1252            ds.setVar("TIME", time.strftime('%H%M%S', t))
1253
1254    def reset_mtime_caches(self):
1255        """
1256        Reset mtime caches - this is particularly important when memory resident as something
1257        which is cached is not unlikely to have changed since the last invocation (e.g. a
1258        file associated with a recipe might have been modified by the user).
1259        """
1260        build.reset_cache()
1261        bb.fetch._checksum_cache.mtime_cache.clear()
1262        siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1263        if siggen_cache:
1264            bb.parse.siggen.checksum_cache.mtime_cache.clear()
1265
1266    def matchFiles(self, bf, mc=''):
1267        """
1268        Find the .bb files which match the expression in 'buildfile'.
1269        """
1270        if bf.startswith("/") or bf.startswith("../"):
1271            bf = os.path.abspath(bf)
1272
1273        collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1274        filelist, masked, searchdirs = collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1275        try:
1276            os.stat(bf)
1277            bf = os.path.abspath(bf)
1278            return [bf]
1279        except OSError:
1280            regexp = re.compile(bf)
1281            matches = []
1282            for f in filelist:
1283                if regexp.search(f) and os.path.isfile(f):
1284                    matches.append(f)
1285            return matches
1286
1287    def matchFile(self, buildfile, mc=''):
1288        """
1289        Find the .bb file which matches the expression in 'buildfile'.
1290        Raise an error if multiple files
1291        """
1292        matches = self.matchFiles(buildfile, mc)
1293        if len(matches) != 1:
1294            if matches:
1295                msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1296                if matches:
1297                    for f in matches:
1298                        msg += "\n    %s" % f
1299                parselog.error(msg)
1300            else:
1301                parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1302            raise NoSpecificMatch
1303        return matches[0]
1304
1305    def buildFile(self, buildfile, task):
1306        """
1307        Build the file matching regexp buildfile
1308        """
1309        bb.event.fire(bb.event.BuildInit(), self.data)
1310
1311        # Too many people use -b because they think it's how you normally
1312        # specify a target to be built, so show a warning
1313        bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1314
1315        self.buildFileInternal(buildfile, task)
1316
1317    def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1318        """
1319        Build the file matching regexp buildfile
1320        """
1321
1322        # Parse the configuration here. We need to do it explicitly here since
1323        # buildFile() doesn't use the cache
1324        self.parseConfiguration()
1325
1326        # If we are told to do the None task then query the default task
1327        if task is None:
1328            task = self.configuration.cmd
1329        if not task.startswith("do_"):
1330            task = "do_%s" % task
1331
1332        fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
1333        fn = self.matchFile(fn, mc)
1334
1335        self.buildSetVars()
1336        self.reset_mtime_caches()
1337
1338        bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.databuilder.data_hash, self.caches_array)
1339
1340        layername = self.collections[mc].calc_bbfile_priority(fn)[2]
1341        infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
1342        infos = dict(infos)
1343
1344        fn = bb.cache.realfn2virtual(fn, cls, mc)
1345        try:
1346            info_array = infos[fn]
1347        except KeyError:
1348            bb.fatal("%s does not exist" % fn)
1349
1350        if info_array[0].skipped:
1351            bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1352
1353        self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
1354
1355        # Tweak some variables
1356        item = info_array[0].pn
1357        self.recipecaches[mc].ignored_dependencies = set()
1358        self.recipecaches[mc].bbfile_priority[fn] = 1
1359        self.configuration.limited_deps = True
1360
1361        # Remove external dependencies
1362        self.recipecaches[mc].task_deps[fn]['depends'] = {}
1363        self.recipecaches[mc].deps[fn] = []
1364        self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1365        self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1366
1367        bb.parse.siggen.setup_datacache(self.recipecaches)
1368
1369        # Invalidate task for target if force mode active
1370        if self.configuration.force:
1371            logger.verbose("Invalidate task %s, %s", task, fn)
1372            bb.parse.siggen.invalidate_task(task, fn)
1373
1374        # Setup taskdata structure
1375        taskdata = {}
1376        taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt)
1377        taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1378
1379        if quietlog:
1380            rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1381            bb.runqueue.logger.setLevel(logging.WARNING)
1382
1383        buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1384        if fireevents:
1385            bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1386            if self.eventlog:
1387                self.eventlog[2].write_variables()
1388            bb.event.enable_heartbeat()
1389
1390        # Execute the runqueue
1391        runlist = [[mc, item, task, fn]]
1392
1393        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1394
1395        def buildFileIdle(server, rq, halt):
1396
1397            msg = None
1398            interrupted = 0
1399            if halt or self.state == State.FORCE_SHUTDOWN:
1400                rq.finish_runqueue(True)
1401                msg = "Forced shutdown"
1402                interrupted = 2
1403            elif self.state == State.SHUTDOWN:
1404                rq.finish_runqueue(False)
1405                msg = "Stopped build"
1406                interrupted = 1
1407            failures = 0
1408            try:
1409                retval = rq.execute_runqueue()
1410            except runqueue.TaskFailure as exc:
1411                failures += len(exc.args)
1412                retval = False
1413            except SystemExit as exc:
1414                if quietlog:
1415                    bb.runqueue.logger.setLevel(rqloglevel)
1416                return bb.server.process.idleFinish(str(exc))
1417
1418            if not retval:
1419                if fireevents:
1420                    bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1421                    bb.event.disable_heartbeat()
1422                # We trashed self.recipecaches above
1423                self._parsecache_set(False)
1424                self.configuration.limited_deps = False
1425                bb.parse.siggen.reset(self.data)
1426                if quietlog:
1427                    bb.runqueue.logger.setLevel(rqloglevel)
1428                return bb.server.process.idleFinish(msg)
1429            if retval is True:
1430                return True
1431            return retval
1432
1433        self.idleCallBackRegister(buildFileIdle, rq)
1434
1435    def getTaskSignatures(self, target, tasks):
1436        sig = []
1437        getAllTaskSignatures = False
1438
1439        if not tasks:
1440            tasks = ["do_build"]
1441            getAllTaskSignatures = True
1442
1443        for task in tasks:
1444            taskdata, runlist = self.buildTaskData(target, task, self.configuration.halt)
1445            rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1446            rq.rqdata.prepare()
1447
1448            for l in runlist:
1449                mc, pn, taskname, fn = l
1450
1451                taskdep = rq.rqdata.dataCaches[mc].task_deps[fn]
1452                for t in taskdep['tasks']:
1453                    if t in taskdep['nostamp'] or "setscene" in t:
1454                        continue
1455                    tid = bb.runqueue.build_tid(mc, fn, t)
1456
1457                    if t in task or getAllTaskSignatures:
1458                        try:
1459                            sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
1460                        except KeyError:
1461                            sig.append(self.getTaskSignatures(target, [t])[0])
1462
1463        return sig
1464
1465    def buildTargets(self, targets, task):
1466        """
1467        Attempt to build the targets specified
1468        """
1469
1470        def buildTargetsIdle(server, rq, halt):
1471            msg = None
1472            interrupted = 0
1473            if halt or self.state == State.FORCE_SHUTDOWN:
1474                bb.event._should_exit.set()
1475                rq.finish_runqueue(True)
1476                msg = "Forced shutdown"
1477                interrupted = 2
1478            elif self.state == State.SHUTDOWN:
1479                rq.finish_runqueue(False)
1480                msg = "Stopped build"
1481                interrupted = 1
1482            failures = 0
1483            try:
1484                retval = rq.execute_runqueue()
1485            except runqueue.TaskFailure as exc:
1486                failures += len(exc.args)
1487                retval = False
1488            except SystemExit as exc:
1489                return bb.server.process.idleFinish(str(exc))
1490
1491            if not retval:
1492                try:
1493                    for mc in self.multiconfigs:
1494                        bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1495                finally:
1496                    bb.event.disable_heartbeat()
1497                return bb.server.process.idleFinish(msg)
1498
1499            if retval is True:
1500                return True
1501            return retval
1502
1503        self.reset_mtime_caches()
1504        self.buildSetVars()
1505
1506        # If we are told to do the None task then query the default task
1507        if task is None:
1508            task = self.configuration.cmd
1509
1510        if not task.startswith("do_"):
1511            task = "do_%s" % task
1512
1513        packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1514
1515        bb.event.fire(bb.event.BuildInit(packages), self.data)
1516
1517        taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt)
1518
1519        buildname = self.data.getVar("BUILDNAME", False)
1520
1521        # make targets to always look as <target>:do_<task>
1522        ntargets = []
1523        for target in runlist:
1524            if target[0]:
1525                ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
1526            ntargets.append("%s:%s" % (target[1], target[2]))
1527
1528        for mc in self.multiconfigs:
1529            bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1530        if self.eventlog:
1531            self.eventlog[2].write_variables()
1532        bb.event.enable_heartbeat()
1533
1534        rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1535        if 'universe' in targets:
1536            rq.rqdata.warn_multi_bb = True
1537
1538        self.idleCallBackRegister(buildTargetsIdle, rq)
1539
1540
1541    def getAllKeysWithFlags(self, flaglist):
1542        def dummy_autorev(d):
1543            return
1544
1545        dump = {}
1546        # Horrible but for now we need to avoid any sideeffects of autorev being called
1547        saved = bb.fetch2.get_autorev
1548        bb.fetch2.get_autorev = dummy_autorev
1549        for k in self.data.keys():
1550            try:
1551                expand = True
1552                flags = self.data.getVarFlags(k)
1553                if flags and "func" in flags and "python" in flags:
1554                    expand = False
1555                v = self.data.getVar(k, expand)
1556                if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1557                    dump[k] = {
1558    'v' : str(v) ,
1559    'history' : self.data.varhistory.variable(k),
1560                    }
1561                    for d in flaglist:
1562                        if flags and d in flags:
1563                            dump[k][d] = flags[d]
1564                        else:
1565                            dump[k][d] = None
1566            except Exception as e:
1567                print(e)
1568        bb.fetch2.get_autorev = saved
1569        return dump
1570
1571
1572    def updateCacheSync(self):
1573        if self.state == State.RUNNING:
1574            return
1575
1576        if not self.baseconfig_valid:
1577            logger.debug("Reloading base configuration data")
1578            self.initConfigurationData()
1579            self.handlePRServ()
1580
1581    # This is called for all async commands when self.state != running
1582    def updateCache(self):
1583        if self.state == State.RUNNING:
1584            return
1585
1586        if self.state in (State.SHUTDOWN, State.FORCE_SHUTDOWN, State.ERROR):
1587            if hasattr(self.parser, 'shutdown'):
1588                self.parser.shutdown(clean=False)
1589                self.parser.final_cleanup()
1590            raise bb.BBHandledException()
1591
1592        if self.state != State.PARSING:
1593            self.updateCacheSync()
1594
1595        if self.state != State.PARSING and not self.parsecache_valid:
1596            bb.server.process.serverlog("Parsing started")
1597            self.parsewatched = {}
1598
1599            bb.parse.siggen.reset(self.data)
1600            self.parseConfiguration ()
1601            if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1602                for mc in self.multiconfigs:
1603                    bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
1604
1605            for mc in self.multiconfigs:
1606                ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
1607                self.recipecaches[mc].ignored_dependencies = set(ignore.split())
1608
1609                for dep in self.configuration.extra_assume_provided:
1610                    self.recipecaches[mc].ignored_dependencies.add(dep)
1611
1612            mcfilelist = {}
1613            total_masked = 0
1614            searchdirs = set()
1615            for mc in self.multiconfigs:
1616                (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1617
1618                mcfilelist[mc] = filelist
1619                total_masked += masked
1620                searchdirs |= set(search)
1621
1622            # Add mtimes for directories searched for bb/bbappend files
1623            for dirent in searchdirs:
1624                self.add_filewatch([(dirent, bb.parse.cached_mtime_noerror(dirent))])
1625
1626            self.parser = CookerParser(self, mcfilelist, total_masked)
1627            self._parsecache_set(True)
1628
1629        self.state = State.PARSING
1630
1631        if not self.parser.parse_next():
1632            collectlog.debug("parsing complete")
1633            if self.parser.error:
1634                raise bb.BBHandledException()
1635            self.show_appends_with_no_recipes()
1636            self.handlePrefProviders()
1637            for mc in self.multiconfigs:
1638                self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1639            self.state = State.RUNNING
1640
1641            # Send an event listing all stamps reachable after parsing
1642            # which the metadata may use to clean up stale data
1643            for mc in self.multiconfigs:
1644                event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1645                bb.event.fire(event, self.databuilder.mcdata[mc])
1646            return None
1647
1648        return True
1649
1650    def checkPackages(self, pkgs_to_build, task=None):
1651
1652        # Return a copy, don't modify the original
1653        pkgs_to_build = pkgs_to_build[:]
1654
1655        if not pkgs_to_build:
1656            raise NothingToBuild
1657
1658        ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
1659        for pkg in pkgs_to_build.copy():
1660            if pkg in ignore:
1661                parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1662            if pkg.startswith("multiconfig:"):
1663                pkgs_to_build.remove(pkg)
1664                pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
1665
1666        if 'world' in pkgs_to_build:
1667            pkgs_to_build.remove('world')
1668            for mc in self.multiconfigs:
1669                bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1670                for t in self.recipecaches[mc].world_target:
1671                    if mc:
1672                        t = "mc:" + mc + ":" + t
1673                    pkgs_to_build.append(t)
1674
1675        if 'universe' in pkgs_to_build:
1676            parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1677            parselog.debug("collating packages for \"universe\"")
1678            pkgs_to_build.remove('universe')
1679            for mc in self.multiconfigs:
1680                for t in self.recipecaches[mc].universe_target:
1681                    if task:
1682                        foundtask = False
1683                        for provider_fn in self.recipecaches[mc].providers[t]:
1684                            if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1685                                foundtask = True
1686                                break
1687                        if not foundtask:
1688                            bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1689                            continue
1690                    if mc:
1691                        t = "mc:" + mc + ":" + t
1692                    pkgs_to_build.append(t)
1693
1694        return pkgs_to_build
1695
1696    def pre_serve(self):
1697        return
1698
1699    def post_serve(self):
1700        self.shutdown(force=True)
1701        prserv.serv.auto_shutdown()
1702        if hasattr(bb.parse, "siggen"):
1703            bb.parse.siggen.exit()
1704        if self.hashserv:
1705            self.hashserv.process.terminate()
1706            self.hashserv.process.join()
1707        if hasattr(self, "data"):
1708            bb.event.fire(CookerExit(), self.data)
1709
1710    def shutdown(self, force=False):
1711        if force:
1712            self.state = State.FORCE_SHUTDOWN
1713            bb.event._should_exit.set()
1714        else:
1715            self.state = State.SHUTDOWN
1716
1717        if self.parser:
1718            self.parser.shutdown(clean=False)
1719            self.parser.final_cleanup()
1720
1721    def finishcommand(self):
1722        if hasattr(self.parser, 'shutdown'):
1723            self.parser.shutdown(clean=False)
1724            self.parser.final_cleanup()
1725        self.state = State.INITIAL
1726        bb.event._should_exit.clear()
1727
1728    def reset(self):
1729        if hasattr(bb.parse, "siggen"):
1730            bb.parse.siggen.exit()
1731        self.finishcommand()
1732        self.initConfigurationData()
1733        self.handlePRServ()
1734
1735    def clientComplete(self):
1736        """Called when the client is done using the server"""
1737        self.finishcommand()
1738        self.extraconfigdata = {}
1739        self.command.reset()
1740        if hasattr(self, "data"):
1741           self.databuilder.reset()
1742           self.data = self.databuilder.data
1743        # In theory tinfoil could have modified the base data before parsing,
1744        # ideally need to track if anything did modify the datastore
1745        self._parsecache_set(False)
1746
1747class CookerExit(bb.event.Event):
1748    """
1749    Notify clients of the Cooker shutdown
1750    """
1751
1752    def __init__(self):
1753        bb.event.Event.__init__(self)
1754
1755
1756class CookerCollectFiles(object):
1757    def __init__(self, priorities, mc=''):
1758        self.mc = mc
1759        self.bbappends = []
1760        # Priorities is a list of tuples, with the second element as the pattern.
1761        # We need to sort the list with the longest pattern first, and so on to
1762        # the shortest.  This allows nested layers to be properly evaluated.
1763        self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1764
1765    def calc_bbfile_priority(self, filename):
1766        for layername, _, regex, pri in self.bbfile_config_priorities:
1767            if regex.match(filename):
1768                return pri, regex, layername
1769        return 0, None, None
1770
1771    def get_bbfiles(self):
1772        """Get list of default .bb files by reading out the current directory"""
1773        path = os.getcwd()
1774        contents = os.listdir(path)
1775        bbfiles = []
1776        for f in contents:
1777            if f.endswith(".bb"):
1778                bbfiles.append(os.path.abspath(os.path.join(path, f)))
1779        return bbfiles
1780
1781    def find_bbfiles(self, path):
1782        """Find all the .bb and .bbappend files in a directory"""
1783        found = []
1784        for dir, dirs, files in os.walk(path):
1785            for ignored in ('SCCS', 'CVS', '.svn'):
1786                if ignored in dirs:
1787                    dirs.remove(ignored)
1788            found += [os.path.join(dir, f) for f in files if (f.endswith(('.bb', '.bbappend')))]
1789
1790        return found
1791
1792    def collect_bbfiles(self, config, eventdata):
1793        """Collect all available .bb build files"""
1794        masked = 0
1795
1796        collectlog.debug("collecting .bb files")
1797
1798        files = (config.getVar( "BBFILES") or "").split()
1799
1800        # Sort files by priority
1801        files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
1802        config.setVar("BBFILES_PRIORITIZED", " ".join(files))
1803
1804        if not files:
1805            files = self.get_bbfiles()
1806
1807        if not files:
1808            collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1809            bb.event.fire(CookerExit(), eventdata)
1810
1811        # We need to track where we look so that we can know when the cache is invalid. There
1812        # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir()
1813        # calls while we run glob().
1814        origlistdir = os.listdir
1815        if hasattr(os, 'scandir'):
1816            origscandir = os.scandir
1817        searchdirs = []
1818
1819        def ourlistdir(d):
1820            searchdirs.append(d)
1821            return origlistdir(d)
1822
1823        def ourscandir(d):
1824            searchdirs.append(d)
1825            return origscandir(d)
1826
1827        os.listdir = ourlistdir
1828        if hasattr(os, 'scandir'):
1829            os.scandir = ourscandir
1830        try:
1831            # Can't use set here as order is important
1832            newfiles = []
1833            for f in files:
1834                if os.path.isdir(f):
1835                    dirfiles = self.find_bbfiles(f)
1836                    for g in dirfiles:
1837                        if g not in newfiles:
1838                            newfiles.append(g)
1839                else:
1840                    globbed = glob.glob(f)
1841                    if not globbed and os.path.exists(f):
1842                        globbed = [f]
1843                    # glob gives files in order on disk. Sort to be deterministic.
1844                    for g in sorted(globbed):
1845                        if g not in newfiles:
1846                            newfiles.append(g)
1847        finally:
1848            os.listdir = origlistdir
1849            if hasattr(os, 'scandir'):
1850                os.scandir = origscandir
1851
1852        bbmask = config.getVar('BBMASK')
1853
1854        if bbmask:
1855            # First validate the individual regular expressions and ignore any
1856            # that do not compile
1857            bbmasks = []
1858            for mask in bbmask.split():
1859                # When constructing an older style single regex, it's possible for BBMASK
1860                # to end up beginning with '|', which matches and masks _everything_.
1861                if mask.startswith("|"):
1862                    collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1863                    mask = mask[1:]
1864                try:
1865                    re.compile(mask)
1866                    bbmasks.append(mask)
1867                except re.error:
1868                    collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1869
1870            # Then validate the combined regular expressions. This should never
1871            # fail, but better safe than sorry...
1872            bbmask = "|".join(bbmasks)
1873            try:
1874                bbmask_compiled = re.compile(bbmask)
1875            except re.error:
1876                collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1877                bbmask = None
1878
1879        bbfiles = []
1880        bbappend = []
1881        for f in newfiles:
1882            if bbmask and bbmask_compiled.search(f):
1883                collectlog.debug("skipping masked file %s", f)
1884                masked += 1
1885                continue
1886            if f.endswith('.bb'):
1887                bbfiles.append(f)
1888            elif f.endswith('.bbappend'):
1889                bbappend.append(f)
1890            else:
1891                collectlog.debug("skipping %s: unknown file extension", f)
1892
1893        # Build a list of .bbappend files for each .bb file
1894        for f in bbappend:
1895            base = os.path.basename(f).replace('.bbappend', '.bb')
1896            self.bbappends.append((base, f))
1897
1898        # Find overlayed recipes
1899        # bbfiles will be in priority order which makes this easy
1900        bbfile_seen = dict()
1901        self.overlayed = defaultdict(list)
1902        for f in reversed(bbfiles):
1903            base = os.path.basename(f)
1904            if base not in bbfile_seen:
1905                bbfile_seen[base] = f
1906            else:
1907                topfile = bbfile_seen[base]
1908                self.overlayed[topfile].append(f)
1909
1910        return (bbfiles, masked, searchdirs)
1911
1912    def get_file_appends(self, fn):
1913        """
1914        Returns a list of .bbappend files to apply to fn
1915        """
1916        filelist = []
1917        f = os.path.basename(fn)
1918        for b in self.bbappends:
1919            (bbappend, filename) = b
1920            if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1921                filelist.append(filename)
1922        return tuple(filelist)
1923
1924    def collection_priorities(self, pkgfns, fns, d):
1925        # Return the priorities of the entries in pkgfns
1926        # Also check that all the regexes in self.bbfile_config_priorities are used
1927        # (but to do that we need to ensure skipped recipes aren't counted, nor
1928        # collections in BBFILE_PATTERN_IGNORE_EMPTY)
1929
1930        priorities = {}
1931        seen = set()
1932        matched = set()
1933
1934        matched_regex = set()
1935        unmatched_regex = set()
1936        for _, _, regex, _ in self.bbfile_config_priorities:
1937            unmatched_regex.add(regex)
1938
1939        # Calculate priorities for each file
1940        for p in pkgfns:
1941            realfn, cls, mc = bb.cache.virtualfn2realfn(p)
1942            priorities[p], regex, _ = self.calc_bbfile_priority(realfn)
1943            if regex in unmatched_regex:
1944                matched_regex.add(regex)
1945                unmatched_regex.remove(regex)
1946            seen.add(realfn)
1947            if regex:
1948                matched.add(realfn)
1949
1950        if unmatched_regex:
1951            # Account for bbappend files
1952            for b in self.bbappends:
1953                (bbfile, append) = b
1954                seen.add(append)
1955
1956            # Account for skipped recipes
1957            seen.update(fns)
1958
1959            seen.difference_update(matched)
1960
1961            def already_matched(fn):
1962                for regex in matched_regex:
1963                    if regex.match(fn):
1964                        return True
1965                return False
1966
1967            for unmatch in unmatched_regex.copy():
1968                for fn in seen:
1969                    if unmatch.match(fn):
1970                        # If the bbappend or file was already matched by another regex, skip it
1971                        # e.g. for a layer within a layer, the outer regex could match, the inner
1972                        # regex may match nothing and we should warn about that
1973                        if already_matched(fn):
1974                            continue
1975                        unmatched_regex.remove(unmatch)
1976                        break
1977
1978        for collection, pattern, regex, _ in self.bbfile_config_priorities:
1979            if regex in unmatched_regex:
1980                if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
1981                    collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
1982                                                                                             collection, pattern))
1983
1984        return priorities
1985
1986class ParsingFailure(Exception):
1987    def __init__(self, realexception, recipe):
1988        self.realexception = realexception
1989        self.recipe = recipe
1990        Exception.__init__(self, realexception, recipe)
1991
1992class Parser(multiprocessing.Process):
1993    def __init__(self, jobs, results, quit, profile):
1994        self.jobs = jobs
1995        self.results = results
1996        self.quit = quit
1997        multiprocessing.Process.__init__(self)
1998        self.context = bb.utils.get_context().copy()
1999        self.handlers = bb.event.get_class_handlers().copy()
2000        self.profile = profile
2001        self.queue_signals = False
2002        self.signal_received = []
2003        self.signal_threadlock = threading.Lock()
2004
2005    def catch_sig(self, signum, frame):
2006        if self.queue_signals:
2007            self.signal_received.append(signum)
2008        else:
2009            self.handle_sig(signum, frame)
2010
2011    def handle_sig(self, signum, frame):
2012        if signum == signal.SIGTERM:
2013            signal.signal(signal.SIGTERM, signal.SIG_DFL)
2014            os.kill(os.getpid(), signal.SIGTERM)
2015        elif signum == signal.SIGINT:
2016            signal.default_int_handler(signum, frame)
2017
2018    def run(self):
2019
2020        if not self.profile:
2021            self.realrun()
2022            return
2023
2024        try:
2025            import cProfile as profile
2026        except:
2027            import profile
2028        prof = profile.Profile()
2029        try:
2030            profile.Profile.runcall(prof, self.realrun)
2031        finally:
2032            logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
2033            prof.dump_stats(logfile)
2034
2035    def realrun(self):
2036        # Signal handling here is hard. We must not terminate any process or thread holding the write
2037        # lock for the event stream as it will not be released, ever, and things will hang.
2038        # Python handles signals in the main thread/process but they can be raised from any thread and
2039        # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section
2040        # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any
2041        # new thread should also do so) and we defer handling but we handle with the local thread lock
2042        # held (a threading lock, not a multiprocessing one) so that no other thread in the process
2043        # can be in the critical section.
2044        signal.signal(signal.SIGTERM, self.catch_sig)
2045        signal.signal(signal.SIGHUP, signal.SIG_DFL)
2046        signal.signal(signal.SIGINT, self.catch_sig)
2047        bb.utils.set_process_name(multiprocessing.current_process().name)
2048        multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2049        multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2050
2051        pending = []
2052        havejobs = True
2053        try:
2054            while havejobs or pending:
2055                if self.quit.is_set():
2056                    break
2057
2058                job = None
2059                try:
2060                    job = self.jobs.pop()
2061                except IndexError:
2062                    havejobs = False
2063                if job:
2064                    result = self.parse(*job)
2065                    # Clear the siggen cache after parsing to control memory usage, its huge
2066                    bb.parse.siggen.postparsing_clean_cache()
2067                    pending.append(result)
2068
2069                if pending:
2070                    try:
2071                        result = pending.pop()
2072                        self.results.put(result, timeout=0.05)
2073                    except queue.Full:
2074                        pending.append(result)
2075        finally:
2076            self.results.close()
2077            self.results.join_thread()
2078
2079    def parse(self, mc, cache, filename, appends, layername):
2080        try:
2081            origfilter = bb.event.LogHandler.filter
2082            # Record the filename we're parsing into any events generated
2083            def parse_filter(self, record):
2084                record.taskpid = bb.event.worker_pid
2085                record.fn = filename
2086                return True
2087
2088            # Reset our environment and handlers to the original settings
2089            bb.utils.set_context(self.context.copy())
2090            bb.event.set_class_handlers(self.handlers.copy())
2091            bb.event.LogHandler.filter = parse_filter
2092
2093            return True, mc, cache.parse(filename, appends, layername)
2094        except Exception as exc:
2095            tb = sys.exc_info()[2]
2096            exc.recipe = filename
2097            return True, None, exc
2098        # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2099        # and for example a worker thread doesn't just exit on its own in response to
2100        # a SystemExit event for example.
2101        except BaseException as exc:
2102            return True, None, ParsingFailure(exc, filename)
2103        finally:
2104            bb.event.LogHandler.filter = origfilter
2105
2106class CookerParser(object):
2107    def __init__(self, cooker, mcfilelist, masked):
2108        self.mcfilelist = mcfilelist
2109        self.cooker = cooker
2110        self.cfgdata = cooker.data
2111        self.cfghash = cooker.databuilder.data_hash
2112        self.cfgbuilder = cooker.databuilder
2113
2114        # Accounting statistics
2115        self.parsed = 0
2116        self.cached = 0
2117        self.error = 0
2118        self.masked = masked
2119
2120        self.skipped = 0
2121        self.virtuals = 0
2122
2123        self.current = 0
2124        self.process_names = []
2125
2126        self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2127        self.fromcache = set()
2128        self.willparse = set()
2129        for mc in self.cooker.multiconfigs:
2130            for filename in self.mcfilelist[mc]:
2131                appends = self.cooker.collections[mc].get_file_appends(filename)
2132                layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2133                if not self.bb_caches[mc].cacheValid(filename, appends):
2134                    self.willparse.add((mc, self.bb_caches[mc], filename, appends, layername))
2135                else:
2136                    self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername))
2137
2138        self.total = len(self.fromcache) + len(self.willparse)
2139        self.toparse = len(self.willparse)
2140        self.progress_chunk = int(max(self.toparse / 100, 1))
2141
2142        self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2143                                 multiprocessing.cpu_count()), self.toparse)
2144
2145        bb.cache.SiggenRecipeInfo.reset()
2146        self.start()
2147        self.haveshutdown = False
2148        self.syncthread = None
2149
2150    def start(self):
2151        self.results = self.load_cached()
2152        self.processes = []
2153        if self.toparse:
2154            bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2155
2156            self.parser_quit = multiprocessing.Event()
2157            self.result_queue = multiprocessing.Queue()
2158
2159            def chunkify(lst,n):
2160                return [lst[i::n] for i in range(n)]
2161            self.jobs = chunkify(list(self.willparse), self.num_processes)
2162
2163            for i in range(0, self.num_processes):
2164                parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile)
2165                parser.start()
2166                self.process_names.append(parser.name)
2167                self.processes.append(parser)
2168
2169            self.results = itertools.chain(self.results, self.parse_generator())
2170
2171    def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"):
2172        if not self.toparse:
2173            return
2174        if self.haveshutdown:
2175            return
2176        self.haveshutdown = True
2177
2178        if clean:
2179            event = bb.event.ParseCompleted(self.cached, self.parsed,
2180                                            self.skipped, self.masked,
2181                                            self.virtuals, self.error,
2182                                            self.total)
2183
2184            bb.event.fire(event, self.cfgdata)
2185        else:
2186            bb.event.fire(bb.event.ParseError(eventmsg), self.cfgdata)
2187            bb.error("Parsing halted due to errors, see error messages above")
2188
2189        # Cleanup the queue before call process.join(), otherwise there might be
2190        # deadlocks.
2191        while True:
2192            try:
2193               self.result_queue.get(timeout=0.25)
2194            except queue.Empty:
2195                break
2196
2197        def sync_caches():
2198            for c in self.bb_caches.values():
2199                bb.cache.SiggenRecipeInfo.reset()
2200                c.sync()
2201
2202        self.syncthread = threading.Thread(target=sync_caches, name="SyncThread")
2203        self.syncthread.start()
2204
2205        self.parser_quit.set()
2206
2207        for process in self.processes:
2208            process.join(0.5)
2209
2210        for process in self.processes:
2211            if process.exitcode is None:
2212                os.kill(process.pid, signal.SIGINT)
2213
2214        for process in self.processes:
2215            process.join(0.5)
2216
2217        for process in self.processes:
2218            if process.exitcode is None:
2219                process.terminate()
2220
2221        for process in self.processes:
2222            process.join()
2223            # clean up zombies
2224            process.close()
2225
2226        bb.codeparser.parser_cache_save()
2227        bb.codeparser.parser_cache_savemerge()
2228        bb.cache.SiggenRecipeInfo.reset()
2229        bb.fetch.fetcher_parse_done()
2230        if self.cooker.configuration.profile:
2231            profiles = []
2232            for i in self.process_names:
2233                logfile = "profile-parse-%s.log" % i
2234                if os.path.exists(logfile) and os.path.getsize(logfile):
2235                    profiles.append(logfile)
2236
2237            if profiles:
2238                pout = "profile-parse.log.processed"
2239                bb.utils.process_profilelog(profiles, pout = pout)
2240                print("Processed parsing statistics saved to %s" % (pout))
2241
2242    def final_cleanup(self):
2243        if self.syncthread:
2244            self.syncthread.join()
2245
2246    def load_cached(self):
2247        for mc, cache, filename, appends, layername in self.fromcache:
2248            infos = cache.loadCached(filename, appends)
2249            yield False, mc, infos
2250
2251    def parse_generator(self):
2252        empty = False
2253        while self.processes or not empty:
2254            for process in self.processes.copy():
2255                if not process.is_alive():
2256                    process.join()
2257                    self.processes.remove(process)
2258
2259            if self.parsed >= self.toparse:
2260                break
2261
2262            try:
2263                result = self.result_queue.get(timeout=0.25)
2264            except queue.Empty:
2265                empty = True
2266                yield None, None, None
2267            else:
2268                empty = False
2269                yield result
2270
2271        if not (self.parsed >= self.toparse):
2272            raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
2273
2274
2275    def parse_next(self):
2276        result = []
2277        parsed = None
2278        try:
2279            parsed, mc, result = next(self.results)
2280            if isinstance(result, BaseException):
2281                # Turn exceptions back into exceptions
2282                raise result
2283            if parsed is None:
2284                # Timeout, loop back through the main loop
2285                return True
2286
2287        except StopIteration:
2288            self.shutdown()
2289            return False
2290        except bb.BBHandledException as exc:
2291            self.error += 1
2292            logger.debug('Failed to parse recipe: %s' % exc.recipe)
2293            self.shutdown(clean=False)
2294            return False
2295        except ParsingFailure as exc:
2296            self.error += 1
2297
2298            exc_desc = str(exc)
2299            if isinstance(exc, SystemExit) and not isinstance(exc.code, str):
2300                exc_desc = 'Exited with "%d"' % exc.code
2301
2302            logger.error('Unable to parse %s: %s' % (exc.recipe, exc_desc))
2303            self.shutdown(clean=False)
2304            return False
2305        except bb.parse.ParseError as exc:
2306            self.error += 1
2307            logger.error(str(exc))
2308            self.shutdown(clean=False, eventmsg=str(exc))
2309            return False
2310        except bb.data_smart.ExpansionError as exc:
2311            def skip_frames(f, fn_prefix):
2312                while f and f.tb_frame.f_code.co_filename.startswith(fn_prefix):
2313                    f = f.tb_next
2314                return f
2315
2316            self.error += 1
2317            bbdir = os.path.dirname(__file__) + os.sep
2318            etype, value, tb = sys.exc_info()
2319
2320            # Remove any frames where the code comes from bitbake. This
2321            # prevents deep (and pretty useless) backtraces for expansion error
2322            tb = skip_frames(tb, bbdir)
2323            cur = tb
2324            while cur:
2325                cur.tb_next = skip_frames(cur.tb_next, bbdir)
2326                cur = cur.tb_next
2327
2328            logger.error('ExpansionError during parsing %s', value.recipe,
2329                         exc_info=(etype, value, tb))
2330            self.shutdown(clean=False)
2331            return False
2332        except Exception as exc:
2333            self.error += 1
2334            _, value, _ = sys.exc_info()
2335            if hasattr(value, "recipe"):
2336                logger.error('Unable to parse %s' % value.recipe,
2337                            exc_info=sys.exc_info())
2338            else:
2339                # Most likely, an exception occurred during raising an exception
2340                import traceback
2341                logger.error('Exception during parse: %s' % traceback.format_exc())
2342            self.shutdown(clean=False)
2343            return False
2344
2345        self.current += 1
2346        self.virtuals += len(result)
2347        if parsed:
2348            self.parsed += 1
2349            if self.parsed % self.progress_chunk == 0:
2350                bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2351                              self.cfgdata)
2352        else:
2353            self.cached += 1
2354
2355        for virtualfn, info_array in result:
2356            if info_array[0].skipped:
2357                self.skipped += 1
2358                self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2359            self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2360                                        parsed=parsed, watcher = self.cooker.add_filewatch)
2361        return True
2362
2363    def reparse(self, filename):
2364        bb.cache.SiggenRecipeInfo.reset()
2365        to_reparse = set()
2366        for mc in self.cooker.multiconfigs:
2367            layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2368            to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename), layername))
2369
2370        for mc, filename, appends, layername in to_reparse:
2371            infos = self.bb_caches[mc].parse(filename, appends, layername)
2372            for vfn, info_array in infos:
2373                self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
2374