xref: /openbmc/openbmc/poky/bitbake/lib/bb/cache.py (revision 9aee5003)
1#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006        Richard Purdie
7# Copyright (C) 2012        Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004  Chris Larson
11# Copyright (C) 2003, 2004  Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005        Holger Hans Peter Freyther
14# Copyright (C) 2005        ROAD GmbH
15#
16# SPDX-License-Identifier: GPL-2.0-only
17#
18
19import os
20import logging
21import pickle
22from collections import defaultdict
23from collections.abc import Mapping
24import bb.utils
25from bb import PrefixLoggerAdapter
26import re
27
28logger = logging.getLogger("BitBake.Cache")
29
30__cache_version__ = "154"
31
32def getCacheFile(path, filename, mc, data_hash):
33    mcspec = ''
34    if mc:
35        mcspec = ".%s" % mc
36    return os.path.join(path, filename + mcspec + "." + data_hash)
37
38# RecipeInfoCommon defines common data retrieving methods
39# from meta data for caches. CoreRecipeInfo as well as other
40# Extra RecipeInfo needs to inherit this class
41class RecipeInfoCommon(object):
42
43    @classmethod
44    def listvar(cls, var, metadata):
45        return cls.getvar(var, metadata).split()
46
47    @classmethod
48    def intvar(cls, var, metadata):
49        return int(cls.getvar(var, metadata) or 0)
50
51    @classmethod
52    def depvar(cls, var, metadata):
53        return bb.utils.explode_deps(cls.getvar(var, metadata))
54
55    @classmethod
56    def pkgvar(cls, var, packages, metadata):
57        return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
58                    for pkg in packages)
59
60    @classmethod
61    def taskvar(cls, var, tasks, metadata):
62        return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
63                    for task in tasks)
64
65    @classmethod
66    def flaglist(cls, flag, varlist, metadata, squash=False):
67        out_dict = dict((var, metadata.getVarFlag(var, flag))
68                    for var in varlist)
69        if squash:
70            return dict((k,v) for (k,v) in out_dict.items() if v)
71        else:
72            return out_dict
73
74    @classmethod
75    def getvar(cls, var, metadata, expand = True):
76        return metadata.getVar(var, expand) or ''
77
78
79class CoreRecipeInfo(RecipeInfoCommon):
80    __slots__ = ()
81
82    cachefile = "bb_cache.dat"
83
84    def __init__(self, filename, metadata):
85        self.file_depends = metadata.getVar('__depends', False)
86        self.timestamp = bb.parse.cached_mtime(filename)
87        self.variants = self.listvar('__VARIANTS', metadata) + ['']
88        self.appends = self.listvar('__BBAPPEND', metadata)
89        self.nocache = self.getvar('BB_DONT_CACHE', metadata)
90
91        self.provides  = self.depvar('PROVIDES', metadata)
92        self.rprovides = self.depvar('RPROVIDES', metadata)
93        self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
94        self.packages = self.listvar('PACKAGES', metadata)
95        if not self.packages:
96            self.packages.append(self.pn)
97        self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
98        self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
99
100        self.skipreason = self.getvar('__SKIPPED', metadata)
101        if self.skipreason:
102            self.skipped = True
103            return
104
105        self.tasks = metadata.getVar('__BBTASKS', False)
106
107        self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
108        self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
109
110        self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
111
112        self.skipped = False
113        self.pe = self.getvar('PE', metadata)
114        self.pv = self.getvar('PV', metadata)
115        self.pr = self.getvar('PR', metadata)
116        self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
117        self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
118        self.stamp = self.getvar('STAMP', metadata)
119        self.stampclean = self.getvar('STAMPCLEAN', metadata)
120        self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
121        self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
122        self.depends          = self.depvar('DEPENDS', metadata)
123        self.rdepends         = self.depvar('RDEPENDS', metadata)
124        self.rrecommends      = self.depvar('RRECOMMENDS', metadata)
125        self.rdepends_pkg     = self.pkgvar('RDEPENDS', self.packages, metadata)
126        self.rrecommends_pkg  = self.pkgvar('RRECOMMENDS', self.packages, metadata)
127        self.inherits         = self.getvar('__inherit_cache', metadata, expand=False)
128        self.fakerootenv      = self.getvar('FAKEROOTENV', metadata)
129        self.fakerootdirs     = self.getvar('FAKEROOTDIRS', metadata)
130        self.fakerootlogs     = self.getvar('FAKEROOTLOGS', metadata)
131        self.fakerootnoenv    = self.getvar('FAKEROOTNOENV', metadata)
132        self.extradepsfunc    = self.getvar('calculate_extra_depends', metadata)
133
134    @classmethod
135    def init_cacheData(cls, cachedata):
136        # CacheData in Core RecipeInfo Class
137        cachedata.task_deps = {}
138        cachedata.pkg_fn = {}
139        cachedata.pkg_pn = defaultdict(list)
140        cachedata.pkg_pepvpr = {}
141        cachedata.pkg_dp = {}
142
143        cachedata.stamp = {}
144        cachedata.stampclean = {}
145        cachedata.stamp_extrainfo = {}
146        cachedata.file_checksums = {}
147        cachedata.fn_provides = {}
148        cachedata.pn_provides = defaultdict(list)
149        cachedata.all_depends = []
150
151        cachedata.deps = defaultdict(list)
152        cachedata.packages = defaultdict(list)
153        cachedata.providers = defaultdict(list)
154        cachedata.rproviders = defaultdict(list)
155        cachedata.packages_dynamic = defaultdict(list)
156
157        cachedata.rundeps = defaultdict(lambda: defaultdict(list))
158        cachedata.runrecs = defaultdict(lambda: defaultdict(list))
159        cachedata.possible_world = []
160        cachedata.universe_target = []
161        cachedata.hashfn = {}
162
163        cachedata.basetaskhash = {}
164        cachedata.inherits = {}
165        cachedata.fakerootenv = {}
166        cachedata.fakerootnoenv = {}
167        cachedata.fakerootdirs = {}
168        cachedata.fakerootlogs = {}
169        cachedata.extradepsfunc = {}
170
171    def add_cacheData(self, cachedata, fn):
172        cachedata.task_deps[fn] = self.task_deps
173        cachedata.pkg_fn[fn] = self.pn
174        cachedata.pkg_pn[self.pn].append(fn)
175        cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
176        cachedata.pkg_dp[fn] = self.defaultpref
177        cachedata.stamp[fn] = self.stamp
178        cachedata.stampclean[fn] = self.stampclean
179        cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
180        cachedata.file_checksums[fn] = self.file_checksums
181
182        provides = [self.pn]
183        for provide in self.provides:
184            if provide not in provides:
185                provides.append(provide)
186        cachedata.fn_provides[fn] = provides
187
188        for provide in provides:
189            cachedata.providers[provide].append(fn)
190            if provide not in cachedata.pn_provides[self.pn]:
191                cachedata.pn_provides[self.pn].append(provide)
192
193        for dep in self.depends:
194            if dep not in cachedata.deps[fn]:
195                cachedata.deps[fn].append(dep)
196            if dep not in cachedata.all_depends:
197                cachedata.all_depends.append(dep)
198
199        rprovides = self.rprovides
200        for package in self.packages:
201            cachedata.packages[package].append(fn)
202            rprovides += self.rprovides_pkg[package]
203
204        for rprovide in rprovides:
205            if fn not in cachedata.rproviders[rprovide]:
206                cachedata.rproviders[rprovide].append(fn)
207
208        for package in self.packages_dynamic:
209            cachedata.packages_dynamic[package].append(fn)
210
211        # Build hash of runtime depends and recommends
212        for package in self.packages:
213            cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
214            cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
215
216        # Collect files we may need for possible world-dep
217        # calculations
218        if not self.not_world:
219            cachedata.possible_world.append(fn)
220        #else:
221        #    logger.debug2("EXCLUDE FROM WORLD: %s", fn)
222
223        # create a collection of all targets for sanity checking
224        # tasks, such as upstream versions, license, and tools for
225        # task and image creation.
226        cachedata.universe_target.append(self.pn)
227
228        cachedata.hashfn[fn] = self.hashfilename
229        for task, taskhash in self.basetaskhashes.items():
230            identifier = '%s:%s' % (fn, task)
231            cachedata.basetaskhash[identifier] = taskhash
232
233        cachedata.inherits[fn] = self.inherits
234        cachedata.fakerootenv[fn] = self.fakerootenv
235        cachedata.fakerootnoenv[fn] = self.fakerootnoenv
236        cachedata.fakerootdirs[fn] = self.fakerootdirs
237        cachedata.fakerootlogs[fn] = self.fakerootlogs
238        cachedata.extradepsfunc[fn] = self.extradepsfunc
239
240def virtualfn2realfn(virtualfn):
241    """
242    Convert a virtual file name to a real one + the associated subclass keyword
243    """
244    mc = ""
245    if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
246        elems = virtualfn.split(':')
247        mc = elems[1]
248        virtualfn = ":".join(elems[2:])
249
250    fn = virtualfn
251    cls = ""
252    if virtualfn.startswith('virtual:'):
253        elems = virtualfn.split(':')
254        cls = ":".join(elems[1:-1])
255        fn = elems[-1]
256
257    return (fn, cls, mc)
258
259def realfn2virtual(realfn, cls, mc):
260    """
261    Convert a real filename + the associated subclass keyword to a virtual filename
262    """
263    if cls:
264        realfn = "virtual:" + cls + ":" + realfn
265    if mc:
266        realfn = "mc:" + mc + ":" + realfn
267    return realfn
268
269def variant2virtual(realfn, variant):
270    """
271    Convert a real filename + the associated subclass keyword to a virtual filename
272    """
273    if variant == "":
274        return realfn
275    if variant.startswith("mc:") and variant.count(':') >= 2:
276        elems = variant.split(":")
277        if elems[2]:
278            return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
279        return "mc:" + elems[1] + ":" + realfn
280    return "virtual:" + variant + ":" + realfn
281
282def parse_recipe(bb_data, bbfile, appends, mc=''):
283    """
284    Parse a recipe
285    """
286
287    bb_data.setVar("__BBMULTICONFIG", mc)
288
289    bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
290    bb.parse.cached_mtime_noerror(bbfile_loc)
291
292    if appends:
293        bb_data.setVar('__BBAPPEND', " ".join(appends))
294    bb_data = bb.parse.handle(bbfile, bb_data)
295    return bb_data
296
297
298class NoCache(object):
299
300    def __init__(self, databuilder):
301        self.databuilder = databuilder
302        self.data = databuilder.data
303
304    def loadDataFull(self, virtualfn, appends):
305        """
306        Return a complete set of data for fn.
307        To do this, we need to parse the file.
308        """
309        logger.debug("Parsing %s (full)" % virtualfn)
310        (fn, virtual, mc) = virtualfn2realfn(virtualfn)
311        bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
312        return bb_data[virtual]
313
314    def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
315        """
316        Load and parse one .bb build file
317        Return the data and whether parsing resulted in the file being skipped
318        """
319
320        if virtonly:
321            (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
322            bb_data = self.databuilder.mcdata[mc].createCopy()
323            bb_data.setVar("__ONLYFINALISE", virtual or "default")
324            datastores = parse_recipe(bb_data, bbfile, appends, mc)
325            return datastores
326
327        if mc is not None:
328            bb_data = self.databuilder.mcdata[mc].createCopy()
329            return parse_recipe(bb_data, bbfile, appends, mc)
330
331        bb_data = self.data.createCopy()
332        datastores = parse_recipe(bb_data, bbfile, appends)
333
334        for mc in self.databuilder.mcdata:
335            if not mc:
336                continue
337            bb_data = self.databuilder.mcdata[mc].createCopy()
338            newstores = parse_recipe(bb_data, bbfile, appends, mc)
339            for ns in newstores:
340                datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
341
342        return datastores
343
344class Cache(NoCache):
345    """
346    BitBake Cache implementation
347    """
348    def __init__(self, databuilder, mc, data_hash, caches_array):
349        super().__init__(databuilder)
350        data = databuilder.data
351
352        # Pass caches_array information into Cache Constructor
353        # It will be used later for deciding whether we
354        # need extra cache file dump/load support
355        self.mc = mc
356        self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
357        self.caches_array = caches_array
358        self.cachedir = data.getVar("CACHE")
359        self.clean = set()
360        self.checked = set()
361        self.depends_cache = {}
362        self.data_fn = None
363        self.cacheclean = True
364        self.data_hash = data_hash
365        self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
366
367        if self.cachedir in [None, '']:
368            self.has_cache = False
369            self.logger.info("Not using a cache. "
370                             "Set CACHE = <directory> to enable.")
371            return
372
373        self.has_cache = True
374
375    def getCacheFile(self, cachefile):
376        return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
377
378    def prepare_cache(self, progress):
379        if not self.has_cache:
380            return 0
381
382        loaded = 0
383
384        self.cachefile = self.getCacheFile("bb_cache.dat")
385
386        self.logger.debug("Cache dir: %s", self.cachedir)
387        bb.utils.mkdirhier(self.cachedir)
388
389        cache_ok = True
390        if self.caches_array:
391            for cache_class in self.caches_array:
392                cachefile = self.getCacheFile(cache_class.cachefile)
393                cache_exists = os.path.exists(cachefile)
394                self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
395                cache_ok = cache_ok and cache_exists
396                cache_class.init_cacheData(self)
397        if cache_ok:
398            loaded = self.load_cachefile(progress)
399        elif os.path.isfile(self.cachefile):
400            self.logger.info("Out of date cache found, rebuilding...")
401        else:
402            self.logger.debug("Cache file %s not found, building..." % self.cachefile)
403
404        # We don't use the symlink, its just for debugging convinience
405        if self.mc:
406            symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
407        else:
408            symlink = os.path.join(self.cachedir, "bb_cache.dat")
409
410        if os.path.exists(symlink):
411            bb.utils.remove(symlink)
412        try:
413            os.symlink(os.path.basename(self.cachefile), symlink)
414        except OSError:
415            pass
416
417        return loaded
418
419    def cachesize(self):
420        if not self.has_cache:
421            return 0
422
423        cachesize = 0
424        for cache_class in self.caches_array:
425            cachefile = self.getCacheFile(cache_class.cachefile)
426            try:
427                with open(cachefile, "rb") as cachefile:
428                    cachesize += os.fstat(cachefile.fileno()).st_size
429            except FileNotFoundError:
430                pass
431
432        return cachesize
433
434    def load_cachefile(self, progress):
435        previous_progress = 0
436
437        for cache_class in self.caches_array:
438            cachefile = self.getCacheFile(cache_class.cachefile)
439            self.logger.debug('Loading cache file: %s' % cachefile)
440            with open(cachefile, "rb") as cachefile:
441                pickled = pickle.Unpickler(cachefile)
442                # Check cache version information
443                try:
444                    cache_ver = pickled.load()
445                    bitbake_ver = pickled.load()
446                except Exception:
447                    self.logger.info('Invalid cache, rebuilding...')
448                    return 0
449
450                if cache_ver != __cache_version__:
451                    self.logger.info('Cache version mismatch, rebuilding...')
452                    return 0
453                elif bitbake_ver != bb.__version__:
454                    self.logger.info('Bitbake version mismatch, rebuilding...')
455                    return 0
456
457                # Load the rest of the cache file
458                current_progress = 0
459                while cachefile:
460                    try:
461                        key = pickled.load()
462                        value = pickled.load()
463                    except Exception:
464                        break
465                    if not isinstance(key, str):
466                        bb.warn("%s from extras cache is not a string?" % key)
467                        break
468                    if not isinstance(value, RecipeInfoCommon):
469                        bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
470                        break
471
472                    if key in self.depends_cache:
473                        self.depends_cache[key].append(value)
474                    else:
475                        self.depends_cache[key] = [value]
476                    # only fire events on even percentage boundaries
477                    current_progress = cachefile.tell() + previous_progress
478                    progress(cachefile.tell() + previous_progress)
479
480                previous_progress += current_progress
481
482        return len(self.depends_cache)
483
484    def parse(self, filename, appends):
485        """Parse the specified filename, returning the recipe information"""
486        self.logger.debug("Parsing %s", filename)
487        infos = []
488        datastores = self.load_bbfile(filename, appends, mc=self.mc)
489        depends = []
490        variants = []
491        # Process the "real" fn last so we can store variants list
492        for variant, data in sorted(datastores.items(),
493                                    key=lambda i: i[0],
494                                    reverse=True):
495            virtualfn = variant2virtual(filename, variant)
496            variants.append(variant)
497            depends = depends + (data.getVar("__depends", False) or [])
498            if depends and not variant:
499                data.setVar("__depends", depends)
500            if virtualfn == filename:
501                data.setVar("__VARIANTS", " ".join(variants))
502            info_array = []
503            for cache_class in self.caches_array:
504                info = cache_class(filename, data)
505                info_array.append(info)
506            infos.append((virtualfn, info_array))
507
508        return infos
509
510    def load(self, filename, appends):
511        """Obtain the recipe information for the specified filename,
512        using cached values if available, otherwise parsing.
513
514        Note that if it does parse to obtain the info, it will not
515        automatically add the information to the cache or to your
516        CacheData.  Use the add or add_info method to do so after
517        running this, or use loadData instead."""
518        cached = self.cacheValid(filename, appends)
519        if cached:
520            infos = []
521            # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
522            info_array = self.depends_cache[filename]
523            for variant in info_array[0].variants:
524                virtualfn = variant2virtual(filename, variant)
525                infos.append((virtualfn, self.depends_cache[virtualfn]))
526        else:
527            return self.parse(filename, appends, configdata, self.caches_array)
528
529        return cached, infos
530
531    def loadData(self, fn, appends, cacheData):
532        """Load the recipe info for the specified filename,
533        parsing and adding to the cache if necessary, and adding
534        the recipe information to the supplied CacheData instance."""
535        skipped, virtuals = 0, 0
536
537        cached, infos = self.load(fn, appends)
538        for virtualfn, info_array in infos:
539            if info_array[0].skipped:
540                self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason)
541                skipped += 1
542            else:
543                self.add_info(virtualfn, info_array, cacheData, not cached)
544                virtuals += 1
545
546        return cached, skipped, virtuals
547
548    def cacheValid(self, fn, appends):
549        """
550        Is the cache valid for fn?
551        Fast version, no timestamps checked.
552        """
553        if fn not in self.checked:
554            self.cacheValidUpdate(fn, appends)
555
556        # Is cache enabled?
557        if not self.has_cache:
558            return False
559        if fn in self.clean:
560            return True
561        return False
562
563    def cacheValidUpdate(self, fn, appends):
564        """
565        Is the cache valid for fn?
566        Make thorough (slower) checks including timestamps.
567        """
568        # Is cache enabled?
569        if not self.has_cache:
570            return False
571
572        self.checked.add(fn)
573
574        # File isn't in depends_cache
575        if not fn in self.depends_cache:
576            self.logger.debug2("%s is not cached", fn)
577            return False
578
579        mtime = bb.parse.cached_mtime_noerror(fn)
580
581        # Check file still exists
582        if mtime == 0:
583            self.logger.debug2("%s no longer exists", fn)
584            self.remove(fn)
585            return False
586
587        info_array = self.depends_cache[fn]
588        # Check the file's timestamp
589        if mtime != info_array[0].timestamp:
590            self.logger.debug2("%s changed", fn)
591            self.remove(fn)
592            return False
593
594        # Check dependencies are still valid
595        depends = info_array[0].file_depends
596        if depends:
597            for f, old_mtime in depends:
598                fmtime = bb.parse.cached_mtime_noerror(f)
599                # Check if file still exists
600                if old_mtime != 0 and fmtime == 0:
601                    self.logger.debug2("%s's dependency %s was removed",
602                                         fn, f)
603                    self.remove(fn)
604                    return False
605
606                if (fmtime != old_mtime):
607                    self.logger.debug2("%s's dependency %s changed",
608                                         fn, f)
609                    self.remove(fn)
610                    return False
611
612        if hasattr(info_array[0], 'file_checksums'):
613            for _, fl in info_array[0].file_checksums.items():
614                fl = fl.strip()
615                if not fl:
616                    continue
617                # Have to be careful about spaces and colons in filenames
618                flist = self.filelist_regex.split(fl)
619                for f in flist:
620                    if not f:
621                        continue
622                    f, exist = f.split(":")
623                    if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
624                        self.logger.debug2("%s's file checksum list file %s changed",
625                                             fn, f)
626                        self.remove(fn)
627                        return False
628
629        if tuple(appends) != tuple(info_array[0].appends):
630            self.logger.debug2("appends for %s changed", fn)
631            self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
632            self.remove(fn)
633            return False
634
635        invalid = False
636        for cls in info_array[0].variants:
637            virtualfn = variant2virtual(fn, cls)
638            self.clean.add(virtualfn)
639            if virtualfn not in self.depends_cache:
640                self.logger.debug2("%s is not cached", virtualfn)
641                invalid = True
642            elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
643                self.logger.debug2("Extra caches missing for %s?" % virtualfn)
644                invalid = True
645
646        # If any one of the variants is not present, mark as invalid for all
647        if invalid:
648            for cls in info_array[0].variants:
649                virtualfn = variant2virtual(fn, cls)
650                if virtualfn in self.clean:
651                    self.logger.debug2("Removing %s from cache", virtualfn)
652                    self.clean.remove(virtualfn)
653            if fn in self.clean:
654                self.logger.debug2("Marking %s as not clean", fn)
655                self.clean.remove(fn)
656            return False
657
658        self.clean.add(fn)
659        return True
660
661    def remove(self, fn):
662        """
663        Remove a fn from the cache
664        Called from the parser in error cases
665        """
666        if fn in self.depends_cache:
667            self.logger.debug("Removing %s from cache", fn)
668            del self.depends_cache[fn]
669        if fn in self.clean:
670            self.logger.debug("Marking %s as unclean", fn)
671            self.clean.remove(fn)
672
673    def sync(self):
674        """
675        Save the cache
676        Called from the parser when complete (or exiting)
677        """
678
679        if not self.has_cache:
680            return
681
682        if self.cacheclean:
683            self.logger.debug2("Cache is clean, not saving.")
684            return
685
686        for cache_class in self.caches_array:
687            cache_class_name = cache_class.__name__
688            cachefile = self.getCacheFile(cache_class.cachefile)
689            self.logger.debug2("Writing %s", cachefile)
690            with open(cachefile, "wb") as f:
691                p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
692                p.dump(__cache_version__)
693                p.dump(bb.__version__)
694
695                for key, info_array in self.depends_cache.items():
696                    for info in info_array:
697                        if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
698                            p.dump(key)
699                            p.dump(info)
700
701        del self.depends_cache
702
703    @staticmethod
704    def mtime(cachefile):
705        return bb.parse.cached_mtime_noerror(cachefile)
706
707    def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
708        if self.mc is not None:
709            (fn, cls, mc) = virtualfn2realfn(filename)
710            if mc:
711                self.logger.error("Unexpected multiconfig %s", filename)
712                return
713
714            vfn = realfn2virtual(fn, cls, self.mc)
715        else:
716            vfn = filename
717
718        if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
719            cacheData.add_from_recipeinfo(vfn, info_array)
720
721            if watcher:
722                watcher(info_array[0].file_depends)
723
724        if not self.has_cache:
725            return
726
727        if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
728            if parsed:
729                self.cacheclean = False
730            self.depends_cache[filename] = info_array
731
732    def add(self, file_name, data, cacheData, parsed=None):
733        """
734        Save data we need into the cache
735        """
736
737        realfn = virtualfn2realfn(file_name)[0]
738
739        info_array = []
740        for cache_class in self.caches_array:
741            info_array.append(cache_class(realfn, data))
742        self.add_info(file_name, info_array, cacheData, parsed)
743
744class MulticonfigCache(Mapping):
745    def __init__(self, databuilder, data_hash, caches_array):
746        def progress(p):
747            nonlocal current_progress
748            nonlocal previous_progress
749            nonlocal previous_percent
750            nonlocal cachesize
751
752            current_progress = previous_progress + p
753
754            if current_progress > cachesize:
755                # we might have calculated incorrect total size because a file
756                # might've been written out just after we checked its size
757                cachesize = current_progress
758            current_percent = 100 * current_progress / cachesize
759            if current_percent > previous_percent:
760                previous_percent = current_percent
761                bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
762                                databuilder.data)
763
764
765        cachesize = 0
766        current_progress = 0
767        previous_progress = 0
768        previous_percent = 0
769        self.__caches = {}
770
771        for mc, mcdata in databuilder.mcdata.items():
772            self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
773
774            cachesize += self.__caches[mc].cachesize()
775
776        bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
777        loaded = 0
778
779        for c in self.__caches.values():
780            loaded += c.prepare_cache(progress)
781            previous_progress = current_progress
782
783        # Note: depends cache number is corresponding to the parsing file numbers.
784        # The same file has several caches, still regarded as one item in the cache
785        bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
786
787    def __len__(self):
788        return len(self.__caches)
789
790    def __getitem__(self, key):
791        return self.__caches[key]
792
793    def __contains__(self, key):
794        return key in self.__caches
795
796    def __iter__(self):
797        for k in self.__caches:
798            yield k
799
800def init(cooker):
801    """
802    The Objective: Cache the minimum amount of data possible yet get to the
803    stage of building packages (i.e. tryBuild) without reparsing any .bb files.
804
805    To do this, we intercept getVar calls and only cache the variables we see
806    being accessed. We rely on the cache getVar calls being made for all
807    variables bitbake might need to use to reach this stage. For each cached
808    file we need to track:
809
810    * Its mtime
811    * The mtimes of all its dependencies
812    * Whether it caused a parse.SkipRecipe exception
813
814    Files causing parsing errors are evicted from the cache.
815
816    """
817    return Cache(cooker.configuration.data, cooker.configuration.data_hash)
818
819
820class CacheData(object):
821    """
822    The data structures we compile from the cached data
823    """
824
825    def __init__(self, caches_array):
826        self.caches_array = caches_array
827        for cache_class in self.caches_array:
828            if not issubclass(cache_class, RecipeInfoCommon):
829                bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
830            cache_class.init_cacheData(self)
831
832        # Direct cache variables
833        self.task_queues = {}
834        self.preferred = {}
835        self.tasks = {}
836        # Indirect Cache variables (set elsewhere)
837        self.ignored_dependencies = []
838        self.world_target = set()
839        self.bbfile_priority = {}
840
841    def add_from_recipeinfo(self, fn, info_array):
842        for info in info_array:
843            info.add_cacheData(self, fn)
844
845class MultiProcessCache(object):
846    """
847    BitBake multi-process cache implementation
848
849    Used by the codeparser & file checksum caches
850    """
851
852    def __init__(self):
853        self.cachefile = None
854        self.cachedata = self.create_cachedata()
855        self.cachedata_extras = self.create_cachedata()
856
857    def init_cache(self, d, cache_file_name=None):
858        cachedir = (d.getVar("PERSISTENT_DIR") or
859                    d.getVar("CACHE"))
860        if cachedir in [None, '']:
861            return
862        bb.utils.mkdirhier(cachedir)
863        self.cachefile = os.path.join(cachedir,
864                                      cache_file_name or self.__class__.cache_file_name)
865        logger.debug("Using cache in '%s'", self.cachefile)
866
867        glf = bb.utils.lockfile(self.cachefile + ".lock")
868
869        try:
870            with open(self.cachefile, "rb") as f:
871                p = pickle.Unpickler(f)
872                data, version = p.load()
873        except:
874            bb.utils.unlockfile(glf)
875            return
876
877        bb.utils.unlockfile(glf)
878
879        if version != self.__class__.CACHE_VERSION:
880            return
881
882        self.cachedata = data
883
884    def create_cachedata(self):
885        data = [{}]
886        return data
887
888    def save_extras(self):
889        if not self.cachefile:
890            return
891
892        glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
893
894        i = os.getpid()
895        lf = None
896        while not lf:
897            lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
898            if not lf or os.path.exists(self.cachefile + "-" + str(i)):
899                if lf:
900                    bb.utils.unlockfile(lf)
901                    lf = None
902                i = i + 1
903                continue
904
905            with open(self.cachefile + "-" + str(i), "wb") as f:
906                p = pickle.Pickler(f, -1)
907                p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
908
909        bb.utils.unlockfile(lf)
910        bb.utils.unlockfile(glf)
911
912    def merge_data(self, source, dest):
913        for j in range(0,len(dest)):
914            for h in source[j]:
915                if h not in dest[j]:
916                    dest[j][h] = source[j][h]
917
918    def save_merge(self):
919        if not self.cachefile:
920            return
921
922        glf = bb.utils.lockfile(self.cachefile + ".lock")
923
924        data = self.cachedata
925
926        for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
927            f = os.path.join(os.path.dirname(self.cachefile), f)
928            try:
929                with open(f, "rb") as fd:
930                    p = pickle.Unpickler(fd)
931                    extradata, version = p.load()
932            except (IOError, EOFError):
933                os.unlink(f)
934                continue
935
936            if version != self.__class__.CACHE_VERSION:
937                os.unlink(f)
938                continue
939
940            self.merge_data(extradata, data)
941            os.unlink(f)
942
943        with open(self.cachefile, "wb") as f:
944            p = pickle.Pickler(f, -1)
945            p.dump([data, self.__class__.CACHE_VERSION])
946
947        bb.utils.unlockfile(glf)
948
949
950class SimpleCache(object):
951    """
952    BitBake multi-process cache implementation
953
954    Used by the codeparser & file checksum caches
955    """
956
957    def __init__(self, version):
958        self.cachefile = None
959        self.cachedata = None
960        self.cacheversion = version
961
962    def init_cache(self, d, cache_file_name=None, defaultdata=None):
963        cachedir = (d.getVar("PERSISTENT_DIR") or
964                    d.getVar("CACHE"))
965        if not cachedir:
966            return defaultdata
967
968        bb.utils.mkdirhier(cachedir)
969        self.cachefile = os.path.join(cachedir,
970                                      cache_file_name or self.__class__.cache_file_name)
971        logger.debug("Using cache in '%s'", self.cachefile)
972
973        glf = bb.utils.lockfile(self.cachefile + ".lock")
974
975        try:
976            with open(self.cachefile, "rb") as f:
977                p = pickle.Unpickler(f)
978                data, version = p.load()
979        except:
980            bb.utils.unlockfile(glf)
981            return defaultdata
982
983        bb.utils.unlockfile(glf)
984
985        if version != self.cacheversion:
986            return defaultdata
987
988        return data
989
990    def save(self, data):
991        if not self.cachefile:
992            return
993
994        glf = bb.utils.lockfile(self.cachefile + ".lock")
995
996        with open(self.cachefile, "wb") as f:
997            p = pickle.Pickler(f, -1)
998            p.dump([data, self.cacheversion])
999
1000        bb.utils.unlockfile(glf)
1001