xref: /openbmc/openbmc/poky/bitbake/lib/bb/cache.py (revision bccaff34)
1#
2# BitBake Cache implementation
3#
4# Caching of bitbake variables before task execution
5
6# Copyright (C) 2006        Richard Purdie
7# Copyright (C) 2012        Intel Corporation
8
9# but small sections based on code from bin/bitbake:
10# Copyright (C) 2003, 2004  Chris Larson
11# Copyright (C) 2003, 2004  Phil Blundell
12# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13# Copyright (C) 2005        Holger Hans Peter Freyther
14# Copyright (C) 2005        ROAD GmbH
15#
16# SPDX-License-Identifier: GPL-2.0-only
17#
18
19import os
20import logging
21import pickle
22from collections import defaultdict
23from collections.abc import Mapping
24import bb.utils
25from bb import PrefixLoggerAdapter
26import re
27import shutil
28
29logger = logging.getLogger("BitBake.Cache")
30
31__cache_version__ = "155"
32
33def getCacheFile(path, filename, mc, data_hash):
34    mcspec = ''
35    if mc:
36        mcspec = ".%s" % mc
37    return os.path.join(path, filename + mcspec + "." + data_hash)
38
39# RecipeInfoCommon defines common data retrieving methods
40# from meta data for caches. CoreRecipeInfo as well as other
41# Extra RecipeInfo needs to inherit this class
42class RecipeInfoCommon(object):
43
44    @classmethod
45    def listvar(cls, var, metadata):
46        return cls.getvar(var, metadata).split()
47
48    @classmethod
49    def intvar(cls, var, metadata):
50        return int(cls.getvar(var, metadata) or 0)
51
52    @classmethod
53    def depvar(cls, var, metadata):
54        return bb.utils.explode_deps(cls.getvar(var, metadata))
55
56    @classmethod
57    def pkgvar(cls, var, packages, metadata):
58        return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
59                    for pkg in packages)
60
61    @classmethod
62    def taskvar(cls, var, tasks, metadata):
63        return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
64                    for task in tasks)
65
66    @classmethod
67    def flaglist(cls, flag, varlist, metadata, squash=False):
68        out_dict = dict((var, metadata.getVarFlag(var, flag))
69                    for var in varlist)
70        if squash:
71            return dict((k,v) for (k,v) in out_dict.items() if v)
72        else:
73            return out_dict
74
75    @classmethod
76    def getvar(cls, var, metadata, expand = True):
77        return metadata.getVar(var, expand) or ''
78
79
80class CoreRecipeInfo(RecipeInfoCommon):
81    __slots__ = ()
82
83    cachefile = "bb_cache.dat"
84
85    def __init__(self, filename, metadata):
86        self.file_depends = metadata.getVar('__depends', False)
87        self.timestamp = bb.parse.cached_mtime(filename)
88        self.variants = self.listvar('__VARIANTS', metadata) + ['']
89        self.appends = self.listvar('__BBAPPEND', metadata)
90        self.nocache = self.getvar('BB_DONT_CACHE', metadata)
91
92        self.provides  = self.depvar('PROVIDES', metadata)
93        self.rprovides = self.depvar('RPROVIDES', metadata)
94        self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
95        self.packages = self.listvar('PACKAGES', metadata)
96        if not self.packages:
97            self.packages.append(self.pn)
98        self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
99        self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
100
101        self.skipreason = self.getvar('__SKIPPED', metadata)
102        if self.skipreason:
103            self.skipped = True
104            return
105
106        self.tasks = metadata.getVar('__BBTASKS', False)
107
108        self.basetaskhashes = metadata.getVar('__siggen_basehashes', False) or {}
109        self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
110
111        self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
112
113        self.skipped = False
114        self.pe = self.getvar('PE', metadata)
115        self.pv = self.getvar('PV', metadata)
116        self.pr = self.getvar('PR', metadata)
117        self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
118        self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
119        self.stamp = self.getvar('STAMP', metadata)
120        self.stampclean = self.getvar('STAMPCLEAN', metadata)
121        self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
122        self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
123        self.depends          = self.depvar('DEPENDS', metadata)
124        self.rdepends         = self.depvar('RDEPENDS', metadata)
125        self.rrecommends      = self.depvar('RRECOMMENDS', metadata)
126        self.rdepends_pkg     = self.pkgvar('RDEPENDS', self.packages, metadata)
127        self.rrecommends_pkg  = self.pkgvar('RRECOMMENDS', self.packages, metadata)
128        self.inherits         = self.getvar('__inherit_cache', metadata, expand=False)
129        self.fakerootenv      = self.getvar('FAKEROOTENV', metadata)
130        self.fakerootdirs     = self.getvar('FAKEROOTDIRS', metadata)
131        self.fakerootlogs     = self.getvar('FAKEROOTLOGS', metadata)
132        self.fakerootnoenv    = self.getvar('FAKEROOTNOENV', metadata)
133        self.extradepsfunc    = self.getvar('calculate_extra_depends', metadata)
134
135    @classmethod
136    def init_cacheData(cls, cachedata):
137        # CacheData in Core RecipeInfo Class
138        cachedata.task_deps = {}
139        cachedata.pkg_fn = {}
140        cachedata.pkg_pn = defaultdict(list)
141        cachedata.pkg_pepvpr = {}
142        cachedata.pkg_dp = {}
143
144        cachedata.stamp = {}
145        cachedata.stampclean = {}
146        cachedata.stamp_extrainfo = {}
147        cachedata.file_checksums = {}
148        cachedata.fn_provides = {}
149        cachedata.pn_provides = defaultdict(list)
150        cachedata.all_depends = []
151
152        cachedata.deps = defaultdict(list)
153        cachedata.packages = defaultdict(list)
154        cachedata.providers = defaultdict(list)
155        cachedata.rproviders = defaultdict(list)
156        cachedata.packages_dynamic = defaultdict(list)
157
158        cachedata.rundeps = defaultdict(lambda: defaultdict(list))
159        cachedata.runrecs = defaultdict(lambda: defaultdict(list))
160        cachedata.possible_world = []
161        cachedata.universe_target = []
162        cachedata.hashfn = {}
163
164        cachedata.basetaskhash = {}
165        cachedata.inherits = {}
166        cachedata.fakerootenv = {}
167        cachedata.fakerootnoenv = {}
168        cachedata.fakerootdirs = {}
169        cachedata.fakerootlogs = {}
170        cachedata.extradepsfunc = {}
171
172    def add_cacheData(self, cachedata, fn):
173        cachedata.task_deps[fn] = self.task_deps
174        cachedata.pkg_fn[fn] = self.pn
175        cachedata.pkg_pn[self.pn].append(fn)
176        cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
177        cachedata.pkg_dp[fn] = self.defaultpref
178        cachedata.stamp[fn] = self.stamp
179        cachedata.stampclean[fn] = self.stampclean
180        cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
181        cachedata.file_checksums[fn] = self.file_checksums
182
183        provides = [self.pn]
184        for provide in self.provides:
185            if provide not in provides:
186                provides.append(provide)
187        cachedata.fn_provides[fn] = provides
188
189        for provide in provides:
190            cachedata.providers[provide].append(fn)
191            if provide not in cachedata.pn_provides[self.pn]:
192                cachedata.pn_provides[self.pn].append(provide)
193
194        for dep in self.depends:
195            if dep not in cachedata.deps[fn]:
196                cachedata.deps[fn].append(dep)
197            if dep not in cachedata.all_depends:
198                cachedata.all_depends.append(dep)
199
200        rprovides = self.rprovides
201        for package in self.packages:
202            cachedata.packages[package].append(fn)
203            rprovides += self.rprovides_pkg[package]
204
205        for rprovide in rprovides:
206            if fn not in cachedata.rproviders[rprovide]:
207                cachedata.rproviders[rprovide].append(fn)
208
209        for package in self.packages_dynamic:
210            cachedata.packages_dynamic[package].append(fn)
211
212        # Build hash of runtime depends and recommends
213        for package in self.packages:
214            cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
215            cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
216
217        # Collect files we may need for possible world-dep
218        # calculations
219        if not bb.utils.to_boolean(self.not_world):
220            cachedata.possible_world.append(fn)
221        #else:
222        #    logger.debug2("EXCLUDE FROM WORLD: %s", fn)
223
224        # create a collection of all targets for sanity checking
225        # tasks, such as upstream versions, license, and tools for
226        # task and image creation.
227        cachedata.universe_target.append(self.pn)
228
229        cachedata.hashfn[fn] = self.hashfilename
230        for task, taskhash in self.basetaskhashes.items():
231            identifier = '%s:%s' % (fn, task)
232            cachedata.basetaskhash[identifier] = taskhash
233
234        cachedata.inherits[fn] = self.inherits
235        cachedata.fakerootenv[fn] = self.fakerootenv
236        cachedata.fakerootnoenv[fn] = self.fakerootnoenv
237        cachedata.fakerootdirs[fn] = self.fakerootdirs
238        cachedata.fakerootlogs[fn] = self.fakerootlogs
239        cachedata.extradepsfunc[fn] = self.extradepsfunc
240
241
242class SiggenRecipeInfo(RecipeInfoCommon):
243    __slots__ = ()
244
245    classname = "SiggenRecipeInfo"
246    cachefile = "bb_cache_" + classname +".dat"
247    # we don't want to show this information in graph files so don't set cachefields
248    #cachefields = []
249
250    def __init__(self, filename, metadata):
251        self.siggen_gendeps = metadata.getVar("__siggen_gendeps", False)
252        self.siggen_varvals = metadata.getVar("__siggen_varvals", False)
253        self.siggen_taskdeps = metadata.getVar("__siggen_taskdeps", False)
254
255    @classmethod
256    def init_cacheData(cls, cachedata):
257        cachedata.siggen_taskdeps = {}
258        cachedata.siggen_gendeps = {}
259        cachedata.siggen_varvals = {}
260
261    def add_cacheData(self, cachedata, fn):
262        cachedata.siggen_gendeps[fn] = self.siggen_gendeps
263        cachedata.siggen_varvals[fn] = self.siggen_varvals
264        cachedata.siggen_taskdeps[fn] = self.siggen_taskdeps
265
266    # The siggen variable data is large and impacts:
267    #  - bitbake's overall memory usage
268    #  - the amount of data sent over IPC between parsing processes and the server
269    #  - the size of the cache files on disk
270    #  - the size of "sigdata" hash information files on disk
271    # The data consists of strings (some large) or frozenset lists of variables
272    # As such, we a) deplicate the data here and b) pass references to the object at second
273    # access (e.g. over IPC or saving into pickle).
274
275    store = {}
276    save_map = {}
277    save_count = 1
278    restore_map = {}
279    restore_count = {}
280
281    @classmethod
282    def reset(cls):
283        # Needs to be called before starting new streamed data in a given process
284        # (e.g. writing out the cache again)
285        cls.save_map = {}
286        cls.save_count = 1
287        cls.restore_map = {}
288
289    @classmethod
290    def _save(cls, deps):
291        ret = []
292        if not deps:
293            return deps
294        for dep in deps:
295            fs = deps[dep]
296            if fs is None:
297                ret.append((dep, None, None))
298            elif fs in cls.save_map:
299                ret.append((dep, None, cls.save_map[fs]))
300            else:
301                cls.save_map[fs] = cls.save_count
302                ret.append((dep, fs, cls.save_count))
303                cls.save_count = cls.save_count + 1
304        return ret
305
306    @classmethod
307    def _restore(cls, deps, pid):
308        ret = {}
309        if not deps:
310            return deps
311        if pid not in cls.restore_map:
312            cls.restore_map[pid] = {}
313        map = cls.restore_map[pid]
314        for dep, fs, mapnum in deps:
315            if fs is None and mapnum is None:
316                ret[dep] = None
317            elif fs is None:
318                ret[dep] = map[mapnum]
319            else:
320                try:
321                    fs = cls.store[fs]
322                except KeyError:
323                    cls.store[fs] = fs
324                map[mapnum] = fs
325                ret[dep] = fs
326        return ret
327
328    def __getstate__(self):
329        ret = {}
330        for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
331            ret[key] = self._save(self.__dict__[key])
332        ret['pid'] = os.getpid()
333        return ret
334
335    def __setstate__(self, state):
336        pid = state['pid']
337        for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
338            setattr(self, key, self._restore(state[key], pid))
339
340
341def virtualfn2realfn(virtualfn):
342    """
343    Convert a virtual file name to a real one + the associated subclass keyword
344    """
345    mc = ""
346    if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
347        elems = virtualfn.split(':')
348        mc = elems[1]
349        virtualfn = ":".join(elems[2:])
350
351    fn = virtualfn
352    cls = ""
353    if virtualfn.startswith('virtual:'):
354        elems = virtualfn.split(':')
355        cls = ":".join(elems[1:-1])
356        fn = elems[-1]
357
358    return (fn, cls, mc)
359
360def realfn2virtual(realfn, cls, mc):
361    """
362    Convert a real filename + the associated subclass keyword to a virtual filename
363    """
364    if cls:
365        realfn = "virtual:" + cls + ":" + realfn
366    if mc:
367        realfn = "mc:" + mc + ":" + realfn
368    return realfn
369
370def variant2virtual(realfn, variant):
371    """
372    Convert a real filename + the associated subclass keyword to a virtual filename
373    """
374    if variant == "":
375        return realfn
376    if variant.startswith("mc:") and variant.count(':') >= 2:
377        elems = variant.split(":")
378        if elems[2]:
379            return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
380        return "mc:" + elems[1] + ":" + realfn
381    return "virtual:" + variant + ":" + realfn
382
383#
384# Cooker calls cacheValid on its recipe list, then either calls loadCached
385# from it's main thread or parse from separate processes to generate an up to
386# date cache
387#
388class Cache(object):
389    """
390    BitBake Cache implementation
391    """
392    def __init__(self, databuilder, mc, data_hash, caches_array):
393        self.databuilder = databuilder
394        self.data = databuilder.data
395
396        # Pass caches_array information into Cache Constructor
397        # It will be used later for deciding whether we
398        # need extra cache file dump/load support
399        self.mc = mc
400        self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
401        self.caches_array = caches_array
402        self.cachedir = self.data.getVar("CACHE")
403        self.clean = set()
404        self.checked = set()
405        self.depends_cache = {}
406        self.data_fn = None
407        self.cacheclean = True
408        self.data_hash = data_hash
409        self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
410
411        if self.cachedir in [None, '']:
412            bb.fatal("Please ensure CACHE is set to the cache directory for BitBake to use")
413
414    def getCacheFile(self, cachefile):
415        return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
416
417    def prepare_cache(self, progress):
418        loaded = 0
419
420        self.cachefile = self.getCacheFile("bb_cache.dat")
421
422        self.logger.debug("Cache dir: %s", self.cachedir)
423        bb.utils.mkdirhier(self.cachedir)
424
425        cache_ok = True
426        if self.caches_array:
427            for cache_class in self.caches_array:
428                cachefile = self.getCacheFile(cache_class.cachefile)
429                cache_exists = os.path.exists(cachefile)
430                self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
431                cache_ok = cache_ok and cache_exists
432                cache_class.init_cacheData(self)
433        if cache_ok:
434            loaded = self.load_cachefile(progress)
435        elif os.path.isfile(self.cachefile):
436            self.logger.info("Out of date cache found, rebuilding...")
437        else:
438            self.logger.debug("Cache file %s not found, building..." % self.cachefile)
439
440        # We don't use the symlink, its just for debugging convinience
441        if self.mc:
442            symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
443        else:
444            symlink = os.path.join(self.cachedir, "bb_cache.dat")
445
446        if os.path.exists(symlink):
447            bb.utils.remove(symlink)
448        try:
449            os.symlink(os.path.basename(self.cachefile), symlink)
450        except OSError:
451            pass
452
453        return loaded
454
455    def cachesize(self):
456        cachesize = 0
457        for cache_class in self.caches_array:
458            cachefile = self.getCacheFile(cache_class.cachefile)
459            try:
460                with open(cachefile, "rb") as cachefile:
461                    cachesize += os.fstat(cachefile.fileno()).st_size
462            except FileNotFoundError:
463                pass
464
465        return cachesize
466
467    def load_cachefile(self, progress):
468        previous_progress = 0
469
470        for cache_class in self.caches_array:
471            cachefile = self.getCacheFile(cache_class.cachefile)
472            self.logger.debug('Loading cache file: %s' % cachefile)
473            with open(cachefile, "rb") as cachefile:
474                pickled = pickle.Unpickler(cachefile)
475                # Check cache version information
476                try:
477                    cache_ver = pickled.load()
478                    bitbake_ver = pickled.load()
479                except Exception:
480                    self.logger.info('Invalid cache, rebuilding...')
481                    return 0
482
483                if cache_ver != __cache_version__:
484                    self.logger.info('Cache version mismatch, rebuilding...')
485                    return 0
486                elif bitbake_ver != bb.__version__:
487                    self.logger.info('Bitbake version mismatch, rebuilding...')
488                    return 0
489
490                # Load the rest of the cache file
491                current_progress = 0
492                while cachefile:
493                    try:
494                        key = pickled.load()
495                        value = pickled.load()
496                    except Exception:
497                        break
498                    if not isinstance(key, str):
499                        bb.warn("%s from extras cache is not a string?" % key)
500                        break
501                    if not isinstance(value, RecipeInfoCommon):
502                        bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
503                        break
504
505                    if key in self.depends_cache:
506                        self.depends_cache[key].append(value)
507                    else:
508                        self.depends_cache[key] = [value]
509                    # only fire events on even percentage boundaries
510                    current_progress = cachefile.tell() + previous_progress
511                    progress(cachefile.tell() + previous_progress)
512
513                previous_progress += current_progress
514
515        return len(self.depends_cache)
516
517    def parse(self, filename, appends):
518        """Parse the specified filename, returning the recipe information"""
519        self.logger.debug("Parsing %s", filename)
520        infos = []
521        datastores = self.databuilder.parseRecipeVariants(filename, appends, mc=self.mc)
522        depends = []
523        variants = []
524        # Process the "real" fn last so we can store variants list
525        for variant, data in sorted(datastores.items(),
526                                    key=lambda i: i[0],
527                                    reverse=True):
528            virtualfn = variant2virtual(filename, variant)
529            variants.append(variant)
530            depends = depends + (data.getVar("__depends", False) or [])
531            if depends and not variant:
532                data.setVar("__depends", depends)
533            if virtualfn == filename:
534                data.setVar("__VARIANTS", " ".join(variants))
535            info_array = []
536            for cache_class in self.caches_array:
537                info = cache_class(filename, data)
538                info_array.append(info)
539            infos.append((virtualfn, info_array))
540
541        return infos
542
543    def loadCached(self, filename, appends):
544        """Obtain the recipe information for the specified filename,
545        using cached values.
546        """
547
548        infos = []
549        # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
550        info_array = self.depends_cache[filename]
551        for variant in info_array[0].variants:
552            virtualfn = variant2virtual(filename, variant)
553            infos.append((virtualfn, self.depends_cache[virtualfn]))
554
555        return infos
556
557    def cacheValid(self, fn, appends):
558        """
559        Is the cache valid for fn?
560        Fast version, no timestamps checked.
561        """
562        if fn not in self.checked:
563            self.cacheValidUpdate(fn, appends)
564        if fn in self.clean:
565            return True
566        return False
567
568    def cacheValidUpdate(self, fn, appends):
569        """
570        Is the cache valid for fn?
571        Make thorough (slower) checks including timestamps.
572        """
573        self.checked.add(fn)
574
575        # File isn't in depends_cache
576        if not fn in self.depends_cache:
577            self.logger.debug2("%s is not cached", fn)
578            return False
579
580        mtime = bb.parse.cached_mtime_noerror(fn)
581
582        # Check file still exists
583        if mtime == 0:
584            self.logger.debug2("%s no longer exists", fn)
585            self.remove(fn)
586            return False
587
588        info_array = self.depends_cache[fn]
589        # Check the file's timestamp
590        if mtime != info_array[0].timestamp:
591            self.logger.debug2("%s changed", fn)
592            self.remove(fn)
593            return False
594
595        # Check dependencies are still valid
596        depends = info_array[0].file_depends
597        if depends:
598            for f, old_mtime in depends:
599                fmtime = bb.parse.cached_mtime_noerror(f)
600                # Check if file still exists
601                if old_mtime != 0 and fmtime == 0:
602                    self.logger.debug2("%s's dependency %s was removed",
603                                         fn, f)
604                    self.remove(fn)
605                    return False
606
607                if (fmtime != old_mtime):
608                    self.logger.debug2("%s's dependency %s changed",
609                                         fn, f)
610                    self.remove(fn)
611                    return False
612
613        if hasattr(info_array[0], 'file_checksums'):
614            for _, fl in info_array[0].file_checksums.items():
615                fl = fl.strip()
616                if not fl:
617                    continue
618                # Have to be careful about spaces and colons in filenames
619                flist = self.filelist_regex.split(fl)
620                for f in flist:
621                    if not f:
622                        continue
623                    f, exist = f.rsplit(":", 1)
624                    if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
625                        self.logger.debug2("%s's file checksum list file %s changed",
626                                             fn, f)
627                        self.remove(fn)
628                        return False
629
630        if tuple(appends) != tuple(info_array[0].appends):
631            self.logger.debug2("appends for %s changed", fn)
632            self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
633            self.remove(fn)
634            return False
635
636        invalid = False
637        for cls in info_array[0].variants:
638            virtualfn = variant2virtual(fn, cls)
639            self.clean.add(virtualfn)
640            if virtualfn not in self.depends_cache:
641                self.logger.debug2("%s is not cached", virtualfn)
642                invalid = True
643            elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
644                self.logger.debug2("Extra caches missing for %s?" % virtualfn)
645                invalid = True
646
647        # If any one of the variants is not present, mark as invalid for all
648        if invalid:
649            for cls in info_array[0].variants:
650                virtualfn = variant2virtual(fn, cls)
651                if virtualfn in self.clean:
652                    self.logger.debug2("Removing %s from cache", virtualfn)
653                    self.clean.remove(virtualfn)
654            if fn in self.clean:
655                self.logger.debug2("Marking %s as not clean", fn)
656                self.clean.remove(fn)
657            return False
658
659        self.clean.add(fn)
660        return True
661
662    def remove(self, fn):
663        """
664        Remove a fn from the cache
665        Called from the parser in error cases
666        """
667        if fn in self.depends_cache:
668            self.logger.debug("Removing %s from cache", fn)
669            del self.depends_cache[fn]
670        if fn in self.clean:
671            self.logger.debug("Marking %s as unclean", fn)
672            self.clean.remove(fn)
673
674    def sync(self):
675        """
676        Save the cache
677        Called from the parser when complete (or exiting)
678        """
679        if self.cacheclean:
680            self.logger.debug2("Cache is clean, not saving.")
681            return
682
683        for cache_class in self.caches_array:
684            cache_class_name = cache_class.__name__
685            cachefile = self.getCacheFile(cache_class.cachefile)
686            self.logger.debug2("Writing %s", cachefile)
687            with open(cachefile, "wb") as f:
688                p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
689                p.dump(__cache_version__)
690                p.dump(bb.__version__)
691
692                for key, info_array in self.depends_cache.items():
693                    for info in info_array:
694                        if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
695                            p.dump(key)
696                            p.dump(info)
697
698        del self.depends_cache
699        SiggenRecipeInfo.reset()
700
701    @staticmethod
702    def mtime(cachefile):
703        return bb.parse.cached_mtime_noerror(cachefile)
704
705    def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
706        if self.mc is not None:
707            (fn, cls, mc) = virtualfn2realfn(filename)
708            if mc:
709                self.logger.error("Unexpected multiconfig %s", filename)
710                return
711
712            vfn = realfn2virtual(fn, cls, self.mc)
713        else:
714            vfn = filename
715
716        if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
717            cacheData.add_from_recipeinfo(vfn, info_array)
718
719            if watcher:
720                watcher(info_array[0].file_depends)
721
722        if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
723            if parsed:
724                self.cacheclean = False
725            self.depends_cache[filename] = info_array
726
727class MulticonfigCache(Mapping):
728    def __init__(self, databuilder, data_hash, caches_array):
729        def progress(p):
730            nonlocal current_progress
731            nonlocal previous_progress
732            nonlocal previous_percent
733            nonlocal cachesize
734
735            current_progress = previous_progress + p
736
737            if current_progress > cachesize:
738                # we might have calculated incorrect total size because a file
739                # might've been written out just after we checked its size
740                cachesize = current_progress
741            current_percent = 100 * current_progress / cachesize
742            if current_percent > previous_percent:
743                previous_percent = current_percent
744                bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
745                                databuilder.data)
746
747
748        cachesize = 0
749        current_progress = 0
750        previous_progress = 0
751        previous_percent = 0
752        self.__caches = {}
753
754        for mc, mcdata in databuilder.mcdata.items():
755            self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
756
757            cachesize += self.__caches[mc].cachesize()
758
759        bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
760        loaded = 0
761
762        for c in self.__caches.values():
763            SiggenRecipeInfo.reset()
764            loaded += c.prepare_cache(progress)
765            previous_progress = current_progress
766
767        # Note: depends cache number is corresponding to the parsing file numbers.
768        # The same file has several caches, still regarded as one item in the cache
769        bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
770
771    def __len__(self):
772        return len(self.__caches)
773
774    def __getitem__(self, key):
775        return self.__caches[key]
776
777    def __contains__(self, key):
778        return key in self.__caches
779
780    def __iter__(self):
781        for k in self.__caches:
782            yield k
783
784def init(cooker):
785    """
786    The Objective: Cache the minimum amount of data possible yet get to the
787    stage of building packages (i.e. tryBuild) without reparsing any .bb files.
788
789    To do this, we intercept getVar calls and only cache the variables we see
790    being accessed. We rely on the cache getVar calls being made for all
791    variables bitbake might need to use to reach this stage. For each cached
792    file we need to track:
793
794    * Its mtime
795    * The mtimes of all its dependencies
796    * Whether it caused a parse.SkipRecipe exception
797
798    Files causing parsing errors are evicted from the cache.
799
800    """
801    return Cache(cooker.configuration.data, cooker.configuration.data_hash)
802
803
804class CacheData(object):
805    """
806    The data structures we compile from the cached data
807    """
808
809    def __init__(self, caches_array):
810        self.caches_array = caches_array
811        for cache_class in self.caches_array:
812            if not issubclass(cache_class, RecipeInfoCommon):
813                bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
814            cache_class.init_cacheData(self)
815
816        # Direct cache variables
817        self.task_queues = {}
818        self.preferred = {}
819        self.tasks = {}
820        # Indirect Cache variables (set elsewhere)
821        self.ignored_dependencies = []
822        self.world_target = set()
823        self.bbfile_priority = {}
824
825    def add_from_recipeinfo(self, fn, info_array):
826        for info in info_array:
827            info.add_cacheData(self, fn)
828
829class MultiProcessCache(object):
830    """
831    BitBake multi-process cache implementation
832
833    Used by the codeparser & file checksum caches
834    """
835
836    def __init__(self):
837        self.cachefile = None
838        self.cachedata = self.create_cachedata()
839        self.cachedata_extras = self.create_cachedata()
840
841    def init_cache(self, cachedir, cache_file_name=None):
842        if not cachedir:
843            return
844
845        bb.utils.mkdirhier(cachedir)
846        self.cachefile = os.path.join(cachedir,
847                                      cache_file_name or self.__class__.cache_file_name)
848        logger.debug("Using cache in '%s'", self.cachefile)
849
850        glf = bb.utils.lockfile(self.cachefile + ".lock")
851
852        try:
853            with open(self.cachefile, "rb") as f:
854                p = pickle.Unpickler(f)
855                data, version = p.load()
856        except:
857            bb.utils.unlockfile(glf)
858            return
859
860        bb.utils.unlockfile(glf)
861
862        if version != self.__class__.CACHE_VERSION:
863            return
864
865        self.cachedata = data
866
867    def create_cachedata(self):
868        data = [{}]
869        return data
870
871    def save_extras(self):
872        if not self.cachefile:
873            return
874
875        have_data = any(self.cachedata_extras)
876        if not have_data:
877            return
878
879        glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
880
881        i = os.getpid()
882        lf = None
883        while not lf:
884            lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
885            if not lf or os.path.exists(self.cachefile + "-" + str(i)):
886                if lf:
887                    bb.utils.unlockfile(lf)
888                    lf = None
889                i = i + 1
890                continue
891
892            with open(self.cachefile + "-" + str(i), "wb") as f:
893                p = pickle.Pickler(f, -1)
894                p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
895
896        bb.utils.unlockfile(lf)
897        bb.utils.unlockfile(glf)
898
899    def merge_data(self, source, dest):
900        for j in range(0,len(dest)):
901            for h in source[j]:
902                if h not in dest[j]:
903                    dest[j][h] = source[j][h]
904
905    def save_merge(self):
906        if not self.cachefile:
907            return
908
909        glf = bb.utils.lockfile(self.cachefile + ".lock")
910
911        data = self.cachedata
912
913        have_data = False
914
915        for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
916            f = os.path.join(os.path.dirname(self.cachefile), f)
917            try:
918                with open(f, "rb") as fd:
919                    p = pickle.Unpickler(fd)
920                    extradata, version = p.load()
921            except (IOError, EOFError):
922                os.unlink(f)
923                continue
924
925            if version != self.__class__.CACHE_VERSION:
926                os.unlink(f)
927                continue
928
929            have_data = True
930            self.merge_data(extradata, data)
931            os.unlink(f)
932
933        if have_data:
934            with open(self.cachefile, "wb") as f:
935                p = pickle.Pickler(f, -1)
936                p.dump([data, self.__class__.CACHE_VERSION])
937
938        bb.utils.unlockfile(glf)
939
940
941class SimpleCache(object):
942    """
943    BitBake multi-process cache implementation
944
945    Used by the codeparser & file checksum caches
946    """
947
948    def __init__(self, version):
949        self.cachefile = None
950        self.cachedata = None
951        self.cacheversion = version
952
953    def init_cache(self, d, cache_file_name=None, defaultdata=None):
954        cachedir = (d.getVar("PERSISTENT_DIR") or
955                    d.getVar("CACHE"))
956        if not cachedir:
957            return defaultdata
958
959        bb.utils.mkdirhier(cachedir)
960        self.cachefile = os.path.join(cachedir,
961                                      cache_file_name or self.__class__.cache_file_name)
962        logger.debug("Using cache in '%s'", self.cachefile)
963
964        glf = bb.utils.lockfile(self.cachefile + ".lock")
965
966        try:
967            with open(self.cachefile, "rb") as f:
968                p = pickle.Unpickler(f)
969                data, version = p.load()
970        except:
971            bb.utils.unlockfile(glf)
972            return defaultdata
973
974        bb.utils.unlockfile(glf)
975
976        if version != self.cacheversion:
977            return defaultdata
978
979        return data
980
981    def save(self, data):
982        if not self.cachefile:
983            return
984
985        glf = bb.utils.lockfile(self.cachefile + ".lock")
986
987        with open(self.cachefile, "wb") as f:
988            p = pickle.Pickler(f, -1)
989            p.dump([data, self.cacheversion])
990
991        bb.utils.unlockfile(glf)
992
993    def copyfile(self, target):
994        if not self.cachefile:
995            return
996
997        glf = bb.utils.lockfile(self.cachefile + ".lock")
998        shutil.copy(self.cachefile, target)
999        bb.utils.unlockfile(glf)
1000