xref: /openbmc/openbmc/poky/bitbake/lib/bb/cache.py (revision 96e4b4e121e0e2da1535d7d537d6a982a6ff5bc0)
1 #
2 # BitBake Cache implementation
3 #
4 # Caching of bitbake variables before task execution
5 
6 # Copyright (C) 2006        Richard Purdie
7 # Copyright (C) 2012        Intel Corporation
8 
9 # but small sections based on code from bin/bitbake:
10 # Copyright (C) 2003, 2004  Chris Larson
11 # Copyright (C) 2003, 2004  Phil Blundell
12 # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
13 # Copyright (C) 2005        Holger Hans Peter Freyther
14 # Copyright (C) 2005        ROAD GmbH
15 #
16 # SPDX-License-Identifier: GPL-2.0-only
17 #
18 
19 import os
20 import logging
21 import pickle
22 from collections import defaultdict
23 from collections.abc import Mapping
24 import bb.utils
25 from bb import PrefixLoggerAdapter
26 import re
27 import shutil
28 
29 logger = logging.getLogger("BitBake.Cache")
30 
31 __cache_version__ = "156"
32 
33 def getCacheFile(path, filename, mc, data_hash):
34     mcspec = ''
35     if mc:
36         mcspec = ".%s" % mc
37     return os.path.join(path, filename + mcspec + "." + data_hash)
38 
39 # RecipeInfoCommon defines common data retrieving methods
40 # from meta data for caches. CoreRecipeInfo as well as other
41 # Extra RecipeInfo needs to inherit this class
42 class RecipeInfoCommon(object):
43 
44     @classmethod
45     def listvar(cls, var, metadata):
46         return cls.getvar(var, metadata).split()
47 
48     @classmethod
49     def intvar(cls, var, metadata):
50         return int(cls.getvar(var, metadata) or 0)
51 
52     @classmethod
53     def depvar(cls, var, metadata):
54         return bb.utils.explode_deps(cls.getvar(var, metadata))
55 
56     @classmethod
57     def pkgvar(cls, var, packages, metadata):
58         return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
59                     for pkg in packages)
60 
61     @classmethod
62     def taskvar(cls, var, tasks, metadata):
63         return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
64                     for task in tasks)
65 
66     @classmethod
67     def flaglist(cls, flag, varlist, metadata, squash=False):
68         out_dict = dict((var, metadata.getVarFlag(var, flag))
69                     for var in varlist)
70         if squash:
71             return dict((k,v) for (k,v) in out_dict.items() if v)
72         else:
73             return out_dict
74 
75     @classmethod
76     def getvar(cls, var, metadata, expand = True):
77         return metadata.getVar(var, expand) or ''
78 
79 
80 class CoreRecipeInfo(RecipeInfoCommon):
81     __slots__ = ()
82 
83     cachefile = "bb_cache.dat"
84 
85     def __init__(self, filename, metadata):
86         self.file_depends = metadata.getVar('__depends', False)
87         self.timestamp = bb.parse.cached_mtime(filename)
88         self.variants = self.listvar('__VARIANTS', metadata) + ['']
89         self.appends = self.listvar('__BBAPPEND', metadata)
90         self.nocache = self.getvar('BB_DONT_CACHE', metadata)
91 
92         self.provides  = self.depvar('PROVIDES', metadata)
93         self.rprovides = self.depvar('RPROVIDES', metadata)
94         self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
95         self.packages = self.listvar('PACKAGES', metadata)
96         if not self.packages:
97             self.packages.append(self.pn)
98         self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
99         self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
100 
101         self.skipreason = self.getvar('__SKIPPED', metadata)
102         if self.skipreason:
103             self.skipped = True
104             return
105 
106         self.tasks = metadata.getVar('__BBTASKS', False)
107 
108         self.basetaskhashes = metadata.getVar('__siggen_basehashes', False) or {}
109         self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
110 
111         self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
112 
113         self.skipped = False
114         self.pe = self.getvar('PE', metadata)
115         self.pv = self.getvar('PV', metadata)
116         self.pr = self.getvar('PR', metadata)
117         self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
118         self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
119         self.stamp = self.getvar('STAMP', metadata)
120         self.stampclean = self.getvar('STAMPCLEAN', metadata)
121         self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
122         self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
123         self.depends          = self.depvar('DEPENDS', metadata)
124         self.rdepends         = self.depvar('RDEPENDS', metadata)
125         self.rrecommends      = self.depvar('RRECOMMENDS', metadata)
126         self.rdepends_pkg     = self.pkgvar('RDEPENDS', self.packages, metadata)
127         self.rrecommends_pkg  = self.pkgvar('RRECOMMENDS', self.packages, metadata)
128         self.inherits         = self.getvar('__inherit_cache', metadata, expand=False)
129         self.fakerootenv      = self.getvar('FAKEROOTENV', metadata)
130         self.fakerootdirs     = self.getvar('FAKEROOTDIRS', metadata)
131         self.fakerootlogs     = self.getvar('FAKEROOTLOGS', metadata)
132         self.fakerootnoenv    = self.getvar('FAKEROOTNOENV', metadata)
133         self.extradepsfunc    = self.getvar('calculate_extra_depends', metadata)
134 
135     @classmethod
136     def init_cacheData(cls, cachedata):
137         # CacheData in Core RecipeInfo Class
138         cachedata.task_deps = {}
139         cachedata.pkg_fn = {}
140         cachedata.pkg_pn = defaultdict(list)
141         cachedata.pkg_pepvpr = {}
142         cachedata.pkg_dp = {}
143 
144         cachedata.stamp = {}
145         cachedata.stampclean = {}
146         cachedata.stamp_extrainfo = {}
147         cachedata.file_checksums = {}
148         cachedata.fn_provides = {}
149         cachedata.pn_provides = defaultdict(list)
150         cachedata.all_depends = []
151 
152         cachedata.deps = defaultdict(list)
153         cachedata.packages = defaultdict(list)
154         cachedata.providers = defaultdict(list)
155         cachedata.rproviders = defaultdict(list)
156         cachedata.packages_dynamic = defaultdict(list)
157 
158         cachedata.rundeps = defaultdict(lambda: defaultdict(list))
159         cachedata.runrecs = defaultdict(lambda: defaultdict(list))
160         cachedata.possible_world = []
161         cachedata.universe_target = []
162         cachedata.hashfn = {}
163 
164         cachedata.basetaskhash = {}
165         cachedata.inherits = {}
166         cachedata.fakerootenv = {}
167         cachedata.fakerootnoenv = {}
168         cachedata.fakerootdirs = {}
169         cachedata.fakerootlogs = {}
170         cachedata.extradepsfunc = {}
171 
172     def add_cacheData(self, cachedata, fn):
173         cachedata.task_deps[fn] = self.task_deps
174         cachedata.pkg_fn[fn] = self.pn
175         cachedata.pkg_pn[self.pn].append(fn)
176         cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
177         cachedata.pkg_dp[fn] = self.defaultpref
178         cachedata.stamp[fn] = self.stamp
179         cachedata.stampclean[fn] = self.stampclean
180         cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
181         cachedata.file_checksums[fn] = self.file_checksums
182 
183         provides = [self.pn]
184         for provide in self.provides:
185             if provide not in provides:
186                 provides.append(provide)
187         cachedata.fn_provides[fn] = provides
188 
189         for provide in provides:
190             cachedata.providers[provide].append(fn)
191             if provide not in cachedata.pn_provides[self.pn]:
192                 cachedata.pn_provides[self.pn].append(provide)
193 
194         for dep in self.depends:
195             if dep not in cachedata.deps[fn]:
196                 cachedata.deps[fn].append(dep)
197             if dep not in cachedata.all_depends:
198                 cachedata.all_depends.append(dep)
199 
200         rprovides = self.rprovides
201         for package in self.packages:
202             cachedata.packages[package].append(fn)
203             rprovides += self.rprovides_pkg[package]
204 
205         for rprovide in rprovides:
206             if fn not in cachedata.rproviders[rprovide]:
207                 cachedata.rproviders[rprovide].append(fn)
208 
209         for package in self.packages_dynamic:
210             cachedata.packages_dynamic[package].append(fn)
211 
212         # Build hash of runtime depends and recommends
213         for package in self.packages:
214             cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
215             cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
216 
217         # Collect files we may need for possible world-dep
218         # calculations
219         if not bb.utils.to_boolean(self.not_world):
220             cachedata.possible_world.append(fn)
221         #else:
222         #    logger.debug2("EXCLUDE FROM WORLD: %s", fn)
223 
224         # create a collection of all targets for sanity checking
225         # tasks, such as upstream versions, license, and tools for
226         # task and image creation.
227         cachedata.universe_target.append(self.pn)
228 
229         cachedata.hashfn[fn] = self.hashfilename
230         for task, taskhash in self.basetaskhashes.items():
231             identifier = '%s:%s' % (fn, task)
232             cachedata.basetaskhash[identifier] = taskhash
233 
234         cachedata.inherits[fn] = self.inherits
235         cachedata.fakerootenv[fn] = self.fakerootenv
236         cachedata.fakerootnoenv[fn] = self.fakerootnoenv
237         cachedata.fakerootdirs[fn] = self.fakerootdirs
238         cachedata.fakerootlogs[fn] = self.fakerootlogs
239         cachedata.extradepsfunc[fn] = self.extradepsfunc
240 
241 
242 class SiggenRecipeInfo(RecipeInfoCommon):
243     __slots__ = ()
244 
245     classname = "SiggenRecipeInfo"
246     cachefile = "bb_cache_" + classname +".dat"
247     # we don't want to show this information in graph files so don't set cachefields
248     #cachefields = []
249 
250     def __init__(self, filename, metadata):
251         self.siggen_gendeps = metadata.getVar("__siggen_gendeps", False)
252         self.siggen_varvals = metadata.getVar("__siggen_varvals", False)
253         self.siggen_taskdeps = metadata.getVar("__siggen_taskdeps", False)
254 
255     @classmethod
256     def init_cacheData(cls, cachedata):
257         cachedata.siggen_taskdeps = {}
258         cachedata.siggen_gendeps = {}
259         cachedata.siggen_varvals = {}
260 
261     def add_cacheData(self, cachedata, fn):
262         cachedata.siggen_gendeps[fn] = self.siggen_gendeps
263         cachedata.siggen_varvals[fn] = self.siggen_varvals
264         cachedata.siggen_taskdeps[fn] = self.siggen_taskdeps
265 
266     # The siggen variable data is large and impacts:
267     #  - bitbake's overall memory usage
268     #  - the amount of data sent over IPC between parsing processes and the server
269     #  - the size of the cache files on disk
270     #  - the size of "sigdata" hash information files on disk
271     # The data consists of strings (some large) or frozenset lists of variables
272     # As such, we a) deplicate the data here and b) pass references to the object at second
273     # access (e.g. over IPC or saving into pickle).
274 
275     store = {}
276     save_map = {}
277     save_count = 1
278     restore_map = {}
279     restore_count = {}
280 
281     @classmethod
282     def reset(cls):
283         # Needs to be called before starting new streamed data in a given process
284         # (e.g. writing out the cache again)
285         cls.save_map = {}
286         cls.save_count = 1
287         cls.restore_map = {}
288 
289     @classmethod
290     def _save(cls, deps):
291         ret = []
292         if not deps:
293             return deps
294         for dep in deps:
295             fs = deps[dep]
296             if fs is None:
297                 ret.append((dep, None, None))
298             elif fs in cls.save_map:
299                 ret.append((dep, None, cls.save_map[fs]))
300             else:
301                 cls.save_map[fs] = cls.save_count
302                 ret.append((dep, fs, cls.save_count))
303                 cls.save_count = cls.save_count + 1
304         return ret
305 
306     @classmethod
307     def _restore(cls, deps, pid):
308         ret = {}
309         if not deps:
310             return deps
311         if pid not in cls.restore_map:
312             cls.restore_map[pid] = {}
313         map = cls.restore_map[pid]
314         for dep, fs, mapnum in deps:
315             if fs is None and mapnum is None:
316                 ret[dep] = None
317             elif fs is None:
318                 ret[dep] = map[mapnum]
319             else:
320                 try:
321                     fs = cls.store[fs]
322                 except KeyError:
323                     cls.store[fs] = fs
324                 map[mapnum] = fs
325                 ret[dep] = fs
326         return ret
327 
328     def __getstate__(self):
329         ret = {}
330         for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
331             ret[key] = self._save(self.__dict__[key])
332         ret['pid'] = os.getpid()
333         return ret
334 
335     def __setstate__(self, state):
336         pid = state['pid']
337         for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
338             setattr(self, key, self._restore(state[key], pid))
339 
340 
341 def virtualfn2realfn(virtualfn):
342     """
343     Convert a virtual file name to a real one + the associated subclass keyword
344     """
345     mc = ""
346     if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
347         (_, mc, virtualfn) = virtualfn.split(':', 2)
348 
349     fn = virtualfn
350     cls = ""
351     if virtualfn.startswith('virtual:'):
352         elems = virtualfn.split(':')
353         cls = ":".join(elems[1:-1])
354         fn = elems[-1]
355 
356     return (fn, cls, mc)
357 
358 def realfn2virtual(realfn, cls, mc):
359     """
360     Convert a real filename + the associated subclass keyword to a virtual filename
361     """
362     if cls:
363         realfn = "virtual:" + cls + ":" + realfn
364     if mc:
365         realfn = "mc:" + mc + ":" + realfn
366     return realfn
367 
368 def variant2virtual(realfn, variant):
369     """
370     Convert a real filename + a variant to a virtual filename
371     """
372     if variant == "":
373         return realfn
374     if variant.startswith("mc:") and variant.count(':') >= 2:
375         elems = variant.split(":")
376         if elems[2]:
377             return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
378         return "mc:" + elems[1] + ":" + realfn
379     return "virtual:" + variant + ":" + realfn
380 
381 #
382 # Cooker calls cacheValid on its recipe list, then either calls loadCached
383 # from it's main thread or parse from separate processes to generate an up to
384 # date cache
385 #
386 class Cache(object):
387     """
388     BitBake Cache implementation
389     """
390     def __init__(self, databuilder, mc, data_hash, caches_array):
391         self.databuilder = databuilder
392         self.data = databuilder.data
393 
394         # Pass caches_array information into Cache Constructor
395         # It will be used later for deciding whether we
396         # need extra cache file dump/load support
397         self.mc = mc
398         self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else ''), logger)
399         self.caches_array = caches_array
400         self.cachedir = self.data.getVar("CACHE")
401         self.clean = set()
402         self.checked = set()
403         self.depends_cache = {}
404         self.data_fn = None
405         self.cacheclean = True
406         self.data_hash = data_hash
407         self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
408 
409         if self.cachedir in [None, '']:
410             bb.fatal("Please ensure CACHE is set to the cache directory for BitBake to use")
411 
412     def getCacheFile(self, cachefile):
413         return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
414 
415     def prepare_cache(self, progress):
416         loaded = 0
417 
418         self.cachefile = self.getCacheFile("bb_cache.dat")
419 
420         self.logger.debug("Cache dir: %s", self.cachedir)
421         bb.utils.mkdirhier(self.cachedir)
422 
423         cache_ok = True
424         if self.caches_array:
425             for cache_class in self.caches_array:
426                 cachefile = self.getCacheFile(cache_class.cachefile)
427                 cache_exists = os.path.exists(cachefile)
428                 self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
429                 cache_ok = cache_ok and cache_exists
430                 cache_class.init_cacheData(self)
431         if cache_ok:
432             loaded = self.load_cachefile(progress)
433         elif os.path.isfile(self.cachefile):
434             self.logger.info("Out of date cache found, rebuilding...")
435         else:
436             self.logger.debug("Cache file %s not found, building..." % self.cachefile)
437 
438         # We don't use the symlink, its just for debugging convinience
439         if self.mc:
440             symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
441         else:
442             symlink = os.path.join(self.cachedir, "bb_cache.dat")
443 
444         if os.path.exists(symlink) or os.path.islink(symlink):
445             bb.utils.remove(symlink)
446         try:
447             os.symlink(os.path.basename(self.cachefile), symlink)
448         except OSError:
449             pass
450 
451         return loaded
452 
453     def cachesize(self):
454         cachesize = 0
455         for cache_class in self.caches_array:
456             cachefile = self.getCacheFile(cache_class.cachefile)
457             try:
458                 with open(cachefile, "rb") as cachefile:
459                     cachesize += os.fstat(cachefile.fileno()).st_size
460             except FileNotFoundError:
461                 pass
462 
463         return cachesize
464 
465     def load_cachefile(self, progress):
466         previous_progress = 0
467 
468         for cache_class in self.caches_array:
469             cachefile = self.getCacheFile(cache_class.cachefile)
470             self.logger.debug('Loading cache file: %s' % cachefile)
471             with open(cachefile, "rb") as cachefile:
472                 pickled = pickle.Unpickler(cachefile)
473                 # Check cache version information
474                 try:
475                     cache_ver = pickled.load()
476                     bitbake_ver = pickled.load()
477                 except Exception:
478                     self.logger.info('Invalid cache, rebuilding...')
479                     return 0
480 
481                 if cache_ver != __cache_version__:
482                     self.logger.info('Cache version mismatch, rebuilding...')
483                     return 0
484                 elif bitbake_ver != bb.__version__:
485                     self.logger.info('Bitbake version mismatch, rebuilding...')
486                     return 0
487 
488                 # Load the rest of the cache file
489                 current_progress = 0
490                 while cachefile:
491                     try:
492                         key = pickled.load()
493                         value = pickled.load()
494                     except Exception:
495                         break
496                     if not isinstance(key, str):
497                         bb.warn("%s from extras cache is not a string?" % key)
498                         break
499                     if not isinstance(value, RecipeInfoCommon):
500                         bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
501                         break
502 
503                     if key in self.depends_cache:
504                         self.depends_cache[key].append(value)
505                     else:
506                         self.depends_cache[key] = [value]
507                     # only fire events on even percentage boundaries
508                     current_progress = cachefile.tell() + previous_progress
509                     progress(cachefile.tell() + previous_progress)
510 
511                 previous_progress += current_progress
512 
513         return len(self.depends_cache)
514 
515     def parse(self, filename, appends, layername):
516         """Parse the specified filename, returning the recipe information"""
517         self.logger.debug("Parsing %s", filename)
518         infos = []
519         datastores = self.databuilder.parseRecipeVariants(filename, appends, mc=self.mc, layername=layername)
520         depends = []
521         variants = []
522         # Process the "real" fn last so we can store variants list
523         for variant, data in sorted(datastores.items(),
524                                     key=lambda i: i[0],
525                                     reverse=True):
526             virtualfn = variant2virtual(filename, variant)
527             variants.append(variant)
528             depends = depends + (data.getVar("__depends", False) or [])
529             if depends and not variant:
530                 data.setVar("__depends", depends)
531             if virtualfn == filename:
532                 data.setVar("__VARIANTS", " ".join(variants))
533             info_array = []
534             for cache_class in self.caches_array:
535                 info = cache_class(filename, data)
536                 info_array.append(info)
537             infos.append((virtualfn, info_array))
538 
539         return infos
540 
541     def loadCached(self, filename, appends):
542         """Obtain the recipe information for the specified filename,
543         using cached values.
544         """
545 
546         infos = []
547         # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
548         info_array = self.depends_cache[filename]
549         for variant in info_array[0].variants:
550             virtualfn = variant2virtual(filename, variant)
551             infos.append((virtualfn, self.depends_cache[virtualfn]))
552 
553         return infos
554 
555     def cacheValid(self, fn, appends):
556         """
557         Is the cache valid for fn?
558         Fast version, no timestamps checked.
559         """
560         if fn not in self.checked:
561             self.cacheValidUpdate(fn, appends)
562         if fn in self.clean:
563             return True
564         return False
565 
566     def cacheValidUpdate(self, fn, appends):
567         """
568         Is the cache valid for fn?
569         Make thorough (slower) checks including timestamps.
570         """
571         self.checked.add(fn)
572 
573         # File isn't in depends_cache
574         if not fn in self.depends_cache:
575             self.logger.debug2("%s is not cached", fn)
576             return False
577 
578         mtime = bb.parse.cached_mtime_noerror(fn)
579 
580         # Check file still exists
581         if mtime == 0:
582             self.logger.debug2("%s no longer exists", fn)
583             self.remove(fn)
584             return False
585 
586         info_array = self.depends_cache[fn]
587         # Check the file's timestamp
588         if mtime != info_array[0].timestamp:
589             self.logger.debug2("%s changed", fn)
590             self.remove(fn)
591             return False
592 
593         # Check dependencies are still valid
594         depends = info_array[0].file_depends
595         if depends:
596             for f, old_mtime in depends:
597                 fmtime = bb.parse.cached_mtime_noerror(f)
598                 # Check if file still exists
599                 if old_mtime != 0 and fmtime == 0:
600                     self.logger.debug2("%s's dependency %s was removed",
601                                          fn, f)
602                     self.remove(fn)
603                     return False
604 
605                 if (fmtime != old_mtime):
606                     self.logger.debug2("%s's dependency %s changed",
607                                          fn, f)
608                     self.remove(fn)
609                     return False
610 
611         if hasattr(info_array[0], 'file_checksums'):
612             for _, fl in info_array[0].file_checksums.items():
613                 fl = fl.strip()
614                 if not fl:
615                     continue
616                 # Have to be careful about spaces and colons in filenames
617                 flist = self.filelist_regex.split(fl)
618                 for f in flist:
619                     if not f:
620                         continue
621                     f, exist = f.rsplit(":", 1)
622                     if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
623                         self.logger.debug2("%s's file checksum list file %s changed",
624                                              fn, f)
625                         self.remove(fn)
626                         return False
627 
628         if tuple(appends) != tuple(info_array[0].appends):
629             self.logger.debug2("appends for %s changed", fn)
630             self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
631             self.remove(fn)
632             return False
633 
634         invalid = False
635         for cls in info_array[0].variants:
636             virtualfn = variant2virtual(fn, cls)
637             self.clean.add(virtualfn)
638             if virtualfn not in self.depends_cache:
639                 self.logger.debug2("%s is not cached", virtualfn)
640                 invalid = True
641             elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
642                 self.logger.debug2("Extra caches missing for %s?" % virtualfn)
643                 invalid = True
644 
645         # If any one of the variants is not present, mark as invalid for all
646         if invalid:
647             for cls in info_array[0].variants:
648                 virtualfn = variant2virtual(fn, cls)
649                 if virtualfn in self.clean:
650                     self.logger.debug2("Removing %s from cache", virtualfn)
651                     self.clean.remove(virtualfn)
652             if fn in self.clean:
653                 self.logger.debug2("Marking %s as not clean", fn)
654                 self.clean.remove(fn)
655             return False
656 
657         self.clean.add(fn)
658         return True
659 
660     def remove(self, fn):
661         """
662         Remove a fn from the cache
663         Called from the parser in error cases
664         """
665         if fn in self.depends_cache:
666             self.logger.debug("Removing %s from cache", fn)
667             del self.depends_cache[fn]
668         if fn in self.clean:
669             self.logger.debug("Marking %s as unclean", fn)
670             self.clean.remove(fn)
671 
672     def sync(self):
673         """
674         Save the cache
675         Called from the parser when complete (or exiting)
676         """
677         if self.cacheclean:
678             self.logger.debug2("Cache is clean, not saving.")
679             return
680 
681         for cache_class in self.caches_array:
682             cache_class_name = cache_class.__name__
683             cachefile = self.getCacheFile(cache_class.cachefile)
684             self.logger.debug2("Writing %s", cachefile)
685             with open(cachefile, "wb") as f:
686                 p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
687                 p.dump(__cache_version__)
688                 p.dump(bb.__version__)
689 
690                 for key, info_array in self.depends_cache.items():
691                     for info in info_array:
692                         if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
693                             p.dump(key)
694                             p.dump(info)
695 
696         del self.depends_cache
697         SiggenRecipeInfo.reset()
698 
699     @staticmethod
700     def mtime(cachefile):
701         return bb.parse.cached_mtime_noerror(cachefile)
702 
703     def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
704         if self.mc is not None:
705             (fn, cls, mc) = virtualfn2realfn(filename)
706             if mc:
707                 self.logger.error("Unexpected multiconfig %s", filename)
708                 return
709 
710             vfn = realfn2virtual(fn, cls, self.mc)
711         else:
712             vfn = filename
713 
714         if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
715             cacheData.add_from_recipeinfo(vfn, info_array)
716 
717             if watcher:
718                 watcher(info_array[0].file_depends)
719 
720         if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
721             if parsed:
722                 self.cacheclean = False
723             self.depends_cache[filename] = info_array
724 
725 class MulticonfigCache(Mapping):
726     def __init__(self, databuilder, data_hash, caches_array):
727         def progress(p):
728             nonlocal current_progress
729             nonlocal previous_progress
730             nonlocal previous_percent
731             nonlocal cachesize
732 
733             current_progress = previous_progress + p
734 
735             if current_progress > cachesize:
736                 # we might have calculated incorrect total size because a file
737                 # might've been written out just after we checked its size
738                 cachesize = current_progress
739             current_percent = 100 * current_progress / cachesize
740             if current_percent > previous_percent:
741                 previous_percent = current_percent
742                 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
743                                 databuilder.data)
744 
745 
746         cachesize = 0
747         current_progress = 0
748         previous_progress = 0
749         previous_percent = 0
750         self.__caches = {}
751 
752         for mc, mcdata in databuilder.mcdata.items():
753             self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
754 
755             cachesize += self.__caches[mc].cachesize()
756 
757         bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
758         loaded = 0
759 
760         for c in self.__caches.values():
761             SiggenRecipeInfo.reset()
762             loaded += c.prepare_cache(progress)
763             previous_progress = current_progress
764 
765         # Note: depends cache number is corresponding to the parsing file numbers.
766         # The same file has several caches, still regarded as one item in the cache
767         bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
768 
769     def __len__(self):
770         return len(self.__caches)
771 
772     def __getitem__(self, key):
773         return self.__caches[key]
774 
775     def __contains__(self, key):
776         return key in self.__caches
777 
778     def __iter__(self):
779         for k in self.__caches:
780             yield k
781 
782 
783 class CacheData(object):
784     """
785     The data structures we compile from the cached data
786     """
787 
788     def __init__(self, caches_array):
789         self.caches_array = caches_array
790         for cache_class in self.caches_array:
791             if not issubclass(cache_class, RecipeInfoCommon):
792                 bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
793             cache_class.init_cacheData(self)
794 
795         # Direct cache variables
796         self.task_queues = {}
797         self.preferred = {}
798         self.tasks = {}
799         # Indirect Cache variables (set elsewhere)
800         self.ignored_dependencies = []
801         self.world_target = set()
802         self.bbfile_priority = {}
803 
804     def add_from_recipeinfo(self, fn, info_array):
805         for info in info_array:
806             info.add_cacheData(self, fn)
807 
808 class MultiProcessCache(object):
809     """
810     BitBake multi-process cache implementation
811 
812     Used by the codeparser & file checksum caches
813     """
814 
815     def __init__(self):
816         self.cachefile = None
817         self.cachedata = self.create_cachedata()
818         self.cachedata_extras = self.create_cachedata()
819 
820     def init_cache(self, cachedir, cache_file_name=None):
821         if not cachedir:
822             return
823 
824         bb.utils.mkdirhier(cachedir)
825         self.cachefile = os.path.join(cachedir,
826                                       cache_file_name or self.__class__.cache_file_name)
827         logger.debug("Using cache in '%s'", self.cachefile)
828 
829         glf = bb.utils.lockfile(self.cachefile + ".lock")
830 
831         try:
832             with open(self.cachefile, "rb") as f:
833                 p = pickle.Unpickler(f)
834                 data, version = p.load()
835         except:
836             bb.utils.unlockfile(glf)
837             return
838 
839         bb.utils.unlockfile(glf)
840 
841         if version != self.__class__.CACHE_VERSION:
842             return
843 
844         self.cachedata = data
845 
846     def create_cachedata(self):
847         data = [{}]
848         return data
849 
850     def clear_cache(self):
851         if not self.cachefile:
852             bb.fatal("Can't clear invalid cachefile")
853 
854         self.cachedata = self.create_cachedata()
855         self.cachedata_extras = self.create_cachedata()
856         with bb.utils.fileslocked([self.cachefile + ".lock"]):
857             bb.utils.remove(self.cachefile)
858             bb.utils.remove(self.cachefile + "-*")
859 
860     def save_extras(self):
861         if not self.cachefile:
862             return
863 
864         have_data = any(self.cachedata_extras)
865         if not have_data:
866             return
867 
868         glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
869 
870         i = os.getpid()
871         lf = None
872         while not lf:
873             lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
874             if not lf or os.path.exists(self.cachefile + "-" + str(i)):
875                 if lf:
876                     bb.utils.unlockfile(lf)
877                     lf = None
878                 i = i + 1
879                 continue
880 
881             with open(self.cachefile + "-" + str(i), "wb") as f:
882                 p = pickle.Pickler(f, -1)
883                 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
884 
885         bb.utils.unlockfile(lf)
886         bb.utils.unlockfile(glf)
887 
888     def merge_data(self, source, dest):
889         for j in range(0,len(dest)):
890             for h in source[j]:
891                 if h not in dest[j]:
892                     dest[j][h] = source[j][h]
893 
894     def save_merge(self):
895         if not self.cachefile:
896             return
897 
898         glf = bb.utils.lockfile(self.cachefile + ".lock")
899 
900         data = self.cachedata
901 
902         have_data = False
903 
904         for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
905             f = os.path.join(os.path.dirname(self.cachefile), f)
906             try:
907                 with open(f, "rb") as fd:
908                     p = pickle.Unpickler(fd)
909                     extradata, version = p.load()
910             except (IOError, EOFError):
911                 os.unlink(f)
912                 continue
913 
914             if version != self.__class__.CACHE_VERSION:
915                 os.unlink(f)
916                 continue
917 
918             have_data = True
919             self.merge_data(extradata, data)
920             os.unlink(f)
921 
922         if have_data:
923             with open(self.cachefile, "wb") as f:
924                 p = pickle.Pickler(f, -1)
925                 p.dump([data, self.__class__.CACHE_VERSION])
926 
927         bb.utils.unlockfile(glf)
928 
929 
930 class SimpleCache(object):
931     """
932     BitBake multi-process cache implementation
933 
934     Used by the codeparser & file checksum caches
935     """
936 
937     def __init__(self, version):
938         self.cachefile = None
939         self.cachedata = None
940         self.cacheversion = version
941 
942     def init_cache(self, d, cache_file_name=None, defaultdata=None):
943         cachedir = (d.getVar("PERSISTENT_DIR") or
944                     d.getVar("CACHE"))
945         if not cachedir:
946             return defaultdata
947 
948         bb.utils.mkdirhier(cachedir)
949         self.cachefile = os.path.join(cachedir,
950                                       cache_file_name or self.__class__.cache_file_name)
951         logger.debug("Using cache in '%s'", self.cachefile)
952 
953         glf = bb.utils.lockfile(self.cachefile + ".lock")
954 
955         try:
956             with open(self.cachefile, "rb") as f:
957                 p = pickle.Unpickler(f)
958                 data, version = p.load()
959         except:
960             bb.utils.unlockfile(glf)
961             return defaultdata
962 
963         bb.utils.unlockfile(glf)
964 
965         if version != self.cacheversion:
966             return defaultdata
967 
968         return data
969 
970     def save(self, data):
971         if not self.cachefile:
972             return
973 
974         glf = bb.utils.lockfile(self.cachefile + ".lock")
975 
976         with open(self.cachefile, "wb") as f:
977             p = pickle.Pickler(f, -1)
978             p.dump([data, self.cacheversion])
979 
980         bb.utils.unlockfile(glf)
981 
982     def copyfile(self, target):
983         if not self.cachefile:
984             return
985 
986         glf = bb.utils.lockfile(self.cachefile + ".lock")
987         shutil.copy(self.cachefile, target)
988         bb.utils.unlockfile(glf)
989