xref: /openbmc/openbmc/poky/bitbake/lib/bb/cache.py (revision 19323693)
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Cache implementation
5#
6# Caching of bitbake variables before task execution
7
8# Copyright (C) 2006        Richard Purdie
9# Copyright (C) 2012        Intel Corporation
10
11# but small sections based on code from bin/bitbake:
12# Copyright (C) 2003, 2004  Chris Larson
13# Copyright (C) 2003, 2004  Phil Blundell
14# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
15# Copyright (C) 2005        Holger Hans Peter Freyther
16# Copyright (C) 2005        ROAD GmbH
17#
18# This program is free software; you can redistribute it and/or modify
19# it under the terms of the GNU General Public License version 2 as
20# published by the Free Software Foundation.
21#
22# This program is distributed in the hope that it will be useful,
23# but WITHOUT ANY WARRANTY; without even the implied warranty of
24# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
25# GNU General Public License for more details.
26#
27# You should have received a copy of the GNU General Public License along
28# with this program; if not, write to the Free Software Foundation, Inc.,
29# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
30
31import os
32import sys
33import logging
34import pickle
35from collections import defaultdict
36import bb.utils
37
38logger = logging.getLogger("BitBake.Cache")
39
40__cache_version__ = "152"
41
42def getCacheFile(path, filename, data_hash):
43    return os.path.join(path, filename + "." + data_hash)
44
45# RecipeInfoCommon defines common data retrieving methods
46# from meta data for caches. CoreRecipeInfo as well as other
47# Extra RecipeInfo needs to inherit this class
48class RecipeInfoCommon(object):
49
50    @classmethod
51    def listvar(cls, var, metadata):
52        return cls.getvar(var, metadata).split()
53
54    @classmethod
55    def intvar(cls, var, metadata):
56        return int(cls.getvar(var, metadata) or 0)
57
58    @classmethod
59    def depvar(cls, var, metadata):
60        return bb.utils.explode_deps(cls.getvar(var, metadata))
61
62    @classmethod
63    def pkgvar(cls, var, packages, metadata):
64        return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
65                    for pkg in packages)
66
67    @classmethod
68    def taskvar(cls, var, tasks, metadata):
69        return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
70                    for task in tasks)
71
72    @classmethod
73    def flaglist(cls, flag, varlist, metadata, squash=False):
74        out_dict = dict((var, metadata.getVarFlag(var, flag))
75                    for var in varlist)
76        if squash:
77            return dict((k,v) for (k,v) in out_dict.items() if v)
78        else:
79            return out_dict
80
81    @classmethod
82    def getvar(cls, var, metadata, expand = True):
83        return metadata.getVar(var, expand) or ''
84
85
86class CoreRecipeInfo(RecipeInfoCommon):
87    __slots__ = ()
88
89    cachefile = "bb_cache.dat"
90
91    def __init__(self, filename, metadata):
92        self.file_depends = metadata.getVar('__depends', False)
93        self.timestamp = bb.parse.cached_mtime(filename)
94        self.variants = self.listvar('__VARIANTS', metadata) + ['']
95        self.appends = self.listvar('__BBAPPEND', metadata)
96        self.nocache = self.getvar('BB_DONT_CACHE', metadata)
97
98        self.skipreason = self.getvar('__SKIPPED', metadata)
99        if self.skipreason:
100            self.pn = self.getvar('PN', metadata) or bb.parse.vars_from_file(filename,metadata)[0]
101            self.skipped = True
102            self.provides  = self.depvar('PROVIDES', metadata)
103            self.rprovides = self.depvar('RPROVIDES', metadata)
104            return
105
106        self.tasks = metadata.getVar('__BBTASKS', False)
107
108        self.pn = self.getvar('PN', metadata)
109        self.packages = self.listvar('PACKAGES', metadata)
110        if not self.packages:
111            self.packages.append(self.pn)
112
113        self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
114        self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
115
116        self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
117
118        self.skipped = False
119        self.pe = self.getvar('PE', metadata)
120        self.pv = self.getvar('PV', metadata)
121        self.pr = self.getvar('PR', metadata)
122        self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
123        self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
124        self.stamp = self.getvar('STAMP', metadata)
125        self.stampclean = self.getvar('STAMPCLEAN', metadata)
126        self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
127        self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
128        self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
129        self.depends          = self.depvar('DEPENDS', metadata)
130        self.provides         = self.depvar('PROVIDES', metadata)
131        self.rdepends         = self.depvar('RDEPENDS', metadata)
132        self.rprovides        = self.depvar('RPROVIDES', metadata)
133        self.rrecommends      = self.depvar('RRECOMMENDS', metadata)
134        self.rprovides_pkg    = self.pkgvar('RPROVIDES', self.packages, metadata)
135        self.rdepends_pkg     = self.pkgvar('RDEPENDS', self.packages, metadata)
136        self.rrecommends_pkg  = self.pkgvar('RRECOMMENDS', self.packages, metadata)
137        self.inherits         = self.getvar('__inherit_cache', metadata, expand=False)
138        self.fakerootenv      = self.getvar('FAKEROOTENV', metadata)
139        self.fakerootdirs     = self.getvar('FAKEROOTDIRS', metadata)
140        self.fakerootnoenv    = self.getvar('FAKEROOTNOENV', metadata)
141        self.extradepsfunc    = self.getvar('calculate_extra_depends', metadata)
142
143    @classmethod
144    def init_cacheData(cls, cachedata):
145        # CacheData in Core RecipeInfo Class
146        cachedata.task_deps = {}
147        cachedata.pkg_fn = {}
148        cachedata.pkg_pn = defaultdict(list)
149        cachedata.pkg_pepvpr = {}
150        cachedata.pkg_dp = {}
151
152        cachedata.stamp = {}
153        cachedata.stampclean = {}
154        cachedata.stamp_extrainfo = {}
155        cachedata.file_checksums = {}
156        cachedata.fn_provides = {}
157        cachedata.pn_provides = defaultdict(list)
158        cachedata.all_depends = []
159
160        cachedata.deps = defaultdict(list)
161        cachedata.packages = defaultdict(list)
162        cachedata.providers = defaultdict(list)
163        cachedata.rproviders = defaultdict(list)
164        cachedata.packages_dynamic = defaultdict(list)
165
166        cachedata.rundeps = defaultdict(lambda: defaultdict(list))
167        cachedata.runrecs = defaultdict(lambda: defaultdict(list))
168        cachedata.possible_world = []
169        cachedata.universe_target = []
170        cachedata.hashfn = {}
171
172        cachedata.basetaskhash = {}
173        cachedata.inherits = {}
174        cachedata.fakerootenv = {}
175        cachedata.fakerootnoenv = {}
176        cachedata.fakerootdirs = {}
177        cachedata.extradepsfunc = {}
178
179    def add_cacheData(self, cachedata, fn):
180        cachedata.task_deps[fn] = self.task_deps
181        cachedata.pkg_fn[fn] = self.pn
182        cachedata.pkg_pn[self.pn].append(fn)
183        cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
184        cachedata.pkg_dp[fn] = self.defaultpref
185        cachedata.stamp[fn] = self.stamp
186        cachedata.stampclean[fn] = self.stampclean
187        cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
188        cachedata.file_checksums[fn] = self.file_checksums
189
190        provides = [self.pn]
191        for provide in self.provides:
192            if provide not in provides:
193                provides.append(provide)
194        cachedata.fn_provides[fn] = provides
195
196        for provide in provides:
197            cachedata.providers[provide].append(fn)
198            if provide not in cachedata.pn_provides[self.pn]:
199                cachedata.pn_provides[self.pn].append(provide)
200
201        for dep in self.depends:
202            if dep not in cachedata.deps[fn]:
203                cachedata.deps[fn].append(dep)
204            if dep not in cachedata.all_depends:
205                cachedata.all_depends.append(dep)
206
207        rprovides = self.rprovides
208        for package in self.packages:
209            cachedata.packages[package].append(fn)
210            rprovides += self.rprovides_pkg[package]
211
212        for rprovide in rprovides:
213            if fn not in cachedata.rproviders[rprovide]:
214                cachedata.rproviders[rprovide].append(fn)
215
216        for package in self.packages_dynamic:
217            cachedata.packages_dynamic[package].append(fn)
218
219        # Build hash of runtime depends and recommends
220        for package in self.packages:
221            cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
222            cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
223
224        # Collect files we may need for possible world-dep
225        # calculations
226        if self.not_world:
227            logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
228        else:
229            cachedata.possible_world.append(fn)
230
231        # create a collection of all targets for sanity checking
232        # tasks, such as upstream versions, license, and tools for
233        # task and image creation.
234        cachedata.universe_target.append(self.pn)
235
236        cachedata.hashfn[fn] = self.hashfilename
237        for task, taskhash in self.basetaskhashes.items():
238            identifier = '%s.%s' % (fn, task)
239            cachedata.basetaskhash[identifier] = taskhash
240
241        cachedata.inherits[fn] = self.inherits
242        cachedata.fakerootenv[fn] = self.fakerootenv
243        cachedata.fakerootnoenv[fn] = self.fakerootnoenv
244        cachedata.fakerootdirs[fn] = self.fakerootdirs
245        cachedata.extradepsfunc[fn] = self.extradepsfunc
246
247def virtualfn2realfn(virtualfn):
248    """
249    Convert a virtual file name to a real one + the associated subclass keyword
250    """
251    mc = ""
252    if virtualfn.startswith('multiconfig:'):
253        elems = virtualfn.split(':')
254        mc = elems[1]
255        virtualfn = ":".join(elems[2:])
256
257    fn = virtualfn
258    cls = ""
259    if virtualfn.startswith('virtual:'):
260        elems = virtualfn.split(':')
261        cls = ":".join(elems[1:-1])
262        fn = elems[-1]
263
264    return (fn, cls, mc)
265
266def realfn2virtual(realfn, cls, mc):
267    """
268    Convert a real filename + the associated subclass keyword to a virtual filename
269    """
270    if cls:
271        realfn = "virtual:" + cls + ":" + realfn
272    if mc:
273        realfn = "multiconfig:" + mc + ":" + realfn
274    return realfn
275
276def variant2virtual(realfn, variant):
277    """
278    Convert a real filename + the associated subclass keyword to a virtual filename
279    """
280    if variant == "":
281        return realfn
282    if variant.startswith("multiconfig:"):
283        elems = variant.split(":")
284        if elems[2]:
285            return "multiconfig:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
286        return "multiconfig:" + elems[1] + ":" + realfn
287    return "virtual:" + variant + ":" + realfn
288
289def parse_recipe(bb_data, bbfile, appends, mc=''):
290    """
291    Parse a recipe
292    """
293
294    chdir_back = False
295
296    bb_data.setVar("__BBMULTICONFIG", mc)
297
298    # expand tmpdir to include this topdir
299    bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
300    bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
301    oldpath = os.path.abspath(os.getcwd())
302    bb.parse.cached_mtime_noerror(bbfile_loc)
303
304    # The ConfHandler first looks if there is a TOPDIR and if not
305    # then it would call getcwd().
306    # Previously, we chdir()ed to bbfile_loc, called the handler
307    # and finally chdir()ed back, a couple of thousand times. We now
308    # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
309    if not bb_data.getVar('TOPDIR', False):
310        chdir_back = True
311        bb_data.setVar('TOPDIR', bbfile_loc)
312    try:
313        if appends:
314            bb_data.setVar('__BBAPPEND', " ".join(appends))
315        bb_data = bb.parse.handle(bbfile, bb_data)
316        if chdir_back:
317            os.chdir(oldpath)
318        return bb_data
319    except:
320        if chdir_back:
321            os.chdir(oldpath)
322        raise
323
324
325
326class NoCache(object):
327
328    def __init__(self, databuilder):
329        self.databuilder = databuilder
330        self.data = databuilder.data
331
332    def loadDataFull(self, virtualfn, appends):
333        """
334        Return a complete set of data for fn.
335        To do this, we need to parse the file.
336        """
337        logger.debug(1, "Parsing %s (full)" % virtualfn)
338        (fn, virtual, mc) = virtualfn2realfn(virtualfn)
339        bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
340        return bb_data[virtual]
341
342    def load_bbfile(self, bbfile, appends, virtonly = False):
343        """
344        Load and parse one .bb build file
345        Return the data and whether parsing resulted in the file being skipped
346        """
347
348        if virtonly:
349            (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
350            bb_data = self.databuilder.mcdata[mc].createCopy()
351            bb_data.setVar("__ONLYFINALISE", virtual or "default")
352            datastores = parse_recipe(bb_data, bbfile, appends, mc)
353            return datastores
354
355        bb_data = self.data.createCopy()
356        datastores = parse_recipe(bb_data, bbfile, appends)
357
358        for mc in self.databuilder.mcdata:
359            if not mc:
360                continue
361            bb_data = self.databuilder.mcdata[mc].createCopy()
362            newstores = parse_recipe(bb_data, bbfile, appends, mc)
363            for ns in newstores:
364                datastores["multiconfig:%s:%s" % (mc, ns)] = newstores[ns]
365
366        return datastores
367
368class Cache(NoCache):
369    """
370    BitBake Cache implementation
371    """
372
373    def __init__(self, databuilder, data_hash, caches_array):
374        super().__init__(databuilder)
375        data = databuilder.data
376
377        # Pass caches_array information into Cache Constructor
378        # It will be used later for deciding whether we
379        # need extra cache file dump/load support
380        self.caches_array = caches_array
381        self.cachedir = data.getVar("CACHE")
382        self.clean = set()
383        self.checked = set()
384        self.depends_cache = {}
385        self.data_fn = None
386        self.cacheclean = True
387        self.data_hash = data_hash
388
389        if self.cachedir in [None, '']:
390            self.has_cache = False
391            logger.info("Not using a cache. "
392                        "Set CACHE = <directory> to enable.")
393            return
394
395        self.has_cache = True
396        self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
397
398        logger.debug(1, "Cache dir: %s", self.cachedir)
399        bb.utils.mkdirhier(self.cachedir)
400
401        cache_ok = True
402        if self.caches_array:
403            for cache_class in self.caches_array:
404                cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
405                cache_ok = cache_ok and os.path.exists(cachefile)
406                cache_class.init_cacheData(self)
407        if cache_ok:
408            self.load_cachefile()
409        elif os.path.isfile(self.cachefile):
410            logger.info("Out of date cache found, rebuilding...")
411        else:
412            logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
413
414    def load_cachefile(self):
415        cachesize = 0
416        previous_progress = 0
417        previous_percent = 0
418
419        # Calculate the correct cachesize of all those cache files
420        for cache_class in self.caches_array:
421            cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
422            with open(cachefile, "rb") as cachefile:
423                cachesize += os.fstat(cachefile.fileno()).st_size
424
425        bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
426
427        for cache_class in self.caches_array:
428            cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
429            logger.debug(1, 'Loading cache file: %s' % cachefile)
430            with open(cachefile, "rb") as cachefile:
431                pickled = pickle.Unpickler(cachefile)
432                # Check cache version information
433                try:
434                    cache_ver = pickled.load()
435                    bitbake_ver = pickled.load()
436                except Exception:
437                    logger.info('Invalid cache, rebuilding...')
438                    return
439
440                if cache_ver != __cache_version__:
441                    logger.info('Cache version mismatch, rebuilding...')
442                    return
443                elif bitbake_ver != bb.__version__:
444                    logger.info('Bitbake version mismatch, rebuilding...')
445                    return
446
447                # Load the rest of the cache file
448                current_progress = 0
449                while cachefile:
450                    try:
451                        key = pickled.load()
452                        value = pickled.load()
453                    except Exception:
454                        break
455                    if not isinstance(key, str):
456                        bb.warn("%s from extras cache is not a string?" % key)
457                        break
458                    if not isinstance(value, RecipeInfoCommon):
459                        bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
460                        break
461
462                    if key in self.depends_cache:
463                        self.depends_cache[key].append(value)
464                    else:
465                        self.depends_cache[key] = [value]
466                    # only fire events on even percentage boundaries
467                    current_progress = cachefile.tell() + previous_progress
468                    if current_progress > cachesize:
469                        # we might have calculated incorrect total size because a file
470                        # might've been written out just after we checked its size
471                        cachesize = current_progress
472                    current_percent = 100 * current_progress / cachesize
473                    if current_percent > previous_percent:
474                        previous_percent = current_percent
475                        bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
476                                      self.data)
477
478                previous_progress += current_progress
479
480        # Note: depends cache number is corresponding to the parsing file numbers.
481        # The same file has several caches, still regarded as one item in the cache
482        bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
483                                                  len(self.depends_cache)),
484                      self.data)
485
486    def parse(self, filename, appends):
487        """Parse the specified filename, returning the recipe information"""
488        logger.debug(1, "Parsing %s", filename)
489        infos = []
490        datastores = self.load_bbfile(filename, appends)
491        depends = []
492        variants = []
493        # Process the "real" fn last so we can store variants list
494        for variant, data in sorted(datastores.items(),
495                                    key=lambda i: i[0],
496                                    reverse=True):
497            virtualfn = variant2virtual(filename, variant)
498            variants.append(variant)
499            depends = depends + (data.getVar("__depends", False) or [])
500            if depends and not variant:
501                data.setVar("__depends", depends)
502            if virtualfn == filename:
503                data.setVar("__VARIANTS", " ".join(variants))
504            info_array = []
505            for cache_class in self.caches_array:
506                info = cache_class(filename, data)
507                info_array.append(info)
508            infos.append((virtualfn, info_array))
509
510        return infos
511
512    def load(self, filename, appends):
513        """Obtain the recipe information for the specified filename,
514        using cached values if available, otherwise parsing.
515
516        Note that if it does parse to obtain the info, it will not
517        automatically add the information to the cache or to your
518        CacheData.  Use the add or add_info method to do so after
519        running this, or use loadData instead."""
520        cached = self.cacheValid(filename, appends)
521        if cached:
522            infos = []
523            # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
524            info_array = self.depends_cache[filename]
525            for variant in info_array[0].variants:
526                virtualfn = variant2virtual(filename, variant)
527                infos.append((virtualfn, self.depends_cache[virtualfn]))
528        else:
529            return self.parse(filename, appends, configdata, self.caches_array)
530
531        return cached, infos
532
533    def loadData(self, fn, appends, cacheData):
534        """Load the recipe info for the specified filename,
535        parsing and adding to the cache if necessary, and adding
536        the recipe information to the supplied CacheData instance."""
537        skipped, virtuals = 0, 0
538
539        cached, infos = self.load(fn, appends)
540        for virtualfn, info_array in infos:
541            if info_array[0].skipped:
542                logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
543                skipped += 1
544            else:
545                self.add_info(virtualfn, info_array, cacheData, not cached)
546                virtuals += 1
547
548        return cached, skipped, virtuals
549
550    def cacheValid(self, fn, appends):
551        """
552        Is the cache valid for fn?
553        Fast version, no timestamps checked.
554        """
555        if fn not in self.checked:
556            self.cacheValidUpdate(fn, appends)
557
558        # Is cache enabled?
559        if not self.has_cache:
560            return False
561        if fn in self.clean:
562            return True
563        return False
564
565    def cacheValidUpdate(self, fn, appends):
566        """
567        Is the cache valid for fn?
568        Make thorough (slower) checks including timestamps.
569        """
570        # Is cache enabled?
571        if not self.has_cache:
572            return False
573
574        self.checked.add(fn)
575
576        # File isn't in depends_cache
577        if not fn in self.depends_cache:
578            logger.debug(2, "Cache: %s is not cached", fn)
579            return False
580
581        mtime = bb.parse.cached_mtime_noerror(fn)
582
583        # Check file still exists
584        if mtime == 0:
585            logger.debug(2, "Cache: %s no longer exists", fn)
586            self.remove(fn)
587            return False
588
589        info_array = self.depends_cache[fn]
590        # Check the file's timestamp
591        if mtime != info_array[0].timestamp:
592            logger.debug(2, "Cache: %s changed", fn)
593            self.remove(fn)
594            return False
595
596        # Check dependencies are still valid
597        depends = info_array[0].file_depends
598        if depends:
599            for f, old_mtime in depends:
600                fmtime = bb.parse.cached_mtime_noerror(f)
601                # Check if file still exists
602                if old_mtime != 0 and fmtime == 0:
603                    logger.debug(2, "Cache: %s's dependency %s was removed",
604                                    fn, f)
605                    self.remove(fn)
606                    return False
607
608                if (fmtime != old_mtime):
609                    logger.debug(2, "Cache: %s's dependency %s changed",
610                                    fn, f)
611                    self.remove(fn)
612                    return False
613
614        if hasattr(info_array[0], 'file_checksums'):
615            for _, fl in info_array[0].file_checksums.items():
616                fl = fl.strip()
617                while fl:
618                    # A .split() would be simpler but means spaces or colons in filenames would break
619                    a = fl.find(":True")
620                    b = fl.find(":False")
621                    if ((a < 0) and b) or ((b > 0) and (b < a)):
622                        f = fl[:b+6]
623                        fl = fl[b+7:]
624                    elif ((b < 0) and a) or ((a > 0) and (a < b)):
625                        f = fl[:a+5]
626                        fl = fl[a+6:]
627                    else:
628                        break
629                    fl = fl.strip()
630                    if "*" in f:
631                        continue
632                    f, exist = f.split(":")
633                    if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
634                        logger.debug(2, "Cache: %s's file checksum list file %s changed",
635                                        fn, f)
636                        self.remove(fn)
637                        return False
638
639        if appends != info_array[0].appends:
640            logger.debug(2, "Cache: appends for %s changed", fn)
641            logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
642            self.remove(fn)
643            return False
644
645        invalid = False
646        for cls in info_array[0].variants:
647            virtualfn = variant2virtual(fn, cls)
648            self.clean.add(virtualfn)
649            if virtualfn not in self.depends_cache:
650                logger.debug(2, "Cache: %s is not cached", virtualfn)
651                invalid = True
652            elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
653                logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
654                invalid = True
655
656        # If any one of the variants is not present, mark as invalid for all
657        if invalid:
658            for cls in info_array[0].variants:
659                virtualfn = variant2virtual(fn, cls)
660                if virtualfn in self.clean:
661                    logger.debug(2, "Cache: Removing %s from cache", virtualfn)
662                    self.clean.remove(virtualfn)
663            if fn in self.clean:
664                logger.debug(2, "Cache: Marking %s as not clean", fn)
665                self.clean.remove(fn)
666            return False
667
668        self.clean.add(fn)
669        return True
670
671    def remove(self, fn):
672        """
673        Remove a fn from the cache
674        Called from the parser in error cases
675        """
676        if fn in self.depends_cache:
677            logger.debug(1, "Removing %s from cache", fn)
678            del self.depends_cache[fn]
679        if fn in self.clean:
680            logger.debug(1, "Marking %s as unclean", fn)
681            self.clean.remove(fn)
682
683    def sync(self):
684        """
685        Save the cache
686        Called from the parser when complete (or exiting)
687        """
688
689        if not self.has_cache:
690            return
691
692        if self.cacheclean:
693            logger.debug(2, "Cache is clean, not saving.")
694            return
695
696        for cache_class in self.caches_array:
697            cache_class_name = cache_class.__name__
698            cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
699            with open(cachefile, "wb") as f:
700                p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
701                p.dump(__cache_version__)
702                p.dump(bb.__version__)
703
704                for key, info_array in self.depends_cache.items():
705                    for info in info_array:
706                        if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
707                            p.dump(key)
708                            p.dump(info)
709
710        del self.depends_cache
711
712    @staticmethod
713    def mtime(cachefile):
714        return bb.parse.cached_mtime_noerror(cachefile)
715
716    def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
717        if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
718            cacheData.add_from_recipeinfo(filename, info_array)
719
720            if watcher:
721                watcher(info_array[0].file_depends)
722
723        if not self.has_cache:
724            return
725
726        if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
727            if parsed:
728                self.cacheclean = False
729            self.depends_cache[filename] = info_array
730
731    def add(self, file_name, data, cacheData, parsed=None):
732        """
733        Save data we need into the cache
734        """
735
736        realfn = virtualfn2realfn(file_name)[0]
737
738        info_array = []
739        for cache_class in self.caches_array:
740            info_array.append(cache_class(realfn, data))
741        self.add_info(file_name, info_array, cacheData, parsed)
742
743
744def init(cooker):
745    """
746    The Objective: Cache the minimum amount of data possible yet get to the
747    stage of building packages (i.e. tryBuild) without reparsing any .bb files.
748
749    To do this, we intercept getVar calls and only cache the variables we see
750    being accessed. We rely on the cache getVar calls being made for all
751    variables bitbake might need to use to reach this stage. For each cached
752    file we need to track:
753
754    * Its mtime
755    * The mtimes of all its dependencies
756    * Whether it caused a parse.SkipRecipe exception
757
758    Files causing parsing errors are evicted from the cache.
759
760    """
761    return Cache(cooker.configuration.data, cooker.configuration.data_hash)
762
763
764class CacheData(object):
765    """
766    The data structures we compile from the cached data
767    """
768
769    def __init__(self, caches_array):
770        self.caches_array = caches_array
771        for cache_class in self.caches_array:
772            if not issubclass(cache_class, RecipeInfoCommon):
773                bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
774            cache_class.init_cacheData(self)
775
776        # Direct cache variables
777        self.task_queues = {}
778        self.preferred = {}
779        self.tasks = {}
780        # Indirect Cache variables (set elsewhere)
781        self.ignored_dependencies = []
782        self.world_target = set()
783        self.bbfile_priority = {}
784
785    def add_from_recipeinfo(self, fn, info_array):
786        for info in info_array:
787            info.add_cacheData(self, fn)
788
789class MultiProcessCache(object):
790    """
791    BitBake multi-process cache implementation
792
793    Used by the codeparser & file checksum caches
794    """
795
796    def __init__(self):
797        self.cachefile = None
798        self.cachedata = self.create_cachedata()
799        self.cachedata_extras = self.create_cachedata()
800
801    def init_cache(self, d, cache_file_name=None):
802        cachedir = (d.getVar("PERSISTENT_DIR") or
803                    d.getVar("CACHE"))
804        if cachedir in [None, '']:
805            return
806        bb.utils.mkdirhier(cachedir)
807        self.cachefile = os.path.join(cachedir,
808                                      cache_file_name or self.__class__.cache_file_name)
809        logger.debug(1, "Using cache in '%s'", self.cachefile)
810
811        glf = bb.utils.lockfile(self.cachefile + ".lock")
812
813        try:
814            with open(self.cachefile, "rb") as f:
815                p = pickle.Unpickler(f)
816                data, version = p.load()
817        except:
818            bb.utils.unlockfile(glf)
819            return
820
821        bb.utils.unlockfile(glf)
822
823        if version != self.__class__.CACHE_VERSION:
824            return
825
826        self.cachedata = data
827
828    def create_cachedata(self):
829        data = [{}]
830        return data
831
832    def save_extras(self):
833        if not self.cachefile:
834            return
835
836        glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
837
838        i = os.getpid()
839        lf = None
840        while not lf:
841            lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
842            if not lf or os.path.exists(self.cachefile + "-" + str(i)):
843                if lf:
844                    bb.utils.unlockfile(lf)
845                    lf = None
846                i = i + 1
847                continue
848
849            with open(self.cachefile + "-" + str(i), "wb") as f:
850                p = pickle.Pickler(f, -1)
851                p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
852
853        bb.utils.unlockfile(lf)
854        bb.utils.unlockfile(glf)
855
856    def merge_data(self, source, dest):
857        for j in range(0,len(dest)):
858            for h in source[j]:
859                if h not in dest[j]:
860                    dest[j][h] = source[j][h]
861
862    def save_merge(self):
863        if not self.cachefile:
864            return
865
866        glf = bb.utils.lockfile(self.cachefile + ".lock")
867
868        data = self.cachedata
869
870        for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
871            f = os.path.join(os.path.dirname(self.cachefile), f)
872            try:
873                with open(f, "rb") as fd:
874                    p = pickle.Unpickler(fd)
875                    extradata, version = p.load()
876            except (IOError, EOFError):
877                os.unlink(f)
878                continue
879
880            if version != self.__class__.CACHE_VERSION:
881                os.unlink(f)
882                continue
883
884            self.merge_data(extradata, data)
885            os.unlink(f)
886
887        with open(self.cachefile, "wb") as f:
888            p = pickle.Pickler(f, -1)
889            p.dump([data, self.__class__.CACHE_VERSION])
890
891        bb.utils.unlockfile(glf)
892