xref: /openbmc/openbmc/poky/bitbake/lib/bb/siggen.py (revision edff4923)
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import hashlib
8import logging
9import os
10import re
11import tempfile
12import pickle
13import bb.data
14import difflib
15import simplediff
16import json
17import types
18from contextlib import contextmanager
19import bb.compress.zstd
20from bb.checksum import FileChecksumCache
21from bb import runqueue
22import hashserv
23import hashserv.client
24
25logger = logging.getLogger('BitBake.SigGen')
26hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
27
28#find_siginfo and find_siginfo_version are set by the metadata siggen
29# The minimum version of the find_siginfo function we need
30find_siginfo_minversion = 2
31
32HASHSERV_ENVVARS = [
33    "SSL_CERT_DIR",
34    "SSL_CERT_FILE",
35    "NO_PROXY",
36    "HTTPS_PROXY",
37    "HTTP_PROXY"
38]
39
40def check_siggen_version(siggen):
41    if not hasattr(siggen, "find_siginfo_version"):
42        bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)")
43    if siggen.find_siginfo_version < siggen.find_siginfo_minversion:
44        bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (%s vs %s)" % (siggen.find_siginfo_version, siggen.find_siginfo_minversion))
45
46class SetEncoder(json.JSONEncoder):
47    def default(self, obj):
48        if isinstance(obj, set) or isinstance(obj, frozenset):
49            return dict(_set_object=list(sorted(obj)))
50        return json.JSONEncoder.default(self, obj)
51
52def SetDecoder(dct):
53    if '_set_object' in dct:
54        return frozenset(dct['_set_object'])
55    return dct
56
57def init(d):
58    siggens = [obj for obj in globals().values()
59                      if type(obj) is type and issubclass(obj, SignatureGenerator)]
60
61    desired = d.getVar("BB_SIGNATURE_HANDLER") or "noop"
62    for sg in siggens:
63        if desired == sg.name:
64            return sg(d)
65    else:
66        logger.error("Invalid signature generator '%s', using default 'noop'\n"
67                     "Available generators: %s", desired,
68                     ', '.join(obj.name for obj in siggens))
69        return SignatureGenerator(d)
70
71class SignatureGenerator(object):
72    """
73    """
74    name = "noop"
75
76    def __init__(self, data):
77        self.basehash = {}
78        self.taskhash = {}
79        self.unihash = {}
80        self.runtaskdeps = {}
81        self.file_checksum_values = {}
82        self.taints = {}
83        self.unitaskhashes = {}
84        self.tidtopn = {}
85        self.setscenetasks = set()
86
87    def finalise(self, fn, d, varient):
88        return
89
90    def postparsing_clean_cache(self):
91        return
92
93    def setup_datacache(self, datacaches):
94        self.datacaches = datacaches
95
96    def setup_datacache_from_datastore(self, mcfn, d):
97        # In task context we have no cache so setup internal data structures
98        # from the fully parsed data store provided
99
100        mc = d.getVar("__BBMULTICONFIG", False) or ""
101        tasks = d.getVar('__BBTASKS', False)
102
103        self.datacaches = {}
104        self.datacaches[mc] = types.SimpleNamespace()
105        setattr(self.datacaches[mc], "stamp", {})
106        self.datacaches[mc].stamp[mcfn] = d.getVar('STAMP')
107        setattr(self.datacaches[mc], "stamp_extrainfo", {})
108        self.datacaches[mc].stamp_extrainfo[mcfn] = {}
109        for t in tasks:
110            flag = d.getVarFlag(t, "stamp-extra-info")
111            if flag:
112                self.datacaches[mc].stamp_extrainfo[mcfn][t] = flag
113
114    def get_cached_unihash(self, tid):
115        return None
116
117    def get_unihash(self, tid):
118        unihash = self.get_cached_unihash(tid)
119        if unihash:
120            return unihash
121        return self.taskhash[tid]
122
123    def get_unihashes(self, tids):
124        return {tid: self.get_unihash(tid) for tid in tids}
125
126    def prep_taskhash(self, tid, deps, dataCaches):
127        return
128
129    def get_taskhash(self, tid, deps, dataCaches):
130        self.taskhash[tid] = hashlib.sha256(tid.encode("utf-8")).hexdigest()
131        return self.taskhash[tid]
132
133    def writeout_file_checksum_cache(self):
134        """Write/update the file checksum cache onto disk"""
135        return
136
137    def stampfile_base(self, mcfn):
138        mc = bb.runqueue.mc_from_tid(mcfn)
139        return self.datacaches[mc].stamp[mcfn]
140
141    def stampfile_mcfn(self, taskname, mcfn, extrainfo=True):
142        mc = bb.runqueue.mc_from_tid(mcfn)
143        stamp = self.datacaches[mc].stamp[mcfn]
144        if not stamp:
145            return
146
147        stamp_extrainfo = ""
148        if extrainfo:
149            taskflagname = taskname
150            if taskname.endswith("_setscene"):
151                taskflagname = taskname.replace("_setscene", "")
152            stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
153
154        return self.stampfile(stamp, mcfn, taskname, stamp_extrainfo)
155
156    def stampfile(self, stampbase, file_name, taskname, extrainfo):
157        return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
158
159    def stampcleanmask_mcfn(self, taskname, mcfn):
160        mc = bb.runqueue.mc_from_tid(mcfn)
161        stamp = self.datacaches[mc].stamp[mcfn]
162        if not stamp:
163            return []
164
165        taskflagname = taskname
166        if taskname.endswith("_setscene"):
167            taskflagname = taskname.replace("_setscene", "")
168        stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
169
170        return self.stampcleanmask(stamp, mcfn, taskname, stamp_extrainfo)
171
172    def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
173        return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
174
175    def dump_sigtask(self, mcfn, task, stampbase, runtime):
176        return
177
178    def invalidate_task(self, task, mcfn):
179        mc = bb.runqueue.mc_from_tid(mcfn)
180        stamp = self.datacaches[mc].stamp[mcfn]
181        bb.utils.remove(stamp)
182
183    def dump_sigs(self, dataCache, options):
184        return
185
186    def get_taskdata(self):
187        return (self.runtaskdeps, self.taskhash, self.unihash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.tidtopn, self.setscenetasks)
188
189    def set_taskdata(self, data):
190        self.runtaskdeps, self.taskhash, self.unihash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.tidtopn, self.setscenetasks = data
191
192    def reset(self, data):
193        self.__init__(data)
194
195    def get_taskhashes(self):
196        return self.taskhash, self.unihash, self.unitaskhashes, self.tidtopn
197
198    def set_taskhashes(self, hashes):
199        self.taskhash, self.unihash, self.unitaskhashes, self.tidtopn = hashes
200
201    def save_unitaskhashes(self):
202        return
203
204    def set_setscene_tasks(self, setscene_tasks):
205        return
206
207    def exit(self):
208        return
209
210def build_pnid(mc, pn, taskname):
211    if mc:
212        return "mc:" + mc + ":" + pn + ":" + taskname
213    return pn + ":" + taskname
214
215class SignatureGeneratorBasic(SignatureGenerator):
216    """
217    """
218    name = "basic"
219
220    def __init__(self, data):
221        self.basehash = {}
222        self.taskhash = {}
223        self.unihash = {}
224        self.runtaskdeps = {}
225        self.file_checksum_values = {}
226        self.taints = {}
227        self.setscenetasks = set()
228        self.basehash_ignore_vars = set((data.getVar("BB_BASEHASH_IGNORE_VARS") or "").split())
229        self.taskhash_ignore_tasks = None
230        self.init_rundepcheck(data)
231        checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
232        if checksum_cache_file:
233            self.checksum_cache = FileChecksumCache()
234            self.checksum_cache.init_cache(data, checksum_cache_file)
235        else:
236            self.checksum_cache = None
237
238        self.unihash_cache = bb.cache.SimpleCache("3")
239        self.unitaskhashes = self.unihash_cache.init_cache(data, "bb_unihashes.dat", {})
240        self.localdirsexclude = (data.getVar("BB_SIGNATURE_LOCAL_DIRS_EXCLUDE") or "CVS .bzr .git .hg .osc .p4 .repo .svn").split()
241        self.tidtopn = {}
242
243    def init_rundepcheck(self, data):
244        self.taskhash_ignore_tasks = data.getVar("BB_TASKHASH_IGNORE_TASKS") or None
245        if self.taskhash_ignore_tasks:
246            self.twl = re.compile(self.taskhash_ignore_tasks)
247        else:
248            self.twl = None
249
250    def _build_data(self, mcfn, d):
251
252        ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
253        tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basehash_ignore_vars)
254
255        taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basehash_ignore_vars, mcfn)
256
257        for task in tasklist:
258            tid = mcfn + ":" + task
259            if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]:
260                bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid]))
261                bb.error("The following commands may help:")
262                cmd = "$ bitbake %s -c%s" % (d.getVar('PN'), task)
263                # Make sure sigdata is dumped before run printdiff
264                bb.error("%s -Snone" % cmd)
265                bb.error("Then:")
266                bb.error("%s -Sprintdiff\n" % cmd)
267            self.basehash[tid] = basehash[tid]
268
269        return taskdeps, gendeps, lookupcache
270
271    def set_setscene_tasks(self, setscene_tasks):
272        self.setscenetasks = set(setscene_tasks)
273
274    def finalise(self, fn, d, variant):
275
276        mc = d.getVar("__BBMULTICONFIG", False) or ""
277        mcfn = fn
278        if variant or mc:
279            mcfn = bb.cache.realfn2virtual(fn, variant, mc)
280
281        try:
282            taskdeps, gendeps, lookupcache = self._build_data(mcfn, d)
283        except bb.parse.SkipRecipe:
284            raise
285        except:
286            bb.warn("Error during finalise of %s" % mcfn)
287            raise
288
289        basehashes = {}
290        for task in taskdeps:
291            basehashes[task] = self.basehash[mcfn + ":" + task]
292
293        d.setVar("__siggen_basehashes", basehashes)
294        d.setVar("__siggen_gendeps", gendeps)
295        d.setVar("__siggen_varvals", lookupcache)
296        d.setVar("__siggen_taskdeps", taskdeps)
297
298        #Slow but can be useful for debugging mismatched basehashes
299        #self.setup_datacache_from_datastore(mcfn, d)
300        #for task in taskdeps:
301        #    self.dump_sigtask(mcfn, task, d.getVar("STAMP"), False)
302
303    def setup_datacache_from_datastore(self, mcfn, d):
304        super().setup_datacache_from_datastore(mcfn, d)
305
306        mc = bb.runqueue.mc_from_tid(mcfn)
307        for attr in ["siggen_varvals", "siggen_taskdeps", "siggen_gendeps"]:
308            if not hasattr(self.datacaches[mc], attr):
309                setattr(self.datacaches[mc], attr, {})
310        self.datacaches[mc].siggen_varvals[mcfn] = d.getVar("__siggen_varvals")
311        self.datacaches[mc].siggen_taskdeps[mcfn] = d.getVar("__siggen_taskdeps")
312        self.datacaches[mc].siggen_gendeps[mcfn] = d.getVar("__siggen_gendeps")
313
314    def rundep_check(self, fn, recipename, task, dep, depname, dataCaches):
315        # Return True if we should keep the dependency, False to drop it
316        # We only manipulate the dependencies for packages not in the ignore
317        # list
318        if self.twl and not self.twl.search(recipename):
319            # then process the actual dependencies
320            if self.twl.search(depname):
321                return False
322        return True
323
324    def read_taint(self, fn, task, stampbase):
325        taint = None
326        try:
327            with open(stampbase + '.' + task + '.taint', 'r') as taintf:
328                taint = taintf.read()
329        except IOError:
330            pass
331        return taint
332
333    def prep_taskhash(self, tid, deps, dataCaches):
334
335        (mc, _, task, mcfn) = bb.runqueue.split_tid_mcfn(tid)
336
337        self.basehash[tid] = dataCaches[mc].basetaskhash[tid]
338        self.runtaskdeps[tid] = []
339        self.file_checksum_values[tid] = []
340        recipename = dataCaches[mc].pkg_fn[mcfn]
341
342        self.tidtopn[tid] = recipename
343        # save hashfn for deps into siginfo?
344        for dep in deps:
345            (depmc, _, deptask, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
346            dep_pn = dataCaches[depmc].pkg_fn[depmcfn]
347
348            if not self.rundep_check(mcfn, recipename, task, dep, dep_pn, dataCaches):
349                continue
350
351            if dep not in self.taskhash:
352                bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
353
354            dep_pnid = build_pnid(depmc, dep_pn, deptask)
355            self.runtaskdeps[tid].append((dep_pnid, dep))
356
357        if task in dataCaches[mc].file_checksums[mcfn]:
358            if self.checksum_cache:
359                checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
360            else:
361                checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
362            for (f,cs) in checksums:
363                self.file_checksum_values[tid].append((f,cs))
364
365        taskdep = dataCaches[mc].task_deps[mcfn]
366        if 'nostamp' in taskdep and task in taskdep['nostamp']:
367            # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
368            if tid in self.taints and self.taints[tid].startswith("nostamp:"):
369                # Don't reset taint value upon every call
370                pass
371            else:
372                import uuid
373                taint = str(uuid.uuid4())
374                self.taints[tid] = "nostamp:" + taint
375
376        taint = self.read_taint(mcfn, task, dataCaches[mc].stamp[mcfn])
377        if taint:
378            self.taints[tid] = taint
379            logger.warning("%s is tainted from a forced run" % tid)
380
381        return set(dep for _, dep in self.runtaskdeps[tid])
382
383    def get_taskhash(self, tid, deps, dataCaches):
384
385        data = self.basehash[tid]
386        for dep in sorted(self.runtaskdeps[tid]):
387            data += self.get_unihash(dep[1])
388
389        for (f, cs) in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
390            if cs:
391                if "/./" in f:
392                    data += "./" + f.split("/./")[1]
393                data += cs
394
395        if tid in self.taints:
396            if self.taints[tid].startswith("nostamp:"):
397                data += self.taints[tid][8:]
398            else:
399                data += self.taints[tid]
400
401        h = hashlib.sha256(data.encode("utf-8")).hexdigest()
402        self.taskhash[tid] = h
403        #d.setVar("BB_TASKHASH:task-%s" % task, taskhash[task])
404        return h
405
406    def writeout_file_checksum_cache(self):
407        """Write/update the file checksum cache onto disk"""
408        if self.checksum_cache:
409            self.checksum_cache.save_extras()
410            self.checksum_cache.save_merge()
411        else:
412            bb.fetch2.fetcher_parse_save()
413            bb.fetch2.fetcher_parse_done()
414
415    def save_unitaskhashes(self):
416        self.unihash_cache.save(self.unitaskhashes)
417
418    def dump_sigtask(self, mcfn, task, stampbase, runtime):
419        tid = mcfn + ":" + task
420        mc = bb.runqueue.mc_from_tid(mcfn)
421        referencestamp = stampbase
422        if isinstance(runtime, str) and runtime.startswith("customfile"):
423            sigfile = stampbase
424            referencestamp = runtime[11:]
425        elif runtime and tid in self.taskhash:
426            sigfile = stampbase + "." + task + ".sigdata" + "." + self.get_unihash(tid)
427        else:
428            sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid]
429
430        with bb.utils.umask(0o002):
431            bb.utils.mkdirhier(os.path.dirname(sigfile))
432
433        data = {}
434        data['task'] = task
435        data['basehash_ignore_vars'] = self.basehash_ignore_vars
436        data['taskhash_ignore_tasks'] = self.taskhash_ignore_tasks
437        data['taskdeps'] = self.datacaches[mc].siggen_taskdeps[mcfn][task]
438        data['basehash'] = self.basehash[tid]
439        data['gendeps'] = {}
440        data['varvals'] = {}
441        data['varvals'][task] = self.datacaches[mc].siggen_varvals[mcfn][task]
442        for dep in self.datacaches[mc].siggen_taskdeps[mcfn][task]:
443            if dep in self.basehash_ignore_vars:
444                continue
445            data['gendeps'][dep] = self.datacaches[mc].siggen_gendeps[mcfn][dep]
446            data['varvals'][dep] = self.datacaches[mc].siggen_varvals[mcfn][dep]
447
448        if runtime and tid in self.taskhash:
449            data['runtaskdeps'] = [dep[0] for dep in sorted(self.runtaskdeps[tid])]
450            data['file_checksum_values'] = []
451            for f,cs in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
452                if "/./" in f:
453                    data['file_checksum_values'].append(("./" + f.split("/./")[1], cs))
454                else:
455                    data['file_checksum_values'].append((os.path.basename(f), cs))
456            data['runtaskhashes'] = {}
457            for dep in self.runtaskdeps[tid]:
458                data['runtaskhashes'][dep[0]] = self.get_unihash(dep[1])
459            data['taskhash'] = self.taskhash[tid]
460            data['unihash'] = self.get_unihash(tid)
461
462        taint = self.read_taint(mcfn, task, referencestamp)
463        if taint:
464            data['taint'] = taint
465
466        if runtime and tid in self.taints:
467            if 'nostamp:' in self.taints[tid]:
468                data['taint'] = self.taints[tid]
469
470        computed_basehash = calc_basehash(data)
471        if computed_basehash != self.basehash[tid]:
472            bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[tid], tid))
473        if runtime and tid in self.taskhash:
474            computed_taskhash = calc_taskhash(data)
475            if computed_taskhash != self.taskhash[tid]:
476                bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid))
477                sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash)
478
479        fd, tmpfile = bb.utils.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
480        try:
481            with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f:
482                json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder)
483                f.flush()
484            os.chmod(tmpfile, 0o664)
485            bb.utils.rename(tmpfile, sigfile)
486        except (OSError, IOError) as err:
487            try:
488                os.unlink(tmpfile)
489            except OSError:
490                pass
491            raise err
492
493class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
494    name = "basichash"
495
496    def get_stampfile_hash(self, tid):
497        if tid in self.taskhash:
498            return self.taskhash[tid]
499
500        # If task is not in basehash, then error
501        return self.basehash[tid]
502
503    def stampfile(self, stampbase, mcfn, taskname, extrainfo, clean=False):
504        if taskname.endswith("_setscene"):
505            tid = mcfn + ":" + taskname[:-9]
506        else:
507            tid = mcfn + ":" + taskname
508        if clean:
509            h = "*"
510        else:
511            h = self.get_stampfile_hash(tid)
512
513        return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
514
515    def stampcleanmask(self, stampbase, mcfn, taskname, extrainfo):
516        return self.stampfile(stampbase, mcfn, taskname, extrainfo, clean=True)
517
518    def invalidate_task(self, task, mcfn):
519        bb.note("Tainting hash to force rebuild of task %s, %s" % (mcfn, task))
520
521        mc = bb.runqueue.mc_from_tid(mcfn)
522        stamp = self.datacaches[mc].stamp[mcfn]
523
524        taintfn = stamp + '.' + task + '.taint'
525
526        import uuid
527        bb.utils.mkdirhier(os.path.dirname(taintfn))
528        # The specific content of the taint file is not really important,
529        # we just need it to be random, so a random UUID is used
530        with open(taintfn, 'w') as taintf:
531            taintf.write(str(uuid.uuid4()))
532
533class SignatureGeneratorUniHashMixIn(object):
534    def __init__(self, data):
535        self.extramethod = {}
536        # NOTE: The cache only tracks hashes that exist. Hashes that don't
537        # exist are always queried from the server since it is possible for
538        # hashes to appear over time, but much less likely for them to
539        # disappear
540        self.unihash_exists_cache = set()
541        self.username = None
542        self.password = None
543        self.env = {}
544
545        origenv = data.getVar("BB_ORIGENV")
546        for e in HASHSERV_ENVVARS:
547            value = data.getVar(e)
548            if not value and origenv:
549                value = origenv.getVar(e)
550            if value:
551                self.env[e] = value
552        super().__init__(data)
553
554    def get_taskdata(self):
555        return (self.server, self.method, self.extramethod, self.username, self.password, self.env) + super().get_taskdata()
556
557    def set_taskdata(self, data):
558        self.server, self.method, self.extramethod, self.username, self.password, self.env = data[:6]
559        super().set_taskdata(data[6:])
560
561    def get_hashserv_creds(self):
562        if self.username and self.password:
563            return {
564                "username": self.username,
565                "password": self.password,
566            }
567
568        return {}
569
570    @contextmanager
571    def _client_env(self):
572        orig_env = os.environ.copy()
573        try:
574            for k, v in self.env.items():
575                os.environ[k] = v
576
577            yield
578        finally:
579            for k, v in self.env.items():
580                if k in orig_env:
581                    os.environ[k] = orig_env[k]
582                else:
583                    del os.environ[k]
584
585    @contextmanager
586    def client(self):
587        with self._client_env():
588            if getattr(self, '_client', None) is None:
589                self._client = hashserv.create_client(self.server, **self.get_hashserv_creds())
590            yield self._client
591
592    def reset(self, data):
593        self.__close_clients()
594        return super().reset(data)
595
596    def exit(self):
597        self.__close_clients()
598        return super().exit()
599
600    def __close_clients(self):
601        with self._client_env():
602            if getattr(self, '_client', None) is not None:
603                self._client.close()
604                self._client = None
605            if getattr(self, '_client_pool', None) is not None:
606                self._client_pool.close()
607                self._client_pool = None
608
609    def get_stampfile_hash(self, tid):
610        if tid in self.taskhash:
611            # If a unique hash is reported, use it as the stampfile hash. This
612            # ensures that if a task won't be re-run if the taskhash changes,
613            # but it would result in the same output hash
614            unihash = self._get_unihash(tid)
615            if unihash is not None:
616                return unihash
617
618        return super().get_stampfile_hash(tid)
619
620    def set_unihash(self, tid, unihash):
621        (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
622        key = mc + ":" + self.tidtopn[tid] + ":" + taskname
623        self.unitaskhashes[key] = (self.taskhash[tid], unihash)
624        self.unihash[tid] = unihash
625
626    def _get_unihash(self, tid, checkkey=None):
627        if tid not in self.tidtopn:
628            return None
629        (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
630        key = mc + ":" + self.tidtopn[tid] + ":" + taskname
631        if key not in self.unitaskhashes:
632            return None
633        if not checkkey:
634            checkkey = self.taskhash[tid]
635        (key, unihash) = self.unitaskhashes[key]
636        if key != checkkey:
637            return None
638        return unihash
639
640    def get_cached_unihash(self, tid):
641        taskhash = self.taskhash[tid]
642
643        # If its not a setscene task we can return
644        if self.setscenetasks and tid not in self.setscenetasks:
645            self.unihash[tid] = None
646            return taskhash
647
648        # TODO: This cache can grow unbounded. It probably only needs to keep
649        # for each task
650        unihash =  self._get_unihash(tid)
651        if unihash is not None:
652            self.unihash[tid] = unihash
653            return unihash
654
655        return None
656
657    def _get_method(self, tid):
658        method = self.method
659        if tid in self.extramethod:
660            method = method + self.extramethod[tid]
661
662        return method
663
664    def unihashes_exist(self, query):
665        if len(query) == 0:
666            return {}
667
668        query_keys = []
669        result = {}
670        for key, unihash in query.items():
671            if unihash in self.unihash_exists_cache:
672                result[key] = True
673            else:
674                query_keys.append(key)
675
676        if query_keys:
677            with self.client() as client:
678                query_result = client.unihash_exists_batch(query[k] for k in query_keys)
679
680        for idx, key in enumerate(query_keys):
681            exists = query_result[idx]
682            if exists:
683                self.unihash_exists_cache.add(query[key])
684            result[key] = exists
685
686        return result
687
688    def get_unihash(self, tid):
689        return self.get_unihashes([tid])[tid]
690
691    def get_unihashes(self, tids):
692        """
693        For a iterable of tids, returns a dictionary that maps each tid to a
694        unihash
695        """
696        result = {}
697        query_tids = []
698
699        for tid in tids:
700            unihash = self.get_cached_unihash(tid)
701            if unihash:
702                result[tid] = unihash
703            else:
704                query_tids.append(tid)
705
706        if query_tids:
707            unihashes = []
708            try:
709                with self.client() as client:
710                    unihashes = client.get_unihash_batch((self._get_method(tid), self.taskhash[tid]) for tid in query_tids)
711            except (ConnectionError, FileNotFoundError) as e:
712                bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
713
714        for idx, tid in enumerate(query_tids):
715            # In the absence of being able to discover a unique hash from the
716            # server, make it be equivalent to the taskhash. The unique "hash" only
717            # really needs to be a unique string (not even necessarily a hash), but
718            # making it match the taskhash has a few advantages:
719            #
720            # 1) All of the sstate code that assumes hashes can be the same
721            # 2) It provides maximal compatibility with builders that don't use
722            #    an equivalency server
723            # 3) The value is easy for multiple independent builders to derive the
724            #    same unique hash from the same input. This means that if the
725            #    independent builders find the same taskhash, but it isn't reported
726            #    to the server, there is a better chance that they will agree on
727            #    the unique hash.
728            taskhash = self.taskhash[tid]
729
730            if unihashes and unihashes[idx]:
731                unihash = unihashes[idx]
732                # A unique hash equal to the taskhash is not very interesting,
733                # so it is reported it at debug level 2. If they differ, that
734                # is much more interesting, so it is reported at debug level 1
735                hashequiv_logger.bbdebug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
736            else:
737                hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
738                unihash = taskhash
739
740            self.set_unihash(tid, unihash)
741            self.unihash[tid] = unihash
742            result[tid] = unihash
743
744        return result
745
746    def report_unihash(self, path, task, d):
747        import importlib
748
749        taskhash = d.getVar('BB_TASKHASH')
750        unihash = d.getVar('BB_UNIHASH')
751        report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
752        tempdir = d.getVar('T')
753        mcfn = d.getVar('BB_FILENAME')
754        tid = mcfn + ':do_' + task
755        key = tid + ':' + taskhash
756
757        if self.setscenetasks and tid not in self.setscenetasks:
758            return
759
760        # This can happen if locked sigs are in action. Detect and just exit
761        if taskhash != self.taskhash[tid]:
762            return
763
764        # Sanity checks
765        cache_unihash = self._get_unihash(tid, checkkey=taskhash)
766        if cache_unihash is None:
767            bb.fatal('%s not in unihash cache. Please report this error' % key)
768
769        if cache_unihash != unihash:
770            bb.fatal("Cache unihash %s doesn't match BB_UNIHASH %s" % (cache_unihash, unihash))
771
772        sigfile = None
773        sigfile_name = "depsig.do_%s.%d" % (task, os.getpid())
774        sigfile_link = "depsig.do_%s" % task
775
776        try:
777            sigfile = open(os.path.join(tempdir, sigfile_name), 'w+b')
778
779            locs = {'path': path, 'sigfile': sigfile, 'task': task, 'd': d}
780
781            if "." in self.method:
782                (module, method) = self.method.rsplit('.', 1)
783                locs['method'] = getattr(importlib.import_module(module), method)
784                outhash = bb.utils.better_eval('method(path, sigfile, task, d)', locs)
785            else:
786                outhash = bb.utils.better_eval(self.method + '(path, sigfile, task, d)', locs)
787
788            try:
789                extra_data = {}
790
791                owner = d.getVar('SSTATE_HASHEQUIV_OWNER')
792                if owner:
793                    extra_data['owner'] = owner
794
795                if report_taskdata:
796                    sigfile.seek(0)
797
798                    extra_data['PN'] = d.getVar('PN')
799                    extra_data['PV'] = d.getVar('PV')
800                    extra_data['PR'] = d.getVar('PR')
801                    extra_data['task'] = task
802                    extra_data['outhash_siginfo'] = sigfile.read().decode('utf-8')
803
804                method = self.method
805                if tid in self.extramethod:
806                    method = method + self.extramethod[tid]
807
808                with self.client() as client:
809                    data = client.report_unihash(taskhash, method, outhash, unihash, extra_data)
810
811                new_unihash = data['unihash']
812
813                if new_unihash != unihash:
814                    hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
815                    bb.event.fire(bb.runqueue.taskUniHashUpdate(mcfn + ':do_' + task, new_unihash), d)
816                    self.set_unihash(tid, new_unihash)
817                    d.setVar('BB_UNIHASH', new_unihash)
818                else:
819                    hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
820            except (ConnectionError, FileNotFoundError) as e:
821                bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
822        finally:
823            if sigfile:
824                sigfile.close()
825
826                sigfile_link_path = os.path.join(tempdir, sigfile_link)
827                bb.utils.remove(sigfile_link_path)
828
829                try:
830                    os.symlink(sigfile_name, sigfile_link_path)
831                except OSError:
832                    pass
833
834    def report_unihash_equiv(self, tid, taskhash, wanted_unihash, current_unihash, datacaches):
835        try:
836            extra_data = {}
837            method = self.method
838            if tid in self.extramethod:
839                method = method + self.extramethod[tid]
840
841            with self.client() as client:
842                data = client.report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
843
844            hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data)))
845
846            if data is None:
847                bb.warn("Server unable to handle unihash report")
848                return False
849
850            finalunihash = data['unihash']
851
852            if finalunihash == current_unihash:
853                hashequiv_logger.verbose('Task %s unihash %s unchanged by server' % (tid, finalunihash))
854            elif finalunihash == wanted_unihash:
855                hashequiv_logger.verbose('Task %s unihash changed %s -> %s as wanted' % (tid, current_unihash, finalunihash))
856                self.set_unihash(tid, finalunihash)
857                return True
858            else:
859                # TODO: What to do here?
860                hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash))
861
862        except (ConnectionError, FileNotFoundError) as e:
863            bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
864
865        return False
866
867#
868# Dummy class used for bitbake-selftest
869#
870class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureGeneratorBasicHash):
871    name = "TestEquivHash"
872    def init_rundepcheck(self, data):
873        super().init_rundepcheck(data)
874        self.server = data.getVar('BB_HASHSERVE')
875        self.method = "sstate_output_hash"
876
877def clean_checksum_file_path(file_checksum_tuple):
878    f, cs = file_checksum_tuple
879    if "/./" in f:
880        return "./" + f.split("/./")[1]
881    return f
882
883def dump_this_task(outfile, d):
884    import bb.parse
885    mcfn = d.getVar("BB_FILENAME")
886    task = "do_" + d.getVar("BB_CURRENTTASK")
887    referencestamp = bb.parse.siggen.stampfile_base(mcfn)
888    bb.parse.siggen.dump_sigtask(mcfn, task, outfile, "customfile:" + referencestamp)
889
890def init_colors(enable_color):
891    """Initialise colour dict for passing to compare_sigfiles()"""
892    # First set up the colours
893    colors = {'color_title':   '\033[1m',
894              'color_default': '\033[0m',
895              'color_add':     '\033[0;32m',
896              'color_remove':  '\033[0;31m',
897             }
898    # Leave all keys present but clear the values
899    if not enable_color:
900        for k in colors.keys():
901            colors[k] = ''
902    return colors
903
904def worddiff_str(oldstr, newstr, colors=None):
905    if not colors:
906        colors = init_colors(False)
907    diff = simplediff.diff(oldstr.split(' '), newstr.split(' '))
908    ret = []
909    for change, value in diff:
910        value = ' '.join(value)
911        if change == '=':
912            ret.append(value)
913        elif change == '+':
914            item = '{color_add}{{+{value}+}}{color_default}'.format(value=value, **colors)
915            ret.append(item)
916        elif change == '-':
917            item = '{color_remove}[-{value}-]{color_default}'.format(value=value, **colors)
918            ret.append(item)
919    whitespace_note = ''
920    if oldstr != newstr and ' '.join(oldstr.split()) == ' '.join(newstr.split()):
921        whitespace_note = ' (whitespace changed)'
922    return '"%s"%s' % (' '.join(ret), whitespace_note)
923
924def list_inline_diff(oldlist, newlist, colors=None):
925    if not colors:
926        colors = init_colors(False)
927    diff = simplediff.diff(oldlist, newlist)
928    ret = []
929    for change, value in diff:
930        value = ' '.join(value)
931        if change == '=':
932            ret.append("'%s'" % value)
933        elif change == '+':
934            item = '{color_add}+{value}{color_default}'.format(value=value, **colors)
935            ret.append(item)
936        elif change == '-':
937            item = '{color_remove}-{value}{color_default}'.format(value=value, **colors)
938            ret.append(item)
939    return '[%s]' % (', '.join(ret))
940
941# Handled renamed fields
942def handle_renames(data):
943    if 'basewhitelist' in data:
944        data['basehash_ignore_vars'] = data['basewhitelist']
945        del data['basewhitelist']
946    if 'taskwhitelist' in data:
947        data['taskhash_ignore_tasks'] = data['taskwhitelist']
948        del data['taskwhitelist']
949
950
951def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
952    output = []
953
954    colors = init_colors(color)
955    def color_format(formatstr, **values):
956        """
957        Return colour formatted string.
958        NOTE: call with the format string, not an already formatted string
959        containing values (otherwise you could have trouble with { and }
960        characters)
961        """
962        if not formatstr.endswith('{color_default}'):
963            formatstr += '{color_default}'
964        # In newer python 3 versions you can pass both of these directly,
965        # but we only require 3.4 at the moment
966        formatparams = {}
967        formatparams.update(colors)
968        formatparams.update(values)
969        return formatstr.format(**formatparams)
970
971    try:
972        with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
973            a_data = json.load(f, object_hook=SetDecoder)
974    except (TypeError, OSError) as err:
975        bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
976        raise err
977    try:
978        with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
979            b_data = json.load(f, object_hook=SetDecoder)
980    except (TypeError, OSError) as err:
981        bb.error("Failed to open sigdata file '%s': %s" % (b, str(err)))
982        raise err
983
984    for data in [a_data, b_data]:
985        handle_renames(data)
986
987    def dict_diff(a, b, ignored_vars=set()):
988        sa = set(a.keys())
989        sb = set(b.keys())
990        common = sa & sb
991        changed = set()
992        for i in common:
993            if a[i] != b[i] and i not in ignored_vars:
994                changed.add(i)
995        added = sb - sa
996        removed = sa - sb
997        return changed, added, removed
998
999    def file_checksums_diff(a, b):
1000        from collections import Counter
1001
1002        # Convert lists back to tuples
1003        a = [(f[0], f[1]) for f in a]
1004        b = [(f[0], f[1]) for f in b]
1005
1006        # Compare lists, ensuring we can handle duplicate filenames if they exist
1007        removedcount = Counter(a)
1008        removedcount.subtract(b)
1009        addedcount = Counter(b)
1010        addedcount.subtract(a)
1011        added = []
1012        for x in b:
1013            if addedcount[x] > 0:
1014                addedcount[x] -= 1
1015                added.append(x)
1016        removed = []
1017        changed = []
1018        for x in a:
1019            if removedcount[x] > 0:
1020                removedcount[x] -= 1
1021                for y in added:
1022                    if y[0] == x[0]:
1023                        changed.append((x[0], x[1], y[1]))
1024                        added.remove(y)
1025                        break
1026                else:
1027                    removed.append(x)
1028        added = [x[0] for x in added]
1029        removed = [x[0] for x in removed]
1030        return changed, added, removed
1031
1032    if 'basehash_ignore_vars' in a_data and a_data['basehash_ignore_vars'] != b_data['basehash_ignore_vars']:
1033        output.append(color_format("{color_title}basehash_ignore_vars changed{color_default} from '%s' to '%s'") % (a_data['basehash_ignore_vars'], b_data['basehash_ignore_vars']))
1034        if a_data['basehash_ignore_vars'] and b_data['basehash_ignore_vars']:
1035            output.append("changed items: %s" % a_data['basehash_ignore_vars'].symmetric_difference(b_data['basehash_ignore_vars']))
1036
1037    if 'taskhash_ignore_tasks' in a_data and a_data['taskhash_ignore_tasks'] != b_data['taskhash_ignore_tasks']:
1038        output.append(color_format("{color_title}taskhash_ignore_tasks changed{color_default} from '%s' to '%s'") % (a_data['taskhash_ignore_tasks'], b_data['taskhash_ignore_tasks']))
1039        if a_data['taskhash_ignore_tasks'] and b_data['taskhash_ignore_tasks']:
1040            output.append("changed items: %s" % a_data['taskhash_ignore_tasks'].symmetric_difference(b_data['taskhash_ignore_tasks']))
1041
1042    if a_data['taskdeps'] != b_data['taskdeps']:
1043        output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
1044
1045    if a_data['basehash'] != b_data['basehash'] and not collapsed:
1046        output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash']))
1047
1048    changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basehash_ignore_vars'] & b_data['basehash_ignore_vars'])
1049    if changed:
1050        for dep in sorted(changed):
1051            output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
1052            if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
1053                output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
1054    if added:
1055        for dep in sorted(added):
1056            output.append(color_format("{color_title}Dependency on variable %s was added") % (dep))
1057    if removed:
1058        for dep in sorted(removed):
1059            output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep))
1060
1061
1062    changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
1063    if changed:
1064        for dep in sorted(changed):
1065            oldval = a_data['varvals'][dep]
1066            newval = b_data['varvals'][dep]
1067            if newval and oldval and ('\n' in oldval or '\n' in newval):
1068                diff = difflib.unified_diff(oldval.splitlines(), newval.splitlines(), lineterm='')
1069                # Cut off the first two lines, since we aren't interested in
1070                # the old/new filename (they are blank anyway in this case)
1071                difflines = list(diff)[2:]
1072                if color:
1073                    # Add colour to diff output
1074                    for i, line in enumerate(difflines):
1075                        if line.startswith('+'):
1076                            line = color_format('{color_add}{line}', line=line)
1077                            difflines[i] = line
1078                        elif line.startswith('-'):
1079                            line = color_format('{color_remove}{line}', line=line)
1080                            difflines[i] = line
1081                output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff='\n'.join(difflines)))
1082            elif newval and oldval and (' ' in oldval or ' ' in newval):
1083                output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff=worddiff_str(oldval, newval, colors)))
1084            else:
1085                output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval))
1086
1087    if not 'file_checksum_values' in a_data:
1088         a_data['file_checksum_values'] = []
1089    if not 'file_checksum_values' in b_data:
1090         b_data['file_checksum_values'] = []
1091
1092    changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
1093    if changed:
1094        for f, old, new in changed:
1095            output.append(color_format("{color_title}Checksum for file %s changed{color_default} from %s to %s") % (f, old, new))
1096    if added:
1097        for f in added:
1098            output.append(color_format("{color_title}Dependency on checksum of file %s was added") % (f))
1099    if removed:
1100        for f in removed:
1101            output.append(color_format("{color_title}Dependency on checksum of file %s was removed") % (f))
1102
1103    if not 'runtaskdeps' in a_data:
1104         a_data['runtaskdeps'] = {}
1105    if not 'runtaskdeps' in b_data:
1106         b_data['runtaskdeps'] = {}
1107
1108    if not collapsed:
1109        if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
1110            changed = ["Number of task dependencies changed"]
1111        else:
1112            changed = []
1113            for idx, task in enumerate(a_data['runtaskdeps']):
1114                a = a_data['runtaskdeps'][idx]
1115                b = b_data['runtaskdeps'][idx]
1116                if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed:
1117                    changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
1118
1119        if changed:
1120            clean_a = a_data['runtaskdeps']
1121            clean_b = b_data['runtaskdeps']
1122            if clean_a != clean_b:
1123                output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors))
1124            else:
1125                output.append(color_format("{color_title}runtaskdeps changed:"))
1126            output.append("\n".join(changed))
1127
1128
1129    if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
1130        a = a_data['runtaskhashes']
1131        b = b_data['runtaskhashes']
1132        changed, added, removed = dict_diff(a, b)
1133        if added:
1134            for dep in sorted(added):
1135                bdep_found = False
1136                if removed:
1137                    for bdep in removed:
1138                        if b[dep] == a[bdep]:
1139                            #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
1140                            bdep_found = True
1141                if not bdep_found:
1142                    output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (dep, b[dep]))
1143        if removed:
1144            for dep in sorted(removed):
1145                adep_found = False
1146                if added:
1147                    for adep in added:
1148                        if b[adep] == a[dep]:
1149                            #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
1150                            adep_found = True
1151                if not adep_found:
1152                    output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (dep, a[dep]))
1153        if changed:
1154            for dep in sorted(changed):
1155                if not collapsed:
1156                    output.append(color_format("{color_title}Hash for task dependency %s changed{color_default} from %s to %s") % (dep, a[dep], b[dep]))
1157                if callable(recursecb):
1158                    recout = recursecb(dep, a[dep], b[dep])
1159                    if recout:
1160                        if collapsed:
1161                            output.extend(recout)
1162                        else:
1163                            # If a dependent hash changed, might as well print the line above and then defer to the changes in
1164                            # that hash since in all likelyhood, they're the same changes this task also saw.
1165                            output = [output[-1]] + recout
1166                            break
1167
1168    a_taint = a_data.get('taint', None)
1169    b_taint = b_data.get('taint', None)
1170    if a_taint != b_taint:
1171        if a_taint and a_taint.startswith('nostamp:'):
1172            a_taint = a_taint.replace('nostamp:', 'nostamp(uuid4):')
1173        if b_taint and b_taint.startswith('nostamp:'):
1174            b_taint = b_taint.replace('nostamp:', 'nostamp(uuid4):')
1175        output.append(color_format("{color_title}Taint (by forced/invalidated task) changed{color_default} from %s to %s") % (a_taint, b_taint))
1176
1177    return output
1178
1179
1180def calc_basehash(sigdata):
1181    task = sigdata['task']
1182    basedata = sigdata['varvals'][task]
1183
1184    if basedata is None:
1185        basedata = ''
1186
1187    alldeps = sigdata['taskdeps']
1188    for dep in sorted(alldeps):
1189        basedata = basedata + dep
1190        val = sigdata['varvals'][dep]
1191        if val is not None:
1192            basedata = basedata + str(val)
1193
1194    return hashlib.sha256(basedata.encode("utf-8")).hexdigest()
1195
1196def calc_taskhash(sigdata):
1197    data = sigdata['basehash']
1198
1199    for dep in sigdata['runtaskdeps']:
1200        data = data + sigdata['runtaskhashes'][dep]
1201
1202    for c in sigdata['file_checksum_values']:
1203        if c[1]:
1204            if "./" in c[0]:
1205                data = data + c[0]
1206            data = data + c[1]
1207
1208    if 'taint' in sigdata:
1209        if 'nostamp:' in sigdata['taint']:
1210            data = data + sigdata['taint'][8:]
1211        else:
1212            data = data + sigdata['taint']
1213
1214    return hashlib.sha256(data.encode("utf-8")).hexdigest()
1215
1216
1217def dump_sigfile(a):
1218    output = []
1219
1220    try:
1221        with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
1222            a_data = json.load(f, object_hook=SetDecoder)
1223    except (TypeError, OSError) as err:
1224        bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
1225        raise err
1226
1227    handle_renames(a_data)
1228
1229    output.append("basehash_ignore_vars: %s" % (sorted(a_data['basehash_ignore_vars'])))
1230
1231    output.append("taskhash_ignore_tasks: %s" % (sorted(a_data['taskhash_ignore_tasks'] or [])))
1232
1233    output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
1234
1235    output.append("basehash: %s" % (a_data['basehash']))
1236
1237    for dep in sorted(a_data['gendeps']):
1238        output.append("List of dependencies for variable %s is %s" % (dep, sorted(a_data['gendeps'][dep])))
1239
1240    for dep in sorted(a_data['varvals']):
1241        output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
1242
1243    if 'runtaskdeps' in a_data:
1244        output.append("Tasks this task depends on: %s" % (sorted(a_data['runtaskdeps'])))
1245
1246    if 'file_checksum_values' in a_data:
1247        output.append("This task depends on the checksums of files: %s" % (sorted(a_data['file_checksum_values'])))
1248
1249    if 'runtaskhashes' in a_data:
1250        for dep in sorted(a_data['runtaskhashes']):
1251            output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
1252
1253    if 'taint' in a_data:
1254        if a_data['taint'].startswith('nostamp:'):
1255            msg = a_data['taint'].replace('nostamp:', 'nostamp(uuid4):')
1256        else:
1257            msg = a_data['taint']
1258        output.append("Tainted (by forced/invalidated task): %s" % msg)
1259
1260    if 'task' in a_data:
1261        computed_basehash = calc_basehash(a_data)
1262        output.append("Computed base hash is %s and from file %s" % (computed_basehash, a_data['basehash']))
1263    else:
1264        output.append("Unable to compute base hash")
1265
1266    computed_taskhash = calc_taskhash(a_data)
1267    output.append("Computed task hash is %s" % computed_taskhash)
1268
1269    return output
1270