xref: /openbmc/openbmc/poky/bitbake/lib/bb/siggen.py (revision 39653566)
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import hashlib
8import logging
9import os
10import re
11import tempfile
12import pickle
13import bb.data
14import difflib
15import simplediff
16import json
17import types
18import bb.compress.zstd
19from bb.checksum import FileChecksumCache
20from bb import runqueue
21import hashserv
22import hashserv.client
23
24logger = logging.getLogger('BitBake.SigGen')
25hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
26
27#find_siginfo and find_siginfo_version are set by the metadata siggen
28# The minimum version of the find_siginfo function we need
29find_siginfo_minversion = 2
30
31def check_siggen_version(siggen):
32    if not hasattr(siggen, "find_siginfo_version"):
33        bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)")
34    if siggen.find_siginfo_version < siggen.find_siginfo_minversion:
35        bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (%s vs %s)" % (siggen.find_siginfo_version, siggen.find_siginfo_minversion))
36
37class SetEncoder(json.JSONEncoder):
38    def default(self, obj):
39        if isinstance(obj, set) or isinstance(obj, frozenset):
40            return dict(_set_object=list(sorted(obj)))
41        return json.JSONEncoder.default(self, obj)
42
43def SetDecoder(dct):
44    if '_set_object' in dct:
45        return frozenset(dct['_set_object'])
46    return dct
47
48def init(d):
49    siggens = [obj for obj in globals().values()
50                      if type(obj) is type and issubclass(obj, SignatureGenerator)]
51
52    desired = d.getVar("BB_SIGNATURE_HANDLER") or "noop"
53    for sg in siggens:
54        if desired == sg.name:
55            return sg(d)
56    else:
57        logger.error("Invalid signature generator '%s', using default 'noop'\n"
58                     "Available generators: %s", desired,
59                     ', '.join(obj.name for obj in siggens))
60        return SignatureGenerator(d)
61
62class SignatureGenerator(object):
63    """
64    """
65    name = "noop"
66
67    def __init__(self, data):
68        self.basehash = {}
69        self.taskhash = {}
70        self.unihash = {}
71        self.runtaskdeps = {}
72        self.file_checksum_values = {}
73        self.taints = {}
74        self.unitaskhashes = {}
75        self.tidtopn = {}
76        self.setscenetasks = set()
77
78    def finalise(self, fn, d, varient):
79        return
80
81    def postparsing_clean_cache(self):
82        return
83
84    def setup_datacache(self, datacaches):
85        self.datacaches = datacaches
86
87    def setup_datacache_from_datastore(self, mcfn, d):
88        # In task context we have no cache so setup internal data structures
89        # from the fully parsed data store provided
90
91        mc = d.getVar("__BBMULTICONFIG", False) or ""
92        tasks = d.getVar('__BBTASKS', False)
93
94        self.datacaches = {}
95        self.datacaches[mc] = types.SimpleNamespace()
96        setattr(self.datacaches[mc], "stamp", {})
97        self.datacaches[mc].stamp[mcfn] = d.getVar('STAMP')
98        setattr(self.datacaches[mc], "stamp_extrainfo", {})
99        self.datacaches[mc].stamp_extrainfo[mcfn] = {}
100        for t in tasks:
101            flag = d.getVarFlag(t, "stamp-extra-info")
102            if flag:
103                self.datacaches[mc].stamp_extrainfo[mcfn][t] = flag
104
105    def get_cached_unihash(self, tid):
106        return None
107
108    def get_unihash(self, tid):
109        unihash = self.get_cached_unihash(tid)
110        if unihash:
111            return unihash
112        return self.taskhash[tid]
113
114    def get_unihashes(self, tids):
115        return {tid: self.get_unihash(tid) for tid in tids}
116
117    def prep_taskhash(self, tid, deps, dataCaches):
118        return
119
120    def get_taskhash(self, tid, deps, dataCaches):
121        self.taskhash[tid] = hashlib.sha256(tid.encode("utf-8")).hexdigest()
122        return self.taskhash[tid]
123
124    def writeout_file_checksum_cache(self):
125        """Write/update the file checksum cache onto disk"""
126        return
127
128    def stampfile_base(self, mcfn):
129        mc = bb.runqueue.mc_from_tid(mcfn)
130        return self.datacaches[mc].stamp[mcfn]
131
132    def stampfile_mcfn(self, taskname, mcfn, extrainfo=True):
133        mc = bb.runqueue.mc_from_tid(mcfn)
134        stamp = self.datacaches[mc].stamp[mcfn]
135        if not stamp:
136            return
137
138        stamp_extrainfo = ""
139        if extrainfo:
140            taskflagname = taskname
141            if taskname.endswith("_setscene"):
142                taskflagname = taskname.replace("_setscene", "")
143            stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
144
145        return self.stampfile(stamp, mcfn, taskname, stamp_extrainfo)
146
147    def stampfile(self, stampbase, file_name, taskname, extrainfo):
148        return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
149
150    def stampcleanmask_mcfn(self, taskname, mcfn):
151        mc = bb.runqueue.mc_from_tid(mcfn)
152        stamp = self.datacaches[mc].stamp[mcfn]
153        if not stamp:
154            return []
155
156        taskflagname = taskname
157        if taskname.endswith("_setscene"):
158            taskflagname = taskname.replace("_setscene", "")
159        stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
160
161        return self.stampcleanmask(stamp, mcfn, taskname, stamp_extrainfo)
162
163    def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
164        return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
165
166    def dump_sigtask(self, mcfn, task, stampbase, runtime):
167        return
168
169    def invalidate_task(self, task, mcfn):
170        mc = bb.runqueue.mc_from_tid(mcfn)
171        stamp = self.datacaches[mc].stamp[mcfn]
172        bb.utils.remove(stamp)
173
174    def dump_sigs(self, dataCache, options):
175        return
176
177    def get_taskdata(self):
178        return (self.runtaskdeps, self.taskhash, self.unihash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.tidtopn, self.setscenetasks)
179
180    def set_taskdata(self, data):
181        self.runtaskdeps, self.taskhash, self.unihash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.tidtopn, self.setscenetasks = data
182
183    def reset(self, data):
184        self.__init__(data)
185
186    def get_taskhashes(self):
187        return self.taskhash, self.unihash, self.unitaskhashes, self.tidtopn
188
189    def set_taskhashes(self, hashes):
190        self.taskhash, self.unihash, self.unitaskhashes, self.tidtopn = hashes
191
192    def save_unitaskhashes(self):
193        return
194
195    def copy_unitaskhashes(self, targetdir):
196        return
197
198    def set_setscene_tasks(self, setscene_tasks):
199        return
200
201    def exit(self):
202        return
203
204def build_pnid(mc, pn, taskname):
205    if mc:
206        return "mc:" + mc + ":" + pn + ":" + taskname
207    return pn + ":" + taskname
208
209class SignatureGeneratorBasic(SignatureGenerator):
210    """
211    """
212    name = "basic"
213
214    def __init__(self, data):
215        self.basehash = {}
216        self.taskhash = {}
217        self.unihash = {}
218        self.runtaskdeps = {}
219        self.file_checksum_values = {}
220        self.taints = {}
221        self.setscenetasks = set()
222        self.basehash_ignore_vars = set((data.getVar("BB_BASEHASH_IGNORE_VARS") or "").split())
223        self.taskhash_ignore_tasks = None
224        self.init_rundepcheck(data)
225        checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
226        if checksum_cache_file:
227            self.checksum_cache = FileChecksumCache()
228            self.checksum_cache.init_cache(data, checksum_cache_file)
229        else:
230            self.checksum_cache = None
231
232        self.unihash_cache = bb.cache.SimpleCache("3")
233        self.unitaskhashes = self.unihash_cache.init_cache(data, "bb_unihashes.dat", {})
234        self.localdirsexclude = (data.getVar("BB_SIGNATURE_LOCAL_DIRS_EXCLUDE") or "CVS .bzr .git .hg .osc .p4 .repo .svn").split()
235        self.tidtopn = {}
236
237    def init_rundepcheck(self, data):
238        self.taskhash_ignore_tasks = data.getVar("BB_TASKHASH_IGNORE_TASKS") or None
239        if self.taskhash_ignore_tasks:
240            self.twl = re.compile(self.taskhash_ignore_tasks)
241        else:
242            self.twl = None
243
244    def _build_data(self, mcfn, d):
245
246        ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
247        tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basehash_ignore_vars)
248
249        taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basehash_ignore_vars, mcfn)
250
251        for task in tasklist:
252            tid = mcfn + ":" + task
253            if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]:
254                bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid]))
255                bb.error("The following commands may help:")
256                cmd = "$ bitbake %s -c%s" % (d.getVar('PN'), task)
257                # Make sure sigdata is dumped before run printdiff
258                bb.error("%s -Snone" % cmd)
259                bb.error("Then:")
260                bb.error("%s -Sprintdiff\n" % cmd)
261            self.basehash[tid] = basehash[tid]
262
263        return taskdeps, gendeps, lookupcache
264
265    def set_setscene_tasks(self, setscene_tasks):
266        self.setscenetasks = set(setscene_tasks)
267
268    def finalise(self, fn, d, variant):
269
270        mc = d.getVar("__BBMULTICONFIG", False) or ""
271        mcfn = fn
272        if variant or mc:
273            mcfn = bb.cache.realfn2virtual(fn, variant, mc)
274
275        try:
276            taskdeps, gendeps, lookupcache = self._build_data(mcfn, d)
277        except bb.parse.SkipRecipe:
278            raise
279        except:
280            bb.warn("Error during finalise of %s" % mcfn)
281            raise
282
283        basehashes = {}
284        for task in taskdeps:
285            basehashes[task] = self.basehash[mcfn + ":" + task]
286
287        d.setVar("__siggen_basehashes", basehashes)
288        d.setVar("__siggen_gendeps", gendeps)
289        d.setVar("__siggen_varvals", lookupcache)
290        d.setVar("__siggen_taskdeps", taskdeps)
291
292        #Slow but can be useful for debugging mismatched basehashes
293        #self.setup_datacache_from_datastore(mcfn, d)
294        #for task in taskdeps:
295        #    self.dump_sigtask(mcfn, task, d.getVar("STAMP"), False)
296
297    def setup_datacache_from_datastore(self, mcfn, d):
298        super().setup_datacache_from_datastore(mcfn, d)
299
300        mc = bb.runqueue.mc_from_tid(mcfn)
301        for attr in ["siggen_varvals", "siggen_taskdeps", "siggen_gendeps"]:
302            if not hasattr(self.datacaches[mc], attr):
303                setattr(self.datacaches[mc], attr, {})
304        self.datacaches[mc].siggen_varvals[mcfn] = d.getVar("__siggen_varvals")
305        self.datacaches[mc].siggen_taskdeps[mcfn] = d.getVar("__siggen_taskdeps")
306        self.datacaches[mc].siggen_gendeps[mcfn] = d.getVar("__siggen_gendeps")
307
308    def rundep_check(self, fn, recipename, task, dep, depname, dataCaches):
309        # Return True if we should keep the dependency, False to drop it
310        # We only manipulate the dependencies for packages not in the ignore
311        # list
312        if self.twl and not self.twl.search(recipename):
313            # then process the actual dependencies
314            if self.twl.search(depname):
315                return False
316        return True
317
318    def read_taint(self, fn, task, stampbase):
319        taint = None
320        try:
321            with open(stampbase + '.' + task + '.taint', 'r') as taintf:
322                taint = taintf.read()
323        except IOError:
324            pass
325        return taint
326
327    def prep_taskhash(self, tid, deps, dataCaches):
328
329        (mc, _, task, mcfn) = bb.runqueue.split_tid_mcfn(tid)
330
331        self.basehash[tid] = dataCaches[mc].basetaskhash[tid]
332        self.runtaskdeps[tid] = []
333        self.file_checksum_values[tid] = []
334        recipename = dataCaches[mc].pkg_fn[mcfn]
335
336        self.tidtopn[tid] = recipename
337        # save hashfn for deps into siginfo?
338        for dep in deps:
339            (depmc, _, deptask, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
340            dep_pn = dataCaches[depmc].pkg_fn[depmcfn]
341
342            if not self.rundep_check(mcfn, recipename, task, dep, dep_pn, dataCaches):
343                continue
344
345            if dep not in self.taskhash:
346                bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
347
348            dep_pnid = build_pnid(depmc, dep_pn, deptask)
349            self.runtaskdeps[tid].append((dep_pnid, dep))
350
351        if task in dataCaches[mc].file_checksums[mcfn]:
352            if self.checksum_cache:
353                checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
354            else:
355                checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
356            for (f,cs) in checksums:
357                self.file_checksum_values[tid].append((f,cs))
358
359        taskdep = dataCaches[mc].task_deps[mcfn]
360        if 'nostamp' in taskdep and task in taskdep['nostamp']:
361            # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
362            if tid in self.taints and self.taints[tid].startswith("nostamp:"):
363                # Don't reset taint value upon every call
364                pass
365            else:
366                import uuid
367                taint = str(uuid.uuid4())
368                self.taints[tid] = "nostamp:" + taint
369
370        taint = self.read_taint(mcfn, task, dataCaches[mc].stamp[mcfn])
371        if taint:
372            self.taints[tid] = taint
373            logger.warning("%s is tainted from a forced run" % tid)
374
375        return
376
377    def get_taskhash(self, tid, deps, dataCaches):
378
379        data = self.basehash[tid]
380        for dep in sorted(self.runtaskdeps[tid]):
381            data += self.get_unihash(dep[1])
382
383        for (f, cs) in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
384            if cs:
385                if "/./" in f:
386                    data += "./" + f.split("/./")[1]
387                data += cs
388
389        if tid in self.taints:
390            if self.taints[tid].startswith("nostamp:"):
391                data += self.taints[tid][8:]
392            else:
393                data += self.taints[tid]
394
395        h = hashlib.sha256(data.encode("utf-8")).hexdigest()
396        self.taskhash[tid] = h
397        #d.setVar("BB_TASKHASH:task-%s" % task, taskhash[task])
398        return h
399
400    def writeout_file_checksum_cache(self):
401        """Write/update the file checksum cache onto disk"""
402        if self.checksum_cache:
403            self.checksum_cache.save_extras()
404            self.checksum_cache.save_merge()
405        else:
406            bb.fetch2.fetcher_parse_save()
407            bb.fetch2.fetcher_parse_done()
408
409    def save_unitaskhashes(self):
410        self.unihash_cache.save(self.unitaskhashes)
411
412    def copy_unitaskhashes(self, targetdir):
413        self.unihash_cache.copyfile(targetdir)
414
415    def dump_sigtask(self, mcfn, task, stampbase, runtime):
416        tid = mcfn + ":" + task
417        mc = bb.runqueue.mc_from_tid(mcfn)
418        referencestamp = stampbase
419        if isinstance(runtime, str) and runtime.startswith("customfile"):
420            sigfile = stampbase
421            referencestamp = runtime[11:]
422        elif runtime and tid in self.taskhash:
423            sigfile = stampbase + "." + task + ".sigdata" + "." + self.get_unihash(tid)
424        else:
425            sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid]
426
427        with bb.utils.umask(0o002):
428            bb.utils.mkdirhier(os.path.dirname(sigfile))
429
430        data = {}
431        data['task'] = task
432        data['basehash_ignore_vars'] = self.basehash_ignore_vars
433        data['taskhash_ignore_tasks'] = self.taskhash_ignore_tasks
434        data['taskdeps'] = self.datacaches[mc].siggen_taskdeps[mcfn][task]
435        data['basehash'] = self.basehash[tid]
436        data['gendeps'] = {}
437        data['varvals'] = {}
438        data['varvals'][task] = self.datacaches[mc].siggen_varvals[mcfn][task]
439        for dep in self.datacaches[mc].siggen_taskdeps[mcfn][task]:
440            if dep in self.basehash_ignore_vars:
441                continue
442            data['gendeps'][dep] = self.datacaches[mc].siggen_gendeps[mcfn][dep]
443            data['varvals'][dep] = self.datacaches[mc].siggen_varvals[mcfn][dep]
444
445        if runtime and tid in self.taskhash:
446            data['runtaskdeps'] = [dep[0] for dep in sorted(self.runtaskdeps[tid])]
447            data['file_checksum_values'] = []
448            for f,cs in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
449                if "/./" in f:
450                    data['file_checksum_values'].append(("./" + f.split("/./")[1], cs))
451                else:
452                    data['file_checksum_values'].append((os.path.basename(f), cs))
453            data['runtaskhashes'] = {}
454            for dep in self.runtaskdeps[tid]:
455                data['runtaskhashes'][dep[0]] = self.get_unihash(dep[1])
456            data['taskhash'] = self.taskhash[tid]
457            data['unihash'] = self.get_unihash(tid)
458
459        taint = self.read_taint(mcfn, task, referencestamp)
460        if taint:
461            data['taint'] = taint
462
463        if runtime and tid in self.taints:
464            if 'nostamp:' in self.taints[tid]:
465                data['taint'] = self.taints[tid]
466
467        computed_basehash = calc_basehash(data)
468        if computed_basehash != self.basehash[tid]:
469            bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[tid], tid))
470        if runtime and tid in self.taskhash:
471            computed_taskhash = calc_taskhash(data)
472            if computed_taskhash != self.taskhash[tid]:
473                bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid))
474                sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash)
475
476        fd, tmpfile = bb.utils.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
477        try:
478            with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f:
479                json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder)
480                f.flush()
481            os.chmod(tmpfile, 0o664)
482            bb.utils.rename(tmpfile, sigfile)
483        except (OSError, IOError) as err:
484            try:
485                os.unlink(tmpfile)
486            except OSError:
487                pass
488            raise err
489
490class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
491    name = "basichash"
492
493    def get_stampfile_hash(self, tid):
494        if tid in self.taskhash:
495            return self.taskhash[tid]
496
497        # If task is not in basehash, then error
498        return self.basehash[tid]
499
500    def stampfile(self, stampbase, mcfn, taskname, extrainfo, clean=False):
501        if taskname.endswith("_setscene"):
502            tid = mcfn + ":" + taskname[:-9]
503        else:
504            tid = mcfn + ":" + taskname
505        if clean:
506            h = "*"
507        else:
508            h = self.get_stampfile_hash(tid)
509
510        return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
511
512    def stampcleanmask(self, stampbase, mcfn, taskname, extrainfo):
513        return self.stampfile(stampbase, mcfn, taskname, extrainfo, clean=True)
514
515    def invalidate_task(self, task, mcfn):
516        bb.note("Tainting hash to force rebuild of task %s, %s" % (mcfn, task))
517
518        mc = bb.runqueue.mc_from_tid(mcfn)
519        stamp = self.datacaches[mc].stamp[mcfn]
520
521        taintfn = stamp + '.' + task + '.taint'
522
523        import uuid
524        bb.utils.mkdirhier(os.path.dirname(taintfn))
525        # The specific content of the taint file is not really important,
526        # we just need it to be random, so a random UUID is used
527        with open(taintfn, 'w') as taintf:
528            taintf.write(str(uuid.uuid4()))
529
530class SignatureGeneratorUniHashMixIn(object):
531    def __init__(self, data):
532        self.extramethod = {}
533        # NOTE: The cache only tracks hashes that exist. Hashes that don't
534        # exist are always queries from the server since it is possible for
535        # hashes to appear over time, but much less likely for them to
536        # disappear
537        self.unihash_exists_cache = set()
538        super().__init__(data)
539
540    def get_taskdata(self):
541        return (self.server, self.method, self.extramethod, self.max_parallel) + super().get_taskdata()
542
543    def set_taskdata(self, data):
544        self.server, self.method, self.extramethod, self.max_parallel = data[:4]
545        super().set_taskdata(data[4:])
546
547    def client(self):
548        if getattr(self, '_client', None) is None:
549            self._client = hashserv.create_client(self.server)
550        return self._client
551
552    def client_pool(self):
553        if getattr(self, '_client_pool', None) is None:
554            self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel)
555        return self._client_pool
556
557    def reset(self, data):
558        self.__close_clients()
559        return super().reset(data)
560
561    def exit(self):
562        self.__close_clients()
563        return super().exit()
564
565    def __close_clients(self):
566        if getattr(self, '_client', None) is not None:
567            self._client.close()
568            self._client = None
569        if getattr(self, '_client_pool', None) is not None:
570            self._client_pool.close()
571            self._client_pool = None
572
573    def get_stampfile_hash(self, tid):
574        if tid in self.taskhash:
575            # If a unique hash is reported, use it as the stampfile hash. This
576            # ensures that if a task won't be re-run if the taskhash changes,
577            # but it would result in the same output hash
578            unihash = self._get_unihash(tid)
579            if unihash is not None:
580                return unihash
581
582        return super().get_stampfile_hash(tid)
583
584    def set_unihash(self, tid, unihash):
585        (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
586        key = mc + ":" + self.tidtopn[tid] + ":" + taskname
587        self.unitaskhashes[key] = (self.taskhash[tid], unihash)
588        self.unihash[tid] = unihash
589
590    def _get_unihash(self, tid, checkkey=None):
591        if tid not in self.tidtopn:
592            return None
593        (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
594        key = mc + ":" + self.tidtopn[tid] + ":" + taskname
595        if key not in self.unitaskhashes:
596            return None
597        if not checkkey:
598            checkkey = self.taskhash[tid]
599        (key, unihash) = self.unitaskhashes[key]
600        if key != checkkey:
601            return None
602        return unihash
603
604    def get_cached_unihash(self, tid):
605        taskhash = self.taskhash[tid]
606
607        # If its not a setscene task we can return
608        if self.setscenetasks and tid not in self.setscenetasks:
609            self.unihash[tid] = None
610            return taskhash
611
612        # TODO: This cache can grow unbounded. It probably only needs to keep
613        # for each task
614        unihash =  self._get_unihash(tid)
615        if unihash is not None:
616            self.unihash[tid] = unihash
617            return unihash
618
619        return None
620
621    def _get_method(self, tid):
622        method = self.method
623        if tid in self.extramethod:
624            method = method + self.extramethod[tid]
625
626        return method
627
628    def unihashes_exist(self, query):
629        if len(query) == 0:
630            return {}
631
632        uncached_query = {}
633        result = {}
634        for key, unihash in query.items():
635            if unihash in self.unihash_exists_cache:
636                result[key] = True
637            else:
638                uncached_query[key] = unihash
639
640        if self.max_parallel <= 1 or len(uncached_query) <= 1:
641            # No parallelism required. Make the query serially with the single client
642            uncached_result = {
643                key: self.client().unihash_exists(value) for key, value in uncached_query.items()
644            }
645        else:
646            uncached_result = self.client_pool().unihashes_exist(uncached_query)
647
648        for key, exists in uncached_result.items():
649            if exists:
650                self.unihash_exists_cache.add(query[key])
651            result[key] = exists
652
653        return result
654
655    def get_unihash(self, tid):
656        return self.get_unihashes([tid])[tid]
657
658    def get_unihashes(self, tids):
659        """
660        For a iterable of tids, returns a dictionary that maps each tid to a
661        unihash
662        """
663        result = {}
664        queries = {}
665        query_result = {}
666
667        for tid in tids:
668            unihash = self.get_cached_unihash(tid)
669            if unihash:
670                result[tid] = unihash
671            else:
672                queries[tid] = (self._get_method(tid), self.taskhash[tid])
673
674        if len(queries) == 0:
675            return result
676
677        if self.max_parallel <= 1 or len(queries) <= 1:
678            # No parallelism required. Make the query serially with the single client
679            for tid, args in queries.items():
680                query_result[tid] = self.client().get_unihash(*args)
681        else:
682            query_result = self.client_pool().get_unihashes(queries)
683
684        for tid, unihash in query_result.items():
685            # In the absence of being able to discover a unique hash from the
686            # server, make it be equivalent to the taskhash. The unique "hash" only
687            # really needs to be a unique string (not even necessarily a hash), but
688            # making it match the taskhash has a few advantages:
689            #
690            # 1) All of the sstate code that assumes hashes can be the same
691            # 2) It provides maximal compatibility with builders that don't use
692            #    an equivalency server
693            # 3) The value is easy for multiple independent builders to derive the
694            #    same unique hash from the same input. This means that if the
695            #    independent builders find the same taskhash, but it isn't reported
696            #    to the server, there is a better chance that they will agree on
697            #    the unique hash.
698            taskhash = self.taskhash[tid]
699            if unihash:
700                # A unique hash equal to the taskhash is not very interesting,
701                # so it is reported it at debug level 2. If they differ, that
702                # is much more interesting, so it is reported at debug level 1
703                hashequiv_logger.bbdebug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
704            else:
705                hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
706                unihash = taskhash
707
708
709            self.set_unihash(tid, unihash)
710            self.unihash[tid] = unihash
711            result[tid] = unihash
712
713        return result
714
715    def report_unihash(self, path, task, d):
716        import importlib
717
718        taskhash = d.getVar('BB_TASKHASH')
719        unihash = d.getVar('BB_UNIHASH')
720        report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
721        tempdir = d.getVar('T')
722        mcfn = d.getVar('BB_FILENAME')
723        tid = mcfn + ':do_' + task
724        key = tid + ':' + taskhash
725
726        if self.setscenetasks and tid not in self.setscenetasks:
727            return
728
729        # This can happen if locked sigs are in action. Detect and just exit
730        if taskhash != self.taskhash[tid]:
731            return
732
733        # Sanity checks
734        cache_unihash = self._get_unihash(tid, checkkey=taskhash)
735        if cache_unihash is None:
736            bb.fatal('%s not in unihash cache. Please report this error' % key)
737
738        if cache_unihash != unihash:
739            bb.fatal("Cache unihash %s doesn't match BB_UNIHASH %s" % (cache_unihash, unihash))
740
741        sigfile = None
742        sigfile_name = "depsig.do_%s.%d" % (task, os.getpid())
743        sigfile_link = "depsig.do_%s" % task
744
745        try:
746            sigfile = open(os.path.join(tempdir, sigfile_name), 'w+b')
747
748            locs = {'path': path, 'sigfile': sigfile, 'task': task, 'd': d}
749
750            if "." in self.method:
751                (module, method) = self.method.rsplit('.', 1)
752                locs['method'] = getattr(importlib.import_module(module), method)
753                outhash = bb.utils.better_eval('method(path, sigfile, task, d)', locs)
754            else:
755                outhash = bb.utils.better_eval(self.method + '(path, sigfile, task, d)', locs)
756
757            try:
758                extra_data = {}
759
760                owner = d.getVar('SSTATE_HASHEQUIV_OWNER')
761                if owner:
762                    extra_data['owner'] = owner
763
764                if report_taskdata:
765                    sigfile.seek(0)
766
767                    extra_data['PN'] = d.getVar('PN')
768                    extra_data['PV'] = d.getVar('PV')
769                    extra_data['PR'] = d.getVar('PR')
770                    extra_data['task'] = task
771                    extra_data['outhash_siginfo'] = sigfile.read().decode('utf-8')
772
773                method = self.method
774                if tid in self.extramethod:
775                    method = method + self.extramethod[tid]
776
777                data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data)
778                new_unihash = data['unihash']
779
780                if new_unihash != unihash:
781                    hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
782                    bb.event.fire(bb.runqueue.taskUniHashUpdate(mcfn + ':do_' + task, new_unihash), d)
783                    self.set_unihash(tid, new_unihash)
784                    d.setVar('BB_UNIHASH', new_unihash)
785                else:
786                    hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
787            except ConnectionError as e:
788                bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
789        finally:
790            if sigfile:
791                sigfile.close()
792
793                sigfile_link_path = os.path.join(tempdir, sigfile_link)
794                bb.utils.remove(sigfile_link_path)
795
796                try:
797                    os.symlink(sigfile_name, sigfile_link_path)
798                except OSError:
799                    pass
800
801    def report_unihash_equiv(self, tid, taskhash, wanted_unihash, current_unihash, datacaches):
802        try:
803            extra_data = {}
804            method = self.method
805            if tid in self.extramethod:
806                method = method + self.extramethod[tid]
807
808            data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
809            hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data)))
810
811            if data is None:
812                bb.warn("Server unable to handle unihash report")
813                return False
814
815            finalunihash = data['unihash']
816
817            if finalunihash == current_unihash:
818                hashequiv_logger.verbose('Task %s unihash %s unchanged by server' % (tid, finalunihash))
819            elif finalunihash == wanted_unihash:
820                hashequiv_logger.verbose('Task %s unihash changed %s -> %s as wanted' % (tid, current_unihash, finalunihash))
821                self.set_unihash(tid, finalunihash)
822                return True
823            else:
824                # TODO: What to do here?
825                hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash))
826
827        except ConnectionError as e:
828            bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
829
830        return False
831
832#
833# Dummy class used for bitbake-selftest
834#
835class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureGeneratorBasicHash):
836    name = "TestEquivHash"
837    def init_rundepcheck(self, data):
838        super().init_rundepcheck(data)
839        self.server = data.getVar('BB_HASHSERVE')
840        self.method = "sstate_output_hash"
841        self.max_parallel = 1
842
843def clean_checksum_file_path(file_checksum_tuple):
844    f, cs = file_checksum_tuple
845    if "/./" in f:
846        return "./" + f.split("/./")[1]
847    return f
848
849def dump_this_task(outfile, d):
850    import bb.parse
851    mcfn = d.getVar("BB_FILENAME")
852    task = "do_" + d.getVar("BB_CURRENTTASK")
853    referencestamp = bb.parse.siggen.stampfile_base(mcfn)
854    bb.parse.siggen.dump_sigtask(mcfn, task, outfile, "customfile:" + referencestamp)
855
856def init_colors(enable_color):
857    """Initialise colour dict for passing to compare_sigfiles()"""
858    # First set up the colours
859    colors = {'color_title':   '\033[1m',
860              'color_default': '\033[0m',
861              'color_add':     '\033[0;32m',
862              'color_remove':  '\033[0;31m',
863             }
864    # Leave all keys present but clear the values
865    if not enable_color:
866        for k in colors.keys():
867            colors[k] = ''
868    return colors
869
870def worddiff_str(oldstr, newstr, colors=None):
871    if not colors:
872        colors = init_colors(False)
873    diff = simplediff.diff(oldstr.split(' '), newstr.split(' '))
874    ret = []
875    for change, value in diff:
876        value = ' '.join(value)
877        if change == '=':
878            ret.append(value)
879        elif change == '+':
880            item = '{color_add}{{+{value}+}}{color_default}'.format(value=value, **colors)
881            ret.append(item)
882        elif change == '-':
883            item = '{color_remove}[-{value}-]{color_default}'.format(value=value, **colors)
884            ret.append(item)
885    whitespace_note = ''
886    if oldstr != newstr and ' '.join(oldstr.split()) == ' '.join(newstr.split()):
887        whitespace_note = ' (whitespace changed)'
888    return '"%s"%s' % (' '.join(ret), whitespace_note)
889
890def list_inline_diff(oldlist, newlist, colors=None):
891    if not colors:
892        colors = init_colors(False)
893    diff = simplediff.diff(oldlist, newlist)
894    ret = []
895    for change, value in diff:
896        value = ' '.join(value)
897        if change == '=':
898            ret.append("'%s'" % value)
899        elif change == '+':
900            item = '{color_add}+{value}{color_default}'.format(value=value, **colors)
901            ret.append(item)
902        elif change == '-':
903            item = '{color_remove}-{value}{color_default}'.format(value=value, **colors)
904            ret.append(item)
905    return '[%s]' % (', '.join(ret))
906
907# Handled renamed fields
908def handle_renames(data):
909    if 'basewhitelist' in data:
910        data['basehash_ignore_vars'] = data['basewhitelist']
911        del data['basewhitelist']
912    if 'taskwhitelist' in data:
913        data['taskhash_ignore_tasks'] = data['taskwhitelist']
914        del data['taskwhitelist']
915
916
917def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
918    output = []
919
920    colors = init_colors(color)
921    def color_format(formatstr, **values):
922        """
923        Return colour formatted string.
924        NOTE: call with the format string, not an already formatted string
925        containing values (otherwise you could have trouble with { and }
926        characters)
927        """
928        if not formatstr.endswith('{color_default}'):
929            formatstr += '{color_default}'
930        # In newer python 3 versions you can pass both of these directly,
931        # but we only require 3.4 at the moment
932        formatparams = {}
933        formatparams.update(colors)
934        formatparams.update(values)
935        return formatstr.format(**formatparams)
936
937    try:
938        with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
939            a_data = json.load(f, object_hook=SetDecoder)
940    except (TypeError, OSError) as err:
941        bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
942        raise err
943    try:
944        with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
945            b_data = json.load(f, object_hook=SetDecoder)
946    except (TypeError, OSError) as err:
947        bb.error("Failed to open sigdata file '%s': %s" % (b, str(err)))
948        raise err
949
950    for data in [a_data, b_data]:
951        handle_renames(data)
952
953    def dict_diff(a, b, ignored_vars=set()):
954        sa = set(a.keys())
955        sb = set(b.keys())
956        common = sa & sb
957        changed = set()
958        for i in common:
959            if a[i] != b[i] and i not in ignored_vars:
960                changed.add(i)
961        added = sb - sa
962        removed = sa - sb
963        return changed, added, removed
964
965    def file_checksums_diff(a, b):
966        from collections import Counter
967
968        # Convert lists back to tuples
969        a = [(f[0], f[1]) for f in a]
970        b = [(f[0], f[1]) for f in b]
971
972        # Compare lists, ensuring we can handle duplicate filenames if they exist
973        removedcount = Counter(a)
974        removedcount.subtract(b)
975        addedcount = Counter(b)
976        addedcount.subtract(a)
977        added = []
978        for x in b:
979            if addedcount[x] > 0:
980                addedcount[x] -= 1
981                added.append(x)
982        removed = []
983        changed = []
984        for x in a:
985            if removedcount[x] > 0:
986                removedcount[x] -= 1
987                for y in added:
988                    if y[0] == x[0]:
989                        changed.append((x[0], x[1], y[1]))
990                        added.remove(y)
991                        break
992                else:
993                    removed.append(x)
994        added = [x[0] for x in added]
995        removed = [x[0] for x in removed]
996        return changed, added, removed
997
998    if 'basehash_ignore_vars' in a_data and a_data['basehash_ignore_vars'] != b_data['basehash_ignore_vars']:
999        output.append(color_format("{color_title}basehash_ignore_vars changed{color_default} from '%s' to '%s'") % (a_data['basehash_ignore_vars'], b_data['basehash_ignore_vars']))
1000        if a_data['basehash_ignore_vars'] and b_data['basehash_ignore_vars']:
1001            output.append("changed items: %s" % a_data['basehash_ignore_vars'].symmetric_difference(b_data['basehash_ignore_vars']))
1002
1003    if 'taskhash_ignore_tasks' in a_data and a_data['taskhash_ignore_tasks'] != b_data['taskhash_ignore_tasks']:
1004        output.append(color_format("{color_title}taskhash_ignore_tasks changed{color_default} from '%s' to '%s'") % (a_data['taskhash_ignore_tasks'], b_data['taskhash_ignore_tasks']))
1005        if a_data['taskhash_ignore_tasks'] and b_data['taskhash_ignore_tasks']:
1006            output.append("changed items: %s" % a_data['taskhash_ignore_tasks'].symmetric_difference(b_data['taskhash_ignore_tasks']))
1007
1008    if a_data['taskdeps'] != b_data['taskdeps']:
1009        output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
1010
1011    if a_data['basehash'] != b_data['basehash'] and not collapsed:
1012        output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash']))
1013
1014    changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basehash_ignore_vars'] & b_data['basehash_ignore_vars'])
1015    if changed:
1016        for dep in sorted(changed):
1017            output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
1018            if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
1019                output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
1020    if added:
1021        for dep in sorted(added):
1022            output.append(color_format("{color_title}Dependency on variable %s was added") % (dep))
1023    if removed:
1024        for dep in sorted(removed):
1025            output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep))
1026
1027
1028    changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
1029    if changed:
1030        for dep in sorted(changed):
1031            oldval = a_data['varvals'][dep]
1032            newval = b_data['varvals'][dep]
1033            if newval and oldval and ('\n' in oldval or '\n' in newval):
1034                diff = difflib.unified_diff(oldval.splitlines(), newval.splitlines(), lineterm='')
1035                # Cut off the first two lines, since we aren't interested in
1036                # the old/new filename (they are blank anyway in this case)
1037                difflines = list(diff)[2:]
1038                if color:
1039                    # Add colour to diff output
1040                    for i, line in enumerate(difflines):
1041                        if line.startswith('+'):
1042                            line = color_format('{color_add}{line}', line=line)
1043                            difflines[i] = line
1044                        elif line.startswith('-'):
1045                            line = color_format('{color_remove}{line}', line=line)
1046                            difflines[i] = line
1047                output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff='\n'.join(difflines)))
1048            elif newval and oldval and (' ' in oldval or ' ' in newval):
1049                output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff=worddiff_str(oldval, newval, colors)))
1050            else:
1051                output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval))
1052
1053    if not 'file_checksum_values' in a_data:
1054         a_data['file_checksum_values'] = []
1055    if not 'file_checksum_values' in b_data:
1056         b_data['file_checksum_values'] = []
1057
1058    changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
1059    if changed:
1060        for f, old, new in changed:
1061            output.append(color_format("{color_title}Checksum for file %s changed{color_default} from %s to %s") % (f, old, new))
1062    if added:
1063        for f in added:
1064            output.append(color_format("{color_title}Dependency on checksum of file %s was added") % (f))
1065    if removed:
1066        for f in removed:
1067            output.append(color_format("{color_title}Dependency on checksum of file %s was removed") % (f))
1068
1069    if not 'runtaskdeps' in a_data:
1070         a_data['runtaskdeps'] = {}
1071    if not 'runtaskdeps' in b_data:
1072         b_data['runtaskdeps'] = {}
1073
1074    if not collapsed:
1075        if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
1076            changed = ["Number of task dependencies changed"]
1077        else:
1078            changed = []
1079            for idx, task in enumerate(a_data['runtaskdeps']):
1080                a = a_data['runtaskdeps'][idx]
1081                b = b_data['runtaskdeps'][idx]
1082                if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed:
1083                    changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
1084
1085        if changed:
1086            clean_a = a_data['runtaskdeps']
1087            clean_b = b_data['runtaskdeps']
1088            if clean_a != clean_b:
1089                output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors))
1090            else:
1091                output.append(color_format("{color_title}runtaskdeps changed:"))
1092            output.append("\n".join(changed))
1093
1094
1095    if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
1096        a = a_data['runtaskhashes']
1097        b = b_data['runtaskhashes']
1098        changed, added, removed = dict_diff(a, b)
1099        if added:
1100            for dep in sorted(added):
1101                bdep_found = False
1102                if removed:
1103                    for bdep in removed:
1104                        if b[dep] == a[bdep]:
1105                            #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
1106                            bdep_found = True
1107                if not bdep_found:
1108                    output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (dep, b[dep]))
1109        if removed:
1110            for dep in sorted(removed):
1111                adep_found = False
1112                if added:
1113                    for adep in added:
1114                        if b[adep] == a[dep]:
1115                            #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
1116                            adep_found = True
1117                if not adep_found:
1118                    output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (dep, a[dep]))
1119        if changed:
1120            for dep in sorted(changed):
1121                if not collapsed:
1122                    output.append(color_format("{color_title}Hash for task dependency %s changed{color_default} from %s to %s") % (dep, a[dep], b[dep]))
1123                if callable(recursecb):
1124                    recout = recursecb(dep, a[dep], b[dep])
1125                    if recout:
1126                        if collapsed:
1127                            output.extend(recout)
1128                        else:
1129                            # If a dependent hash changed, might as well print the line above and then defer to the changes in
1130                            # that hash since in all likelyhood, they're the same changes this task also saw.
1131                            output = [output[-1]] + recout
1132                            break
1133
1134    a_taint = a_data.get('taint', None)
1135    b_taint = b_data.get('taint', None)
1136    if a_taint != b_taint:
1137        if a_taint and a_taint.startswith('nostamp:'):
1138            a_taint = a_taint.replace('nostamp:', 'nostamp(uuid4):')
1139        if b_taint and b_taint.startswith('nostamp:'):
1140            b_taint = b_taint.replace('nostamp:', 'nostamp(uuid4):')
1141        output.append(color_format("{color_title}Taint (by forced/invalidated task) changed{color_default} from %s to %s") % (a_taint, b_taint))
1142
1143    return output
1144
1145
1146def calc_basehash(sigdata):
1147    task = sigdata['task']
1148    basedata = sigdata['varvals'][task]
1149
1150    if basedata is None:
1151        basedata = ''
1152
1153    alldeps = sigdata['taskdeps']
1154    for dep in sorted(alldeps):
1155        basedata = basedata + dep
1156        val = sigdata['varvals'][dep]
1157        if val is not None:
1158            basedata = basedata + str(val)
1159
1160    return hashlib.sha256(basedata.encode("utf-8")).hexdigest()
1161
1162def calc_taskhash(sigdata):
1163    data = sigdata['basehash']
1164
1165    for dep in sigdata['runtaskdeps']:
1166        data = data + sigdata['runtaskhashes'][dep]
1167
1168    for c in sigdata['file_checksum_values']:
1169        if c[1]:
1170            if "./" in c[0]:
1171                data = data + c[0]
1172            data = data + c[1]
1173
1174    if 'taint' in sigdata:
1175        if 'nostamp:' in sigdata['taint']:
1176            data = data + sigdata['taint'][8:]
1177        else:
1178            data = data + sigdata['taint']
1179
1180    return hashlib.sha256(data.encode("utf-8")).hexdigest()
1181
1182
1183def dump_sigfile(a):
1184    output = []
1185
1186    try:
1187        with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
1188            a_data = json.load(f, object_hook=SetDecoder)
1189    except (TypeError, OSError) as err:
1190        bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
1191        raise err
1192
1193    handle_renames(a_data)
1194
1195    output.append("basehash_ignore_vars: %s" % (sorted(a_data['basehash_ignore_vars'])))
1196
1197    output.append("taskhash_ignore_tasks: %s" % (sorted(a_data['taskhash_ignore_tasks'] or [])))
1198
1199    output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
1200
1201    output.append("basehash: %s" % (a_data['basehash']))
1202
1203    for dep in sorted(a_data['gendeps']):
1204        output.append("List of dependencies for variable %s is %s" % (dep, sorted(a_data['gendeps'][dep])))
1205
1206    for dep in sorted(a_data['varvals']):
1207        output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
1208
1209    if 'runtaskdeps' in a_data:
1210        output.append("Tasks this task depends on: %s" % (sorted(a_data['runtaskdeps'])))
1211
1212    if 'file_checksum_values' in a_data:
1213        output.append("This task depends on the checksums of files: %s" % (sorted(a_data['file_checksum_values'])))
1214
1215    if 'runtaskhashes' in a_data:
1216        for dep in sorted(a_data['runtaskhashes']):
1217            output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
1218
1219    if 'taint' in a_data:
1220        if a_data['taint'].startswith('nostamp:'):
1221            msg = a_data['taint'].replace('nostamp:', 'nostamp(uuid4):')
1222        else:
1223            msg = a_data['taint']
1224        output.append("Tainted (by forced/invalidated task): %s" % msg)
1225
1226    if 'task' in a_data:
1227        computed_basehash = calc_basehash(a_data)
1228        output.append("Computed base hash is %s and from file %s" % (computed_basehash, a_data['basehash']))
1229    else:
1230        output.append("Unable to compute base hash")
1231
1232    computed_taskhash = calc_taskhash(a_data)
1233    output.append("Computed task hash is %s" % computed_taskhash)
1234
1235    return output
1236