xref: /openbmc/openbmc/poky/bitbake/lib/bb/siggen.py (revision 319b8b92)
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import hashlib
8import logging
9import os
10import re
11import tempfile
12import pickle
13import bb.data
14import difflib
15import simplediff
16import json
17import types
18from contextlib import contextmanager
19import bb.compress.zstd
20from bb.checksum import FileChecksumCache
21from bb import runqueue
22import hashserv
23import hashserv.client
24
25logger = logging.getLogger('BitBake.SigGen')
26hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
27
28#find_siginfo and find_siginfo_version are set by the metadata siggen
29# The minimum version of the find_siginfo function we need
30find_siginfo_minversion = 2
31
32HASHSERV_ENVVARS = [
33    "SSL_CERT_DIR",
34    "SSL_CERT_FILE",
35    "NO_PROXY",
36    "HTTPS_PROXY",
37    "HTTP_PROXY"
38]
39
40def check_siggen_version(siggen):
41    if not hasattr(siggen, "find_siginfo_version"):
42        bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)")
43    if siggen.find_siginfo_version < siggen.find_siginfo_minversion:
44        bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (%s vs %s)" % (siggen.find_siginfo_version, siggen.find_siginfo_minversion))
45
46class SetEncoder(json.JSONEncoder):
47    def default(self, obj):
48        if isinstance(obj, set) or isinstance(obj, frozenset):
49            return dict(_set_object=list(sorted(obj)))
50        return json.JSONEncoder.default(self, obj)
51
52def SetDecoder(dct):
53    if '_set_object' in dct:
54        return frozenset(dct['_set_object'])
55    return dct
56
57def init(d):
58    siggens = [obj for obj in globals().values()
59                      if type(obj) is type and issubclass(obj, SignatureGenerator)]
60
61    desired = d.getVar("BB_SIGNATURE_HANDLER") or "noop"
62    for sg in siggens:
63        if desired == sg.name:
64            return sg(d)
65    else:
66        logger.error("Invalid signature generator '%s', using default 'noop'\n"
67                     "Available generators: %s", desired,
68                     ', '.join(obj.name for obj in siggens))
69        return SignatureGenerator(d)
70
71class SignatureGenerator(object):
72    """
73    """
74    name = "noop"
75
76    def __init__(self, data):
77        self.basehash = {}
78        self.taskhash = {}
79        self.unihash = {}
80        self.runtaskdeps = {}
81        self.file_checksum_values = {}
82        self.taints = {}
83        self.unitaskhashes = {}
84        self.tidtopn = {}
85        self.setscenetasks = set()
86
87    def finalise(self, fn, d, varient):
88        return
89
90    def postparsing_clean_cache(self):
91        return
92
93    def setup_datacache(self, datacaches):
94        self.datacaches = datacaches
95
96    def setup_datacache_from_datastore(self, mcfn, d):
97        # In task context we have no cache so setup internal data structures
98        # from the fully parsed data store provided
99
100        mc = d.getVar("__BBMULTICONFIG", False) or ""
101        tasks = d.getVar('__BBTASKS', False)
102
103        self.datacaches = {}
104        self.datacaches[mc] = types.SimpleNamespace()
105        setattr(self.datacaches[mc], "stamp", {})
106        self.datacaches[mc].stamp[mcfn] = d.getVar('STAMP')
107        setattr(self.datacaches[mc], "stamp_extrainfo", {})
108        self.datacaches[mc].stamp_extrainfo[mcfn] = {}
109        for t in tasks:
110            flag = d.getVarFlag(t, "stamp-extra-info")
111            if flag:
112                self.datacaches[mc].stamp_extrainfo[mcfn][t] = flag
113
114    def get_cached_unihash(self, tid):
115        return None
116
117    def get_unihash(self, tid):
118        unihash = self.get_cached_unihash(tid)
119        if unihash:
120            return unihash
121        return self.taskhash[tid]
122
123    def get_unihashes(self, tids):
124        return {tid: self.get_unihash(tid) for tid in tids}
125
126    def prep_taskhash(self, tid, deps, dataCaches):
127        return
128
129    def get_taskhash(self, tid, deps, dataCaches):
130        self.taskhash[tid] = hashlib.sha256(tid.encode("utf-8")).hexdigest()
131        return self.taskhash[tid]
132
133    def writeout_file_checksum_cache(self):
134        """Write/update the file checksum cache onto disk"""
135        return
136
137    def stampfile_base(self, mcfn):
138        mc = bb.runqueue.mc_from_tid(mcfn)
139        return self.datacaches[mc].stamp[mcfn]
140
141    def stampfile_mcfn(self, taskname, mcfn, extrainfo=True):
142        mc = bb.runqueue.mc_from_tid(mcfn)
143        stamp = self.datacaches[mc].stamp[mcfn]
144        if not stamp:
145            return
146
147        stamp_extrainfo = ""
148        if extrainfo:
149            taskflagname = taskname
150            if taskname.endswith("_setscene"):
151                taskflagname = taskname.replace("_setscene", "")
152            stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
153
154        return self.stampfile(stamp, mcfn, taskname, stamp_extrainfo)
155
156    def stampfile(self, stampbase, file_name, taskname, extrainfo):
157        return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
158
159    def stampcleanmask_mcfn(self, taskname, mcfn):
160        mc = bb.runqueue.mc_from_tid(mcfn)
161        stamp = self.datacaches[mc].stamp[mcfn]
162        if not stamp:
163            return []
164
165        taskflagname = taskname
166        if taskname.endswith("_setscene"):
167            taskflagname = taskname.replace("_setscene", "")
168        stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
169
170        return self.stampcleanmask(stamp, mcfn, taskname, stamp_extrainfo)
171
172    def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
173        return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
174
175    def dump_sigtask(self, mcfn, task, stampbase, runtime):
176        return
177
178    def invalidate_task(self, task, mcfn):
179        mc = bb.runqueue.mc_from_tid(mcfn)
180        stamp = self.datacaches[mc].stamp[mcfn]
181        bb.utils.remove(stamp)
182
183    def dump_sigs(self, dataCache, options):
184        return
185
186    def get_taskdata(self):
187        return (self.runtaskdeps, self.taskhash, self.unihash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.tidtopn, self.setscenetasks)
188
189    def set_taskdata(self, data):
190        self.runtaskdeps, self.taskhash, self.unihash, self.file_checksum_values, self.taints, self.basehash, self.unitaskhashes, self.tidtopn, self.setscenetasks = data
191
192    def reset(self, data):
193        self.__init__(data)
194
195    def get_taskhashes(self):
196        return self.taskhash, self.unihash, self.unitaskhashes, self.tidtopn
197
198    def set_taskhashes(self, hashes):
199        self.taskhash, self.unihash, self.unitaskhashes, self.tidtopn = hashes
200
201    def save_unitaskhashes(self):
202        return
203
204    def copy_unitaskhashes(self, targetdir):
205        return
206
207    def set_setscene_tasks(self, setscene_tasks):
208        return
209
210    def exit(self):
211        return
212
213def build_pnid(mc, pn, taskname):
214    if mc:
215        return "mc:" + mc + ":" + pn + ":" + taskname
216    return pn + ":" + taskname
217
218class SignatureGeneratorBasic(SignatureGenerator):
219    """
220    """
221    name = "basic"
222
223    def __init__(self, data):
224        self.basehash = {}
225        self.taskhash = {}
226        self.unihash = {}
227        self.runtaskdeps = {}
228        self.file_checksum_values = {}
229        self.taints = {}
230        self.setscenetasks = set()
231        self.basehash_ignore_vars = set((data.getVar("BB_BASEHASH_IGNORE_VARS") or "").split())
232        self.taskhash_ignore_tasks = None
233        self.init_rundepcheck(data)
234        checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
235        if checksum_cache_file:
236            self.checksum_cache = FileChecksumCache()
237            self.checksum_cache.init_cache(data, checksum_cache_file)
238        else:
239            self.checksum_cache = None
240
241        self.unihash_cache = bb.cache.SimpleCache("3")
242        self.unitaskhashes = self.unihash_cache.init_cache(data, "bb_unihashes.dat", {})
243        self.localdirsexclude = (data.getVar("BB_SIGNATURE_LOCAL_DIRS_EXCLUDE") or "CVS .bzr .git .hg .osc .p4 .repo .svn").split()
244        self.tidtopn = {}
245
246    def init_rundepcheck(self, data):
247        self.taskhash_ignore_tasks = data.getVar("BB_TASKHASH_IGNORE_TASKS") or None
248        if self.taskhash_ignore_tasks:
249            self.twl = re.compile(self.taskhash_ignore_tasks)
250        else:
251            self.twl = None
252
253    def _build_data(self, mcfn, d):
254
255        ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
256        tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basehash_ignore_vars)
257
258        taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basehash_ignore_vars, mcfn)
259
260        for task in tasklist:
261            tid = mcfn + ":" + task
262            if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]:
263                bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid]))
264                bb.error("The following commands may help:")
265                cmd = "$ bitbake %s -c%s" % (d.getVar('PN'), task)
266                # Make sure sigdata is dumped before run printdiff
267                bb.error("%s -Snone" % cmd)
268                bb.error("Then:")
269                bb.error("%s -Sprintdiff\n" % cmd)
270            self.basehash[tid] = basehash[tid]
271
272        return taskdeps, gendeps, lookupcache
273
274    def set_setscene_tasks(self, setscene_tasks):
275        self.setscenetasks = set(setscene_tasks)
276
277    def finalise(self, fn, d, variant):
278
279        mc = d.getVar("__BBMULTICONFIG", False) or ""
280        mcfn = fn
281        if variant or mc:
282            mcfn = bb.cache.realfn2virtual(fn, variant, mc)
283
284        try:
285            taskdeps, gendeps, lookupcache = self._build_data(mcfn, d)
286        except bb.parse.SkipRecipe:
287            raise
288        except:
289            bb.warn("Error during finalise of %s" % mcfn)
290            raise
291
292        basehashes = {}
293        for task in taskdeps:
294            basehashes[task] = self.basehash[mcfn + ":" + task]
295
296        d.setVar("__siggen_basehashes", basehashes)
297        d.setVar("__siggen_gendeps", gendeps)
298        d.setVar("__siggen_varvals", lookupcache)
299        d.setVar("__siggen_taskdeps", taskdeps)
300
301        #Slow but can be useful for debugging mismatched basehashes
302        #self.setup_datacache_from_datastore(mcfn, d)
303        #for task in taskdeps:
304        #    self.dump_sigtask(mcfn, task, d.getVar("STAMP"), False)
305
306    def setup_datacache_from_datastore(self, mcfn, d):
307        super().setup_datacache_from_datastore(mcfn, d)
308
309        mc = bb.runqueue.mc_from_tid(mcfn)
310        for attr in ["siggen_varvals", "siggen_taskdeps", "siggen_gendeps"]:
311            if not hasattr(self.datacaches[mc], attr):
312                setattr(self.datacaches[mc], attr, {})
313        self.datacaches[mc].siggen_varvals[mcfn] = d.getVar("__siggen_varvals")
314        self.datacaches[mc].siggen_taskdeps[mcfn] = d.getVar("__siggen_taskdeps")
315        self.datacaches[mc].siggen_gendeps[mcfn] = d.getVar("__siggen_gendeps")
316
317    def rundep_check(self, fn, recipename, task, dep, depname, dataCaches):
318        # Return True if we should keep the dependency, False to drop it
319        # We only manipulate the dependencies for packages not in the ignore
320        # list
321        if self.twl and not self.twl.search(recipename):
322            # then process the actual dependencies
323            if self.twl.search(depname):
324                return False
325        return True
326
327    def read_taint(self, fn, task, stampbase):
328        taint = None
329        try:
330            with open(stampbase + '.' + task + '.taint', 'r') as taintf:
331                taint = taintf.read()
332        except IOError:
333            pass
334        return taint
335
336    def prep_taskhash(self, tid, deps, dataCaches):
337
338        (mc, _, task, mcfn) = bb.runqueue.split_tid_mcfn(tid)
339
340        self.basehash[tid] = dataCaches[mc].basetaskhash[tid]
341        self.runtaskdeps[tid] = []
342        self.file_checksum_values[tid] = []
343        recipename = dataCaches[mc].pkg_fn[mcfn]
344
345        self.tidtopn[tid] = recipename
346        # save hashfn for deps into siginfo?
347        for dep in deps:
348            (depmc, _, deptask, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
349            dep_pn = dataCaches[depmc].pkg_fn[depmcfn]
350
351            if not self.rundep_check(mcfn, recipename, task, dep, dep_pn, dataCaches):
352                continue
353
354            if dep not in self.taskhash:
355                bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
356
357            dep_pnid = build_pnid(depmc, dep_pn, deptask)
358            self.runtaskdeps[tid].append((dep_pnid, dep))
359
360        if task in dataCaches[mc].file_checksums[mcfn]:
361            if self.checksum_cache:
362                checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
363            else:
364                checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
365            for (f,cs) in checksums:
366                self.file_checksum_values[tid].append((f,cs))
367
368        taskdep = dataCaches[mc].task_deps[mcfn]
369        if 'nostamp' in taskdep and task in taskdep['nostamp']:
370            # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
371            if tid in self.taints and self.taints[tid].startswith("nostamp:"):
372                # Don't reset taint value upon every call
373                pass
374            else:
375                import uuid
376                taint = str(uuid.uuid4())
377                self.taints[tid] = "nostamp:" + taint
378
379        taint = self.read_taint(mcfn, task, dataCaches[mc].stamp[mcfn])
380        if taint:
381            self.taints[tid] = taint
382            logger.warning("%s is tainted from a forced run" % tid)
383
384        return
385
386    def get_taskhash(self, tid, deps, dataCaches):
387
388        data = self.basehash[tid]
389        for dep in sorted(self.runtaskdeps[tid]):
390            data += self.get_unihash(dep[1])
391
392        for (f, cs) in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
393            if cs:
394                if "/./" in f:
395                    data += "./" + f.split("/./")[1]
396                data += cs
397
398        if tid in self.taints:
399            if self.taints[tid].startswith("nostamp:"):
400                data += self.taints[tid][8:]
401            else:
402                data += self.taints[tid]
403
404        h = hashlib.sha256(data.encode("utf-8")).hexdigest()
405        self.taskhash[tid] = h
406        #d.setVar("BB_TASKHASH:task-%s" % task, taskhash[task])
407        return h
408
409    def writeout_file_checksum_cache(self):
410        """Write/update the file checksum cache onto disk"""
411        if self.checksum_cache:
412            self.checksum_cache.save_extras()
413            self.checksum_cache.save_merge()
414        else:
415            bb.fetch2.fetcher_parse_save()
416            bb.fetch2.fetcher_parse_done()
417
418    def save_unitaskhashes(self):
419        self.unihash_cache.save(self.unitaskhashes)
420
421    def copy_unitaskhashes(self, targetdir):
422        self.unihash_cache.copyfile(targetdir)
423
424    def dump_sigtask(self, mcfn, task, stampbase, runtime):
425        tid = mcfn + ":" + task
426        mc = bb.runqueue.mc_from_tid(mcfn)
427        referencestamp = stampbase
428        if isinstance(runtime, str) and runtime.startswith("customfile"):
429            sigfile = stampbase
430            referencestamp = runtime[11:]
431        elif runtime and tid in self.taskhash:
432            sigfile = stampbase + "." + task + ".sigdata" + "." + self.get_unihash(tid)
433        else:
434            sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid]
435
436        with bb.utils.umask(0o002):
437            bb.utils.mkdirhier(os.path.dirname(sigfile))
438
439        data = {}
440        data['task'] = task
441        data['basehash_ignore_vars'] = self.basehash_ignore_vars
442        data['taskhash_ignore_tasks'] = self.taskhash_ignore_tasks
443        data['taskdeps'] = self.datacaches[mc].siggen_taskdeps[mcfn][task]
444        data['basehash'] = self.basehash[tid]
445        data['gendeps'] = {}
446        data['varvals'] = {}
447        data['varvals'][task] = self.datacaches[mc].siggen_varvals[mcfn][task]
448        for dep in self.datacaches[mc].siggen_taskdeps[mcfn][task]:
449            if dep in self.basehash_ignore_vars:
450                continue
451            data['gendeps'][dep] = self.datacaches[mc].siggen_gendeps[mcfn][dep]
452            data['varvals'][dep] = self.datacaches[mc].siggen_varvals[mcfn][dep]
453
454        if runtime and tid in self.taskhash:
455            data['runtaskdeps'] = [dep[0] for dep in sorted(self.runtaskdeps[tid])]
456            data['file_checksum_values'] = []
457            for f,cs in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
458                if "/./" in f:
459                    data['file_checksum_values'].append(("./" + f.split("/./")[1], cs))
460                else:
461                    data['file_checksum_values'].append((os.path.basename(f), cs))
462            data['runtaskhashes'] = {}
463            for dep in self.runtaskdeps[tid]:
464                data['runtaskhashes'][dep[0]] = self.get_unihash(dep[1])
465            data['taskhash'] = self.taskhash[tid]
466            data['unihash'] = self.get_unihash(tid)
467
468        taint = self.read_taint(mcfn, task, referencestamp)
469        if taint:
470            data['taint'] = taint
471
472        if runtime and tid in self.taints:
473            if 'nostamp:' in self.taints[tid]:
474                data['taint'] = self.taints[tid]
475
476        computed_basehash = calc_basehash(data)
477        if computed_basehash != self.basehash[tid]:
478            bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[tid], tid))
479        if runtime and tid in self.taskhash:
480            computed_taskhash = calc_taskhash(data)
481            if computed_taskhash != self.taskhash[tid]:
482                bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid))
483                sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash)
484
485        fd, tmpfile = bb.utils.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
486        try:
487            with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f:
488                json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder)
489                f.flush()
490            os.chmod(tmpfile, 0o664)
491            bb.utils.rename(tmpfile, sigfile)
492        except (OSError, IOError) as err:
493            try:
494                os.unlink(tmpfile)
495            except OSError:
496                pass
497            raise err
498
499class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
500    name = "basichash"
501
502    def get_stampfile_hash(self, tid):
503        if tid in self.taskhash:
504            return self.taskhash[tid]
505
506        # If task is not in basehash, then error
507        return self.basehash[tid]
508
509    def stampfile(self, stampbase, mcfn, taskname, extrainfo, clean=False):
510        if taskname.endswith("_setscene"):
511            tid = mcfn + ":" + taskname[:-9]
512        else:
513            tid = mcfn + ":" + taskname
514        if clean:
515            h = "*"
516        else:
517            h = self.get_stampfile_hash(tid)
518
519        return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
520
521    def stampcleanmask(self, stampbase, mcfn, taskname, extrainfo):
522        return self.stampfile(stampbase, mcfn, taskname, extrainfo, clean=True)
523
524    def invalidate_task(self, task, mcfn):
525        bb.note("Tainting hash to force rebuild of task %s, %s" % (mcfn, task))
526
527        mc = bb.runqueue.mc_from_tid(mcfn)
528        stamp = self.datacaches[mc].stamp[mcfn]
529
530        taintfn = stamp + '.' + task + '.taint'
531
532        import uuid
533        bb.utils.mkdirhier(os.path.dirname(taintfn))
534        # The specific content of the taint file is not really important,
535        # we just need it to be random, so a random UUID is used
536        with open(taintfn, 'w') as taintf:
537            taintf.write(str(uuid.uuid4()))
538
539class SignatureGeneratorUniHashMixIn(object):
540    def __init__(self, data):
541        self.extramethod = {}
542        # NOTE: The cache only tracks hashes that exist. Hashes that don't
543        # exist are always queries from the server since it is possible for
544        # hashes to appear over time, but much less likely for them to
545        # disappear
546        self.unihash_exists_cache = set()
547        self.username = None
548        self.password = None
549        self.env = {}
550
551        origenv = data.getVar("BB_ORIGENV")
552        for e in HASHSERV_ENVVARS:
553            value = data.getVar(e)
554            if not value and origenv:
555                value = origenv.getVar(e)
556            if value:
557                self.env[e] = value
558        super().__init__(data)
559
560    def get_taskdata(self):
561        return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env) + super().get_taskdata()
562
563    def set_taskdata(self, data):
564        self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env = data[:7]
565        super().set_taskdata(data[7:])
566
567    def get_hashserv_creds(self):
568        if self.username and self.password:
569            return {
570                "username": self.username,
571                "password": self.password,
572            }
573
574        return {}
575
576    @contextmanager
577    def _client_env(self):
578        orig_env = os.environ.copy()
579        try:
580            for k, v in self.env.items():
581                os.environ[k] = v
582
583            yield
584        finally:
585            for k, v in self.env.items():
586                if k in orig_env:
587                    os.environ[k] = orig_env[k]
588                else:
589                    del os.environ[k]
590
591    @contextmanager
592    def client(self):
593        with self._client_env():
594            if getattr(self, '_client', None) is None:
595                self._client = hashserv.create_client(self.server, **self.get_hashserv_creds())
596            yield self._client
597
598    @contextmanager
599    def client_pool(self):
600        with self._client_env():
601            if getattr(self, '_client_pool', None) is None:
602                self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds())
603            yield self._client_pool
604
605    def reset(self, data):
606        self.__close_clients()
607        return super().reset(data)
608
609    def exit(self):
610        self.__close_clients()
611        return super().exit()
612
613    def __close_clients(self):
614        with self._client_env():
615            if getattr(self, '_client', None) is not None:
616                self._client.close()
617                self._client = None
618            if getattr(self, '_client_pool', None) is not None:
619                self._client_pool.close()
620                self._client_pool = None
621
622    def get_stampfile_hash(self, tid):
623        if tid in self.taskhash:
624            # If a unique hash is reported, use it as the stampfile hash. This
625            # ensures that if a task won't be re-run if the taskhash changes,
626            # but it would result in the same output hash
627            unihash = self._get_unihash(tid)
628            if unihash is not None:
629                return unihash
630
631        return super().get_stampfile_hash(tid)
632
633    def set_unihash(self, tid, unihash):
634        (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
635        key = mc + ":" + self.tidtopn[tid] + ":" + taskname
636        self.unitaskhashes[key] = (self.taskhash[tid], unihash)
637        self.unihash[tid] = unihash
638
639    def _get_unihash(self, tid, checkkey=None):
640        if tid not in self.tidtopn:
641            return None
642        (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
643        key = mc + ":" + self.tidtopn[tid] + ":" + taskname
644        if key not in self.unitaskhashes:
645            return None
646        if not checkkey:
647            checkkey = self.taskhash[tid]
648        (key, unihash) = self.unitaskhashes[key]
649        if key != checkkey:
650            return None
651        return unihash
652
653    def get_cached_unihash(self, tid):
654        taskhash = self.taskhash[tid]
655
656        # If its not a setscene task we can return
657        if self.setscenetasks and tid not in self.setscenetasks:
658            self.unihash[tid] = None
659            return taskhash
660
661        # TODO: This cache can grow unbounded. It probably only needs to keep
662        # for each task
663        unihash =  self._get_unihash(tid)
664        if unihash is not None:
665            self.unihash[tid] = unihash
666            return unihash
667
668        return None
669
670    def _get_method(self, tid):
671        method = self.method
672        if tid in self.extramethod:
673            method = method + self.extramethod[tid]
674
675        return method
676
677    def unihashes_exist(self, query):
678        if len(query) == 0:
679            return {}
680
681        uncached_query = {}
682        result = {}
683        for key, unihash in query.items():
684            if unihash in self.unihash_exists_cache:
685                result[key] = True
686            else:
687                uncached_query[key] = unihash
688
689        if self.max_parallel <= 1 or len(uncached_query) <= 1:
690            # No parallelism required. Make the query serially with the single client
691            with self.client() as client:
692                uncached_result = {
693                    key: client.unihash_exists(value) for key, value in uncached_query.items()
694                }
695        else:
696            with self.client_pool() as client_pool:
697                uncached_result = client_pool.unihashes_exist(uncached_query)
698
699        for key, exists in uncached_result.items():
700            if exists:
701                self.unihash_exists_cache.add(query[key])
702            result[key] = exists
703
704        return result
705
706    def get_unihash(self, tid):
707        return self.get_unihashes([tid])[tid]
708
709    def get_unihashes(self, tids):
710        """
711        For a iterable of tids, returns a dictionary that maps each tid to a
712        unihash
713        """
714        result = {}
715        queries = {}
716        query_result = {}
717
718        for tid in tids:
719            unihash = self.get_cached_unihash(tid)
720            if unihash:
721                result[tid] = unihash
722            else:
723                queries[tid] = (self._get_method(tid), self.taskhash[tid])
724
725        if len(queries) == 0:
726            return result
727
728        if self.max_parallel <= 1 or len(queries) <= 1:
729            # No parallelism required. Make the query serially with the single client
730            with self.client() as client:
731                for tid, args in queries.items():
732                    query_result[tid] = client.get_unihash(*args)
733        else:
734            with self.client_pool() as client_pool:
735                query_result = client_pool.get_unihashes(queries)
736
737        for tid, unihash in query_result.items():
738            # In the absence of being able to discover a unique hash from the
739            # server, make it be equivalent to the taskhash. The unique "hash" only
740            # really needs to be a unique string (not even necessarily a hash), but
741            # making it match the taskhash has a few advantages:
742            #
743            # 1) All of the sstate code that assumes hashes can be the same
744            # 2) It provides maximal compatibility with builders that don't use
745            #    an equivalency server
746            # 3) The value is easy for multiple independent builders to derive the
747            #    same unique hash from the same input. This means that if the
748            #    independent builders find the same taskhash, but it isn't reported
749            #    to the server, there is a better chance that they will agree on
750            #    the unique hash.
751            taskhash = self.taskhash[tid]
752            if unihash:
753                # A unique hash equal to the taskhash is not very interesting,
754                # so it is reported it at debug level 2. If they differ, that
755                # is much more interesting, so it is reported at debug level 1
756                hashequiv_logger.bbdebug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
757            else:
758                hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
759                unihash = taskhash
760
761
762            self.set_unihash(tid, unihash)
763            self.unihash[tid] = unihash
764            result[tid] = unihash
765
766        return result
767
768    def report_unihash(self, path, task, d):
769        import importlib
770
771        taskhash = d.getVar('BB_TASKHASH')
772        unihash = d.getVar('BB_UNIHASH')
773        report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
774        tempdir = d.getVar('T')
775        mcfn = d.getVar('BB_FILENAME')
776        tid = mcfn + ':do_' + task
777        key = tid + ':' + taskhash
778
779        if self.setscenetasks and tid not in self.setscenetasks:
780            return
781
782        # This can happen if locked sigs are in action. Detect and just exit
783        if taskhash != self.taskhash[tid]:
784            return
785
786        # Sanity checks
787        cache_unihash = self._get_unihash(tid, checkkey=taskhash)
788        if cache_unihash is None:
789            bb.fatal('%s not in unihash cache. Please report this error' % key)
790
791        if cache_unihash != unihash:
792            bb.fatal("Cache unihash %s doesn't match BB_UNIHASH %s" % (cache_unihash, unihash))
793
794        sigfile = None
795        sigfile_name = "depsig.do_%s.%d" % (task, os.getpid())
796        sigfile_link = "depsig.do_%s" % task
797
798        try:
799            sigfile = open(os.path.join(tempdir, sigfile_name), 'w+b')
800
801            locs = {'path': path, 'sigfile': sigfile, 'task': task, 'd': d}
802
803            if "." in self.method:
804                (module, method) = self.method.rsplit('.', 1)
805                locs['method'] = getattr(importlib.import_module(module), method)
806                outhash = bb.utils.better_eval('method(path, sigfile, task, d)', locs)
807            else:
808                outhash = bb.utils.better_eval(self.method + '(path, sigfile, task, d)', locs)
809
810            try:
811                extra_data = {}
812
813                owner = d.getVar('SSTATE_HASHEQUIV_OWNER')
814                if owner:
815                    extra_data['owner'] = owner
816
817                if report_taskdata:
818                    sigfile.seek(0)
819
820                    extra_data['PN'] = d.getVar('PN')
821                    extra_data['PV'] = d.getVar('PV')
822                    extra_data['PR'] = d.getVar('PR')
823                    extra_data['task'] = task
824                    extra_data['outhash_siginfo'] = sigfile.read().decode('utf-8')
825
826                method = self.method
827                if tid in self.extramethod:
828                    method = method + self.extramethod[tid]
829
830                with self.client() as client:
831                    data = client.report_unihash(taskhash, method, outhash, unihash, extra_data)
832
833                new_unihash = data['unihash']
834
835                if new_unihash != unihash:
836                    hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
837                    bb.event.fire(bb.runqueue.taskUniHashUpdate(mcfn + ':do_' + task, new_unihash), d)
838                    self.set_unihash(tid, new_unihash)
839                    d.setVar('BB_UNIHASH', new_unihash)
840                else:
841                    hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
842            except ConnectionError as e:
843                bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
844        finally:
845            if sigfile:
846                sigfile.close()
847
848                sigfile_link_path = os.path.join(tempdir, sigfile_link)
849                bb.utils.remove(sigfile_link_path)
850
851                try:
852                    os.symlink(sigfile_name, sigfile_link_path)
853                except OSError:
854                    pass
855
856    def report_unihash_equiv(self, tid, taskhash, wanted_unihash, current_unihash, datacaches):
857        try:
858            extra_data = {}
859            method = self.method
860            if tid in self.extramethod:
861                method = method + self.extramethod[tid]
862
863            with self.client() as client:
864                data = client.report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
865
866            hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data)))
867
868            if data is None:
869                bb.warn("Server unable to handle unihash report")
870                return False
871
872            finalunihash = data['unihash']
873
874            if finalunihash == current_unihash:
875                hashequiv_logger.verbose('Task %s unihash %s unchanged by server' % (tid, finalunihash))
876            elif finalunihash == wanted_unihash:
877                hashequiv_logger.verbose('Task %s unihash changed %s -> %s as wanted' % (tid, current_unihash, finalunihash))
878                self.set_unihash(tid, finalunihash)
879                return True
880            else:
881                # TODO: What to do here?
882                hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash))
883
884        except ConnectionError as e:
885            bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
886
887        return False
888
889#
890# Dummy class used for bitbake-selftest
891#
892class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureGeneratorBasicHash):
893    name = "TestEquivHash"
894    def init_rundepcheck(self, data):
895        super().init_rundepcheck(data)
896        self.server = data.getVar('BB_HASHSERVE')
897        self.method = "sstate_output_hash"
898        self.max_parallel = 1
899
900def clean_checksum_file_path(file_checksum_tuple):
901    f, cs = file_checksum_tuple
902    if "/./" in f:
903        return "./" + f.split("/./")[1]
904    return f
905
906def dump_this_task(outfile, d):
907    import bb.parse
908    mcfn = d.getVar("BB_FILENAME")
909    task = "do_" + d.getVar("BB_CURRENTTASK")
910    referencestamp = bb.parse.siggen.stampfile_base(mcfn)
911    bb.parse.siggen.dump_sigtask(mcfn, task, outfile, "customfile:" + referencestamp)
912
913def init_colors(enable_color):
914    """Initialise colour dict for passing to compare_sigfiles()"""
915    # First set up the colours
916    colors = {'color_title':   '\033[1m',
917              'color_default': '\033[0m',
918              'color_add':     '\033[0;32m',
919              'color_remove':  '\033[0;31m',
920             }
921    # Leave all keys present but clear the values
922    if not enable_color:
923        for k in colors.keys():
924            colors[k] = ''
925    return colors
926
927def worddiff_str(oldstr, newstr, colors=None):
928    if not colors:
929        colors = init_colors(False)
930    diff = simplediff.diff(oldstr.split(' '), newstr.split(' '))
931    ret = []
932    for change, value in diff:
933        value = ' '.join(value)
934        if change == '=':
935            ret.append(value)
936        elif change == '+':
937            item = '{color_add}{{+{value}+}}{color_default}'.format(value=value, **colors)
938            ret.append(item)
939        elif change == '-':
940            item = '{color_remove}[-{value}-]{color_default}'.format(value=value, **colors)
941            ret.append(item)
942    whitespace_note = ''
943    if oldstr != newstr and ' '.join(oldstr.split()) == ' '.join(newstr.split()):
944        whitespace_note = ' (whitespace changed)'
945    return '"%s"%s' % (' '.join(ret), whitespace_note)
946
947def list_inline_diff(oldlist, newlist, colors=None):
948    if not colors:
949        colors = init_colors(False)
950    diff = simplediff.diff(oldlist, newlist)
951    ret = []
952    for change, value in diff:
953        value = ' '.join(value)
954        if change == '=':
955            ret.append("'%s'" % value)
956        elif change == '+':
957            item = '{color_add}+{value}{color_default}'.format(value=value, **colors)
958            ret.append(item)
959        elif change == '-':
960            item = '{color_remove}-{value}{color_default}'.format(value=value, **colors)
961            ret.append(item)
962    return '[%s]' % (', '.join(ret))
963
964# Handled renamed fields
965def handle_renames(data):
966    if 'basewhitelist' in data:
967        data['basehash_ignore_vars'] = data['basewhitelist']
968        del data['basewhitelist']
969    if 'taskwhitelist' in data:
970        data['taskhash_ignore_tasks'] = data['taskwhitelist']
971        del data['taskwhitelist']
972
973
974def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
975    output = []
976
977    colors = init_colors(color)
978    def color_format(formatstr, **values):
979        """
980        Return colour formatted string.
981        NOTE: call with the format string, not an already formatted string
982        containing values (otherwise you could have trouble with { and }
983        characters)
984        """
985        if not formatstr.endswith('{color_default}'):
986            formatstr += '{color_default}'
987        # In newer python 3 versions you can pass both of these directly,
988        # but we only require 3.4 at the moment
989        formatparams = {}
990        formatparams.update(colors)
991        formatparams.update(values)
992        return formatstr.format(**formatparams)
993
994    try:
995        with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
996            a_data = json.load(f, object_hook=SetDecoder)
997    except (TypeError, OSError) as err:
998        bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
999        raise err
1000    try:
1001        with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
1002            b_data = json.load(f, object_hook=SetDecoder)
1003    except (TypeError, OSError) as err:
1004        bb.error("Failed to open sigdata file '%s': %s" % (b, str(err)))
1005        raise err
1006
1007    for data in [a_data, b_data]:
1008        handle_renames(data)
1009
1010    def dict_diff(a, b, ignored_vars=set()):
1011        sa = set(a.keys())
1012        sb = set(b.keys())
1013        common = sa & sb
1014        changed = set()
1015        for i in common:
1016            if a[i] != b[i] and i not in ignored_vars:
1017                changed.add(i)
1018        added = sb - sa
1019        removed = sa - sb
1020        return changed, added, removed
1021
1022    def file_checksums_diff(a, b):
1023        from collections import Counter
1024
1025        # Convert lists back to tuples
1026        a = [(f[0], f[1]) for f in a]
1027        b = [(f[0], f[1]) for f in b]
1028
1029        # Compare lists, ensuring we can handle duplicate filenames if they exist
1030        removedcount = Counter(a)
1031        removedcount.subtract(b)
1032        addedcount = Counter(b)
1033        addedcount.subtract(a)
1034        added = []
1035        for x in b:
1036            if addedcount[x] > 0:
1037                addedcount[x] -= 1
1038                added.append(x)
1039        removed = []
1040        changed = []
1041        for x in a:
1042            if removedcount[x] > 0:
1043                removedcount[x] -= 1
1044                for y in added:
1045                    if y[0] == x[0]:
1046                        changed.append((x[0], x[1], y[1]))
1047                        added.remove(y)
1048                        break
1049                else:
1050                    removed.append(x)
1051        added = [x[0] for x in added]
1052        removed = [x[0] for x in removed]
1053        return changed, added, removed
1054
1055    if 'basehash_ignore_vars' in a_data and a_data['basehash_ignore_vars'] != b_data['basehash_ignore_vars']:
1056        output.append(color_format("{color_title}basehash_ignore_vars changed{color_default} from '%s' to '%s'") % (a_data['basehash_ignore_vars'], b_data['basehash_ignore_vars']))
1057        if a_data['basehash_ignore_vars'] and b_data['basehash_ignore_vars']:
1058            output.append("changed items: %s" % a_data['basehash_ignore_vars'].symmetric_difference(b_data['basehash_ignore_vars']))
1059
1060    if 'taskhash_ignore_tasks' in a_data and a_data['taskhash_ignore_tasks'] != b_data['taskhash_ignore_tasks']:
1061        output.append(color_format("{color_title}taskhash_ignore_tasks changed{color_default} from '%s' to '%s'") % (a_data['taskhash_ignore_tasks'], b_data['taskhash_ignore_tasks']))
1062        if a_data['taskhash_ignore_tasks'] and b_data['taskhash_ignore_tasks']:
1063            output.append("changed items: %s" % a_data['taskhash_ignore_tasks'].symmetric_difference(b_data['taskhash_ignore_tasks']))
1064
1065    if a_data['taskdeps'] != b_data['taskdeps']:
1066        output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
1067
1068    if a_data['basehash'] != b_data['basehash'] and not collapsed:
1069        output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash']))
1070
1071    changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basehash_ignore_vars'] & b_data['basehash_ignore_vars'])
1072    if changed:
1073        for dep in sorted(changed):
1074            output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
1075            if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
1076                output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
1077    if added:
1078        for dep in sorted(added):
1079            output.append(color_format("{color_title}Dependency on variable %s was added") % (dep))
1080    if removed:
1081        for dep in sorted(removed):
1082            output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep))
1083
1084
1085    changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
1086    if changed:
1087        for dep in sorted(changed):
1088            oldval = a_data['varvals'][dep]
1089            newval = b_data['varvals'][dep]
1090            if newval and oldval and ('\n' in oldval or '\n' in newval):
1091                diff = difflib.unified_diff(oldval.splitlines(), newval.splitlines(), lineterm='')
1092                # Cut off the first two lines, since we aren't interested in
1093                # the old/new filename (they are blank anyway in this case)
1094                difflines = list(diff)[2:]
1095                if color:
1096                    # Add colour to diff output
1097                    for i, line in enumerate(difflines):
1098                        if line.startswith('+'):
1099                            line = color_format('{color_add}{line}', line=line)
1100                            difflines[i] = line
1101                        elif line.startswith('-'):
1102                            line = color_format('{color_remove}{line}', line=line)
1103                            difflines[i] = line
1104                output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff='\n'.join(difflines)))
1105            elif newval and oldval and (' ' in oldval or ' ' in newval):
1106                output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff=worddiff_str(oldval, newval, colors)))
1107            else:
1108                output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval))
1109
1110    if not 'file_checksum_values' in a_data:
1111         a_data['file_checksum_values'] = []
1112    if not 'file_checksum_values' in b_data:
1113         b_data['file_checksum_values'] = []
1114
1115    changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
1116    if changed:
1117        for f, old, new in changed:
1118            output.append(color_format("{color_title}Checksum for file %s changed{color_default} from %s to %s") % (f, old, new))
1119    if added:
1120        for f in added:
1121            output.append(color_format("{color_title}Dependency on checksum of file %s was added") % (f))
1122    if removed:
1123        for f in removed:
1124            output.append(color_format("{color_title}Dependency on checksum of file %s was removed") % (f))
1125
1126    if not 'runtaskdeps' in a_data:
1127         a_data['runtaskdeps'] = {}
1128    if not 'runtaskdeps' in b_data:
1129         b_data['runtaskdeps'] = {}
1130
1131    if not collapsed:
1132        if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
1133            changed = ["Number of task dependencies changed"]
1134        else:
1135            changed = []
1136            for idx, task in enumerate(a_data['runtaskdeps']):
1137                a = a_data['runtaskdeps'][idx]
1138                b = b_data['runtaskdeps'][idx]
1139                if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed:
1140                    changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
1141
1142        if changed:
1143            clean_a = a_data['runtaskdeps']
1144            clean_b = b_data['runtaskdeps']
1145            if clean_a != clean_b:
1146                output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors))
1147            else:
1148                output.append(color_format("{color_title}runtaskdeps changed:"))
1149            output.append("\n".join(changed))
1150
1151
1152    if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
1153        a = a_data['runtaskhashes']
1154        b = b_data['runtaskhashes']
1155        changed, added, removed = dict_diff(a, b)
1156        if added:
1157            for dep in sorted(added):
1158                bdep_found = False
1159                if removed:
1160                    for bdep in removed:
1161                        if b[dep] == a[bdep]:
1162                            #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
1163                            bdep_found = True
1164                if not bdep_found:
1165                    output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (dep, b[dep]))
1166        if removed:
1167            for dep in sorted(removed):
1168                adep_found = False
1169                if added:
1170                    for adep in added:
1171                        if b[adep] == a[dep]:
1172                            #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
1173                            adep_found = True
1174                if not adep_found:
1175                    output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (dep, a[dep]))
1176        if changed:
1177            for dep in sorted(changed):
1178                if not collapsed:
1179                    output.append(color_format("{color_title}Hash for task dependency %s changed{color_default} from %s to %s") % (dep, a[dep], b[dep]))
1180                if callable(recursecb):
1181                    recout = recursecb(dep, a[dep], b[dep])
1182                    if recout:
1183                        if collapsed:
1184                            output.extend(recout)
1185                        else:
1186                            # If a dependent hash changed, might as well print the line above and then defer to the changes in
1187                            # that hash since in all likelyhood, they're the same changes this task also saw.
1188                            output = [output[-1]] + recout
1189                            break
1190
1191    a_taint = a_data.get('taint', None)
1192    b_taint = b_data.get('taint', None)
1193    if a_taint != b_taint:
1194        if a_taint and a_taint.startswith('nostamp:'):
1195            a_taint = a_taint.replace('nostamp:', 'nostamp(uuid4):')
1196        if b_taint and b_taint.startswith('nostamp:'):
1197            b_taint = b_taint.replace('nostamp:', 'nostamp(uuid4):')
1198        output.append(color_format("{color_title}Taint (by forced/invalidated task) changed{color_default} from %s to %s") % (a_taint, b_taint))
1199
1200    return output
1201
1202
1203def calc_basehash(sigdata):
1204    task = sigdata['task']
1205    basedata = sigdata['varvals'][task]
1206
1207    if basedata is None:
1208        basedata = ''
1209
1210    alldeps = sigdata['taskdeps']
1211    for dep in sorted(alldeps):
1212        basedata = basedata + dep
1213        val = sigdata['varvals'][dep]
1214        if val is not None:
1215            basedata = basedata + str(val)
1216
1217    return hashlib.sha256(basedata.encode("utf-8")).hexdigest()
1218
1219def calc_taskhash(sigdata):
1220    data = sigdata['basehash']
1221
1222    for dep in sigdata['runtaskdeps']:
1223        data = data + sigdata['runtaskhashes'][dep]
1224
1225    for c in sigdata['file_checksum_values']:
1226        if c[1]:
1227            if "./" in c[0]:
1228                data = data + c[0]
1229            data = data + c[1]
1230
1231    if 'taint' in sigdata:
1232        if 'nostamp:' in sigdata['taint']:
1233            data = data + sigdata['taint'][8:]
1234        else:
1235            data = data + sigdata['taint']
1236
1237    return hashlib.sha256(data.encode("utf-8")).hexdigest()
1238
1239
1240def dump_sigfile(a):
1241    output = []
1242
1243    try:
1244        with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
1245            a_data = json.load(f, object_hook=SetDecoder)
1246    except (TypeError, OSError) as err:
1247        bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
1248        raise err
1249
1250    handle_renames(a_data)
1251
1252    output.append("basehash_ignore_vars: %s" % (sorted(a_data['basehash_ignore_vars'])))
1253
1254    output.append("taskhash_ignore_tasks: %s" % (sorted(a_data['taskhash_ignore_tasks'] or [])))
1255
1256    output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
1257
1258    output.append("basehash: %s" % (a_data['basehash']))
1259
1260    for dep in sorted(a_data['gendeps']):
1261        output.append("List of dependencies for variable %s is %s" % (dep, sorted(a_data['gendeps'][dep])))
1262
1263    for dep in sorted(a_data['varvals']):
1264        output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
1265
1266    if 'runtaskdeps' in a_data:
1267        output.append("Tasks this task depends on: %s" % (sorted(a_data['runtaskdeps'])))
1268
1269    if 'file_checksum_values' in a_data:
1270        output.append("This task depends on the checksums of files: %s" % (sorted(a_data['file_checksum_values'])))
1271
1272    if 'runtaskhashes' in a_data:
1273        for dep in sorted(a_data['runtaskhashes']):
1274            output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
1275
1276    if 'taint' in a_data:
1277        if a_data['taint'].startswith('nostamp:'):
1278            msg = a_data['taint'].replace('nostamp:', 'nostamp(uuid4):')
1279        else:
1280            msg = a_data['taint']
1281        output.append("Tainted (by forced/invalidated task): %s" % msg)
1282
1283    if 'task' in a_data:
1284        computed_basehash = calc_basehash(a_data)
1285        output.append("Computed base hash is %s and from file %s" % (computed_basehash, a_data['basehash']))
1286    else:
1287        output.append("Unable to compute base hash")
1288
1289    computed_taskhash = calc_taskhash(a_data)
1290    output.append("Computed task hash is %s" % computed_taskhash)
1291
1292    return output
1293