xref: /openbmc/openbmc/poky/meta/lib/oe/sstatesig.py (revision 82c905dc)
1#
2# SPDX-License-Identifier: GPL-2.0-only
3#
4import bb.siggen
5import oe
6
7def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache):
8    # Return True if we should keep the dependency, False to drop it
9    def isNative(x):
10        return x.endswith("-native")
11    def isCross(x):
12        return "-cross-" in x
13    def isNativeSDK(x):
14        return x.startswith("nativesdk-")
15    def isKernel(fn):
16        inherits = " ".join(dataCache.inherits[fn])
17        return inherits.find("/module-base.bbclass") != -1 or inherits.find("/linux-kernel-base.bbclass") != -1
18    def isPackageGroup(fn):
19        inherits = " ".join(dataCache.inherits[fn])
20        return "/packagegroup.bbclass" in inherits
21    def isAllArch(fn):
22        inherits = " ".join(dataCache.inherits[fn])
23        return "/allarch.bbclass" in inherits
24    def isImage(fn):
25        return "/image.bbclass" in " ".join(dataCache.inherits[fn])
26
27    # (Almost) always include our own inter-task dependencies.
28    # The exception is the special do_kernel_configme->do_unpack_and_patch
29    # dependency from archiver.bbclass.
30    if recipename == depname:
31        if task == "do_kernel_configme" and dep.endswith(".do_unpack_and_patch"):
32            return False
33        return True
34
35    # Exclude well defined recipe->dependency
36    if "%s->%s" % (recipename, depname) in siggen.saferecipedeps:
37        return False
38
39    # Check for special wildcard
40    if "*->%s" % depname in siggen.saferecipedeps and recipename != depname:
41        return False
42
43    # Don't change native/cross/nativesdk recipe dependencies any further
44    if isNative(recipename) or isCross(recipename) or isNativeSDK(recipename):
45        return True
46
47    # Only target packages beyond here
48
49    # allarch packagegroups are assumed to have well behaved names which don't change between architecures/tunes
50    if isPackageGroup(fn) and isAllArch(fn) and not isNative(depname):
51        return False
52
53    # Exclude well defined machine specific configurations which don't change ABI
54    if depname in siggen.abisaferecipes and not isImage(fn):
55        return False
56
57    # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
58    # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum
59    # is machine specific.
60    # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
61    # and we reccomend a kernel-module, we exclude the dependency.
62    depfn = dep.rsplit(":", 1)[0]
63    if dataCache and isKernel(depfn) and not isKernel(fn):
64        for pkg in dataCache.runrecs[fn]:
65            if " ".join(dataCache.runrecs[fn][pkg]).find("kernel-module-") != -1:
66                return False
67
68    # Default to keep dependencies
69    return True
70
71def sstate_lockedsigs(d):
72    sigs = {}
73    types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split()
74    for t in types:
75        siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t
76        lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split()
77        for ls in lockedsigs:
78            pn, task, h = ls.split(":", 2)
79            if pn not in sigs:
80                sigs[pn] = {}
81            sigs[pn][task] = [h, siggen_lockedsigs_var]
82    return sigs
83
84class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
85    name = "OEBasic"
86    def init_rundepcheck(self, data):
87        self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
88        self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
89        pass
90    def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
91        return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
92
93class SignatureGeneratorOEBasicHashMixIn(object):
94    def init_rundepcheck(self, data):
95        self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
96        self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
97        self.lockedsigs = sstate_lockedsigs(data)
98        self.lockedhashes = {}
99        self.lockedpnmap = {}
100        self.lockedhashfn = {}
101        self.machine = data.getVar("MACHINE")
102        self.mismatch_msgs = []
103        self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
104                                "").split()
105        self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
106        self.buildarch = data.getVar('BUILD_ARCH')
107        self._internal = False
108        pass
109
110    def tasks_resolved(self, virtmap, virtpnmap, dataCache):
111        # Translate virtual/xxx entries to PN values
112        newabisafe = []
113        for a in self.abisaferecipes:
114            if a in virtpnmap:
115                newabisafe.append(virtpnmap[a])
116            else:
117                newabisafe.append(a)
118        self.abisaferecipes = newabisafe
119        newsafedeps = []
120        for a in self.saferecipedeps:
121            a1, a2 = a.split("->")
122            if a1 in virtpnmap:
123                a1 = virtpnmap[a1]
124            if a2 in virtpnmap:
125                a2 = virtpnmap[a2]
126            newsafedeps.append(a1 + "->" + a2)
127        self.saferecipedeps = newsafedeps
128
129    def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
130        return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
131
132    def get_taskdata(self):
133        return (self.lockedpnmap, self.lockedhashfn, self.lockedhashes) + super().get_taskdata()
134
135    def set_taskdata(self, data):
136        self.lockedpnmap, self.lockedhashfn, self.lockedhashes = data[:3]
137        super().set_taskdata(data[3:])
138
139    def dump_sigs(self, dataCache, options):
140        sigfile = os.getcwd() + "/locked-sigs.inc"
141        bb.plain("Writing locked sigs to %s" % sigfile)
142        self.dump_lockedsigs(sigfile)
143        return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
144
145    def prep_taskhash(self, tid, deps, dataCache):
146        super().prep_taskhash(tid, deps, dataCache)
147        if hasattr(self, "extramethod"):
148            (_, _, _, fn) = bb.runqueue.split_tid_mcfn(tid)
149            inherits = " ".join(dataCache.inherits[fn])
150            if inherits.find("/native.bbclass") != -1 or inherits.find("/cross.bbclass") != -1:
151                self.extramethod[tid] = ":" + self.buildarch
152
153    def get_taskhash(self, tid, deps, dataCache):
154        if tid in self.lockedhashes:
155            if self.lockedhashes[tid]:
156                return self.lockedhashes[tid]
157            else:
158                return super().get_taskhash(tid, deps, dataCache)
159
160        # get_taskhash will call get_unihash internally in the parent class, we
161        # need to disable our filter of it whilst this runs else
162        # incorrect hashes can be calculated.
163        self._internal = True
164        h = super().get_taskhash(tid, deps, dataCache)
165        self._internal = False
166
167        (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
168
169        recipename = dataCache.pkg_fn[fn]
170        self.lockedpnmap[fn] = recipename
171        self.lockedhashfn[fn] = dataCache.hashfn[fn]
172
173        unlocked = False
174        if recipename in self.unlockedrecipes:
175            unlocked = True
176        else:
177            def recipename_from_dep(dep):
178                fn = bb.runqueue.fn_from_tid(dep)
179                return dataCache.pkg_fn[fn]
180
181            # If any unlocked recipe is in the direct dependencies then the
182            # current recipe should be unlocked as well.
183            depnames = [ recipename_from_dep(x) for x in deps if mc == bb.runqueue.mc_from_tid(x)]
184            if any(x in y for y in depnames for x in self.unlockedrecipes):
185                self.unlockedrecipes[recipename] = ''
186                unlocked = True
187
188        if not unlocked and recipename in self.lockedsigs:
189            if task in self.lockedsigs[recipename]:
190                h_locked = self.lockedsigs[recipename][task][0]
191                var = self.lockedsigs[recipename][task][1]
192                self.lockedhashes[tid] = h_locked
193                self._internal = True
194                unihash = self.get_unihash(tid)
195                self._internal = False
196                #bb.warn("Using %s %s %s" % (recipename, task, h))
197
198                if h != h_locked and h_locked != unihash:
199                    self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
200                                          % (recipename, task, h, h_locked, var))
201
202                return h_locked
203
204        self.lockedhashes[tid] = False
205        #bb.warn("%s %s %s" % (recipename, task, h))
206        return h
207
208    def get_stampfile_hash(self, tid):
209        if tid in self.lockedhashes and self.lockedhashes[tid]:
210            return self.lockedhashes[tid]
211        return super().get_stampfile_hash(tid)
212
213    def get_unihash(self, tid):
214        if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
215            return self.lockedhashes[tid]
216        return super().get_unihash(tid)
217
218    def dump_sigtask(self, fn, task, stampbase, runtime):
219        tid = fn + ":" + task
220        if tid in self.lockedhashes and self.lockedhashes[tid]:
221            return
222        super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime)
223
224    def dump_lockedsigs(self, sigfile, taskfilter=None):
225        types = {}
226        for tid in self.runtaskdeps:
227            if taskfilter:
228                if not tid in taskfilter:
229                    continue
230            fn = bb.runqueue.fn_from_tid(tid)
231            t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
232            t = 't-' + t.replace('_', '-')
233            if t not in types:
234                types[t] = []
235            types[t].append(tid)
236
237        with open(sigfile, "w") as f:
238            l = sorted(types)
239            for t in l:
240                f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t)
241                types[t].sort()
242                sortedtid = sorted(types[t], key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
243                for tid in sortedtid:
244                    (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
245                    if tid not in self.taskhash:
246                        continue
247                    f.write("    " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
248                f.write('    "\n')
249            f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l)))
250
251    def dump_siglist(self, sigfile):
252        with open(sigfile, "w") as f:
253            tasks = []
254            for taskitem in self.taskhash:
255                (fn, task) = taskitem.rsplit(":", 1)
256                pn = self.lockedpnmap[fn]
257                tasks.append((pn, task, fn, self.taskhash[taskitem]))
258            for (pn, task, fn, taskhash) in sorted(tasks):
259                f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash))
260
261    def checkhashes(self, sq_data, missed, found, d):
262        warn_msgs = []
263        error_msgs = []
264        sstate_missing_msgs = []
265
266        for tid in sq_data['hash']:
267            if tid not in found:
268                for pn in self.lockedsigs:
269                    taskname = bb.runqueue.taskname_from_tid(tid)
270                    if sq_data['hash'][tid] in iter(self.lockedsigs[pn].values()):
271                        if taskname == 'do_shared_workdir':
272                            continue
273                        sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?"
274                                               % (pn, taskname, sq_data['hash'][tid]))
275
276        checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
277        if checklevel == 'warn':
278            warn_msgs += self.mismatch_msgs
279        elif checklevel == 'error':
280            error_msgs += self.mismatch_msgs
281
282        checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK")
283        if checklevel == 'warn':
284            warn_msgs += sstate_missing_msgs
285        elif checklevel == 'error':
286            error_msgs += sstate_missing_msgs
287
288        if warn_msgs:
289            bb.warn("\n".join(warn_msgs))
290        if error_msgs:
291            bb.fatal("\n".join(error_msgs))
292
293class SignatureGeneratorOEBasicHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorBasicHash):
294    name = "OEBasicHash"
295
296class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorUniHashMixIn, bb.siggen.SignatureGeneratorBasicHash):
297    name = "OEEquivHash"
298
299    def init_rundepcheck(self, data):
300        super().init_rundepcheck(data)
301        self.server = data.getVar('BB_HASHSERVE')
302        if not self.server:
303            bb.fatal("OEEquivHash requires BB_HASHSERVE to be set")
304        self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
305        if not self.method:
306            bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
307
308# Insert these classes into siggen's namespace so it can see and select them
309bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
310bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
311bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
312
313
314def find_siginfo(pn, taskname, taskhashlist, d):
315    """ Find signature data files for comparison purposes """
316
317    import fnmatch
318    import glob
319
320    if not taskname:
321        # We have to derive pn and taskname
322        key = pn
323        splitit = key.split('.bb:')
324        taskname = splitit[1]
325        pn = os.path.basename(splitit[0]).split('_')[0]
326        if key.startswith('virtual:native:'):
327            pn = pn + '-native'
328
329    hashfiles = {}
330    filedates = {}
331
332    def get_hashval(siginfo):
333        if siginfo.endswith('.siginfo'):
334            return siginfo.rpartition(':')[2].partition('_')[0]
335        else:
336            return siginfo.rpartition('.')[2]
337
338    # First search in stamps dir
339    localdata = d.createCopy()
340    localdata.setVar('MULTIMACH_TARGET_SYS', '*')
341    localdata.setVar('PN', pn)
342    localdata.setVar('PV', '*')
343    localdata.setVar('PR', '*')
344    localdata.setVar('EXTENDPE', '')
345    stamp = localdata.getVar('STAMP')
346    if pn.startswith("gcc-source"):
347        # gcc-source shared workdir is a special case :(
348        stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}")
349
350    filespec = '%s.%s.sigdata.*' % (stamp, taskname)
351    foundall = False
352    import glob
353    for fullpath in glob.glob(filespec):
354        match = False
355        if taskhashlist:
356            for taskhash in taskhashlist:
357                if fullpath.endswith('.%s' % taskhash):
358                    hashfiles[taskhash] = fullpath
359                    if len(hashfiles) == len(taskhashlist):
360                        foundall = True
361                        break
362        else:
363            try:
364                filedates[fullpath] = os.stat(fullpath).st_mtime
365            except OSError:
366                continue
367            hashval = get_hashval(fullpath)
368            hashfiles[hashval] = fullpath
369
370    if not taskhashlist or (len(filedates) < 2 and not foundall):
371        # That didn't work, look in sstate-cache
372        hashes = taskhashlist or ['?' * 64]
373        localdata = bb.data.createCopy(d)
374        for hashval in hashes:
375            localdata.setVar('PACKAGE_ARCH', '*')
376            localdata.setVar('TARGET_VENDOR', '*')
377            localdata.setVar('TARGET_OS', '*')
378            localdata.setVar('PN', pn)
379            localdata.setVar('PV', '*')
380            localdata.setVar('PR', '*')
381            localdata.setVar('BB_TASKHASH', hashval)
382            swspec = localdata.getVar('SSTATE_SWSPEC')
383            if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
384                localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
385            elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
386                localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
387            sstatename = taskname[3:]
388            filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename)
389
390            matchedfiles = glob.glob(filespec)
391            for fullpath in matchedfiles:
392                actual_hashval = get_hashval(fullpath)
393                if actual_hashval in hashfiles:
394                    continue
395                hashfiles[hashval] = fullpath
396                if not taskhashlist:
397                    try:
398                        filedates[fullpath] = os.stat(fullpath).st_mtime
399                    except:
400                        continue
401
402    if taskhashlist:
403        return hashfiles
404    else:
405        return filedates
406
407bb.siggen.find_siginfo = find_siginfo
408
409
410def sstate_get_manifest_filename(task, d):
411    """
412    Return the sstate manifest file path for a particular task.
413    Also returns the datastore that can be used to query related variables.
414    """
415    d2 = d.createCopy()
416    extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info')
417    if extrainf:
418        d2.setVar("SSTATE_MANMACH", extrainf)
419    return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2)
420
421def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
422    d2 = d
423    variant = ''
424    curr_variant = ''
425    if d.getVar("BBEXTENDCURR") == "multilib":
426        curr_variant = d.getVar("BBEXTENDVARIANT")
427        if "virtclass-multilib" not in d.getVar("OVERRIDES"):
428            curr_variant = "invalid"
429    if taskdata2.startswith("virtual:multilib"):
430        variant = taskdata2.split(":")[2]
431    if curr_variant != variant:
432        if variant not in multilibcache:
433            multilibcache[variant] = oe.utils.get_multilib_datastore(variant, d)
434        d2 = multilibcache[variant]
435
436    if taskdata.endswith("-native"):
437        pkgarchs = ["${BUILD_ARCH}"]
438    elif taskdata.startswith("nativesdk-"):
439        pkgarchs = ["${SDK_ARCH}_${SDK_OS}", "allarch"]
440    elif "-cross-canadian" in taskdata:
441        pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"]
442    elif "-cross-" in taskdata:
443        pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"]
444    elif "-crosssdk" in taskdata:
445        pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"]
446    else:
447        pkgarchs = ['${MACHINE_ARCH}']
448        pkgarchs = pkgarchs + list(reversed(d2.getVar("PACKAGE_EXTRA_ARCHS").split()))
449        pkgarchs.append('allarch')
450        pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
451
452    for pkgarch in pkgarchs:
453        manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
454        if os.path.exists(manifest):
455            return manifest, d2
456    bb.warn("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant))
457    return None, d2
458
459def OEOuthashBasic(path, sigfile, task, d):
460    """
461    Basic output hash function
462
463    Calculates the output hash of a task by hashing all output file metadata,
464    and file contents.
465    """
466    import hashlib
467    import stat
468    import pwd
469    import grp
470
471    def update_hash(s):
472        s = s.encode('utf-8')
473        h.update(s)
474        if sigfile:
475            sigfile.write(s)
476
477    h = hashlib.sha256()
478    prev_dir = os.getcwd()
479    include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
480    extra_content = d.getVar('HASHEQUIV_HASH_VERSION')
481
482    try:
483        os.chdir(path)
484
485        update_hash("OEOuthashBasic\n")
486        if extra_content:
487            update_hash(extra_content + "\n")
488
489        # It is only currently useful to get equivalent hashes for things that
490        # can be restored from sstate. Since the sstate object is named using
491        # SSTATE_PKGSPEC and the task name, those should be included in the
492        # output hash calculation.
493        update_hash("SSTATE_PKGSPEC=%s\n" % d.getVar('SSTATE_PKGSPEC'))
494        update_hash("task=%s\n" % task)
495
496        for root, dirs, files in os.walk('.', topdown=True):
497            # Sort directories to ensure consistent ordering when recursing
498            dirs.sort()
499            files.sort()
500
501            def process(path):
502                s = os.lstat(path)
503
504                if stat.S_ISDIR(s.st_mode):
505                    update_hash('d')
506                elif stat.S_ISCHR(s.st_mode):
507                    update_hash('c')
508                elif stat.S_ISBLK(s.st_mode):
509                    update_hash('b')
510                elif stat.S_ISSOCK(s.st_mode):
511                    update_hash('s')
512                elif stat.S_ISLNK(s.st_mode):
513                    update_hash('l')
514                elif stat.S_ISFIFO(s.st_mode):
515                    update_hash('p')
516                else:
517                    update_hash('-')
518
519                def add_perm(mask, on, off='-'):
520                    if mask & s.st_mode:
521                        update_hash(on)
522                    else:
523                        update_hash(off)
524
525                add_perm(stat.S_IRUSR, 'r')
526                add_perm(stat.S_IWUSR, 'w')
527                if stat.S_ISUID & s.st_mode:
528                    add_perm(stat.S_IXUSR, 's', 'S')
529                else:
530                    add_perm(stat.S_IXUSR, 'x')
531
532                add_perm(stat.S_IRGRP, 'r')
533                add_perm(stat.S_IWGRP, 'w')
534                if stat.S_ISGID & s.st_mode:
535                    add_perm(stat.S_IXGRP, 's', 'S')
536                else:
537                    add_perm(stat.S_IXGRP, 'x')
538
539                add_perm(stat.S_IROTH, 'r')
540                add_perm(stat.S_IWOTH, 'w')
541                if stat.S_ISVTX & s.st_mode:
542                    update_hash('t')
543                else:
544                    add_perm(stat.S_IXOTH, 'x')
545
546                if include_owners:
547                    try:
548                        update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
549                        update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
550                    except KeyError:
551                        bb.warn("KeyError in %s" % path)
552                        raise
553
554                update_hash(" ")
555                if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode):
556                    update_hash("%9s" % ("%d.%d" % (os.major(s.st_rdev), os.minor(s.st_rdev))))
557                else:
558                    update_hash(" " * 9)
559
560                update_hash(" ")
561                if stat.S_ISREG(s.st_mode):
562                    update_hash("%10d" % s.st_size)
563                else:
564                    update_hash(" " * 10)
565
566                update_hash(" ")
567                fh = hashlib.sha256()
568                if stat.S_ISREG(s.st_mode):
569                    # Hash file contents
570                    with open(path, 'rb') as d:
571                        for chunk in iter(lambda: d.read(4096), b""):
572                            fh.update(chunk)
573                    update_hash(fh.hexdigest())
574                else:
575                    update_hash(" " * len(fh.hexdigest()))
576
577                update_hash(" %s" % path)
578
579                if stat.S_ISLNK(s.st_mode):
580                    update_hash(" -> %s" % os.readlink(path))
581
582                update_hash("\n")
583
584            # Process this directory and all its child files
585            process(root)
586            for f in files:
587                if f == 'fixmepath':
588                    continue
589                process(os.path.join(root, f))
590    finally:
591        os.chdir(prev_dir)
592
593    return h.hexdigest()
594
595
596