1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# This bbclass is used for creating archive for:
5# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original"
6# 2) patched source: ARCHIVER_MODE[src] = "patched" (default)
7# 3) configured source: ARCHIVER_MODE[src] = "configured"
8# 4) The patches between do_unpack and do_patch:
9#    ARCHIVER_MODE[diff] = "1"
10#    And you can set the one that you'd like to exclude from the diff:
11#    ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
12# 5) The environment data, similar to 'bitbake -e recipe':
13#    ARCHIVER_MODE[dumpdata] = "1"
14# 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1"
15# 7) Whether output the .src.rpm package:
16#    ARCHIVER_MODE[srpm] = "1"
17# 8) Filter the license, the recipe whose license in
18#    COPYLEFT_LICENSE_INCLUDE will be included, and in
19#    COPYLEFT_LICENSE_EXCLUDE will be excluded.
20#    COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*'
21#    COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary'
22# 9) The recipe type that will be archived:
23#    COPYLEFT_RECIPE_TYPES = 'target'
24#
25
26# Create archive for all the recipe types
27COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian'
28inherit copyleft_filter
29
30ARCHIVER_MODE[srpm] ?= "0"
31ARCHIVER_MODE[src] ?= "patched"
32ARCHIVER_MODE[diff] ?= "0"
33ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
34ARCHIVER_MODE[dumpdata] ?= "0"
35ARCHIVER_MODE[recipe] ?= "0"
36
37DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
38ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources"
39ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/"
40ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm"
41ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/"
42ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
43
44
45do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}"
46do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}"
47do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}"
48do_deploy_archives[dirs] = "${WORKDIR}"
49
50# This is a convenience for the shell script to use it
51
52
53python () {
54    pn = d.getVar('PN')
55    assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
56    if pn in assume_provided:
57        for p in d.getVar("PROVIDES").split():
58            if p != pn:
59                pn = p
60                break
61
62    included, reason = copyleft_should_include(d)
63    if not included:
64        bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason))
65        return
66    else:
67        bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
68
69
70    # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
71    # so avoid archiving source here.
72    if pn.startswith('glibc-locale'):
73        return
74
75    # We just archive gcc-source for all the gcc related recipes
76    if d.getVar('BPN') in ['gcc', 'libgcc'] \
77            and not pn.startswith('gcc-source'):
78        bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
79        return
80
81    ar_src = d.getVarFlag('ARCHIVER_MODE', 'src')
82    ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata')
83    ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe')
84
85    if ar_src == "original":
86        d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn)
87        # 'patched' and 'configured' invoke do_unpack_and_patch because
88        # do_ar_patched resp. do_ar_configured depend on it, but for 'original'
89        # we have to add it explicitly.
90        if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
91            d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn)
92    elif ar_src == "patched":
93        d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn)
94    elif ar_src == "configured":
95        # We can't use "addtask do_ar_configured after do_configure" since it
96        # will cause the deptask of do_populate_sysroot to run not matter what
97        # archives we need, so we add the depends here.
98
99        # There is a corner case with "gcc-source-${PV}" recipes, they don't have
100        # the "do_configure" task, so we need to use "do_preconfigure"
101        def hasTask(task):
102            return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
103
104        if hasTask("do_preconfigure"):
105            d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn)
106        elif hasTask("do_configure"):
107            d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn)
108        d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn)
109
110    elif ar_src:
111        bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src)
112
113    if ar_dumpdata == "1":
114        d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn)
115
116    if ar_recipe == "1":
117        d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn)
118
119    # Output the SRPM package
120    if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'):
121        if "package_rpm" in d.getVar('PACKAGE_CLASSES'):
122            d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
123            d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}')
124            d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}')
125            d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}')
126            if ar_dumpdata == "1":
127                d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn)
128            if ar_recipe == "1":
129                d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn)
130            if ar_src == "original":
131                d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn)
132            elif ar_src == "patched":
133                d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn)
134            elif ar_src == "configured":
135                d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn)
136        else:
137            bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES")
138}
139
140# Take all the sources for a recipe and puts them in WORKDIR/archiver-work/.
141# Files in SRC_URI are copied directly, anything that's a directory
142# (e.g. git repositories) is "unpacked" and then put into a tarball.
143python do_ar_original() {
144
145    import shutil, tempfile
146
147    if d.getVarFlag('ARCHIVER_MODE', 'src') != "original":
148        return
149
150    ar_outdir = d.getVar('ARCHIVER_OUTDIR')
151    bb.note('Archiving the original source...')
152    urls = d.getVar("SRC_URI").split()
153    # destsuffix (git fetcher) and subdir (everything else) are allowed to be
154    # absolute paths (for example, destsuffix=${S}/foobar).
155    # That messes with unpacking inside our tmpdir below, because the fetchers
156    # will then unpack in that directory and completely ignore the tmpdir.
157    # That breaks parallel tasks relying on ${S}, like do_compile.
158    #
159    # To solve this, we remove these parameters from all URLs.
160    # We do this even for relative paths because it makes the content of the
161    # archives more useful (no extra paths that are only used during
162    # compilation).
163    for i, url in enumerate(urls):
164        decoded = bb.fetch2.decodeurl(url)
165        for param in ('destsuffix', 'subdir'):
166            if param in decoded[5]:
167                del decoded[5][param]
168        encoded = bb.fetch2.encodeurl(decoded)
169        urls[i] = encoded
170    fetch = bb.fetch2.Fetch(urls, d)
171    tarball_suffix = {}
172    for url in fetch.urls:
173        local = fetch.localpath(url).rstrip("/");
174        if os.path.isfile(local):
175            shutil.copy(local, ar_outdir)
176        elif os.path.isdir(local):
177            tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR'))
178            fetch.unpack(tmpdir, (url,))
179            # To handle recipes with more than one source, we add the "name"
180            # URL parameter as suffix. We treat it as an error when
181            # there's more than one URL without a name, or a name gets reused.
182            # This is an additional safety net, in practice the name has
183            # to be set when using the git fetcher, otherwise SRCREV cannot
184            # be set separately for each URL.
185            params = bb.fetch2.decodeurl(url)[5]
186            type = bb.fetch2.decodeurl(url)[0]
187            location = bb.fetch2.decodeurl(url)[2]
188            name = params.get('name', '')
189            if type.lower() == 'file':
190                name_tmp = location.rstrip("*").rstrip("/")
191                name = os.path.basename(name_tmp)
192            else:
193                if name in tarball_suffix:
194                    if not name:
195                        bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url))
196                    else:
197                        bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url))
198            tarball_suffix[name] = url
199            create_tarball(d, tmpdir + '/.', name, ar_outdir)
200
201    # Emit patch series files for 'original'
202    bb.note('Writing patch series files...')
203    for patch in src_patches(d):
204        _, _, local, _, _, parm = bb.fetch.decodeurl(patch)
205        patchdir = parm.get('patchdir')
206        if patchdir:
207            series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_'))
208        else:
209            series = os.path.join(ar_outdir, 'series')
210
211        with open(series, 'a') as s:
212            s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel']))
213}
214
215python do_ar_patched() {
216
217    if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched':
218        return
219
220    # Get the ARCHIVER_OUTDIR before we reset the WORKDIR
221    ar_outdir = d.getVar('ARCHIVER_OUTDIR')
222    ar_workdir = d.getVar('ARCHIVER_WORKDIR')
223    bb.note('Archiving the patched source...')
224    d.setVar('WORKDIR', ar_workdir)
225    create_tarball(d, d.getVar('S'), 'patched', ar_outdir)
226}
227
228python do_ar_configured() {
229    import shutil
230
231    # Forcibly expand the sysroot paths as we're about to change WORKDIR
232    d.setVar('STAGING_DIR_HOST', d.getVar('STAGING_DIR_HOST'))
233    d.setVar('STAGING_DIR_TARGET', d.getVar('STAGING_DIR_TARGET'))
234    d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT'))
235    d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE'))
236
237    ar_outdir = d.getVar('ARCHIVER_OUTDIR')
238    if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured':
239        bb.note('Archiving the configured source...')
240        pn = d.getVar('PN')
241        # "gcc-source-${PV}" recipes don't have "do_configure"
242        # task, so we need to run "do_preconfigure" instead
243        if pn.startswith("gcc-source-"):
244            d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
245            bb.build.exec_func('do_preconfigure', d)
246
247        # The libtool-native's do_configure will remove the
248        # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the
249        # do_configure, we archive the already configured ${S} to
250        # instead of.
251        elif pn != 'libtool-native':
252            def runTask(task):
253                prefuncs = d.getVarFlag(task, 'prefuncs') or ''
254                for func in prefuncs.split():
255                    if func != "sysroot_cleansstate":
256                        bb.build.exec_func(func, d)
257                bb.build.exec_func(task, d)
258                postfuncs = d.getVarFlag(task, 'postfuncs') or ''
259                for func in postfuncs.split():
260                    if func != 'do_qa_configure':
261                        bb.build.exec_func(func, d)
262
263            # Change the WORKDIR to make do_configure run in another dir.
264            d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
265
266            preceeds = bb.build.preceedtask('do_configure', False, d)
267            for task in preceeds:
268                if task != 'do_patch' and task != 'do_prepare_recipe_sysroot':
269                    runTask(task)
270            runTask('do_configure')
271
272        srcdir = d.getVar('S')
273        builddir = d.getVar('B')
274        if srcdir != builddir:
275            if os.path.exists(builddir):
276                oe.path.copytree(builddir, os.path.join(srcdir, \
277                    'build.%s.ar_configured' % d.getVar('PF')))
278        create_tarball(d, srcdir, 'configured', ar_outdir)
279}
280
281def exclude_useless_paths(tarinfo):
282    if tarinfo.isdir():
283        if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'):
284            return None
285        elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc':
286            return None
287    return tarinfo
288
289def create_tarball(d, srcdir, suffix, ar_outdir):
290    """
291    create the tarball from srcdir
292    """
293    import tarfile
294
295    # Make sure we are only creating a single tarball for gcc sources
296    if (d.getVar('SRC_URI') == ""):
297        return
298
299    # For the kernel archive, srcdir may just be a link to the
300    # work-shared location. Use os.path.realpath to make sure
301    # that we archive the actual directory and not just the link.
302    srcdir = os.path.realpath(srcdir)
303
304    bb.utils.mkdirhier(ar_outdir)
305    if suffix:
306        filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix)
307    else:
308        filename = '%s.tar.gz' % d.getVar('PF')
309    tarname = os.path.join(ar_outdir, filename)
310
311    bb.note('Creating %s' % tarname)
312    tar = tarfile.open(tarname, 'w:gz')
313    tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths)
314    tar.close()
315
316# creating .diff.gz between source.orig and source
317def create_diff_gz(d, src_orig, src, ar_outdir):
318
319    import subprocess
320
321    if not os.path.isdir(src) or not os.path.isdir(src_orig):
322        return
323
324    # The diff --exclude can't exclude the file with path, so we copy
325    # the patched source, and remove the files that we'd like to
326    # exclude.
327    src_patched = src + '.patched'
328    oe.path.copyhardlinktree(src, src_patched)
329    for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split():
330        bb.utils.remove(os.path.join(src_orig, i), recurse=True)
331        bb.utils.remove(os.path.join(src_patched, i), recurse=True)
332
333    dirname = os.path.dirname(src)
334    basename = os.path.basename(src)
335    bb.utils.mkdirhier(ar_outdir)
336    cwd = os.getcwd()
337    try:
338        os.chdir(dirname)
339        out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF'))
340        diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file)
341        subprocess.check_call(diff_cmd, shell=True)
342        bb.utils.remove(src_patched, recurse=True)
343    finally:
344        os.chdir(cwd)
345
346def is_work_shared(d):
347    pn = d.getVar('PN')
348    return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source')
349
350# Run do_unpack and do_patch
351python do_unpack_and_patch() {
352    if d.getVarFlag('ARCHIVER_MODE', 'src') not in \
353            [ 'patched', 'configured'] and \
354            d.getVarFlag('ARCHIVER_MODE', 'diff') != '1':
355        return
356    ar_outdir = d.getVar('ARCHIVER_OUTDIR')
357    ar_workdir = d.getVar('ARCHIVER_WORKDIR')
358    ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
359    pn = d.getVar('PN')
360
361    # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
362    if not is_work_shared(d):
363        # Change the WORKDIR to make do_unpack do_patch run in another dir.
364        d.setVar('WORKDIR', ar_workdir)
365        # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
366        d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native)
367
368        # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
369        # possibly requiring of the following tasks (such as some recipes's
370        # do_patch required 'B' existed).
371        bb.utils.mkdirhier(d.getVar('B'))
372
373        bb.build.exec_func('do_unpack', d)
374
375    # Save the original source for creating the patches
376    if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
377        src = d.getVar('S').rstrip('/')
378        src_orig = '%s.orig' % src
379        oe.path.copytree(src, src_orig)
380
381    # Make sure gcc and kernel sources are patched only once
382    if not (d.getVar('SRC_URI') == "" or is_work_shared(d)):
383        bb.build.exec_func('do_patch', d)
384
385    # Create the patches
386    if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
387        bb.note('Creating diff gz...')
388        create_diff_gz(d, src_orig, src, ar_outdir)
389        bb.utils.remove(src_orig, recurse=True)
390}
391
392# BBINCLUDED is special (excluded from basehash signature
393# calculation). Using it in a task signature can cause "basehash
394# changed" errors.
395#
396# Depending on BBINCLUDED also causes do_ar_recipe to run again
397# for unrelated changes, like adding or removing buildhistory.bbclass.
398#
399# For these reasons we ignore the dependency completely. The versioning
400# of the output file ensures that we create it each time the recipe
401# gets rebuilt, at least as long as a PR server is used. We also rely
402# on that mechanism to catch changes in the file content, because the
403# file content is not part of of the task signature either.
404do_ar_recipe[vardepsexclude] += "BBINCLUDED"
405python do_ar_recipe () {
406    """
407    archive the recipe, including .bb and .inc.
408    """
409    import re
410    import shutil
411
412    require_re = re.compile( r"require\s+(.+)" )
413    include_re = re.compile( r"include\s+(.+)" )
414    bbfile = d.getVar('FILE')
415    outdir = os.path.join(d.getVar('WORKDIR'), \
416            '%s-recipe' % d.getVar('PF'))
417    bb.utils.mkdirhier(outdir)
418    shutil.copy(bbfile, outdir)
419
420    pn = d.getVar('PN')
421    bbappend_files = d.getVar('BBINCLUDED').split()
422    # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend
423    # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded.
424    bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn))
425    bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn))
426    for file in bbappend_files:
427        if bbappend_re.match(file) or bbappend_re1.match(file):
428            shutil.copy(file, outdir)
429
430    dirname = os.path.dirname(bbfile)
431    bbpath = '%s:%s' % (dirname, d.getVar('BBPATH'))
432    f = open(bbfile, 'r')
433    for line in f.readlines():
434        incfile = None
435        if require_re.match(line):
436            incfile = require_re.match(line).group(1)
437        elif include_re.match(line):
438            incfile = include_re.match(line).group(1)
439        if incfile:
440            incfile = d.expand(incfile)
441            incfile = bb.utils.which(bbpath, incfile)
442            if incfile:
443                shutil.copy(incfile, outdir)
444
445    create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR'))
446    bb.utils.remove(outdir, recurse=True)
447}
448
449python do_dumpdata () {
450    """
451    dump environment data to ${PF}-showdata.dump
452    """
453
454    dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \
455        '%s-showdata.dump' % d.getVar('PF'))
456    bb.note('Dumping metadata into %s' % dumpfile)
457    with open(dumpfile, "w") as f:
458        # emit variables and shell functions
459        bb.data.emit_env(f, d, True)
460        # emit the metadata which isn't valid shell
461        for e in d.keys():
462            if d.getVarFlag(e, "python", False):
463                f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False)))
464}
465
466SSTATETASKS += "do_deploy_archives"
467do_deploy_archives () {
468    echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
469}
470python do_deploy_archives_setscene () {
471    sstate_setscene(d)
472}
473do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}"
474do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}"
475do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}"
476addtask do_deploy_archives_setscene
477
478addtask do_ar_original after do_unpack
479addtask do_unpack_and_patch after do_patch
480addtask do_ar_patched after do_unpack_and_patch
481addtask do_ar_configured after do_unpack_and_patch
482addtask do_dumpdata
483addtask do_ar_recipe
484addtask do_deploy_archives before do_build
485
486python () {
487    # Add tasks in the correct order, specifically for linux-yocto to avoid race condition.
488    # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency
489    # so that do_kernel_configme does not need to run again when do_unpack_and_patch
490    # gets added or removed (by adding or removing archiver.bbclass).
491    if bb.data.inherits_class('kernel-yocto', d):
492        bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d)
493}
494