1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# This bbclass is used for creating archive for:
5# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original"
6# 2) patched source: ARCHIVER_MODE[src] = "patched" (default)
7# 3) configured source: ARCHIVER_MODE[src] = "configured"
8# 4) The patches between do_unpack and do_patch:
9#    ARCHIVER_MODE[diff] = "1"
10#    And you can set the one that you'd like to exclude from the diff:
11#    ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
12# 5) The environment data, similar to 'bitbake -e recipe':
13#    ARCHIVER_MODE[dumpdata] = "1"
14# 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1"
15# 7) Whether output the .src.rpm package:
16#    ARCHIVER_MODE[srpm] = "1"
17# 8) Filter the license, the recipe whose license in
18#    COPYLEFT_LICENSE_INCLUDE will be included, and in
19#    COPYLEFT_LICENSE_EXCLUDE will be excluded.
20#    COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*'
21#    COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary'
22# 9) The recipe type that will be archived:
23#    COPYLEFT_RECIPE_TYPES = 'target'
24#
25
26# Create archive for all the recipe types
27COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian'
28inherit copyleft_filter
29
30ARCHIVER_MODE[srpm] ?= "0"
31ARCHIVER_MODE[src] ?= "patched"
32ARCHIVER_MODE[diff] ?= "0"
33ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
34ARCHIVER_MODE[dumpdata] ?= "0"
35ARCHIVER_MODE[recipe] ?= "0"
36
37DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
38ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources"
39ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/"
40ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm"
41ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/"
42ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
43
44
45do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}"
46do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}"
47do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}"
48do_deploy_archives[dirs] = "${WORKDIR}"
49
50# This is a convenience for the shell script to use it
51
52
53python () {
54    pn = d.getVar('PN')
55    assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
56    if pn in assume_provided:
57        for p in d.getVar("PROVIDES").split():
58            if p != pn:
59                pn = p
60                break
61
62    included, reason = copyleft_should_include(d)
63    if not included:
64        bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason))
65        return
66    else:
67        bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
68
69
70    # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
71    # so avoid archiving source here.
72    if pn.startswith('glibc-locale'):
73        return
74
75    # We just archive gcc-source for all the gcc related recipes
76    if d.getVar('BPN') in ['gcc', 'libgcc'] \
77            and not pn.startswith('gcc-source'):
78        bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
79        return
80
81    def hasTask(task):
82        return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
83
84    ar_src = d.getVarFlag('ARCHIVER_MODE', 'src')
85    ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata')
86    ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe')
87
88    if ar_src == "original":
89        d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn)
90        # 'patched' and 'configured' invoke do_unpack_and_patch because
91        # do_ar_patched resp. do_ar_configured depend on it, but for 'original'
92        # we have to add it explicitly.
93        if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
94            d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn)
95    elif ar_src == "patched":
96        d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn)
97    elif ar_src == "configured":
98        # We can't use "addtask do_ar_configured after do_configure" since it
99        # will cause the deptask of do_populate_sysroot to run not matter what
100        # archives we need, so we add the depends here.
101
102        # There is a corner case with "gcc-source-${PV}" recipes, they don't have
103        # the "do_configure" task, so we need to use "do_preconfigure"
104        if hasTask("do_preconfigure"):
105            d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn)
106        elif hasTask("do_configure"):
107            d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn)
108        d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn)
109
110    elif ar_src:
111        bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src)
112
113    if ar_dumpdata == "1":
114        d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn)
115
116    if ar_recipe == "1":
117        d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn)
118
119    # Output the SRPM package
120    if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'):
121        if "package_rpm" not in d.getVar('PACKAGE_CLASSES'):
122            bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES")
123
124        # Some recipes do not have any packaging tasks
125        if hasTask("do_package_write_rpm"):
126            d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
127            d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}')
128            d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}')
129            d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}')
130            if ar_dumpdata == "1":
131                d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn)
132            if ar_recipe == "1":
133                d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn)
134            if ar_src == "original":
135                d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn)
136            elif ar_src == "patched":
137                d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn)
138            elif ar_src == "configured":
139                d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn)
140}
141
142# Take all the sources for a recipe and puts them in WORKDIR/archiver-work/.
143# Files in SRC_URI are copied directly, anything that's a directory
144# (e.g. git repositories) is "unpacked" and then put into a tarball.
145python do_ar_original() {
146
147    import shutil, tempfile
148
149    if d.getVarFlag('ARCHIVER_MODE', 'src') != "original":
150        return
151
152    ar_outdir = d.getVar('ARCHIVER_OUTDIR')
153    bb.note('Archiving the original source...')
154    urls = d.getVar("SRC_URI").split()
155    # destsuffix (git fetcher) and subdir (everything else) are allowed to be
156    # absolute paths (for example, destsuffix=${S}/foobar).
157    # That messes with unpacking inside our tmpdir below, because the fetchers
158    # will then unpack in that directory and completely ignore the tmpdir.
159    # That breaks parallel tasks relying on ${S}, like do_compile.
160    #
161    # To solve this, we remove these parameters from all URLs.
162    # We do this even for relative paths because it makes the content of the
163    # archives more useful (no extra paths that are only used during
164    # compilation).
165    for i, url in enumerate(urls):
166        decoded = bb.fetch2.decodeurl(url)
167        for param in ('destsuffix', 'subdir'):
168            if param in decoded[5]:
169                del decoded[5][param]
170        encoded = bb.fetch2.encodeurl(decoded)
171        urls[i] = encoded
172    fetch = bb.fetch2.Fetch(urls, d)
173    tarball_suffix = {}
174    for url in fetch.urls:
175        local = fetch.localpath(url).rstrip("/");
176        if os.path.isfile(local):
177            shutil.copy(local, ar_outdir)
178        elif os.path.isdir(local):
179            tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR'))
180            fetch.unpack(tmpdir, (url,))
181            # To handle recipes with more than one source, we add the "name"
182            # URL parameter as suffix. We treat it as an error when
183            # there's more than one URL without a name, or a name gets reused.
184            # This is an additional safety net, in practice the name has
185            # to be set when using the git fetcher, otherwise SRCREV cannot
186            # be set separately for each URL.
187            params = bb.fetch2.decodeurl(url)[5]
188            type = bb.fetch2.decodeurl(url)[0]
189            location = bb.fetch2.decodeurl(url)[2]
190            name = params.get('name', '')
191            if type.lower() == 'file':
192                name_tmp = location.rstrip("*").rstrip("/")
193                name = os.path.basename(name_tmp)
194            else:
195                if name in tarball_suffix:
196                    if not name:
197                        bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url))
198                    else:
199                        bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url))
200            tarball_suffix[name] = url
201            create_tarball(d, tmpdir + '/.', name, ar_outdir)
202
203    # Emit patch series files for 'original'
204    bb.note('Writing patch series files...')
205    for patch in src_patches(d):
206        _, _, local, _, _, parm = bb.fetch.decodeurl(patch)
207        patchdir = parm.get('patchdir')
208        if patchdir:
209            series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_'))
210        else:
211            series = os.path.join(ar_outdir, 'series')
212
213        with open(series, 'a') as s:
214            s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel']))
215}
216
217python do_ar_patched() {
218
219    if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched':
220        return
221
222    # Get the ARCHIVER_OUTDIR before we reset the WORKDIR
223    ar_outdir = d.getVar('ARCHIVER_OUTDIR')
224    ar_workdir = d.getVar('ARCHIVER_WORKDIR')
225    bb.note('Archiving the patched source...')
226    d.setVar('WORKDIR', ar_workdir)
227    create_tarball(d, d.getVar('S'), 'patched', ar_outdir)
228}
229
230python do_ar_configured() {
231    import shutil
232
233    # Forcibly expand the sysroot paths as we're about to change WORKDIR
234    d.setVar('STAGING_DIR_HOST', d.getVar('STAGING_DIR_HOST'))
235    d.setVar('STAGING_DIR_TARGET', d.getVar('STAGING_DIR_TARGET'))
236    d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT'))
237    d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE'))
238
239    ar_outdir = d.getVar('ARCHIVER_OUTDIR')
240    if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured':
241        bb.note('Archiving the configured source...')
242        pn = d.getVar('PN')
243        # "gcc-source-${PV}" recipes don't have "do_configure"
244        # task, so we need to run "do_preconfigure" instead
245        if pn.startswith("gcc-source-"):
246            d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
247            bb.build.exec_func('do_preconfigure', d)
248
249        # The libtool-native's do_configure will remove the
250        # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the
251        # do_configure, we archive the already configured ${S} to
252        # instead of.
253        elif pn != 'libtool-native':
254            def runTask(task):
255                prefuncs = d.getVarFlag(task, 'prefuncs') or ''
256                for func in prefuncs.split():
257                    if func != "sysroot_cleansstate":
258                        bb.build.exec_func(func, d)
259                bb.build.exec_func(task, d)
260                postfuncs = d.getVarFlag(task, 'postfuncs') or ''
261                for func in postfuncs.split():
262                    if func != 'do_qa_configure':
263                        bb.build.exec_func(func, d)
264
265            # Change the WORKDIR to make do_configure run in another dir.
266            d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
267
268            preceeds = bb.build.preceedtask('do_configure', False, d)
269            for task in preceeds:
270                if task != 'do_patch' and task != 'do_prepare_recipe_sysroot':
271                    runTask(task)
272            runTask('do_configure')
273
274        srcdir = d.getVar('S')
275        builddir = d.getVar('B')
276        if srcdir != builddir:
277            if os.path.exists(builddir):
278                oe.path.copytree(builddir, os.path.join(srcdir, \
279                    'build.%s.ar_configured' % d.getVar('PF')))
280        create_tarball(d, srcdir, 'configured', ar_outdir)
281}
282
283def exclude_useless_paths(tarinfo):
284    if tarinfo.isdir():
285        if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'):
286            return None
287        elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc':
288            return None
289    return tarinfo
290
291def create_tarball(d, srcdir, suffix, ar_outdir):
292    """
293    create the tarball from srcdir
294    """
295    import tarfile
296
297    # Make sure we are only creating a single tarball for gcc sources
298    if (d.getVar('SRC_URI') == ""):
299        return
300
301    # For the kernel archive, srcdir may just be a link to the
302    # work-shared location. Use os.path.realpath to make sure
303    # that we archive the actual directory and not just the link.
304    srcdir = os.path.realpath(srcdir)
305
306    bb.utils.mkdirhier(ar_outdir)
307    if suffix:
308        filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix)
309    else:
310        filename = '%s.tar.gz' % d.getVar('PF')
311    tarname = os.path.join(ar_outdir, filename)
312
313    bb.note('Creating %s' % tarname)
314    tar = tarfile.open(tarname, 'w:gz')
315    tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths)
316    tar.close()
317
318# creating .diff.gz between source.orig and source
319def create_diff_gz(d, src_orig, src, ar_outdir):
320
321    import subprocess
322
323    if not os.path.isdir(src) or not os.path.isdir(src_orig):
324        return
325
326    # The diff --exclude can't exclude the file with path, so we copy
327    # the patched source, and remove the files that we'd like to
328    # exclude.
329    src_patched = src + '.patched'
330    oe.path.copyhardlinktree(src, src_patched)
331    for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split():
332        bb.utils.remove(os.path.join(src_orig, i), recurse=True)
333        bb.utils.remove(os.path.join(src_patched, i), recurse=True)
334
335    dirname = os.path.dirname(src)
336    basename = os.path.basename(src)
337    bb.utils.mkdirhier(ar_outdir)
338    cwd = os.getcwd()
339    try:
340        os.chdir(dirname)
341        out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF'))
342        diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file)
343        subprocess.check_call(diff_cmd, shell=True)
344        bb.utils.remove(src_patched, recurse=True)
345    finally:
346        os.chdir(cwd)
347
348def is_work_shared(d):
349    pn = d.getVar('PN')
350    return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source')
351
352# Run do_unpack and do_patch
353python do_unpack_and_patch() {
354    if d.getVarFlag('ARCHIVER_MODE', 'src') not in \
355            [ 'patched', 'configured'] and \
356            d.getVarFlag('ARCHIVER_MODE', 'diff') != '1':
357        return
358    ar_outdir = d.getVar('ARCHIVER_OUTDIR')
359    ar_workdir = d.getVar('ARCHIVER_WORKDIR')
360    ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
361    pn = d.getVar('PN')
362
363    # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
364    if not is_work_shared(d):
365        # Change the WORKDIR to make do_unpack do_patch run in another dir.
366        d.setVar('WORKDIR', ar_workdir)
367        # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
368        d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native)
369
370        # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
371        # possibly requiring of the following tasks (such as some recipes's
372        # do_patch required 'B' existed).
373        bb.utils.mkdirhier(d.getVar('B'))
374
375        bb.build.exec_func('do_unpack', d)
376
377    # Save the original source for creating the patches
378    if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
379        src = d.getVar('S').rstrip('/')
380        src_orig = '%s.orig' % src
381        oe.path.copytree(src, src_orig)
382
383    # Make sure gcc and kernel sources are patched only once
384    if not (d.getVar('SRC_URI') == "" or is_work_shared(d)):
385        bb.build.exec_func('do_patch', d)
386
387    # Create the patches
388    if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
389        bb.note('Creating diff gz...')
390        create_diff_gz(d, src_orig, src, ar_outdir)
391        bb.utils.remove(src_orig, recurse=True)
392}
393
394# BBINCLUDED is special (excluded from basehash signature
395# calculation). Using it in a task signature can cause "basehash
396# changed" errors.
397#
398# Depending on BBINCLUDED also causes do_ar_recipe to run again
399# for unrelated changes, like adding or removing buildhistory.bbclass.
400#
401# For these reasons we ignore the dependency completely. The versioning
402# of the output file ensures that we create it each time the recipe
403# gets rebuilt, at least as long as a PR server is used. We also rely
404# on that mechanism to catch changes in the file content, because the
405# file content is not part of of the task signature either.
406do_ar_recipe[vardepsexclude] += "BBINCLUDED"
407python do_ar_recipe () {
408    """
409    archive the recipe, including .bb and .inc.
410    """
411    import re
412    import shutil
413
414    require_re = re.compile( r"require\s+(.+)" )
415    include_re = re.compile( r"include\s+(.+)" )
416    bbfile = d.getVar('FILE')
417    outdir = os.path.join(d.getVar('WORKDIR'), \
418            '%s-recipe' % d.getVar('PF'))
419    bb.utils.mkdirhier(outdir)
420    shutil.copy(bbfile, outdir)
421
422    pn = d.getVar('PN')
423    bbappend_files = d.getVar('BBINCLUDED').split()
424    # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend
425    # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded.
426    bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn))
427    bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn))
428    for file in bbappend_files:
429        if bbappend_re.match(file) or bbappend_re1.match(file):
430            shutil.copy(file, outdir)
431
432    dirname = os.path.dirname(bbfile)
433    bbpath = '%s:%s' % (dirname, d.getVar('BBPATH'))
434    f = open(bbfile, 'r')
435    for line in f.readlines():
436        incfile = None
437        if require_re.match(line):
438            incfile = require_re.match(line).group(1)
439        elif include_re.match(line):
440            incfile = include_re.match(line).group(1)
441        if incfile:
442            incfile = d.expand(incfile)
443            incfile = bb.utils.which(bbpath, incfile)
444            if incfile:
445                shutil.copy(incfile, outdir)
446
447    create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR'))
448    bb.utils.remove(outdir, recurse=True)
449}
450
451python do_dumpdata () {
452    """
453    dump environment data to ${PF}-showdata.dump
454    """
455
456    dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \
457        '%s-showdata.dump' % d.getVar('PF'))
458    bb.note('Dumping metadata into %s' % dumpfile)
459    with open(dumpfile, "w") as f:
460        # emit variables and shell functions
461        bb.data.emit_env(f, d, True)
462        # emit the metadata which isn't valid shell
463        for e in d.keys():
464            if d.getVarFlag(e, "python", False):
465                f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False)))
466}
467
468SSTATETASKS += "do_deploy_archives"
469do_deploy_archives () {
470    echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
471}
472python do_deploy_archives_setscene () {
473    sstate_setscene(d)
474}
475do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}"
476do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}"
477do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}"
478addtask do_deploy_archives_setscene
479
480addtask do_ar_original after do_unpack
481addtask do_unpack_and_patch after do_patch
482addtask do_ar_patched after do_unpack_and_patch
483addtask do_ar_configured after do_unpack_and_patch
484addtask do_dumpdata
485addtask do_ar_recipe
486addtask do_deploy_archives before do_build
487
488python () {
489    # Add tasks in the correct order, specifically for linux-yocto to avoid race condition.
490    # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency
491    # so that do_kernel_configme does not need to run again when do_unpack_and_patch
492    # gets added or removed (by adding or removing archiver.bbclass).
493    if bb.data.inherits_class('kernel-yocto', d):
494        bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d)
495}
496