1*eb8dc403SDave Cobbley# ex:ts=4:sw=4:sts=4:et 2*eb8dc403SDave Cobbley# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- 3*eb8dc403SDave Cobbley# 4*eb8dc403SDave Cobbley# This bbclass is used for creating archive for: 5*eb8dc403SDave Cobbley# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" 6*eb8dc403SDave Cobbley# 2) patched source: ARCHIVER_MODE[src] = "patched" (default) 7*eb8dc403SDave Cobbley# 3) configured source: ARCHIVER_MODE[src] = "configured" 8*eb8dc403SDave Cobbley# 4) The patches between do_unpack and do_patch: 9*eb8dc403SDave Cobbley# ARCHIVER_MODE[diff] = "1" 10*eb8dc403SDave Cobbley# And you can set the one that you'd like to exclude from the diff: 11*eb8dc403SDave Cobbley# ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" 12*eb8dc403SDave Cobbley# 5) The environment data, similar to 'bitbake -e recipe': 13*eb8dc403SDave Cobbley# ARCHIVER_MODE[dumpdata] = "1" 14*eb8dc403SDave Cobbley# 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1" 15*eb8dc403SDave Cobbley# 7) Whether output the .src.rpm package: 16*eb8dc403SDave Cobbley# ARCHIVER_MODE[srpm] = "1" 17*eb8dc403SDave Cobbley# 8) Filter the license, the recipe whose license in 18*eb8dc403SDave Cobbley# COPYLEFT_LICENSE_INCLUDE will be included, and in 19*eb8dc403SDave Cobbley# COPYLEFT_LICENSE_EXCLUDE will be excluded. 20*eb8dc403SDave Cobbley# COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*' 21*eb8dc403SDave Cobbley# COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary' 22*eb8dc403SDave Cobbley# 9) The recipe type that will be archived: 23*eb8dc403SDave Cobbley# COPYLEFT_RECIPE_TYPES = 'target' 24*eb8dc403SDave Cobbley# 25*eb8dc403SDave Cobbley 26*eb8dc403SDave Cobbley# Don't filter the license by default 27*eb8dc403SDave CobbleyCOPYLEFT_LICENSE_INCLUDE ?= '' 28*eb8dc403SDave CobbleyCOPYLEFT_LICENSE_EXCLUDE ?= '' 29*eb8dc403SDave Cobbley# Create archive for all the recipe types 30*eb8dc403SDave CobbleyCOPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian' 31*eb8dc403SDave Cobbleyinherit copyleft_filter 32*eb8dc403SDave Cobbley 33*eb8dc403SDave CobbleyARCHIVER_MODE[srpm] ?= "0" 34*eb8dc403SDave CobbleyARCHIVER_MODE[src] ?= "patched" 35*eb8dc403SDave CobbleyARCHIVER_MODE[diff] ?= "0" 36*eb8dc403SDave CobbleyARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" 37*eb8dc403SDave CobbleyARCHIVER_MODE[dumpdata] ?= "0" 38*eb8dc403SDave CobbleyARCHIVER_MODE[recipe] ?= "0" 39*eb8dc403SDave Cobbley 40*eb8dc403SDave CobbleyDEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" 41*eb8dc403SDave CobbleyARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources" 42*eb8dc403SDave CobbleyARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/" 43*eb8dc403SDave CobbleyARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" 44*eb8dc403SDave Cobbley 45*eb8dc403SDave Cobbleydo_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" 46*eb8dc403SDave Cobbleydo_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" 47*eb8dc403SDave Cobbleydo_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" 48*eb8dc403SDave Cobbleydo_deploy_archives[dirs] = "${WORKDIR}" 49*eb8dc403SDave Cobbley 50*eb8dc403SDave Cobbley# This is a convenience for the shell script to use it 51*eb8dc403SDave Cobbley 52*eb8dc403SDave Cobbley 53*eb8dc403SDave Cobbleypython () { 54*eb8dc403SDave Cobbley pn = d.getVar('PN') 55*eb8dc403SDave Cobbley assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() 56*eb8dc403SDave Cobbley if pn in assume_provided: 57*eb8dc403SDave Cobbley for p in d.getVar("PROVIDES").split(): 58*eb8dc403SDave Cobbley if p != pn: 59*eb8dc403SDave Cobbley pn = p 60*eb8dc403SDave Cobbley break 61*eb8dc403SDave Cobbley 62*eb8dc403SDave Cobbley included, reason = copyleft_should_include(d) 63*eb8dc403SDave Cobbley if not included: 64*eb8dc403SDave Cobbley bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) 65*eb8dc403SDave Cobbley return 66*eb8dc403SDave Cobbley else: 67*eb8dc403SDave Cobbley bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) 68*eb8dc403SDave Cobbley 69*eb8dc403SDave Cobbley 70*eb8dc403SDave Cobbley # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, 71*eb8dc403SDave Cobbley # so avoid archiving source here. 72*eb8dc403SDave Cobbley if pn.startswith('glibc-locale'): 73*eb8dc403SDave Cobbley return 74*eb8dc403SDave Cobbley 75*eb8dc403SDave Cobbley # We just archive gcc-source for all the gcc related recipes 76*eb8dc403SDave Cobbley if d.getVar('BPN') in ['gcc', 'libgcc'] \ 77*eb8dc403SDave Cobbley and not pn.startswith('gcc-source'): 78*eb8dc403SDave Cobbley bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) 79*eb8dc403SDave Cobbley return 80*eb8dc403SDave Cobbley 81*eb8dc403SDave Cobbley ar_src = d.getVarFlag('ARCHIVER_MODE', 'src') 82*eb8dc403SDave Cobbley ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata') 83*eb8dc403SDave Cobbley ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe') 84*eb8dc403SDave Cobbley 85*eb8dc403SDave Cobbley if ar_src == "original": 86*eb8dc403SDave Cobbley d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn) 87*eb8dc403SDave Cobbley # 'patched' and 'configured' invoke do_unpack_and_patch because 88*eb8dc403SDave Cobbley # do_ar_patched resp. do_ar_configured depend on it, but for 'original' 89*eb8dc403SDave Cobbley # we have to add it explicitly. 90*eb8dc403SDave Cobbley if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 91*eb8dc403SDave Cobbley d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn) 92*eb8dc403SDave Cobbley elif ar_src == "patched": 93*eb8dc403SDave Cobbley d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) 94*eb8dc403SDave Cobbley elif ar_src == "configured": 95*eb8dc403SDave Cobbley # We can't use "addtask do_ar_configured after do_configure" since it 96*eb8dc403SDave Cobbley # will cause the deptask of do_populate_sysroot to run not matter what 97*eb8dc403SDave Cobbley # archives we need, so we add the depends here. 98*eb8dc403SDave Cobbley 99*eb8dc403SDave Cobbley # There is a corner case with "gcc-source-${PV}" recipes, they don't have 100*eb8dc403SDave Cobbley # the "do_configure" task, so we need to use "do_preconfigure" 101*eb8dc403SDave Cobbley if pn.startswith("gcc-source-"): 102*eb8dc403SDave Cobbley d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn) 103*eb8dc403SDave Cobbley else: 104*eb8dc403SDave Cobbley d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn) 105*eb8dc403SDave Cobbley d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn) 106*eb8dc403SDave Cobbley 107*eb8dc403SDave Cobbley elif ar_src: 108*eb8dc403SDave Cobbley bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src) 109*eb8dc403SDave Cobbley 110*eb8dc403SDave Cobbley if ar_dumpdata == "1": 111*eb8dc403SDave Cobbley d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn) 112*eb8dc403SDave Cobbley 113*eb8dc403SDave Cobbley if ar_recipe == "1": 114*eb8dc403SDave Cobbley d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn) 115*eb8dc403SDave Cobbley 116*eb8dc403SDave Cobbley # Output the SRPM package 117*eb8dc403SDave Cobbley if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'): 118*eb8dc403SDave Cobbley if "package_rpm" in d.getVar('PACKAGE_CLASSES'): 119*eb8dc403SDave Cobbley d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) 120*eb8dc403SDave Cobbley if ar_dumpdata == "1": 121*eb8dc403SDave Cobbley d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) 122*eb8dc403SDave Cobbley if ar_recipe == "1": 123*eb8dc403SDave Cobbley d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn) 124*eb8dc403SDave Cobbley if ar_src == "original": 125*eb8dc403SDave Cobbley d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn) 126*eb8dc403SDave Cobbley elif ar_src == "patched": 127*eb8dc403SDave Cobbley d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn) 128*eb8dc403SDave Cobbley elif ar_src == "configured": 129*eb8dc403SDave Cobbley d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) 130*eb8dc403SDave Cobbley else: 131*eb8dc403SDave Cobbley bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES") 132*eb8dc403SDave Cobbley} 133*eb8dc403SDave Cobbley 134*eb8dc403SDave Cobbley# Take all the sources for a recipe and puts them in WORKDIR/archiver-work/. 135*eb8dc403SDave Cobbley# Files in SRC_URI are copied directly, anything that's a directory 136*eb8dc403SDave Cobbley# (e.g. git repositories) is "unpacked" and then put into a tarball. 137*eb8dc403SDave Cobbleypython do_ar_original() { 138*eb8dc403SDave Cobbley 139*eb8dc403SDave Cobbley import shutil, tempfile 140*eb8dc403SDave Cobbley 141*eb8dc403SDave Cobbley if d.getVarFlag('ARCHIVER_MODE', 'src') != "original": 142*eb8dc403SDave Cobbley return 143*eb8dc403SDave Cobbley 144*eb8dc403SDave Cobbley ar_outdir = d.getVar('ARCHIVER_OUTDIR') 145*eb8dc403SDave Cobbley bb.note('Archiving the original source...') 146*eb8dc403SDave Cobbley urls = d.getVar("SRC_URI").split() 147*eb8dc403SDave Cobbley # destsuffix (git fetcher) and subdir (everything else) are allowed to be 148*eb8dc403SDave Cobbley # absolute paths (for example, destsuffix=${S}/foobar). 149*eb8dc403SDave Cobbley # That messes with unpacking inside our tmpdir below, because the fetchers 150*eb8dc403SDave Cobbley # will then unpack in that directory and completely ignore the tmpdir. 151*eb8dc403SDave Cobbley # That breaks parallel tasks relying on ${S}, like do_compile. 152*eb8dc403SDave Cobbley # 153*eb8dc403SDave Cobbley # To solve this, we remove these parameters from all URLs. 154*eb8dc403SDave Cobbley # We do this even for relative paths because it makes the content of the 155*eb8dc403SDave Cobbley # archives more useful (no extra paths that are only used during 156*eb8dc403SDave Cobbley # compilation). 157*eb8dc403SDave Cobbley for i, url in enumerate(urls): 158*eb8dc403SDave Cobbley decoded = bb.fetch2.decodeurl(url) 159*eb8dc403SDave Cobbley for param in ('destsuffix', 'subdir'): 160*eb8dc403SDave Cobbley if param in decoded[5]: 161*eb8dc403SDave Cobbley del decoded[5][param] 162*eb8dc403SDave Cobbley encoded = bb.fetch2.encodeurl(decoded) 163*eb8dc403SDave Cobbley urls[i] = encoded 164*eb8dc403SDave Cobbley fetch = bb.fetch2.Fetch(urls, d) 165*eb8dc403SDave Cobbley tarball_suffix = {} 166*eb8dc403SDave Cobbley for url in fetch.urls: 167*eb8dc403SDave Cobbley local = fetch.localpath(url).rstrip("/"); 168*eb8dc403SDave Cobbley if os.path.isfile(local): 169*eb8dc403SDave Cobbley shutil.copy(local, ar_outdir) 170*eb8dc403SDave Cobbley elif os.path.isdir(local): 171*eb8dc403SDave Cobbley tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR')) 172*eb8dc403SDave Cobbley fetch.unpack(tmpdir, (url,)) 173*eb8dc403SDave Cobbley # To handle recipes with more than one source, we add the "name" 174*eb8dc403SDave Cobbley # URL parameter as suffix. We treat it as an error when 175*eb8dc403SDave Cobbley # there's more than one URL without a name, or a name gets reused. 176*eb8dc403SDave Cobbley # This is an additional safety net, in practice the name has 177*eb8dc403SDave Cobbley # to be set when using the git fetcher, otherwise SRCREV cannot 178*eb8dc403SDave Cobbley # be set separately for each URL. 179*eb8dc403SDave Cobbley params = bb.fetch2.decodeurl(url)[5] 180*eb8dc403SDave Cobbley type = bb.fetch2.decodeurl(url)[0] 181*eb8dc403SDave Cobbley location = bb.fetch2.decodeurl(url)[2] 182*eb8dc403SDave Cobbley name = params.get('name', '') 183*eb8dc403SDave Cobbley if type.lower() == 'file': 184*eb8dc403SDave Cobbley name_tmp = location.rstrip("*").rstrip("/") 185*eb8dc403SDave Cobbley name = os.path.basename(name_tmp) 186*eb8dc403SDave Cobbley else: 187*eb8dc403SDave Cobbley if name in tarball_suffix: 188*eb8dc403SDave Cobbley if not name: 189*eb8dc403SDave Cobbley bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url)) 190*eb8dc403SDave Cobbley else: 191*eb8dc403SDave Cobbley bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url)) 192*eb8dc403SDave Cobbley tarball_suffix[name] = url 193*eb8dc403SDave Cobbley create_tarball(d, tmpdir + '/.', name, ar_outdir) 194*eb8dc403SDave Cobbley 195*eb8dc403SDave Cobbley # Emit patch series files for 'original' 196*eb8dc403SDave Cobbley bb.note('Writing patch series files...') 197*eb8dc403SDave Cobbley for patch in src_patches(d): 198*eb8dc403SDave Cobbley _, _, local, _, _, parm = bb.fetch.decodeurl(patch) 199*eb8dc403SDave Cobbley patchdir = parm.get('patchdir') 200*eb8dc403SDave Cobbley if patchdir: 201*eb8dc403SDave Cobbley series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_')) 202*eb8dc403SDave Cobbley else: 203*eb8dc403SDave Cobbley series = os.path.join(ar_outdir, 'series') 204*eb8dc403SDave Cobbley 205*eb8dc403SDave Cobbley with open(series, 'a') as s: 206*eb8dc403SDave Cobbley s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel'])) 207*eb8dc403SDave Cobbley} 208*eb8dc403SDave Cobbley 209*eb8dc403SDave Cobbleypython do_ar_patched() { 210*eb8dc403SDave Cobbley 211*eb8dc403SDave Cobbley if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched': 212*eb8dc403SDave Cobbley return 213*eb8dc403SDave Cobbley 214*eb8dc403SDave Cobbley # Get the ARCHIVER_OUTDIR before we reset the WORKDIR 215*eb8dc403SDave Cobbley ar_outdir = d.getVar('ARCHIVER_OUTDIR') 216*eb8dc403SDave Cobbley ar_workdir = d.getVar('ARCHIVER_WORKDIR') 217*eb8dc403SDave Cobbley bb.note('Archiving the patched source...') 218*eb8dc403SDave Cobbley d.setVar('WORKDIR', ar_workdir) 219*eb8dc403SDave Cobbley create_tarball(d, d.getVar('S'), 'patched', ar_outdir) 220*eb8dc403SDave Cobbley} 221*eb8dc403SDave Cobbley 222*eb8dc403SDave Cobbleypython do_ar_configured() { 223*eb8dc403SDave Cobbley import shutil 224*eb8dc403SDave Cobbley 225*eb8dc403SDave Cobbley # Forcibly expand the sysroot paths as we're about to change WORKDIR 226*eb8dc403SDave Cobbley d.setVar('STAGING_DIR_HOST', d.getVar('STAGING_DIR_HOST')) 227*eb8dc403SDave Cobbley d.setVar('STAGING_DIR_TARGET', d.getVar('STAGING_DIR_TARGET')) 228*eb8dc403SDave Cobbley d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT')) 229*eb8dc403SDave Cobbley d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE')) 230*eb8dc403SDave Cobbley 231*eb8dc403SDave Cobbley ar_outdir = d.getVar('ARCHIVER_OUTDIR') 232*eb8dc403SDave Cobbley if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured': 233*eb8dc403SDave Cobbley bb.note('Archiving the configured source...') 234*eb8dc403SDave Cobbley pn = d.getVar('PN') 235*eb8dc403SDave Cobbley # "gcc-source-${PV}" recipes don't have "do_configure" 236*eb8dc403SDave Cobbley # task, so we need to run "do_preconfigure" instead 237*eb8dc403SDave Cobbley if pn.startswith("gcc-source-"): 238*eb8dc403SDave Cobbley d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) 239*eb8dc403SDave Cobbley bb.build.exec_func('do_preconfigure', d) 240*eb8dc403SDave Cobbley 241*eb8dc403SDave Cobbley # The libtool-native's do_configure will remove the 242*eb8dc403SDave Cobbley # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the 243*eb8dc403SDave Cobbley # do_configure, we archive the already configured ${S} to 244*eb8dc403SDave Cobbley # instead of. 245*eb8dc403SDave Cobbley elif pn != 'libtool-native': 246*eb8dc403SDave Cobbley # Change the WORKDIR to make do_configure run in another dir. 247*eb8dc403SDave Cobbley d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) 248*eb8dc403SDave Cobbley if bb.data.inherits_class('kernel-yocto', d): 249*eb8dc403SDave Cobbley bb.build.exec_func('do_kernel_configme', d) 250*eb8dc403SDave Cobbley if bb.data.inherits_class('cmake', d): 251*eb8dc403SDave Cobbley bb.build.exec_func('do_generate_toolchain_file', d) 252*eb8dc403SDave Cobbley prefuncs = d.getVarFlag('do_configure', 'prefuncs') 253*eb8dc403SDave Cobbley for func in (prefuncs or '').split(): 254*eb8dc403SDave Cobbley if func != "sysroot_cleansstate": 255*eb8dc403SDave Cobbley bb.build.exec_func(func, d) 256*eb8dc403SDave Cobbley bb.build.exec_func('do_configure', d) 257*eb8dc403SDave Cobbley postfuncs = d.getVarFlag('do_configure', 'postfuncs') 258*eb8dc403SDave Cobbley for func in (postfuncs or '').split(): 259*eb8dc403SDave Cobbley if func != "do_qa_configure": 260*eb8dc403SDave Cobbley bb.build.exec_func(func, d) 261*eb8dc403SDave Cobbley srcdir = d.getVar('S') 262*eb8dc403SDave Cobbley builddir = d.getVar('B') 263*eb8dc403SDave Cobbley if srcdir != builddir: 264*eb8dc403SDave Cobbley if os.path.exists(builddir): 265*eb8dc403SDave Cobbley oe.path.copytree(builddir, os.path.join(srcdir, \ 266*eb8dc403SDave Cobbley 'build.%s.ar_configured' % d.getVar('PF'))) 267*eb8dc403SDave Cobbley create_tarball(d, srcdir, 'configured', ar_outdir) 268*eb8dc403SDave Cobbley} 269*eb8dc403SDave Cobbley 270*eb8dc403SDave Cobbleydef create_tarball(d, srcdir, suffix, ar_outdir): 271*eb8dc403SDave Cobbley """ 272*eb8dc403SDave Cobbley create the tarball from srcdir 273*eb8dc403SDave Cobbley """ 274*eb8dc403SDave Cobbley import tarfile 275*eb8dc403SDave Cobbley 276*eb8dc403SDave Cobbley # Make sure we are only creating a single tarball for gcc sources 277*eb8dc403SDave Cobbley if (d.getVar('SRC_URI') == ""): 278*eb8dc403SDave Cobbley return 279*eb8dc403SDave Cobbley 280*eb8dc403SDave Cobbley # For the kernel archive, srcdir may just be a link to the 281*eb8dc403SDave Cobbley # work-shared location. Use os.path.realpath to make sure 282*eb8dc403SDave Cobbley # that we archive the actual directory and not just the link. 283*eb8dc403SDave Cobbley srcdir = os.path.realpath(srcdir) 284*eb8dc403SDave Cobbley 285*eb8dc403SDave Cobbley bb.utils.mkdirhier(ar_outdir) 286*eb8dc403SDave Cobbley if suffix: 287*eb8dc403SDave Cobbley filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix) 288*eb8dc403SDave Cobbley else: 289*eb8dc403SDave Cobbley filename = '%s.tar.gz' % d.getVar('PF') 290*eb8dc403SDave Cobbley tarname = os.path.join(ar_outdir, filename) 291*eb8dc403SDave Cobbley 292*eb8dc403SDave Cobbley bb.note('Creating %s' % tarname) 293*eb8dc403SDave Cobbley tar = tarfile.open(tarname, 'w:gz') 294*eb8dc403SDave Cobbley tar.add(srcdir, arcname=os.path.basename(srcdir)) 295*eb8dc403SDave Cobbley tar.close() 296*eb8dc403SDave Cobbley 297*eb8dc403SDave Cobbley# creating .diff.gz between source.orig and source 298*eb8dc403SDave Cobbleydef create_diff_gz(d, src_orig, src, ar_outdir): 299*eb8dc403SDave Cobbley 300*eb8dc403SDave Cobbley import subprocess 301*eb8dc403SDave Cobbley 302*eb8dc403SDave Cobbley if not os.path.isdir(src) or not os.path.isdir(src_orig): 303*eb8dc403SDave Cobbley return 304*eb8dc403SDave Cobbley 305*eb8dc403SDave Cobbley # The diff --exclude can't exclude the file with path, so we copy 306*eb8dc403SDave Cobbley # the patched source, and remove the files that we'd like to 307*eb8dc403SDave Cobbley # exclude. 308*eb8dc403SDave Cobbley src_patched = src + '.patched' 309*eb8dc403SDave Cobbley oe.path.copyhardlinktree(src, src_patched) 310*eb8dc403SDave Cobbley for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split(): 311*eb8dc403SDave Cobbley bb.utils.remove(os.path.join(src_orig, i), recurse=True) 312*eb8dc403SDave Cobbley bb.utils.remove(os.path.join(src_patched, i), recurse=True) 313*eb8dc403SDave Cobbley 314*eb8dc403SDave Cobbley dirname = os.path.dirname(src) 315*eb8dc403SDave Cobbley basename = os.path.basename(src) 316*eb8dc403SDave Cobbley bb.utils.mkdirhier(ar_outdir) 317*eb8dc403SDave Cobbley cwd = os.getcwd() 318*eb8dc403SDave Cobbley try: 319*eb8dc403SDave Cobbley os.chdir(dirname) 320*eb8dc403SDave Cobbley out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF')) 321*eb8dc403SDave Cobbley diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) 322*eb8dc403SDave Cobbley subprocess.check_call(diff_cmd, shell=True) 323*eb8dc403SDave Cobbley bb.utils.remove(src_patched, recurse=True) 324*eb8dc403SDave Cobbley finally: 325*eb8dc403SDave Cobbley os.chdir(cwd) 326*eb8dc403SDave Cobbley 327*eb8dc403SDave Cobbleydef is_work_shared(d): 328*eb8dc403SDave Cobbley pn = d.getVar('PN') 329*eb8dc403SDave Cobbley return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source') 330*eb8dc403SDave Cobbley 331*eb8dc403SDave Cobbley# Run do_unpack and do_patch 332*eb8dc403SDave Cobbleypython do_unpack_and_patch() { 333*eb8dc403SDave Cobbley if d.getVarFlag('ARCHIVER_MODE', 'src') not in \ 334*eb8dc403SDave Cobbley [ 'patched', 'configured'] and \ 335*eb8dc403SDave Cobbley d.getVarFlag('ARCHIVER_MODE', 'diff') != '1': 336*eb8dc403SDave Cobbley return 337*eb8dc403SDave Cobbley ar_outdir = d.getVar('ARCHIVER_OUTDIR') 338*eb8dc403SDave Cobbley ar_workdir = d.getVar('ARCHIVER_WORKDIR') 339*eb8dc403SDave Cobbley ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE') 340*eb8dc403SDave Cobbley pn = d.getVar('PN') 341*eb8dc403SDave Cobbley 342*eb8dc403SDave Cobbley # The kernel class functions require it to be on work-shared, so we dont change WORKDIR 343*eb8dc403SDave Cobbley if not is_work_shared(d): 344*eb8dc403SDave Cobbley # Change the WORKDIR to make do_unpack do_patch run in another dir. 345*eb8dc403SDave Cobbley d.setVar('WORKDIR', ar_workdir) 346*eb8dc403SDave Cobbley # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). 347*eb8dc403SDave Cobbley d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native) 348*eb8dc403SDave Cobbley 349*eb8dc403SDave Cobbley # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the 350*eb8dc403SDave Cobbley # possibly requiring of the following tasks (such as some recipes's 351*eb8dc403SDave Cobbley # do_patch required 'B' existed). 352*eb8dc403SDave Cobbley bb.utils.mkdirhier(d.getVar('B')) 353*eb8dc403SDave Cobbley 354*eb8dc403SDave Cobbley bb.build.exec_func('do_unpack', d) 355*eb8dc403SDave Cobbley 356*eb8dc403SDave Cobbley # Save the original source for creating the patches 357*eb8dc403SDave Cobbley if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 358*eb8dc403SDave Cobbley src = d.getVar('S').rstrip('/') 359*eb8dc403SDave Cobbley src_orig = '%s.orig' % src 360*eb8dc403SDave Cobbley oe.path.copytree(src, src_orig) 361*eb8dc403SDave Cobbley 362*eb8dc403SDave Cobbley # Make sure gcc and kernel sources are patched only once 363*eb8dc403SDave Cobbley if not (d.getVar('SRC_URI') == "" or is_work_shared(d)): 364*eb8dc403SDave Cobbley bb.build.exec_func('do_patch', d) 365*eb8dc403SDave Cobbley 366*eb8dc403SDave Cobbley # Create the patches 367*eb8dc403SDave Cobbley if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 368*eb8dc403SDave Cobbley bb.note('Creating diff gz...') 369*eb8dc403SDave Cobbley create_diff_gz(d, src_orig, src, ar_outdir) 370*eb8dc403SDave Cobbley bb.utils.remove(src_orig, recurse=True) 371*eb8dc403SDave Cobbley} 372*eb8dc403SDave Cobbley 373*eb8dc403SDave Cobbley# BBINCLUDED is special (excluded from basehash signature 374*eb8dc403SDave Cobbley# calculation). Using it in a task signature can cause "basehash 375*eb8dc403SDave Cobbley# changed" errors. 376*eb8dc403SDave Cobbley# 377*eb8dc403SDave Cobbley# Depending on BBINCLUDED also causes do_ar_recipe to run again 378*eb8dc403SDave Cobbley# for unrelated changes, like adding or removing buildhistory.bbclass. 379*eb8dc403SDave Cobbley# 380*eb8dc403SDave Cobbley# For these reasons we ignore the dependency completely. The versioning 381*eb8dc403SDave Cobbley# of the output file ensures that we create it each time the recipe 382*eb8dc403SDave Cobbley# gets rebuilt, at least as long as a PR server is used. We also rely 383*eb8dc403SDave Cobbley# on that mechanism to catch changes in the file content, because the 384*eb8dc403SDave Cobbley# file content is not part of of the task signature either. 385*eb8dc403SDave Cobbleydo_ar_recipe[vardepsexclude] += "BBINCLUDED" 386*eb8dc403SDave Cobbleypython do_ar_recipe () { 387*eb8dc403SDave Cobbley """ 388*eb8dc403SDave Cobbley archive the recipe, including .bb and .inc. 389*eb8dc403SDave Cobbley """ 390*eb8dc403SDave Cobbley import re 391*eb8dc403SDave Cobbley import shutil 392*eb8dc403SDave Cobbley 393*eb8dc403SDave Cobbley require_re = re.compile( r"require\s+(.+)" ) 394*eb8dc403SDave Cobbley include_re = re.compile( r"include\s+(.+)" ) 395*eb8dc403SDave Cobbley bbfile = d.getVar('FILE') 396*eb8dc403SDave Cobbley outdir = os.path.join(d.getVar('WORKDIR'), \ 397*eb8dc403SDave Cobbley '%s-recipe' % d.getVar('PF')) 398*eb8dc403SDave Cobbley bb.utils.mkdirhier(outdir) 399*eb8dc403SDave Cobbley shutil.copy(bbfile, outdir) 400*eb8dc403SDave Cobbley 401*eb8dc403SDave Cobbley pn = d.getVar('PN') 402*eb8dc403SDave Cobbley bbappend_files = d.getVar('BBINCLUDED').split() 403*eb8dc403SDave Cobbley # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend 404*eb8dc403SDave Cobbley # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. 405*eb8dc403SDave Cobbley bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn)) 406*eb8dc403SDave Cobbley bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn)) 407*eb8dc403SDave Cobbley for file in bbappend_files: 408*eb8dc403SDave Cobbley if bbappend_re.match(file) or bbappend_re1.match(file): 409*eb8dc403SDave Cobbley shutil.copy(file, outdir) 410*eb8dc403SDave Cobbley 411*eb8dc403SDave Cobbley dirname = os.path.dirname(bbfile) 412*eb8dc403SDave Cobbley bbpath = '%s:%s' % (dirname, d.getVar('BBPATH')) 413*eb8dc403SDave Cobbley f = open(bbfile, 'r') 414*eb8dc403SDave Cobbley for line in f.readlines(): 415*eb8dc403SDave Cobbley incfile = None 416*eb8dc403SDave Cobbley if require_re.match(line): 417*eb8dc403SDave Cobbley incfile = require_re.match(line).group(1) 418*eb8dc403SDave Cobbley elif include_re.match(line): 419*eb8dc403SDave Cobbley incfile = include_re.match(line).group(1) 420*eb8dc403SDave Cobbley if incfile: 421*eb8dc403SDave Cobbley incfile = d.expand(incfile) 422*eb8dc403SDave Cobbley incfile = bb.utils.which(bbpath, incfile) 423*eb8dc403SDave Cobbley if incfile: 424*eb8dc403SDave Cobbley shutil.copy(incfile, outdir) 425*eb8dc403SDave Cobbley 426*eb8dc403SDave Cobbley create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR')) 427*eb8dc403SDave Cobbley bb.utils.remove(outdir, recurse=True) 428*eb8dc403SDave Cobbley} 429*eb8dc403SDave Cobbley 430*eb8dc403SDave Cobbleypython do_dumpdata () { 431*eb8dc403SDave Cobbley """ 432*eb8dc403SDave Cobbley dump environment data to ${PF}-showdata.dump 433*eb8dc403SDave Cobbley """ 434*eb8dc403SDave Cobbley 435*eb8dc403SDave Cobbley dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \ 436*eb8dc403SDave Cobbley '%s-showdata.dump' % d.getVar('PF')) 437*eb8dc403SDave Cobbley bb.note('Dumping metadata into %s' % dumpfile) 438*eb8dc403SDave Cobbley with open(dumpfile, "w") as f: 439*eb8dc403SDave Cobbley # emit variables and shell functions 440*eb8dc403SDave Cobbley bb.data.emit_env(f, d, True) 441*eb8dc403SDave Cobbley # emit the metadata which isn't valid shell 442*eb8dc403SDave Cobbley for e in d.keys(): 443*eb8dc403SDave Cobbley if d.getVarFlag(e, "python", False): 444*eb8dc403SDave Cobbley f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False))) 445*eb8dc403SDave Cobbley} 446*eb8dc403SDave Cobbley 447*eb8dc403SDave CobbleySSTATETASKS += "do_deploy_archives" 448*eb8dc403SDave Cobbleydo_deploy_archives () { 449*eb8dc403SDave Cobbley echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}." 450*eb8dc403SDave Cobbley} 451*eb8dc403SDave Cobbleypython do_deploy_archives_setscene () { 452*eb8dc403SDave Cobbley sstate_setscene(d) 453*eb8dc403SDave Cobbley} 454*eb8dc403SDave Cobbleydo_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}" 455*eb8dc403SDave Cobbleydo_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}" 456*eb8dc403SDave Cobbleydo_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}" 457*eb8dc403SDave Cobbleyaddtask do_deploy_archives_setscene 458*eb8dc403SDave Cobbley 459*eb8dc403SDave Cobbleyaddtask do_ar_original after do_unpack 460*eb8dc403SDave Cobbleyaddtask do_unpack_and_patch after do_patch 461*eb8dc403SDave Cobbleyaddtask do_ar_patched after do_unpack_and_patch 462*eb8dc403SDave Cobbleyaddtask do_ar_configured after do_unpack_and_patch 463*eb8dc403SDave Cobbleyaddtask do_dumpdata 464*eb8dc403SDave Cobbleyaddtask do_ar_recipe 465*eb8dc403SDave Cobbleyaddtask do_deploy_archives before do_build 466*eb8dc403SDave Cobbley 467*eb8dc403SDave Cobbleypython () { 468*eb8dc403SDave Cobbley # Add tasks in the correct order, specifically for linux-yocto to avoid race condition. 469*eb8dc403SDave Cobbley # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency 470*eb8dc403SDave Cobbley # so that do_kernel_configme does not need to run again when do_unpack_and_patch 471*eb8dc403SDave Cobbley # gets added or removed (by adding or removing archiver.bbclass). 472*eb8dc403SDave Cobbley if bb.data.inherits_class('kernel-yocto', d): 473*eb8dc403SDave Cobbley bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d) 474*eb8dc403SDave Cobbley} 475