1# ex:ts=4:sw=4:sts=4:et 2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- 3# 4# This bbclass is used for creating archive for: 5# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" 6# 2) patched source: ARCHIVER_MODE[src] = "patched" (default) 7# 3) configured source: ARCHIVER_MODE[src] = "configured" 8# 4) The patches between do_unpack and do_patch: 9# ARCHIVER_MODE[diff] = "1" 10# And you can set the one that you'd like to exclude from the diff: 11# ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" 12# 5) The environment data, similar to 'bitbake -e recipe': 13# ARCHIVER_MODE[dumpdata] = "1" 14# 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1" 15# 7) Whether output the .src.rpm package: 16# ARCHIVER_MODE[srpm] = "1" 17# 8) Filter the license, the recipe whose license in 18# COPYLEFT_LICENSE_INCLUDE will be included, and in 19# COPYLEFT_LICENSE_EXCLUDE will be excluded. 20# COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*' 21# COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary' 22# 9) The recipe type that will be archived: 23# COPYLEFT_RECIPE_TYPES = 'target' 24# 25 26# Don't filter the license by default 27COPYLEFT_LICENSE_INCLUDE ?= '' 28COPYLEFT_LICENSE_EXCLUDE ?= '' 29# Create archive for all the recipe types 30COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian' 31inherit copyleft_filter 32 33ARCHIVER_MODE[srpm] ?= "0" 34ARCHIVER_MODE[src] ?= "patched" 35ARCHIVER_MODE[diff] ?= "0" 36ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" 37ARCHIVER_MODE[dumpdata] ?= "0" 38ARCHIVER_MODE[recipe] ?= "0" 39 40DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" 41ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources" 42ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/" 43ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" 44 45do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" 46do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" 47do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" 48do_deploy_archives[dirs] = "${WORKDIR}" 49 50# This is a convenience for the shell script to use it 51 52 53python () { 54 pn = d.getVar('PN') 55 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() 56 if pn in assume_provided: 57 for p in d.getVar("PROVIDES").split(): 58 if p != pn: 59 pn = p 60 break 61 62 included, reason = copyleft_should_include(d) 63 if not included: 64 bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) 65 return 66 else: 67 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) 68 69 70 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, 71 # so avoid archiving source here. 72 if pn.startswith('glibc-locale'): 73 return 74 75 # We just archive gcc-source for all the gcc related recipes 76 if d.getVar('BPN') in ['gcc', 'libgcc'] \ 77 and not pn.startswith('gcc-source'): 78 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) 79 return 80 81 ar_src = d.getVarFlag('ARCHIVER_MODE', 'src') 82 ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata') 83 ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe') 84 85 if ar_src == "original": 86 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn) 87 # 'patched' and 'configured' invoke do_unpack_and_patch because 88 # do_ar_patched resp. do_ar_configured depend on it, but for 'original' 89 # we have to add it explicitly. 90 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 91 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn) 92 elif ar_src == "patched": 93 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) 94 elif ar_src == "configured": 95 # We can't use "addtask do_ar_configured after do_configure" since it 96 # will cause the deptask of do_populate_sysroot to run not matter what 97 # archives we need, so we add the depends here. 98 99 # There is a corner case with "gcc-source-${PV}" recipes, they don't have 100 # the "do_configure" task, so we need to use "do_preconfigure" 101 if pn.startswith("gcc-source-"): 102 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn) 103 else: 104 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn) 105 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn) 106 107 elif ar_src: 108 bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src) 109 110 if ar_dumpdata == "1": 111 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn) 112 113 if ar_recipe == "1": 114 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn) 115 116 # Output the SRPM package 117 if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'): 118 if "package_rpm" in d.getVar('PACKAGE_CLASSES'): 119 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) 120 if ar_dumpdata == "1": 121 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) 122 if ar_recipe == "1": 123 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn) 124 if ar_src == "original": 125 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn) 126 elif ar_src == "patched": 127 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn) 128 elif ar_src == "configured": 129 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) 130 else: 131 bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES") 132} 133 134# Take all the sources for a recipe and puts them in WORKDIR/archiver-work/. 135# Files in SRC_URI are copied directly, anything that's a directory 136# (e.g. git repositories) is "unpacked" and then put into a tarball. 137python do_ar_original() { 138 139 import shutil, tempfile 140 141 if d.getVarFlag('ARCHIVER_MODE', 'src') != "original": 142 return 143 144 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 145 bb.note('Archiving the original source...') 146 urls = d.getVar("SRC_URI").split() 147 # destsuffix (git fetcher) and subdir (everything else) are allowed to be 148 # absolute paths (for example, destsuffix=${S}/foobar). 149 # That messes with unpacking inside our tmpdir below, because the fetchers 150 # will then unpack in that directory and completely ignore the tmpdir. 151 # That breaks parallel tasks relying on ${S}, like do_compile. 152 # 153 # To solve this, we remove these parameters from all URLs. 154 # We do this even for relative paths because it makes the content of the 155 # archives more useful (no extra paths that are only used during 156 # compilation). 157 for i, url in enumerate(urls): 158 decoded = bb.fetch2.decodeurl(url) 159 for param in ('destsuffix', 'subdir'): 160 if param in decoded[5]: 161 del decoded[5][param] 162 encoded = bb.fetch2.encodeurl(decoded) 163 urls[i] = encoded 164 fetch = bb.fetch2.Fetch(urls, d) 165 tarball_suffix = {} 166 for url in fetch.urls: 167 local = fetch.localpath(url).rstrip("/"); 168 if os.path.isfile(local): 169 shutil.copy(local, ar_outdir) 170 elif os.path.isdir(local): 171 tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR')) 172 fetch.unpack(tmpdir, (url,)) 173 # To handle recipes with more than one source, we add the "name" 174 # URL parameter as suffix. We treat it as an error when 175 # there's more than one URL without a name, or a name gets reused. 176 # This is an additional safety net, in practice the name has 177 # to be set when using the git fetcher, otherwise SRCREV cannot 178 # be set separately for each URL. 179 params = bb.fetch2.decodeurl(url)[5] 180 type = bb.fetch2.decodeurl(url)[0] 181 location = bb.fetch2.decodeurl(url)[2] 182 name = params.get('name', '') 183 if type.lower() == 'file': 184 name_tmp = location.rstrip("*").rstrip("/") 185 name = os.path.basename(name_tmp) 186 else: 187 if name in tarball_suffix: 188 if not name: 189 bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url)) 190 else: 191 bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url)) 192 tarball_suffix[name] = url 193 create_tarball(d, tmpdir + '/.', name, ar_outdir) 194 195 # Emit patch series files for 'original' 196 bb.note('Writing patch series files...') 197 for patch in src_patches(d): 198 _, _, local, _, _, parm = bb.fetch.decodeurl(patch) 199 patchdir = parm.get('patchdir') 200 if patchdir: 201 series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_')) 202 else: 203 series = os.path.join(ar_outdir, 'series') 204 205 with open(series, 'a') as s: 206 s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel'])) 207} 208 209python do_ar_patched() { 210 211 if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched': 212 return 213 214 # Get the ARCHIVER_OUTDIR before we reset the WORKDIR 215 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 216 ar_workdir = d.getVar('ARCHIVER_WORKDIR') 217 bb.note('Archiving the patched source...') 218 d.setVar('WORKDIR', ar_workdir) 219 create_tarball(d, d.getVar('S'), 'patched', ar_outdir) 220} 221 222python do_ar_configured() { 223 import shutil 224 225 # Forcibly expand the sysroot paths as we're about to change WORKDIR 226 d.setVar('STAGING_DIR_HOST', d.getVar('STAGING_DIR_HOST')) 227 d.setVar('STAGING_DIR_TARGET', d.getVar('STAGING_DIR_TARGET')) 228 d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT')) 229 d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE')) 230 231 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 232 if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured': 233 bb.note('Archiving the configured source...') 234 pn = d.getVar('PN') 235 # "gcc-source-${PV}" recipes don't have "do_configure" 236 # task, so we need to run "do_preconfigure" instead 237 if pn.startswith("gcc-source-"): 238 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) 239 bb.build.exec_func('do_preconfigure', d) 240 241 # The libtool-native's do_configure will remove the 242 # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the 243 # do_configure, we archive the already configured ${S} to 244 # instead of. 245 elif pn != 'libtool-native': 246 # Change the WORKDIR to make do_configure run in another dir. 247 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) 248 if bb.data.inherits_class('kernel-yocto', d): 249 bb.build.exec_func('do_kernel_configme', d) 250 if bb.data.inherits_class('cmake', d): 251 bb.build.exec_func('do_generate_toolchain_file', d) 252 prefuncs = d.getVarFlag('do_configure', 'prefuncs') 253 for func in (prefuncs or '').split(): 254 if func != "sysroot_cleansstate": 255 bb.build.exec_func(func, d) 256 bb.build.exec_func('do_configure', d) 257 postfuncs = d.getVarFlag('do_configure', 'postfuncs') 258 for func in (postfuncs or '').split(): 259 if func != "do_qa_configure": 260 bb.build.exec_func(func, d) 261 srcdir = d.getVar('S') 262 builddir = d.getVar('B') 263 if srcdir != builddir: 264 if os.path.exists(builddir): 265 oe.path.copytree(builddir, os.path.join(srcdir, \ 266 'build.%s.ar_configured' % d.getVar('PF'))) 267 create_tarball(d, srcdir, 'configured', ar_outdir) 268} 269 270def create_tarball(d, srcdir, suffix, ar_outdir): 271 """ 272 create the tarball from srcdir 273 """ 274 import tarfile 275 276 # Make sure we are only creating a single tarball for gcc sources 277 if (d.getVar('SRC_URI') == ""): 278 return 279 280 # For the kernel archive, srcdir may just be a link to the 281 # work-shared location. Use os.path.realpath to make sure 282 # that we archive the actual directory and not just the link. 283 srcdir = os.path.realpath(srcdir) 284 285 bb.utils.mkdirhier(ar_outdir) 286 if suffix: 287 filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix) 288 else: 289 filename = '%s.tar.gz' % d.getVar('PF') 290 tarname = os.path.join(ar_outdir, filename) 291 292 bb.note('Creating %s' % tarname) 293 tar = tarfile.open(tarname, 'w:gz') 294 tar.add(srcdir, arcname=os.path.basename(srcdir)) 295 tar.close() 296 297# creating .diff.gz between source.orig and source 298def create_diff_gz(d, src_orig, src, ar_outdir): 299 300 import subprocess 301 302 if not os.path.isdir(src) or not os.path.isdir(src_orig): 303 return 304 305 # The diff --exclude can't exclude the file with path, so we copy 306 # the patched source, and remove the files that we'd like to 307 # exclude. 308 src_patched = src + '.patched' 309 oe.path.copyhardlinktree(src, src_patched) 310 for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split(): 311 bb.utils.remove(os.path.join(src_orig, i), recurse=True) 312 bb.utils.remove(os.path.join(src_patched, i), recurse=True) 313 314 dirname = os.path.dirname(src) 315 basename = os.path.basename(src) 316 bb.utils.mkdirhier(ar_outdir) 317 cwd = os.getcwd() 318 try: 319 os.chdir(dirname) 320 out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF')) 321 diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) 322 subprocess.check_call(diff_cmd, shell=True) 323 bb.utils.remove(src_patched, recurse=True) 324 finally: 325 os.chdir(cwd) 326 327def is_work_shared(d): 328 pn = d.getVar('PN') 329 return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source') 330 331# Run do_unpack and do_patch 332python do_unpack_and_patch() { 333 if d.getVarFlag('ARCHIVER_MODE', 'src') not in \ 334 [ 'patched', 'configured'] and \ 335 d.getVarFlag('ARCHIVER_MODE', 'diff') != '1': 336 return 337 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 338 ar_workdir = d.getVar('ARCHIVER_WORKDIR') 339 ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE') 340 pn = d.getVar('PN') 341 342 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR 343 if not is_work_shared(d): 344 # Change the WORKDIR to make do_unpack do_patch run in another dir. 345 d.setVar('WORKDIR', ar_workdir) 346 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). 347 d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native) 348 349 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the 350 # possibly requiring of the following tasks (such as some recipes's 351 # do_patch required 'B' existed). 352 bb.utils.mkdirhier(d.getVar('B')) 353 354 bb.build.exec_func('do_unpack', d) 355 356 # Save the original source for creating the patches 357 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 358 src = d.getVar('S').rstrip('/') 359 src_orig = '%s.orig' % src 360 oe.path.copytree(src, src_orig) 361 362 # Make sure gcc and kernel sources are patched only once 363 if not (d.getVar('SRC_URI') == "" or is_work_shared(d)): 364 bb.build.exec_func('do_patch', d) 365 366 # Create the patches 367 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 368 bb.note('Creating diff gz...') 369 create_diff_gz(d, src_orig, src, ar_outdir) 370 bb.utils.remove(src_orig, recurse=True) 371} 372 373# BBINCLUDED is special (excluded from basehash signature 374# calculation). Using it in a task signature can cause "basehash 375# changed" errors. 376# 377# Depending on BBINCLUDED also causes do_ar_recipe to run again 378# for unrelated changes, like adding or removing buildhistory.bbclass. 379# 380# For these reasons we ignore the dependency completely. The versioning 381# of the output file ensures that we create it each time the recipe 382# gets rebuilt, at least as long as a PR server is used. We also rely 383# on that mechanism to catch changes in the file content, because the 384# file content is not part of of the task signature either. 385do_ar_recipe[vardepsexclude] += "BBINCLUDED" 386python do_ar_recipe () { 387 """ 388 archive the recipe, including .bb and .inc. 389 """ 390 import re 391 import shutil 392 393 require_re = re.compile( r"require\s+(.+)" ) 394 include_re = re.compile( r"include\s+(.+)" ) 395 bbfile = d.getVar('FILE') 396 outdir = os.path.join(d.getVar('WORKDIR'), \ 397 '%s-recipe' % d.getVar('PF')) 398 bb.utils.mkdirhier(outdir) 399 shutil.copy(bbfile, outdir) 400 401 pn = d.getVar('PN') 402 bbappend_files = d.getVar('BBINCLUDED').split() 403 # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend 404 # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. 405 bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn)) 406 bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn)) 407 for file in bbappend_files: 408 if bbappend_re.match(file) or bbappend_re1.match(file): 409 shutil.copy(file, outdir) 410 411 dirname = os.path.dirname(bbfile) 412 bbpath = '%s:%s' % (dirname, d.getVar('BBPATH')) 413 f = open(bbfile, 'r') 414 for line in f.readlines(): 415 incfile = None 416 if require_re.match(line): 417 incfile = require_re.match(line).group(1) 418 elif include_re.match(line): 419 incfile = include_re.match(line).group(1) 420 if incfile: 421 incfile = d.expand(incfile) 422 incfile = bb.utils.which(bbpath, incfile) 423 if incfile: 424 shutil.copy(incfile, outdir) 425 426 create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR')) 427 bb.utils.remove(outdir, recurse=True) 428} 429 430python do_dumpdata () { 431 """ 432 dump environment data to ${PF}-showdata.dump 433 """ 434 435 dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \ 436 '%s-showdata.dump' % d.getVar('PF')) 437 bb.note('Dumping metadata into %s' % dumpfile) 438 with open(dumpfile, "w") as f: 439 # emit variables and shell functions 440 bb.data.emit_env(f, d, True) 441 # emit the metadata which isn't valid shell 442 for e in d.keys(): 443 if d.getVarFlag(e, "python", False): 444 f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False))) 445} 446 447SSTATETASKS += "do_deploy_archives" 448do_deploy_archives () { 449 echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}." 450} 451python do_deploy_archives_setscene () { 452 sstate_setscene(d) 453} 454do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}" 455do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}" 456do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}" 457addtask do_deploy_archives_setscene 458 459addtask do_ar_original after do_unpack 460addtask do_unpack_and_patch after do_patch 461addtask do_ar_patched after do_unpack_and_patch 462addtask do_ar_configured after do_unpack_and_patch 463addtask do_dumpdata 464addtask do_ar_recipe 465addtask do_deploy_archives before do_build 466 467python () { 468 # Add tasks in the correct order, specifically for linux-yocto to avoid race condition. 469 # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency 470 # so that do_kernel_configme does not need to run again when do_unpack_and_patch 471 # gets added or removed (by adding or removing archiver.bbclass). 472 if bb.data.inherits_class('kernel-yocto', d): 473 bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d) 474} 475