1# ex:ts=4:sw=4:sts=4:et 2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- 3# 4# This bbclass is used for creating archive for: 5# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" 6# 2) patched source: ARCHIVER_MODE[src] = "patched" (default) 7# 3) configured source: ARCHIVER_MODE[src] = "configured" 8# 4) source mirror: ARCHIVE_MODE[src] = "mirror" 9# 5) The patches between do_unpack and do_patch: 10# ARCHIVER_MODE[diff] = "1" 11# And you can set the one that you'd like to exclude from the diff: 12# ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" 13# 6) The environment data, similar to 'bitbake -e recipe': 14# ARCHIVER_MODE[dumpdata] = "1" 15# 7) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1" 16# 8) Whether output the .src.rpm package: 17# ARCHIVER_MODE[srpm] = "1" 18# 9) Filter the license, the recipe whose license in 19# COPYLEFT_LICENSE_INCLUDE will be included, and in 20# COPYLEFT_LICENSE_EXCLUDE will be excluded. 21# COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*' 22# COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary' 23# 10) The recipe type that will be archived: 24# COPYLEFT_RECIPE_TYPES = 'target' 25# 11) The source mirror mode: 26# ARCHIVER_MODE[mirror] = "split" (default): Sources are split into 27# per-recipe directories in a similar way to other archiver modes. 28# Post-processing may be required to produce a single mirror directory. 29# This does however allow inspection of duplicate sources and more 30# intelligent handling. 31# ARCHIVER_MODE[mirror] = "combined": All sources are placed into a single 32# directory suitable for direct use as a mirror. Duplicate sources are 33# ignored. 34# 12) Source mirror exclusions: 35# ARCHIVER_MIRROR_EXCLUDE is a list of prefixes to exclude from the mirror. 36# This may be used for sources which you are already publishing yourself 37# (e.g. if the URI starts with 'https://mysite.com/' and your mirror is 38# going to be published to the same site). It may also be used to exclude 39# local files (with the prefix 'file://') if these will be provided as part 40# of an archive of the layers themselves. 41# 42 43# Create archive for all the recipe types 44COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian' 45inherit copyleft_filter 46 47ARCHIVER_MODE[srpm] ?= "0" 48ARCHIVER_MODE[src] ?= "patched" 49ARCHIVER_MODE[diff] ?= "0" 50ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" 51ARCHIVER_MODE[dumpdata] ?= "0" 52ARCHIVER_MODE[recipe] ?= "0" 53ARCHIVER_MODE[mirror] ?= "split" 54ARCHIVER_MODE[compression] ?= "gz" 55 56DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" 57ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources" 58ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/" 59ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm" 60ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/" 61ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" 62 63# When producing a combined mirror directory, allow duplicates for the case 64# where multiple recipes use the same SRC_URI. 65ARCHIVER_COMBINED_MIRRORDIR = "${ARCHIVER_TOPDIR}/mirror" 66SSTATE_DUPWHITELIST += "${DEPLOY_DIR_SRC}/mirror" 67 68do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" 69do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" 70do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" 71do_deploy_archives[dirs] = "${WORKDIR}" 72 73# This is a convenience for the shell script to use it 74 75 76python () { 77 pn = d.getVar('PN') 78 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() 79 if pn in assume_provided: 80 for p in d.getVar("PROVIDES").split(): 81 if p != pn: 82 pn = p 83 break 84 85 included, reason = copyleft_should_include(d) 86 if not included: 87 bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) 88 return 89 else: 90 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) 91 92 93 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, 94 # so avoid archiving source here. 95 if pn.startswith('glibc-locale'): 96 return 97 98 # We just archive gcc-source for all the gcc related recipes 99 if d.getVar('BPN') in ['gcc', 'libgcc'] \ 100 and not pn.startswith('gcc-source'): 101 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) 102 return 103 104 def hasTask(task): 105 return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) 106 107 ar_src = d.getVarFlag('ARCHIVER_MODE', 'src') 108 ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata') 109 ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe') 110 111 if ar_src == "original": 112 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn) 113 # 'patched' and 'configured' invoke do_unpack_and_patch because 114 # do_ar_patched resp. do_ar_configured depend on it, but for 'original' 115 # we have to add it explicitly. 116 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 117 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn) 118 elif ar_src == "patched": 119 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) 120 elif ar_src == "configured": 121 # We can't use "addtask do_ar_configured after do_configure" since it 122 # will cause the deptask of do_populate_sysroot to run no matter what 123 # archives we need, so we add the depends here. 124 125 # There is a corner case with "gcc-source-${PV}" recipes, they don't have 126 # the "do_configure" task, so we need to use "do_preconfigure" 127 if hasTask("do_preconfigure"): 128 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn) 129 elif hasTask("do_configure"): 130 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn) 131 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn) 132 elif ar_src == "mirror": 133 d.appendVarFlag('do_deploy_archives', 'depends', '%s:do_ar_mirror' % pn) 134 135 elif ar_src: 136 bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src) 137 138 if ar_dumpdata == "1": 139 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn) 140 141 if ar_recipe == "1": 142 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn) 143 144 # Output the SRPM package 145 if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'): 146 if "package_rpm" not in d.getVar('PACKAGE_CLASSES'): 147 bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES") 148 149 # Some recipes do not have any packaging tasks 150 if hasTask("do_package_write_rpm"): 151 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) 152 d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}') 153 d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}') 154 d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}') 155 if ar_dumpdata == "1": 156 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) 157 if ar_recipe == "1": 158 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn) 159 if ar_src == "original": 160 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn) 161 elif ar_src == "patched": 162 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn) 163 elif ar_src == "configured": 164 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) 165} 166 167# Take all the sources for a recipe and put them in WORKDIR/archiver-work/. 168# Files in SRC_URI are copied directly, anything that's a directory 169# (e.g. git repositories) is "unpacked" and then put into a tarball. 170python do_ar_original() { 171 172 import shutil, tempfile 173 174 if d.getVarFlag('ARCHIVER_MODE', 'src') != "original": 175 return 176 177 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 178 bb.note('Archiving the original source...') 179 urls = d.getVar("SRC_URI").split() 180 # destsuffix (git fetcher) and subdir (everything else) are allowed to be 181 # absolute paths (for example, destsuffix=${S}/foobar). 182 # That messes with unpacking inside our tmpdir below, because the fetchers 183 # will then unpack in that directory and completely ignore the tmpdir. 184 # That breaks parallel tasks relying on ${S}, like do_compile. 185 # 186 # To solve this, we remove these parameters from all URLs. 187 # We do this even for relative paths because it makes the content of the 188 # archives more useful (no extra paths that are only used during 189 # compilation). 190 for i, url in enumerate(urls): 191 decoded = bb.fetch2.decodeurl(url) 192 for param in ('destsuffix', 'subdir'): 193 if param in decoded[5]: 194 del decoded[5][param] 195 encoded = bb.fetch2.encodeurl(decoded) 196 urls[i] = encoded 197 198 # Cleanup SRC_URI before call bb.fetch2.Fetch() since now SRC_URI is in the 199 # variable "urls", otherwise there might be errors like: 200 # The SRCREV_FORMAT variable must be set when multiple SCMs are used 201 ld = bb.data.createCopy(d) 202 ld.setVar('SRC_URI', '') 203 fetch = bb.fetch2.Fetch(urls, ld) 204 tarball_suffix = {} 205 for url in fetch.urls: 206 local = fetch.localpath(url).rstrip("/"); 207 if os.path.isfile(local): 208 shutil.copy(local, ar_outdir) 209 elif os.path.isdir(local): 210 tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR')) 211 fetch.unpack(tmpdir, (url,)) 212 # To handle recipes with more than one source, we add the "name" 213 # URL parameter as suffix. We treat it as an error when 214 # there's more than one URL without a name, or a name gets reused. 215 # This is an additional safety net, in practice the name has 216 # to be set when using the git fetcher, otherwise SRCREV cannot 217 # be set separately for each URL. 218 params = bb.fetch2.decodeurl(url)[5] 219 type = bb.fetch2.decodeurl(url)[0] 220 location = bb.fetch2.decodeurl(url)[2] 221 name = params.get('name', '') 222 if type.lower() == 'file': 223 name_tmp = location.rstrip("*").rstrip("/") 224 name = os.path.basename(name_tmp) 225 else: 226 if name in tarball_suffix: 227 if not name: 228 bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url)) 229 else: 230 bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url)) 231 tarball_suffix[name] = url 232 create_tarball(d, tmpdir + '/.', name, ar_outdir) 233 234 # Emit patch series files for 'original' 235 bb.note('Writing patch series files...') 236 for patch in src_patches(d): 237 _, _, local, _, _, parm = bb.fetch.decodeurl(patch) 238 patchdir = parm.get('patchdir') 239 if patchdir: 240 series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_')) 241 else: 242 series = os.path.join(ar_outdir, 'series') 243 244 with open(series, 'a') as s: 245 s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel'])) 246} 247 248python do_ar_patched() { 249 250 if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched': 251 return 252 253 # Get the ARCHIVER_OUTDIR before we reset the WORKDIR 254 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 255 if not is_work_shared(d): 256 ar_workdir = d.getVar('ARCHIVER_WORKDIR') 257 d.setVar('WORKDIR', ar_workdir) 258 bb.note('Archiving the patched source...') 259 create_tarball(d, d.getVar('S'), 'patched', ar_outdir) 260} 261 262python do_ar_configured() { 263 import shutil 264 265 # Forcibly expand the sysroot paths as we're about to change WORKDIR 266 d.setVar('STAGING_DIR_HOST', d.getVar('STAGING_DIR_HOST')) 267 d.setVar('STAGING_DIR_TARGET', d.getVar('STAGING_DIR_TARGET')) 268 d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT')) 269 d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE')) 270 271 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 272 if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured': 273 bb.note('Archiving the configured source...') 274 pn = d.getVar('PN') 275 # "gcc-source-${PV}" recipes don't have "do_configure" 276 # task, so we need to run "do_preconfigure" instead 277 if pn.startswith("gcc-source-"): 278 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) 279 bb.build.exec_func('do_preconfigure', d) 280 281 # The libtool-native's do_configure will remove the 282 # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the 283 # do_configure, we archive the already configured ${S} to 284 # instead of. 285 # The kernel class functions require it to be on work-shared, we 286 # don't unpack, patch, configure again, just archive the already 287 # configured ${S} 288 elif not (pn == 'libtool-native' or is_work_shared(d)): 289 def runTask(task): 290 prefuncs = d.getVarFlag(task, 'prefuncs') or '' 291 for func in prefuncs.split(): 292 if func != "sysroot_cleansstate": 293 bb.build.exec_func(func, d) 294 bb.build.exec_func(task, d) 295 postfuncs = d.getVarFlag(task, 'postfuncs') or '' 296 for func in postfuncs.split(): 297 if func != 'do_qa_configure': 298 bb.build.exec_func(func, d) 299 300 # Change the WORKDIR to make do_configure run in another dir. 301 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) 302 303 preceeds = bb.build.preceedtask('do_configure', False, d) 304 for task in preceeds: 305 if task != 'do_patch' and task != 'do_prepare_recipe_sysroot': 306 runTask(task) 307 runTask('do_configure') 308 309 srcdir = d.getVar('S') 310 builddir = d.getVar('B') 311 if srcdir != builddir: 312 if os.path.exists(builddir): 313 oe.path.copytree(builddir, os.path.join(srcdir, \ 314 'build.%s.ar_configured' % d.getVar('PF'))) 315 create_tarball(d, srcdir, 'configured', ar_outdir) 316} 317 318python do_ar_mirror() { 319 import subprocess 320 321 src_uri = (d.getVar('SRC_URI') or '').split() 322 if len(src_uri) == 0: 323 return 324 325 dl_dir = d.getVar('DL_DIR') 326 mirror_exclusions = (d.getVar('ARCHIVER_MIRROR_EXCLUDE') or '').split() 327 mirror_mode = d.getVarFlag('ARCHIVER_MODE', 'mirror') 328 have_mirror_tarballs = d.getVar('BB_GENERATE_MIRROR_TARBALLS') 329 330 if mirror_mode == 'combined': 331 destdir = d.getVar('ARCHIVER_COMBINED_MIRRORDIR') 332 elif mirror_mode == 'split': 333 destdir = d.getVar('ARCHIVER_OUTDIR') 334 else: 335 bb.fatal('Invalid ARCHIVER_MODE[mirror]: %s' % (mirror_mode)) 336 337 if not have_mirror_tarballs: 338 bb.fatal('Using `ARCHIVER_MODE[src] = "mirror"` depends on setting `BB_GENERATE_MIRROR_TARBALLS = "1"`') 339 340 def is_excluded(url): 341 for prefix in mirror_exclusions: 342 if url.startswith(prefix): 343 return True 344 return False 345 346 bb.note('Archiving the source as a mirror...') 347 348 bb.utils.mkdirhier(destdir) 349 350 fetcher = bb.fetch2.Fetch(src_uri, d) 351 352 for ud in fetcher.expanded_urldata(): 353 if is_excluded(ud.url): 354 bb.note('Skipping excluded url: %s' % (ud.url)) 355 continue 356 357 bb.note('Archiving url: %s' % (ud.url)) 358 ud.setup_localpath(d) 359 localpath = None 360 361 # Check for mirror tarballs first. We will archive the first mirror 362 # tarball that we find as it's assumed that we just need one. 363 for mirror_fname in ud.mirrortarballs: 364 mirror_path = os.path.join(dl_dir, mirror_fname) 365 if os.path.exists(mirror_path): 366 bb.note('Found mirror tarball: %s' % (mirror_path)) 367 localpath = mirror_path 368 break 369 370 if len(ud.mirrortarballs) and not localpath: 371 bb.warn('Mirror tarballs are listed for a source but none are present. ' \ 372 'Falling back to original download.\n' \ 373 'SRC_URI = %s' % (ud.url)) 374 375 # Check original download 376 if not localpath: 377 bb.note('Using original download: %s' % (ud.localpath)) 378 localpath = ud.localpath 379 380 if not localpath or not os.path.exists(localpath): 381 bb.fatal('Original download is missing for a source.\n' \ 382 'SRC_URI = %s' % (ud.url)) 383 384 # We now have an appropriate localpath 385 bb.note('Copying source mirror') 386 cmd = 'cp -fpPRH %s %s' % (localpath, destdir) 387 subprocess.check_call(cmd, shell=True) 388} 389 390def exclude_useless_paths(tarinfo): 391 if tarinfo.isdir(): 392 if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'): 393 return None 394 elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc': 395 return None 396 return tarinfo 397 398def create_tarball(d, srcdir, suffix, ar_outdir): 399 """ 400 create the tarball from srcdir 401 """ 402 import tarfile 403 404 # Make sure we are only creating a single tarball for gcc sources 405 if (d.getVar('SRC_URI') == ""): 406 return 407 408 # For the kernel archive, srcdir may just be a link to the 409 # work-shared location. Use os.path.realpath to make sure 410 # that we archive the actual directory and not just the link. 411 srcdir = os.path.realpath(srcdir) 412 413 compression_method = d.getVarFlag('ARCHIVER_MODE', 'compression') 414 bb.utils.mkdirhier(ar_outdir) 415 if suffix: 416 filename = '%s-%s.tar.%s' % (d.getVar('PF'), suffix, compression_method) 417 else: 418 filename = '%s.tar.%s' % (d.getVar('PF'), compression_method) 419 tarname = os.path.join(ar_outdir, filename) 420 421 bb.note('Creating %s' % tarname) 422 tar = tarfile.open(tarname, 'w:%s' % compression_method) 423 tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths) 424 tar.close() 425 426# creating .diff.gz between source.orig and source 427def create_diff_gz(d, src_orig, src, ar_outdir): 428 429 import subprocess 430 431 if not os.path.isdir(src) or not os.path.isdir(src_orig): 432 return 433 434 # The diff --exclude can't exclude the file with path, so we copy 435 # the patched source, and remove the files that we'd like to 436 # exclude. 437 src_patched = src + '.patched' 438 oe.path.copyhardlinktree(src, src_patched) 439 for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split(): 440 bb.utils.remove(os.path.join(src_orig, i), recurse=True) 441 bb.utils.remove(os.path.join(src_patched, i), recurse=True) 442 443 dirname = os.path.dirname(src) 444 basename = os.path.basename(src) 445 bb.utils.mkdirhier(ar_outdir) 446 cwd = os.getcwd() 447 try: 448 os.chdir(dirname) 449 out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF')) 450 diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) 451 subprocess.check_call(diff_cmd, shell=True) 452 bb.utils.remove(src_patched, recurse=True) 453 finally: 454 os.chdir(cwd) 455 456def is_work_shared(d): 457 pn = d.getVar('PN') 458 return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source') 459 460# Run do_unpack and do_patch 461python do_unpack_and_patch() { 462 if d.getVarFlag('ARCHIVER_MODE', 'src') not in \ 463 [ 'patched', 'configured'] and \ 464 d.getVarFlag('ARCHIVER_MODE', 'diff') != '1': 465 return 466 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 467 ar_workdir = d.getVar('ARCHIVER_WORKDIR') 468 ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE') 469 pn = d.getVar('PN') 470 471 # The kernel class functions require it to be on work-shared, so we don't change WORKDIR 472 if not is_work_shared(d): 473 # Change the WORKDIR to make do_unpack do_patch run in another dir. 474 d.setVar('WORKDIR', ar_workdir) 475 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). 476 d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native) 477 478 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the 479 # possibly requiring of the following tasks (such as some recipes's 480 # do_patch required 'B' existed). 481 bb.utils.mkdirhier(d.getVar('B')) 482 483 bb.build.exec_func('do_unpack', d) 484 485 # Save the original source for creating the patches 486 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 487 src = d.getVar('S').rstrip('/') 488 src_orig = '%s.orig' % src 489 oe.path.copytree(src, src_orig) 490 491 if bb.data.inherits_class('dos2unix', d): 492 bb.build.exec_func('do_convert_crlf_to_lf', d) 493 494 # Make sure gcc and kernel sources are patched only once 495 if not (d.getVar('SRC_URI') == "" or is_work_shared(d)): 496 bb.build.exec_func('do_patch', d) 497 498 # Create the patches 499 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 500 bb.note('Creating diff gz...') 501 create_diff_gz(d, src_orig, src, ar_outdir) 502 bb.utils.remove(src_orig, recurse=True) 503} 504 505# BBINCLUDED is special (excluded from basehash signature 506# calculation). Using it in a task signature can cause "basehash 507# changed" errors. 508# 509# Depending on BBINCLUDED also causes do_ar_recipe to run again 510# for unrelated changes, like adding or removing buildhistory.bbclass. 511# 512# For these reasons we ignore the dependency completely. The versioning 513# of the output file ensures that we create it each time the recipe 514# gets rebuilt, at least as long as a PR server is used. We also rely 515# on that mechanism to catch changes in the file content, because the 516# file content is not part of the task signature either. 517do_ar_recipe[vardepsexclude] += "BBINCLUDED" 518python do_ar_recipe () { 519 """ 520 archive the recipe, including .bb and .inc. 521 """ 522 import re 523 import shutil 524 525 require_re = re.compile( r"require\s+(.+)" ) 526 include_re = re.compile( r"include\s+(.+)" ) 527 bbfile = d.getVar('FILE') 528 outdir = os.path.join(d.getVar('WORKDIR'), \ 529 '%s-recipe' % d.getVar('PF')) 530 bb.utils.mkdirhier(outdir) 531 shutil.copy(bbfile, outdir) 532 533 pn = d.getVar('PN') 534 bbappend_files = d.getVar('BBINCLUDED').split() 535 # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend 536 # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. 537 bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn)) 538 bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn)) 539 for file in bbappend_files: 540 if bbappend_re.match(file) or bbappend_re1.match(file): 541 shutil.copy(file, outdir) 542 543 dirname = os.path.dirname(bbfile) 544 bbpath = '%s:%s' % (dirname, d.getVar('BBPATH')) 545 f = open(bbfile, 'r') 546 for line in f.readlines(): 547 incfile = None 548 if require_re.match(line): 549 incfile = require_re.match(line).group(1) 550 elif include_re.match(line): 551 incfile = include_re.match(line).group(1) 552 if incfile: 553 incfile = d.expand(incfile) 554 if incfile: 555 incfile = bb.utils.which(bbpath, incfile) 556 if incfile: 557 shutil.copy(incfile, outdir) 558 559 create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR')) 560 bb.utils.remove(outdir, recurse=True) 561} 562 563python do_dumpdata () { 564 """ 565 dump environment data to ${PF}-showdata.dump 566 """ 567 568 dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \ 569 '%s-showdata.dump' % d.getVar('PF')) 570 bb.note('Dumping metadata into %s' % dumpfile) 571 with open(dumpfile, "w") as f: 572 # emit variables and shell functions 573 bb.data.emit_env(f, d, True) 574 # emit the metadata which isn't valid shell 575 for e in d.keys(): 576 if d.getVarFlag(e, "python", False): 577 f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False))) 578} 579 580SSTATETASKS += "do_deploy_archives" 581do_deploy_archives () { 582 echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}." 583} 584python do_deploy_archives_setscene () { 585 sstate_setscene(d) 586} 587do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}" 588do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}" 589do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}" 590addtask do_deploy_archives_setscene 591 592addtask do_ar_original after do_unpack 593addtask do_unpack_and_patch after do_patch do_preconfigure 594addtask do_ar_patched after do_unpack_and_patch 595addtask do_ar_configured after do_unpack_and_patch 596addtask do_ar_mirror after do_fetch 597addtask do_dumpdata 598addtask do_ar_recipe 599addtask do_deploy_archives 600do_build[recrdeptask] += "do_deploy_archives" 601do_rootfs[recrdeptask] += "do_deploy_archives" 602do_populate_sdk[recrdeptask] += "do_deploy_archives" 603 604python () { 605 # Add tasks in the correct order, specifically for linux-yocto to avoid race condition. 606 # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency 607 # so that do_kernel_configme does not need to run again when do_unpack_and_patch 608 # gets added or removed (by adding or removing archiver.bbclass). 609 if bb.data.inherits_class('kernel-yocto', d): 610 bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d) 611} 612