1# 2# Copyright OpenEmbedded Contributors 3# 4# SPDX-License-Identifier: MIT 5# 6 7# 8# This bbclass is used for creating archive for: 9# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" 10# 2) patched source: ARCHIVER_MODE[src] = "patched" (default) 11# 3) configured source: ARCHIVER_MODE[src] = "configured" 12# 4) source mirror: ARCHIVER_MODE[src] = "mirror" 13# 5) The patches between do_unpack and do_patch: 14# ARCHIVER_MODE[diff] = "1" 15# And you can set the one that you'd like to exclude from the diff: 16# ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" 17# 6) The environment data, similar to 'bitbake -e recipe': 18# ARCHIVER_MODE[dumpdata] = "1" 19# 7) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1" 20# 8) Whether output the .src.rpm package: 21# ARCHIVER_MODE[srpm] = "1" 22# 9) Filter the license, the recipe whose license in 23# COPYLEFT_LICENSE_INCLUDE will be included, and in 24# COPYLEFT_LICENSE_EXCLUDE will be excluded. 25# COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*' 26# COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary' 27# 10) The recipe type that will be archived: 28# COPYLEFT_RECIPE_TYPES = 'target' 29# 11) The source mirror mode: 30# ARCHIVER_MODE[mirror] = "split" (default): Sources are split into 31# per-recipe directories in a similar way to other archiver modes. 32# Post-processing may be required to produce a single mirror directory. 33# This does however allow inspection of duplicate sources and more 34# intelligent handling. 35# ARCHIVER_MODE[mirror] = "combined": All sources are placed into a single 36# directory suitable for direct use as a mirror. Duplicate sources are 37# ignored. 38# 12) Source mirror exclusions: 39# ARCHIVER_MIRROR_EXCLUDE is a list of prefixes to exclude from the mirror. 40# This may be used for sources which you are already publishing yourself 41# (e.g. if the URI starts with 'https://mysite.com/' and your mirror is 42# going to be published to the same site). It may also be used to exclude 43# local files (with the prefix 'file://') if these will be provided as part 44# of an archive of the layers themselves. 45# 46 47# Create archive for all the recipe types 48COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian' 49inherit copyleft_filter 50 51ARCHIVER_MODE[srpm] ?= "0" 52ARCHIVER_MODE[src] ?= "patched" 53ARCHIVER_MODE[diff] ?= "0" 54ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" 55ARCHIVER_MODE[dumpdata] ?= "0" 56ARCHIVER_MODE[recipe] ?= "0" 57ARCHIVER_MODE[mirror] ?= "split" 58ARCHIVER_MODE[compression] ?= "xz" 59 60DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" 61ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources" 62ARCHIVER_ARCH = "${TARGET_SYS}" 63ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${ARCHIVER_ARCH}/${PF}/" 64ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm" 65ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${ARCHIVER_ARCH}/${PF}/" 66ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" 67 68# When producing a combined mirror directory, allow duplicates for the case 69# where multiple recipes use the same SRC_URI. 70ARCHIVER_COMBINED_MIRRORDIR = "${ARCHIVER_TOPDIR}/mirror" 71SSTATE_ALLOW_OVERLAP_FILES += "${DEPLOY_DIR_SRC}/mirror" 72 73do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" 74do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" 75do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" 76 77# This is a convenience for the shell script to use it 78 79def include_package(d, pn): 80 81 included, reason = copyleft_should_include(d) 82 if not included: 83 bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) 84 return False 85 86 else: 87 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) 88 89 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, 90 # so avoid archiving source here. 91 if pn.startswith('glibc-locale'): 92 return False 93 94 # We just archive gcc-source for all the gcc related recipes 95 if d.getVar('BPN') in ['gcc', 'libgcc'] \ 96 and not pn.startswith('gcc-source'): 97 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) 98 return False 99 100 return True 101 102python () { 103 pn = d.getVar('PN') 104 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() 105 if pn in assume_provided: 106 for p in d.getVar("PROVIDES").split(): 107 if p != pn: 108 pn = p 109 break 110 111 if not include_package(d, pn): 112 return 113 114 # TARGET_SYS in ARCHIVER_ARCH will break the stamp for gcc-source in multiconfig 115 if pn.startswith('gcc-source'): 116 d.setVar('ARCHIVER_ARCH', "allarch") 117 118 def hasTask(task): 119 return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) 120 121 ar_src = d.getVarFlag('ARCHIVER_MODE', 'src') 122 ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata') 123 ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe') 124 125 if ar_src == "original": 126 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn) 127 # 'patched' and 'configured' invoke do_unpack_and_patch because 128 # do_ar_patched resp. do_ar_configured depend on it, but for 'original' 129 # we have to add it explicitly. 130 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 131 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn) 132 elif ar_src == "patched": 133 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) 134 elif ar_src == "configured": 135 # We can't use "addtask do_ar_configured after do_configure" since it 136 # will cause the deptask of do_populate_sysroot to run no matter what 137 # archives we need, so we add the depends here. 138 139 # There is a corner case with "gcc-source-${PV}" recipes, they don't have 140 # the "do_configure" task, so we need to use "do_preconfigure" 141 if hasTask("do_preconfigure"): 142 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn) 143 elif hasTask("do_configure"): 144 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn) 145 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn) 146 elif ar_src == "mirror": 147 d.appendVarFlag('do_deploy_archives', 'depends', '%s:do_ar_mirror' % pn) 148 149 elif ar_src: 150 bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src) 151 152 if ar_dumpdata == "1": 153 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn) 154 155 if ar_recipe == "1": 156 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn) 157 158 # Output the SRPM package 159 if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'): 160 if "package_rpm" not in d.getVar('PACKAGE_CLASSES'): 161 bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES") 162 163 # Some recipes do not have any packaging tasks 164 if hasTask("do_package_write_rpm"): 165 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) 166 d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}') 167 d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}') 168 d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}') 169 if ar_dumpdata == "1": 170 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) 171 if ar_recipe == "1": 172 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn) 173 if ar_src == "original": 174 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn) 175 elif ar_src == "patched": 176 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn) 177 elif ar_src == "configured": 178 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) 179} 180 181# Take all the sources for a recipe and put them in WORKDIR/archiver-work/. 182# Files in SRC_URI are copied directly, anything that's a directory 183# (e.g. git repositories) is "unpacked" and then put into a tarball. 184python do_ar_original() { 185 186 import shutil, tempfile 187 188 if d.getVarFlag('ARCHIVER_MODE', 'src') != "original": 189 return 190 191 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 192 bb.note('Archiving the original source...') 193 urls = d.getVar("SRC_URI").split() 194 # destsuffix (git fetcher) and subdir (everything else) are allowed to be 195 # absolute paths (for example, destsuffix=${S}/foobar). 196 # That messes with unpacking inside our tmpdir below, because the fetchers 197 # will then unpack in that directory and completely ignore the tmpdir. 198 # That breaks parallel tasks relying on ${S}, like do_compile. 199 # 200 # To solve this, we remove these parameters from all URLs. 201 # We do this even for relative paths because it makes the content of the 202 # archives more useful (no extra paths that are only used during 203 # compilation). 204 for i, url in enumerate(urls): 205 decoded = bb.fetch2.decodeurl(url) 206 for param in ('destsuffix', 'subdir'): 207 if param in decoded[5]: 208 del decoded[5][param] 209 encoded = bb.fetch2.encodeurl(decoded) 210 urls[i] = encoded 211 212 # Cleanup SRC_URI before call bb.fetch2.Fetch() since now SRC_URI is in the 213 # variable "urls", otherwise there might be errors like: 214 # The SRCREV_FORMAT variable must be set when multiple SCMs are used 215 ld = bb.data.createCopy(d) 216 ld.setVar('SRC_URI', '') 217 fetch = bb.fetch2.Fetch(urls, ld) 218 tarball_suffix = {} 219 for url in fetch.urls: 220 local = fetch.localpath(url).rstrip("/"); 221 if os.path.isfile(local): 222 shutil.copy(local, ar_outdir) 223 elif os.path.isdir(local): 224 tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR')) 225 fetch.unpack(tmpdir, (url,)) 226 # To handle recipes with more than one source, we add the "name" 227 # URL parameter as suffix. We treat it as an error when 228 # there's more than one URL without a name, or a name gets reused. 229 # This is an additional safety net, in practice the name has 230 # to be set when using the git fetcher, otherwise SRCREV cannot 231 # be set separately for each URL. 232 params = bb.fetch2.decodeurl(url)[5] 233 type = bb.fetch2.decodeurl(url)[0] 234 location = bb.fetch2.decodeurl(url)[2] 235 name = params.get('name', '') 236 if type.lower() == 'file': 237 name_tmp = location.rstrip("*").rstrip("/") 238 name = os.path.basename(name_tmp) 239 else: 240 if name in tarball_suffix: 241 if not name: 242 bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url)) 243 else: 244 bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url)) 245 tarball_suffix[name] = url 246 create_tarball(d, tmpdir + '/.', name, ar_outdir) 247 248 # Emit patch series files for 'original' 249 bb.note('Writing patch series files...') 250 for patch in src_patches(d): 251 _, _, local, _, _, parm = bb.fetch.decodeurl(patch) 252 patchdir = parm.get('patchdir') 253 if patchdir: 254 series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_')) 255 else: 256 series = os.path.join(ar_outdir, 'series') 257 258 with open(series, 'a') as s: 259 s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel'])) 260} 261 262python do_ar_patched() { 263 264 if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched': 265 return 266 267 # Get the ARCHIVER_OUTDIR before we reset the WORKDIR 268 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 269 if not is_work_shared(d): 270 ar_workdir = d.getVar('ARCHIVER_WORKDIR') 271 d.setVar('WORKDIR', ar_workdir) 272 bb.note('Archiving the patched source...') 273 create_tarball(d, d.getVar('S'), 'patched', ar_outdir) 274} 275 276python do_ar_configured() { 277 import shutil 278 279 # Forcibly expand the sysroot paths as we're about to change WORKDIR 280 d.setVar('STAGING_DIR_HOST', d.getVar('STAGING_DIR_HOST')) 281 d.setVar('STAGING_DIR_TARGET', d.getVar('STAGING_DIR_TARGET')) 282 d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT')) 283 d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE')) 284 285 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 286 if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured': 287 bb.note('Archiving the configured source...') 288 pn = d.getVar('PN') 289 # "gcc-source-${PV}" recipes don't have "do_configure" 290 # task, so we need to run "do_preconfigure" instead 291 if pn.startswith("gcc-source-"): 292 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) 293 bb.build.exec_func('do_preconfigure', d) 294 295 # The libtool-native's do_configure will remove the 296 # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the 297 # do_configure, we archive the already configured ${S} to 298 # instead of. 299 # The kernel class functions require it to be on work-shared, we 300 # don't unpack, patch, configure again, just archive the already 301 # configured ${S} 302 elif not (pn == 'libtool-native' or is_work_shared(d)): 303 def runTask(task): 304 prefuncs = d.getVarFlag(task, 'prefuncs') or '' 305 for func in prefuncs.split(): 306 if func != "sysroot_cleansstate": 307 bb.build.exec_func(func, d) 308 bb.build.exec_func(task, d) 309 postfuncs = d.getVarFlag(task, 'postfuncs') or '' 310 for func in postfuncs.split(): 311 if func != 'do_qa_configure': 312 bb.build.exec_func(func, d) 313 314 # Change the WORKDIR to make do_configure run in another dir. 315 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) 316 317 preceeds = bb.build.preceedtask('do_configure', False, d) 318 for task in preceeds: 319 if task != 'do_patch' and task != 'do_prepare_recipe_sysroot': 320 runTask(task) 321 runTask('do_configure') 322 323 srcdir = d.getVar('S') 324 builddir = d.getVar('B') 325 if srcdir != builddir: 326 if os.path.exists(builddir): 327 oe.path.copytree(builddir, os.path.join(srcdir, \ 328 'build.%s.ar_configured' % d.getVar('PF'))) 329 create_tarball(d, srcdir, 'configured', ar_outdir) 330} 331 332python do_ar_mirror() { 333 import subprocess 334 335 src_uri = (d.getVar('SRC_URI') or '').split() 336 if len(src_uri) == 0: 337 return 338 339 dl_dir = d.getVar('DL_DIR') 340 mirror_exclusions = (d.getVar('ARCHIVER_MIRROR_EXCLUDE') or '').split() 341 mirror_mode = d.getVarFlag('ARCHIVER_MODE', 'mirror') 342 have_mirror_tarballs = oe.types.boolean(d.getVar('BB_GENERATE_MIRROR_TARBALLS')) 343 344 if mirror_mode == 'combined': 345 destdir = d.getVar('ARCHIVER_COMBINED_MIRRORDIR') 346 elif mirror_mode == 'split': 347 destdir = d.getVar('ARCHIVER_OUTDIR') 348 else: 349 bb.fatal('Invalid ARCHIVER_MODE[mirror]: %s' % (mirror_mode)) 350 351 if not have_mirror_tarballs: 352 bb.fatal('Using `ARCHIVER_MODE[src] = "mirror"` depends on setting `BB_GENERATE_MIRROR_TARBALLS = "1"`') 353 354 def is_excluded(url): 355 for prefix in mirror_exclusions: 356 if url.startswith(prefix): 357 return True 358 return False 359 360 bb.note('Archiving the source as a mirror...') 361 362 bb.utils.mkdirhier(destdir) 363 364 fetcher = bb.fetch2.Fetch(src_uri, d) 365 366 for ud in fetcher.expanded_urldata(): 367 if is_excluded(ud.url): 368 bb.note('Skipping excluded url: %s' % (ud.url)) 369 continue 370 371 bb.note('Archiving url: %s' % (ud.url)) 372 ud.setup_localpath(d) 373 localpath = None 374 375 # Check for mirror tarballs first. We will archive the first mirror 376 # tarball that we find as it's assumed that we just need one. 377 for mirror_fname in ud.mirrortarballs: 378 mirror_path = os.path.join(dl_dir, mirror_fname) 379 if os.path.exists(mirror_path): 380 bb.note('Found mirror tarball: %s' % (mirror_path)) 381 localpath = mirror_path 382 break 383 384 if len(ud.mirrortarballs) and not localpath: 385 bb.warn('Mirror tarballs are listed for a source but none are present. ' \ 386 'Falling back to original download.\n' \ 387 'SRC_URI = %s' % (ud.url)) 388 389 # Check original download 390 if not localpath: 391 bb.note('Using original download: %s' % (ud.localpath)) 392 localpath = ud.localpath 393 394 if not localpath or not os.path.exists(localpath): 395 bb.fatal('Original download is missing for a source.\n' \ 396 'SRC_URI = %s' % (ud.url)) 397 398 # We now have an appropriate localpath 399 bb.note('Copying source mirror') 400 cmd = 'cp -fpPRH %s %s' % (localpath, destdir) 401 subprocess.check_call(cmd, shell=True) 402} 403 404def create_tarball(d, srcdir, suffix, ar_outdir): 405 """ 406 create the tarball from srcdir 407 """ 408 import subprocess 409 410 # Make sure we are only creating a single tarball for gcc sources 411 if (d.getVar('SRC_URI') == ""): 412 return 413 414 # For the kernel archive, srcdir may just be a link to the 415 # work-shared location. Use os.path.realpath to make sure 416 # that we archive the actual directory and not just the link. 417 srcdir = os.path.realpath(srcdir) 418 419 compression_method = d.getVarFlag('ARCHIVER_MODE', 'compression') 420 if compression_method == "xz": 421 compression_cmd = "xz %s" % d.getVar('XZ_DEFAULTS') 422 # To keep compatibility with ARCHIVER_MODE[compression] 423 elif compression_method == "gz": 424 compression_cmd = "gzip" 425 elif compression_method == "bz2": 426 compression_cmd = "bzip2" 427 else: 428 bb.fatal("Unsupported compression_method: %s" % compression_method) 429 430 bb.utils.mkdirhier(ar_outdir) 431 if suffix: 432 filename = '%s-%s.tar.%s' % (d.getVar('PF'), suffix, compression_method) 433 else: 434 filename = '%s.tar.%s' % (d.getVar('PF'), compression_method) 435 tarname = os.path.join(ar_outdir, filename) 436 437 bb.note('Creating %s' % tarname) 438 dirname = os.path.dirname(srcdir) 439 basename = os.path.basename(srcdir) 440 exclude = "--exclude=temp --exclude=patches --exclude='.pc'" 441 tar_cmd = "tar %s -cf - %s | %s > %s" % (exclude, basename, compression_cmd, tarname) 442 subprocess.check_call(tar_cmd, cwd=dirname, shell=True) 443 444# creating .diff.gz between source.orig and source 445def create_diff_gz(d, src_orig, src, ar_outdir): 446 447 import subprocess 448 449 if not os.path.isdir(src) or not os.path.isdir(src_orig): 450 return 451 452 # The diff --exclude can't exclude the file with path, so we copy 453 # the patched source, and remove the files that we'd like to 454 # exclude. 455 src_patched = src + '.patched' 456 oe.path.copyhardlinktree(src, src_patched) 457 for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split(): 458 bb.utils.remove(os.path.join(src_orig, i), recurse=True) 459 bb.utils.remove(os.path.join(src_patched, i), recurse=True) 460 461 dirname = os.path.dirname(src) 462 basename = os.path.basename(src) 463 bb.utils.mkdirhier(ar_outdir) 464 cwd = os.getcwd() 465 try: 466 os.chdir(dirname) 467 out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF')) 468 diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) 469 subprocess.check_call(diff_cmd, shell=True) 470 bb.utils.remove(src_patched, recurse=True) 471 finally: 472 os.chdir(cwd) 473 474def is_work_shared(d): 475 sharedworkdir = os.path.join(d.getVar('TMPDIR'), 'work-shared') 476 sourcedir = os.path.realpath(d.getVar('S')) 477 return sourcedir.startswith(sharedworkdir) 478 479# Run do_unpack and do_patch 480python do_unpack_and_patch() { 481 if d.getVarFlag('ARCHIVER_MODE', 'src') not in \ 482 [ 'patched', 'configured'] and \ 483 d.getVarFlag('ARCHIVER_MODE', 'diff') != '1': 484 return 485 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 486 ar_workdir = d.getVar('ARCHIVER_WORKDIR') 487 ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE') 488 pn = d.getVar('PN') 489 490 # The kernel class functions require it to be on work-shared, so we don't change WORKDIR 491 if not is_work_shared(d): 492 # Change the WORKDIR to make do_unpack do_patch run in another dir. 493 d.setVar('WORKDIR', ar_workdir) 494 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). 495 d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native) 496 497 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the 498 # possibly requiring of the following tasks (such as some recipes's 499 # do_patch required 'B' existed). 500 bb.utils.mkdirhier(d.getVar('B')) 501 502 bb.build.exec_func('do_unpack', d) 503 504 # Save the original source for creating the patches 505 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 506 src = d.getVar('S').rstrip('/') 507 src_orig = '%s.orig' % src 508 oe.path.copytree(src, src_orig) 509 510 if bb.data.inherits_class('dos2unix', d): 511 bb.build.exec_func('do_convert_crlf_to_lf', d) 512 513 # Make sure gcc and kernel sources are patched only once 514 if not (d.getVar('SRC_URI') == "" or is_work_shared(d)): 515 bb.build.exec_func('do_patch', d) 516 517 # Create the patches 518 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 519 bb.note('Creating diff gz...') 520 create_diff_gz(d, src_orig, src, ar_outdir) 521 bb.utils.remove(src_orig, recurse=True) 522} 523 524# BBINCLUDED is special (excluded from basehash signature 525# calculation). Using it in a task signature can cause "basehash 526# changed" errors. 527# 528# Depending on BBINCLUDED also causes do_ar_recipe to run again 529# for unrelated changes, like adding or removing buildhistory.bbclass. 530# 531# For these reasons we ignore the dependency completely. The versioning 532# of the output file ensures that we create it each time the recipe 533# gets rebuilt, at least as long as a PR server is used. We also rely 534# on that mechanism to catch changes in the file content, because the 535# file content is not part of the task signature either. 536do_ar_recipe[vardepsexclude] += "BBINCLUDED" 537python do_ar_recipe () { 538 """ 539 archive the recipe, including .bb and .inc. 540 """ 541 import re 542 import shutil 543 544 require_re = re.compile( r"require\s+(.+)" ) 545 include_re = re.compile( r"include\s+(.+)" ) 546 bbfile = d.getVar('FILE') 547 outdir = os.path.join(d.getVar('WORKDIR'), \ 548 '%s-recipe' % d.getVar('PF')) 549 bb.utils.mkdirhier(outdir) 550 shutil.copy(bbfile, outdir) 551 552 pn = d.getVar('PN') 553 bbappend_files = d.getVar('BBINCLUDED').split() 554 # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend 555 # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. 556 bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn)) 557 bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn)) 558 for file in bbappend_files: 559 if bbappend_re.match(file) or bbappend_re1.match(file): 560 shutil.copy(file, outdir) 561 562 dirname = os.path.dirname(bbfile) 563 bbpath = '%s:%s' % (dirname, d.getVar('BBPATH')) 564 f = open(bbfile, 'r') 565 for line in f.readlines(): 566 incfile = None 567 if require_re.match(line): 568 incfile = require_re.match(line).group(1) 569 elif include_re.match(line): 570 incfile = include_re.match(line).group(1) 571 if incfile: 572 incfile = d.expand(incfile) 573 if incfile: 574 incfile = bb.utils.which(bbpath, incfile) 575 if incfile: 576 shutil.copy(incfile, outdir) 577 578 create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR')) 579 bb.utils.remove(outdir, recurse=True) 580} 581 582python do_dumpdata () { 583 """ 584 dump environment data to ${PF}-showdata.dump 585 """ 586 587 dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \ 588 '%s-showdata.dump' % d.getVar('PF')) 589 bb.note('Dumping metadata into %s' % dumpfile) 590 with open(dumpfile, "w") as f: 591 # emit variables and shell functions 592 bb.data.emit_env(f, d, True) 593 # emit the metadata which isn't valid shell 594 for e in d.keys(): 595 if d.getVarFlag(e, "python", False): 596 f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False))) 597} 598 599SSTATETASKS += "do_deploy_archives" 600do_deploy_archives () { 601 bbnote "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}." 602} 603python do_deploy_archives_setscene () { 604 sstate_setscene(d) 605} 606do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}" 607do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}" 608do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}" 609addtask do_deploy_archives_setscene 610 611addtask do_ar_original after do_unpack 612addtask do_unpack_and_patch after do_patch do_preconfigure 613addtask do_ar_patched after do_unpack_and_patch 614addtask do_ar_configured after do_unpack_and_patch 615addtask do_ar_mirror after do_fetch 616addtask do_dumpdata 617addtask do_ar_recipe 618addtask do_deploy_archives 619do_build[recrdeptask] += "do_deploy_archives" 620do_rootfs[recrdeptask] += "do_deploy_archives" 621do_populate_sdk[recrdeptask] += "do_deploy_archives" 622 623python () { 624 # Add tasks in the correct order, specifically for linux-yocto to avoid race condition. 625 # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency 626 # so that do_kernel_configme does not need to run again when do_unpack_and_patch 627 # gets added or removed (by adding or removing archiver.bbclass). 628 if bb.data.inherits_class('kernel-yocto', d): 629 bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d) 630} 631