1# ex:ts=4:sw=4:sts=4:et 2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- 3# 4# This bbclass is used for creating archive for: 5# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" 6# 2) patched source: ARCHIVER_MODE[src] = "patched" (default) 7# 3) configured source: ARCHIVER_MODE[src] = "configured" 8# 4) The patches between do_unpack and do_patch: 9# ARCHIVER_MODE[diff] = "1" 10# And you can set the one that you'd like to exclude from the diff: 11# ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" 12# 5) The environment data, similar to 'bitbake -e recipe': 13# ARCHIVER_MODE[dumpdata] = "1" 14# 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1" 15# 7) Whether output the .src.rpm package: 16# ARCHIVER_MODE[srpm] = "1" 17# 8) Filter the license, the recipe whose license in 18# COPYLEFT_LICENSE_INCLUDE will be included, and in 19# COPYLEFT_LICENSE_EXCLUDE will be excluded. 20# COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*' 21# COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary' 22# 9) The recipe type that will be archived: 23# COPYLEFT_RECIPE_TYPES = 'target' 24# 25 26# Create archive for all the recipe types 27COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian' 28inherit copyleft_filter 29 30ARCHIVER_MODE[srpm] ?= "0" 31ARCHIVER_MODE[src] ?= "patched" 32ARCHIVER_MODE[diff] ?= "0" 33ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" 34ARCHIVER_MODE[dumpdata] ?= "0" 35ARCHIVER_MODE[recipe] ?= "0" 36 37DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" 38ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources" 39ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/" 40ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm" 41ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${TARGET_SYS}/${PF}/" 42ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" 43 44 45do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" 46do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" 47do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" 48do_deploy_archives[dirs] = "${WORKDIR}" 49 50# This is a convenience for the shell script to use it 51 52 53python () { 54 pn = d.getVar('PN') 55 assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() 56 if pn in assume_provided: 57 for p in d.getVar("PROVIDES").split(): 58 if p != pn: 59 pn = p 60 break 61 62 included, reason = copyleft_should_include(d) 63 if not included: 64 bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) 65 return 66 else: 67 bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) 68 69 70 # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, 71 # so avoid archiving source here. 72 if pn.startswith('glibc-locale'): 73 return 74 75 # We just archive gcc-source for all the gcc related recipes 76 if d.getVar('BPN') in ['gcc', 'libgcc'] \ 77 and not pn.startswith('gcc-source'): 78 bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) 79 return 80 81 def hasTask(task): 82 return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) 83 84 ar_src = d.getVarFlag('ARCHIVER_MODE', 'src') 85 ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata') 86 ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe') 87 88 if ar_src == "original": 89 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn) 90 # 'patched' and 'configured' invoke do_unpack_and_patch because 91 # do_ar_patched resp. do_ar_configured depend on it, but for 'original' 92 # we have to add it explicitly. 93 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 94 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn) 95 elif ar_src == "patched": 96 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) 97 elif ar_src == "configured": 98 # We can't use "addtask do_ar_configured after do_configure" since it 99 # will cause the deptask of do_populate_sysroot to run not matter what 100 # archives we need, so we add the depends here. 101 102 # There is a corner case with "gcc-source-${PV}" recipes, they don't have 103 # the "do_configure" task, so we need to use "do_preconfigure" 104 if hasTask("do_preconfigure"): 105 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn) 106 elif hasTask("do_configure"): 107 d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn) 108 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn) 109 110 elif ar_src: 111 bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src) 112 113 if ar_dumpdata == "1": 114 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn) 115 116 if ar_recipe == "1": 117 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn) 118 119 # Output the SRPM package 120 if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'): 121 if "package_rpm" not in d.getVar('PACKAGE_CLASSES'): 122 bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES") 123 124 # Some recipes do not have any packaging tasks 125 if hasTask("do_package_write_rpm"): 126 d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) 127 d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}') 128 d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}') 129 d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}') 130 if ar_dumpdata == "1": 131 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) 132 if ar_recipe == "1": 133 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn) 134 if ar_src == "original": 135 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn) 136 elif ar_src == "patched": 137 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn) 138 elif ar_src == "configured": 139 d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) 140} 141 142# Take all the sources for a recipe and puts them in WORKDIR/archiver-work/. 143# Files in SRC_URI are copied directly, anything that's a directory 144# (e.g. git repositories) is "unpacked" and then put into a tarball. 145python do_ar_original() { 146 147 import shutil, tempfile 148 149 if d.getVarFlag('ARCHIVER_MODE', 'src') != "original": 150 return 151 152 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 153 bb.note('Archiving the original source...') 154 urls = d.getVar("SRC_URI").split() 155 # destsuffix (git fetcher) and subdir (everything else) are allowed to be 156 # absolute paths (for example, destsuffix=${S}/foobar). 157 # That messes with unpacking inside our tmpdir below, because the fetchers 158 # will then unpack in that directory and completely ignore the tmpdir. 159 # That breaks parallel tasks relying on ${S}, like do_compile. 160 # 161 # To solve this, we remove these parameters from all URLs. 162 # We do this even for relative paths because it makes the content of the 163 # archives more useful (no extra paths that are only used during 164 # compilation). 165 for i, url in enumerate(urls): 166 decoded = bb.fetch2.decodeurl(url) 167 for param in ('destsuffix', 'subdir'): 168 if param in decoded[5]: 169 del decoded[5][param] 170 encoded = bb.fetch2.encodeurl(decoded) 171 urls[i] = encoded 172 fetch = bb.fetch2.Fetch(urls, d) 173 tarball_suffix = {} 174 for url in fetch.urls: 175 local = fetch.localpath(url).rstrip("/"); 176 if os.path.isfile(local): 177 shutil.copy(local, ar_outdir) 178 elif os.path.isdir(local): 179 tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR')) 180 fetch.unpack(tmpdir, (url,)) 181 # To handle recipes with more than one source, we add the "name" 182 # URL parameter as suffix. We treat it as an error when 183 # there's more than one URL without a name, or a name gets reused. 184 # This is an additional safety net, in practice the name has 185 # to be set when using the git fetcher, otherwise SRCREV cannot 186 # be set separately for each URL. 187 params = bb.fetch2.decodeurl(url)[5] 188 type = bb.fetch2.decodeurl(url)[0] 189 location = bb.fetch2.decodeurl(url)[2] 190 name = params.get('name', '') 191 if type.lower() == 'file': 192 name_tmp = location.rstrip("*").rstrip("/") 193 name = os.path.basename(name_tmp) 194 else: 195 if name in tarball_suffix: 196 if not name: 197 bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url)) 198 else: 199 bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url)) 200 tarball_suffix[name] = url 201 create_tarball(d, tmpdir + '/.', name, ar_outdir) 202 203 # Emit patch series files for 'original' 204 bb.note('Writing patch series files...') 205 for patch in src_patches(d): 206 _, _, local, _, _, parm = bb.fetch.decodeurl(patch) 207 patchdir = parm.get('patchdir') 208 if patchdir: 209 series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_')) 210 else: 211 series = os.path.join(ar_outdir, 'series') 212 213 with open(series, 'a') as s: 214 s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel'])) 215} 216 217python do_ar_patched() { 218 219 if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched': 220 return 221 222 # Get the ARCHIVER_OUTDIR before we reset the WORKDIR 223 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 224 if not is_work_shared(d): 225 ar_workdir = d.getVar('ARCHIVER_WORKDIR') 226 d.setVar('WORKDIR', ar_workdir) 227 bb.note('Archiving the patched source...') 228 create_tarball(d, d.getVar('S'), 'patched', ar_outdir) 229} 230 231python do_ar_configured() { 232 import shutil 233 234 # Forcibly expand the sysroot paths as we're about to change WORKDIR 235 d.setVar('STAGING_DIR_HOST', d.getVar('STAGING_DIR_HOST')) 236 d.setVar('STAGING_DIR_TARGET', d.getVar('STAGING_DIR_TARGET')) 237 d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT')) 238 d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE')) 239 240 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 241 if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured': 242 bb.note('Archiving the configured source...') 243 pn = d.getVar('PN') 244 # "gcc-source-${PV}" recipes don't have "do_configure" 245 # task, so we need to run "do_preconfigure" instead 246 if pn.startswith("gcc-source-"): 247 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) 248 bb.build.exec_func('do_preconfigure', d) 249 250 # The libtool-native's do_configure will remove the 251 # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the 252 # do_configure, we archive the already configured ${S} to 253 # instead of. 254 elif pn != 'libtool-native': 255 def runTask(task): 256 prefuncs = d.getVarFlag(task, 'prefuncs') or '' 257 for func in prefuncs.split(): 258 if func != "sysroot_cleansstate": 259 bb.build.exec_func(func, d) 260 bb.build.exec_func(task, d) 261 postfuncs = d.getVarFlag(task, 'postfuncs') or '' 262 for func in postfuncs.split(): 263 if func != 'do_qa_configure': 264 bb.build.exec_func(func, d) 265 266 # Change the WORKDIR to make do_configure run in another dir. 267 d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) 268 269 preceeds = bb.build.preceedtask('do_configure', False, d) 270 for task in preceeds: 271 if task != 'do_patch' and task != 'do_prepare_recipe_sysroot': 272 runTask(task) 273 runTask('do_configure') 274 275 srcdir = d.getVar('S') 276 builddir = d.getVar('B') 277 if srcdir != builddir: 278 if os.path.exists(builddir): 279 oe.path.copytree(builddir, os.path.join(srcdir, \ 280 'build.%s.ar_configured' % d.getVar('PF'))) 281 create_tarball(d, srcdir, 'configured', ar_outdir) 282} 283 284def exclude_useless_paths(tarinfo): 285 if tarinfo.isdir(): 286 if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'): 287 return None 288 elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc': 289 return None 290 return tarinfo 291 292def create_tarball(d, srcdir, suffix, ar_outdir): 293 """ 294 create the tarball from srcdir 295 """ 296 import tarfile 297 298 # Make sure we are only creating a single tarball for gcc sources 299 if (d.getVar('SRC_URI') == ""): 300 return 301 302 # For the kernel archive, srcdir may just be a link to the 303 # work-shared location. Use os.path.realpath to make sure 304 # that we archive the actual directory and not just the link. 305 srcdir = os.path.realpath(srcdir) 306 307 bb.utils.mkdirhier(ar_outdir) 308 if suffix: 309 filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix) 310 else: 311 filename = '%s.tar.gz' % d.getVar('PF') 312 tarname = os.path.join(ar_outdir, filename) 313 314 bb.note('Creating %s' % tarname) 315 tar = tarfile.open(tarname, 'w:gz') 316 tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths) 317 tar.close() 318 319# creating .diff.gz between source.orig and source 320def create_diff_gz(d, src_orig, src, ar_outdir): 321 322 import subprocess 323 324 if not os.path.isdir(src) or not os.path.isdir(src_orig): 325 return 326 327 # The diff --exclude can't exclude the file with path, so we copy 328 # the patched source, and remove the files that we'd like to 329 # exclude. 330 src_patched = src + '.patched' 331 oe.path.copyhardlinktree(src, src_patched) 332 for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split(): 333 bb.utils.remove(os.path.join(src_orig, i), recurse=True) 334 bb.utils.remove(os.path.join(src_patched, i), recurse=True) 335 336 dirname = os.path.dirname(src) 337 basename = os.path.basename(src) 338 bb.utils.mkdirhier(ar_outdir) 339 cwd = os.getcwd() 340 try: 341 os.chdir(dirname) 342 out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF')) 343 diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) 344 subprocess.check_call(diff_cmd, shell=True) 345 bb.utils.remove(src_patched, recurse=True) 346 finally: 347 os.chdir(cwd) 348 349def is_work_shared(d): 350 pn = d.getVar('PN') 351 return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source') 352 353# Run do_unpack and do_patch 354python do_unpack_and_patch() { 355 if d.getVarFlag('ARCHIVER_MODE', 'src') not in \ 356 [ 'patched', 'configured'] and \ 357 d.getVarFlag('ARCHIVER_MODE', 'diff') != '1': 358 return 359 ar_outdir = d.getVar('ARCHIVER_OUTDIR') 360 ar_workdir = d.getVar('ARCHIVER_WORKDIR') 361 ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE') 362 pn = d.getVar('PN') 363 364 # The kernel class functions require it to be on work-shared, so we dont change WORKDIR 365 if not is_work_shared(d): 366 # Change the WORKDIR to make do_unpack do_patch run in another dir. 367 d.setVar('WORKDIR', ar_workdir) 368 # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). 369 d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native) 370 371 # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the 372 # possibly requiring of the following tasks (such as some recipes's 373 # do_patch required 'B' existed). 374 bb.utils.mkdirhier(d.getVar('B')) 375 376 bb.build.exec_func('do_unpack', d) 377 378 # Save the original source for creating the patches 379 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 380 src = d.getVar('S').rstrip('/') 381 src_orig = '%s.orig' % src 382 oe.path.copytree(src, src_orig) 383 384 # Make sure gcc and kernel sources are patched only once 385 if not (d.getVar('SRC_URI') == "" or is_work_shared(d)): 386 bb.build.exec_func('do_patch', d) 387 388 # Create the patches 389 if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': 390 bb.note('Creating diff gz...') 391 create_diff_gz(d, src_orig, src, ar_outdir) 392 bb.utils.remove(src_orig, recurse=True) 393} 394 395# BBINCLUDED is special (excluded from basehash signature 396# calculation). Using it in a task signature can cause "basehash 397# changed" errors. 398# 399# Depending on BBINCLUDED also causes do_ar_recipe to run again 400# for unrelated changes, like adding or removing buildhistory.bbclass. 401# 402# For these reasons we ignore the dependency completely. The versioning 403# of the output file ensures that we create it each time the recipe 404# gets rebuilt, at least as long as a PR server is used. We also rely 405# on that mechanism to catch changes in the file content, because the 406# file content is not part of of the task signature either. 407do_ar_recipe[vardepsexclude] += "BBINCLUDED" 408python do_ar_recipe () { 409 """ 410 archive the recipe, including .bb and .inc. 411 """ 412 import re 413 import shutil 414 415 require_re = re.compile( r"require\s+(.+)" ) 416 include_re = re.compile( r"include\s+(.+)" ) 417 bbfile = d.getVar('FILE') 418 outdir = os.path.join(d.getVar('WORKDIR'), \ 419 '%s-recipe' % d.getVar('PF')) 420 bb.utils.mkdirhier(outdir) 421 shutil.copy(bbfile, outdir) 422 423 pn = d.getVar('PN') 424 bbappend_files = d.getVar('BBINCLUDED').split() 425 # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend 426 # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. 427 bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn)) 428 bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn)) 429 for file in bbappend_files: 430 if bbappend_re.match(file) or bbappend_re1.match(file): 431 shutil.copy(file, outdir) 432 433 dirname = os.path.dirname(bbfile) 434 bbpath = '%s:%s' % (dirname, d.getVar('BBPATH')) 435 f = open(bbfile, 'r') 436 for line in f.readlines(): 437 incfile = None 438 if require_re.match(line): 439 incfile = require_re.match(line).group(1) 440 elif include_re.match(line): 441 incfile = include_re.match(line).group(1) 442 if incfile: 443 incfile = d.expand(incfile) 444 if incfile: 445 incfile = bb.utils.which(bbpath, incfile) 446 if incfile: 447 shutil.copy(incfile, outdir) 448 449 create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR')) 450 bb.utils.remove(outdir, recurse=True) 451} 452 453python do_dumpdata () { 454 """ 455 dump environment data to ${PF}-showdata.dump 456 """ 457 458 dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \ 459 '%s-showdata.dump' % d.getVar('PF')) 460 bb.note('Dumping metadata into %s' % dumpfile) 461 with open(dumpfile, "w") as f: 462 # emit variables and shell functions 463 bb.data.emit_env(f, d, True) 464 # emit the metadata which isn't valid shell 465 for e in d.keys(): 466 if d.getVarFlag(e, "python", False): 467 f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False))) 468} 469 470SSTATETASKS += "do_deploy_archives" 471do_deploy_archives () { 472 echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}." 473} 474python do_deploy_archives_setscene () { 475 sstate_setscene(d) 476} 477do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}" 478do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}" 479do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}" 480addtask do_deploy_archives_setscene 481 482addtask do_ar_original after do_unpack 483addtask do_unpack_and_patch after do_patch 484addtask do_ar_patched after do_unpack_and_patch 485addtask do_ar_configured after do_unpack_and_patch 486addtask do_dumpdata 487addtask do_ar_recipe 488addtask do_deploy_archives before do_build 489 490python () { 491 # Add tasks in the correct order, specifically for linux-yocto to avoid race condition. 492 # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency 493 # so that do_kernel_configme does not need to run again when do_unpack_and_patch 494 # gets added or removed (by adding or removing archiver.bbclass). 495 if bb.data.inherits_class('kernel-yocto', d): 496 bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d) 497} 498