1# 2# Copyright OpenEmbedded Contributors 3# 4# SPDX-License-Identifier: MIT 5# 6 7# 8# Sanity check the users setup for common misconfigurations 9# 10 11SANITY_REQUIRED_UTILITIES ?= "patch diffstat git bzip2 tar \ 12 gzip gawk chrpath wget cpio perl file which" 13 14def bblayers_conf_file(d): 15 return os.path.join(d.getVar('TOPDIR'), 'conf/bblayers.conf') 16 17def sanity_conf_read(fn): 18 with open(fn, 'r') as f: 19 lines = f.readlines() 20 return lines 21 22def sanity_conf_find_line(pattern, lines): 23 import re 24 return next(((index, line) 25 for index, line in enumerate(lines) 26 if re.search(pattern, line)), (None, None)) 27 28def sanity_conf_update(fn, lines, version_var_name, new_version): 29 index, line = sanity_conf_find_line(r"^%s" % version_var_name, lines) 30 lines[index] = '%s = "%d"\n' % (version_var_name, new_version) 31 with open(fn, "w") as f: 32 f.write(''.join(lines)) 33 34# Functions added to this variable MUST throw a NotImplementedError exception unless 35# they successfully changed the config version in the config file. Exceptions 36# are used since exec_func doesn't handle return values. 37BBLAYERS_CONF_UPDATE_FUNCS += " \ 38 conf/bblayers.conf:LCONF_VERSION:LAYER_CONF_VERSION:oecore_update_bblayers \ 39 conf/local.conf:CONF_VERSION:LOCALCONF_VERSION:oecore_update_localconf \ 40 conf/site.conf:SCONF_VERSION:SITE_CONF_VERSION:oecore_update_siteconf \ 41" 42 43SANITY_DIFF_TOOL ?= "meld" 44 45SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/templates/default/local.conf.sample" 46python oecore_update_localconf() { 47 # Check we are using a valid local.conf 48 current_conf = d.getVar('CONF_VERSION') 49 conf_version = d.getVar('LOCALCONF_VERSION') 50 51 failmsg = """Your version of local.conf was generated from an older/newer version of 52local.conf.sample and there have been updates made to this file. Please compare the two 53files and merge any changes before continuing. 54 55Matching the version numbers will remove this message. 56 57\"${SANITY_DIFF_TOOL} conf/local.conf ${SANITY_LOCALCONF_SAMPLE}\" 58 59is a good way to visualise the changes.""" 60 failmsg = d.expand(failmsg) 61 62 raise NotImplementedError(failmsg) 63} 64 65SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/templates/default/site.conf.sample" 66python oecore_update_siteconf() { 67 # If we have a site.conf, check it's valid 68 current_sconf = d.getVar('SCONF_VERSION') 69 sconf_version = d.getVar('SITE_CONF_VERSION') 70 71 failmsg = """Your version of site.conf was generated from an older version of 72site.conf.sample and there have been updates made to this file. Please compare the two 73files and merge any changes before continuing. 74 75Matching the version numbers will remove this message. 76 77\"${SANITY_DIFF_TOOL} conf/site.conf ${SANITY_SITECONF_SAMPLE}\" 78 79is a good way to visualise the changes.""" 80 failmsg = d.expand(failmsg) 81 82 raise NotImplementedError(failmsg) 83} 84 85SANITY_BBLAYERCONF_SAMPLE ?= "${COREBASE}/meta*/conf/templates/default/bblayers.conf.sample" 86python oecore_update_bblayers() { 87 # bblayers.conf is out of date, so see if we can resolve that 88 89 current_lconf = int(d.getVar('LCONF_VERSION')) 90 lconf_version = int(d.getVar('LAYER_CONF_VERSION')) 91 92 failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}). 93Please compare your file against bblayers.conf.sample and merge any changes before continuing. 94"${SANITY_DIFF_TOOL} conf/bblayers.conf ${SANITY_BBLAYERCONF_SAMPLE}" 95 96is a good way to visualise the changes.""" 97 failmsg = d.expand(failmsg) 98 99 if not current_lconf: 100 raise NotImplementedError(failmsg) 101 102 lines = [] 103 104 if current_lconf < 4: 105 raise NotImplementedError(failmsg) 106 107 bblayers_fn = bblayers_conf_file(d) 108 lines = sanity_conf_read(bblayers_fn) 109 110 if current_lconf == 4 and lconf_version > 4: 111 topdir_var = '$' + '{TOPDIR}' 112 index, bbpath_line = sanity_conf_find_line('BBPATH', lines) 113 if bbpath_line: 114 start = bbpath_line.find('"') 115 if start != -1 and (len(bbpath_line) != (start + 1)): 116 if bbpath_line[start + 1] == '"': 117 lines[index] = (bbpath_line[:start + 1] + 118 topdir_var + bbpath_line[start + 1:]) 119 else: 120 if not topdir_var in bbpath_line: 121 lines[index] = (bbpath_line[:start + 1] + 122 topdir_var + ':' + bbpath_line[start + 1:]) 123 else: 124 raise NotImplementedError(failmsg) 125 else: 126 index, bbfiles_line = sanity_conf_find_line('BBFILES', lines) 127 if bbfiles_line: 128 lines.insert(index, 'BBPATH = "' + topdir_var + '"\n') 129 else: 130 raise NotImplementedError(failmsg) 131 132 current_lconf += 1 133 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf) 134 bb.note("Your conf/bblayers.conf has been automatically updated.") 135 return 136 137 elif current_lconf == 5 and lconf_version > 5: 138 # Null update, to avoid issues with people switching between poky and other distros 139 current_lconf = 6 140 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf) 141 bb.note("Your conf/bblayers.conf has been automatically updated.") 142 return 143 144 status.addresult() 145 146 elif current_lconf == 6 and lconf_version > 6: 147 # Handle rename of meta-yocto -> meta-poky 148 # This marks the start of separate version numbers but code is needed in OE-Core 149 # for the migration, one last time. 150 layers = d.getVar('BBLAYERS').split() 151 layers = [ os.path.basename(path) for path in layers ] 152 if 'meta-yocto' in layers: 153 found = False 154 while True: 155 index, meta_yocto_line = sanity_conf_find_line(r'.*meta-yocto[\'"\s\n]', lines) 156 if meta_yocto_line: 157 lines[index] = meta_yocto_line.replace('meta-yocto', 'meta-poky') 158 found = True 159 else: 160 break 161 if not found: 162 raise NotImplementedError(failmsg) 163 index, meta_yocto_line = sanity_conf_find_line('LCONF_VERSION.*\n', lines) 164 if meta_yocto_line: 165 lines[index] = 'POKY_BBLAYERS_CONF_VERSION = "1"\n' 166 else: 167 raise NotImplementedError(failmsg) 168 with open(bblayers_fn, "w") as f: 169 f.write(''.join(lines)) 170 bb.note("Your conf/bblayers.conf has been automatically updated.") 171 return 172 current_lconf += 1 173 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf) 174 bb.note("Your conf/bblayers.conf has been automatically updated.") 175 return 176 177 raise NotImplementedError(failmsg) 178} 179 180def raise_sanity_error(msg, d, network_error=False): 181 if d.getVar("SANITY_USE_EVENTS") == "1": 182 try: 183 bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d) 184 except TypeError: 185 bb.event.fire(bb.event.SanityCheckFailed(msg), d) 186 return 187 188 bb.fatal(""" OE-core's config sanity checker detected a potential misconfiguration. 189 Either fix the cause of this error or at your own risk disable the checker (see sanity.conf). 190 Following is the list of potential problems / advisories: 191 192 %s""" % msg) 193 194# Check a single tune for validity. 195def check_toolchain_tune(data, tune, multilib): 196 tune_errors = [] 197 if not tune: 198 return "No tuning found for %s multilib." % multilib 199 localdata = bb.data.createCopy(data) 200 if multilib != "default": 201 # Apply the overrides so we can look at the details. 202 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + multilib 203 localdata.setVar("OVERRIDES", overrides) 204 bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib)) 205 features = (localdata.getVar("TUNE_FEATURES:tune-%s" % tune) or "").split() 206 if not features: 207 return "Tuning '%s' has no defined features, and cannot be used." % tune 208 valid_tunes = localdata.getVarFlags('TUNEVALID') or {} 209 conflicts = localdata.getVarFlags('TUNECONFLICTS') or {} 210 # [doc] is the documentation for the variable, not a real feature 211 if 'doc' in valid_tunes: 212 del valid_tunes['doc'] 213 if 'doc' in conflicts: 214 del conflicts['doc'] 215 for feature in features: 216 if feature in conflicts: 217 for conflict in conflicts[feature].split(): 218 if conflict in features: 219 tune_errors.append("Feature '%s' conflicts with '%s'." % 220 (feature, conflict)) 221 if feature in valid_tunes: 222 bb.debug(2, " %s: %s" % (feature, valid_tunes[feature])) 223 else: 224 tune_errors.append("Feature '%s' is not defined." % feature) 225 if tune_errors: 226 return "Tuning '%s' has the following errors:\n" % tune + '\n'.join(tune_errors) 227 228def check_toolchain(data): 229 tune_error_set = [] 230 deftune = data.getVar("DEFAULTTUNE") 231 tune_errors = check_toolchain_tune(data, deftune, 'default') 232 if tune_errors: 233 tune_error_set.append(tune_errors) 234 235 multilibs = (data.getVar("MULTILIB_VARIANTS") or "").split() 236 global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS") or "").split() 237 238 if multilibs: 239 seen_libs = [] 240 seen_tunes = [] 241 for lib in multilibs: 242 if lib in seen_libs: 243 tune_error_set.append("The multilib '%s' appears more than once." % lib) 244 else: 245 seen_libs.append(lib) 246 if not lib in global_multilibs: 247 tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib) 248 tune = data.getVar("DEFAULTTUNE:virtclass-multilib-%s" % lib) 249 if tune in seen_tunes: 250 tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune) 251 else: 252 seen_libs.append(tune) 253 if tune == deftune: 254 tune_error_set.append("Multilib '%s' (%s) is also the default tuning." % (lib, deftune)) 255 else: 256 tune_errors = check_toolchain_tune(data, tune, lib) 257 if tune_errors: 258 tune_error_set.append(tune_errors) 259 if tune_error_set: 260 return "Toolchain tunings invalid:\n" + '\n'.join(tune_error_set) + "\n" 261 262 return "" 263 264def check_conf_exists(fn, data): 265 bbpath = [] 266 fn = data.expand(fn) 267 vbbpath = data.getVar("BBPATH", False) 268 if vbbpath: 269 bbpath += vbbpath.split(":") 270 for p in bbpath: 271 currname = os.path.join(data.expand(p), fn) 272 if os.access(currname, os.R_OK): 273 return True 274 return False 275 276def check_create_long_filename(filepath, pathname): 277 import string, random 278 testfile = os.path.join(filepath, ''.join(random.choice(string.ascii_letters) for x in range(200))) 279 try: 280 if not os.path.exists(filepath): 281 bb.utils.mkdirhier(filepath) 282 f = open(testfile, "w") 283 f.close() 284 os.remove(testfile) 285 except IOError as e: 286 import errno 287 err, strerror = e.args 288 if err == errno.ENAMETOOLONG: 289 return "Failed to create a file with a long name in %s. Please use a filesystem that does not unreasonably limit filename length.\n" % pathname 290 else: 291 return "Failed to create a file in %s: %s.\n" % (pathname, strerror) 292 except OSError as e: 293 errno, strerror = e.args 294 return "Failed to create %s directory in which to run long name sanity check: %s.\n" % (pathname, strerror) 295 return "" 296 297def check_path_length(filepath, pathname, limit): 298 if len(filepath) > limit: 299 return "The length of %s is longer than %s, this would cause unexpected errors, please use a shorter path.\n" % (pathname, limit) 300 return "" 301 302def get_filesystem_id(path): 303 import subprocess 304 try: 305 return subprocess.check_output(["stat", "-f", "-c", "%t", path]).decode('utf-8').strip() 306 except subprocess.CalledProcessError: 307 bb.warn("Can't get filesystem id of: %s" % path) 308 return None 309 310# Check that the path isn't located on nfs. 311def check_not_nfs(path, name): 312 # The nfs' filesystem id is 6969 313 if get_filesystem_id(path) == "6969": 314 return "The %s: %s can't be located on nfs.\n" % (name, path) 315 return "" 316 317# Check that the path is on a case-sensitive file system 318def check_case_sensitive(path, name): 319 import tempfile 320 with tempfile.NamedTemporaryFile(prefix='TmP', dir=path) as tmp_file: 321 if os.path.exists(tmp_file.name.lower()): 322 return "The %s (%s) can't be on a case-insensitive file system.\n" % (name, path) 323 return "" 324 325# Check that path isn't a broken symlink 326def check_symlink(lnk, data): 327 if os.path.islink(lnk) and not os.path.exists(lnk): 328 raise_sanity_error("%s is a broken symlink." % lnk, data) 329 330def check_connectivity(d): 331 # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable 332 # using the same syntax as for SRC_URI. If the variable is not set 333 # the check is skipped 334 test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS') or "").split() 335 retval = "" 336 337 bbn = d.getVar('BB_NO_NETWORK') 338 if bbn not in (None, '0', '1'): 339 return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn 340 341 # Only check connectivity if network enabled and the 342 # CONNECTIVITY_CHECK_URIS are set 343 network_enabled = not (bbn == '1') 344 check_enabled = len(test_uris) 345 if check_enabled and network_enabled: 346 # Take a copy of the data store and unset MIRRORS and PREMIRRORS 347 data = bb.data.createCopy(d) 348 data.delVar('PREMIRRORS') 349 data.delVar('MIRRORS') 350 try: 351 fetcher = bb.fetch2.Fetch(test_uris, data) 352 fetcher.checkstatus() 353 except Exception as err: 354 # Allow the message to be configured so that users can be 355 # pointed to a support mechanism. 356 msg = data.getVar('CONNECTIVITY_CHECK_MSG') or "" 357 if len(msg) == 0: 358 msg = "%s.\n" % err 359 msg += " Please ensure your host's network is configured correctly.\n" 360 msg += " Please ensure CONNECTIVITY_CHECK_URIS is correct and specified URIs are available.\n" 361 msg += " If your ISP or network is blocking the above URL,\n" 362 msg += " try with another domain name, for example by setting:\n" 363 msg += " CONNECTIVITY_CHECK_URIS = \"https://www.example.com/\"" 364 msg += " You could also set BB_NO_NETWORK = \"1\" to disable network\n" 365 msg += " access if all required sources are on local disk.\n" 366 retval = msg 367 368 return retval 369 370def check_supported_distro(sanity_data): 371 from fnmatch import fnmatch 372 373 tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS') 374 if not tested_distros: 375 return 376 377 try: 378 distro = oe.lsb.distro_identifier() 379 except Exception: 380 distro = None 381 382 if not distro: 383 bb.warn('Host distribution could not be determined; you may possibly experience unexpected failures. It is recommended that you use a tested distribution.') 384 385 for supported in [x.strip() for x in tested_distros.split('\\n')]: 386 if fnmatch(distro, supported): 387 return 388 389 bb.warn('Host distribution "%s" has not been validated with this version of the build system; you may possibly experience unexpected failures. It is recommended that you use a tested distribution.' % distro) 390 391# Checks we should only make if MACHINE is set correctly 392def check_sanity_validmachine(sanity_data): 393 messages = "" 394 395 # Check TUNE_ARCH is set 396 if sanity_data.getVar('TUNE_ARCH') == 'INVALID': 397 messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n' 398 399 # Check TARGET_OS is set 400 if sanity_data.getVar('TARGET_OS') == 'INVALID': 401 messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n' 402 403 # Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS 404 pkgarchs = sanity_data.getVar('PACKAGE_ARCHS') 405 tunepkg = sanity_data.getVar('TUNE_PKGARCH') 406 defaulttune = sanity_data.getVar('DEFAULTTUNE') 407 tunefound = False 408 seen = {} 409 dups = [] 410 411 for pa in pkgarchs.split(): 412 if seen.get(pa, 0) == 1: 413 dups.append(pa) 414 else: 415 seen[pa] = 1 416 if pa == tunepkg: 417 tunefound = True 418 419 if len(dups): 420 messages = messages + "Error, the PACKAGE_ARCHS variable contains duplicates. The following archs are listed more than once: %s" % " ".join(dups) 421 422 if tunefound == False: 423 messages = messages + "Error, the PACKAGE_ARCHS variable (%s) for DEFAULTTUNE (%s) does not contain TUNE_PKGARCH (%s)." % (pkgarchs, defaulttune, tunepkg) 424 425 return messages 426 427# Patch before 2.7 can't handle all the features in git-style diffs. Some 428# patches may incorrectly apply, and others won't apply at all. 429def check_patch_version(sanity_data): 430 import re, subprocess 431 432 try: 433 result = subprocess.check_output(["patch", "--version"], stderr=subprocess.STDOUT).decode('utf-8') 434 version = re.search(r"[0-9.]+", result.splitlines()[0]).group() 435 if bb.utils.vercmp_string_op(version, "2.7", "<"): 436 return "Your version of patch is older than 2.7 and has bugs which will break builds. Please install a newer version of patch.\n" 437 else: 438 return None 439 except subprocess.CalledProcessError as e: 440 return "Unable to execute patch --version, exit code %d:\n%s\n" % (e.returncode, e.output) 441 442# Glibc needs make 4.0 or later, we may as well match at this point 443def check_make_version(sanity_data): 444 import subprocess 445 446 try: 447 result = subprocess.check_output(['make', '--version'], stderr=subprocess.STDOUT).decode('utf-8') 448 except subprocess.CalledProcessError as e: 449 return "Unable to execute make --version, exit code %d\n%s\n" % (e.returncode, e.output) 450 version = result.split()[2] 451 if bb.utils.vercmp_string_op(version, "4.0", "<"): 452 return "Please install a make version of 4.0 or later.\n" 453 454 if bb.utils.vercmp_string_op(version, "4.2.1", "=="): 455 distro = oe.lsb.distro_identifier() 456 if "ubuntu" in distro or "debian" in distro or "linuxmint" in distro: 457 return None 458 return "make version 4.2.1 is known to have issues on Centos/OpenSUSE and other non-Ubuntu systems. Please use a buildtools-make-tarball or a newer version of make.\n" 459 return None 460 461 462# Check if we're running on WSL (Windows Subsystem for Linux). 463# WSLv1 is known not to work but WSLv2 should work properly as 464# long as the VHDX file is optimized often, let the user know 465# upfront. 466# More information on installing WSLv2 at: 467# https://docs.microsoft.com/en-us/windows/wsl/wsl2-install 468def check_wsl(d): 469 with open("/proc/version", "r") as f: 470 verdata = f.readlines() 471 for l in verdata: 472 if "Microsoft" in l: 473 return "OpenEmbedded doesn't work under WSLv1, please upgrade to WSLv2 if you want to run builds on Windows" 474 elif "microsoft" in l: 475 bb.warn("You are running bitbake under WSLv2, this works properly but you should optimize your VHDX file eventually to avoid running out of storage space") 476 return None 477 478# Require at least gcc version 8.0 479# 480# This can be fixed on CentOS-7 with devtoolset-6+ 481# https://www.softwarecollections.org/en/scls/rhscl/devtoolset-6/ 482# 483# A less invasive fix is with scripts/install-buildtools (or with user 484# built buildtools-extended-tarball) 485# 486def check_gcc_version(sanity_data): 487 import subprocess 488 489 build_cc, version = oe.utils.get_host_compiler_version(sanity_data) 490 if build_cc.strip() == "gcc": 491 if bb.utils.vercmp_string_op(version, "8.0", "<"): 492 return "Your version of gcc is older than 8.0 and will break builds. Please install a newer version of gcc (you could use the project's buildtools-extended-tarball or use scripts/install-buildtools).\n" 493 return None 494 495# Tar version 1.24 and onwards handle overwriting symlinks correctly 496# but earlier versions do not; this needs to work properly for sstate 497# Version 1.28 is needed so opkg-build works correctly when reproducible builds are enabled 498def check_tar_version(sanity_data): 499 import subprocess 500 try: 501 result = subprocess.check_output(["tar", "--version"], stderr=subprocess.STDOUT).decode('utf-8') 502 except subprocess.CalledProcessError as e: 503 return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output) 504 version = result.split()[3] 505 if bb.utils.vercmp_string_op(version, "1.28", "<"): 506 return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n" 507 508 try: 509 result = subprocess.check_output(["tar", "--help"], stderr=subprocess.STDOUT).decode('utf-8') 510 if "--xattrs" not in result: 511 return "Your tar doesn't support --xattrs, please use GNU tar.\n" 512 except subprocess.CalledProcessError as e: 513 return "Unable to execute tar --help, exit code %d\n%s\n" % (e.returncode, e.output) 514 515 return None 516 517# We use git parameters and functionality only found in 1.7.8 or later 518# The kernel tools assume git >= 1.8.3.1 (verified needed > 1.7.9.5) see #6162 519# The git fetcher also had workarounds for git < 1.7.9.2 which we've dropped 520def check_git_version(sanity_data): 521 import subprocess 522 try: 523 result = subprocess.check_output(["git", "--version"], stderr=subprocess.DEVNULL).decode('utf-8') 524 except subprocess.CalledProcessError as e: 525 return "Unable to execute git --version, exit code %d\n%s\n" % (e.returncode, e.output) 526 version = result.split()[2] 527 if bb.utils.vercmp_string_op(version, "1.8.3.1", "<"): 528 return "Your version of git is older than 1.8.3.1 and has bugs which will break builds. Please install a newer version of git.\n" 529 return None 530 531# Check the required perl modules which may not be installed by default 532def check_perl_modules(sanity_data): 533 import subprocess 534 ret = "" 535 modules = ( "Text::ParseWords", "Thread::Queue", "Data::Dumper", "File::Compare", "File::Copy", "open ':std'", "FindBin" ) 536 errresult = '' 537 for m in modules: 538 try: 539 subprocess.check_output(["perl", "-e", "use %s" % m]) 540 except subprocess.CalledProcessError as e: 541 errresult += bytes.decode(e.output) 542 ret += "%s " % m 543 if ret: 544 return "Required perl module(s) not found: %s\n\n%s\n" % (ret, errresult) 545 return None 546 547def sanity_check_conffiles(d): 548 funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS').split() 549 for func in funcs: 550 conffile, current_version, required_version, func = func.split(":") 551 if check_conf_exists(conffile, d) and d.getVar(current_version) is not None and \ 552 d.getVar(current_version) != d.getVar(required_version): 553 try: 554 bb.build.exec_func(func, d) 555 except NotImplementedError as e: 556 bb.fatal(str(e)) 557 d.setVar("BB_INVALIDCONF", True) 558 559def drop_v14_cross_builds(d): 560 import glob 561 indexes = glob.glob(d.expand("${SSTATE_MANIFESTS}/index-${BUILD_ARCH}_*")) 562 for i in indexes: 563 with open(i, "r") as f: 564 lines = f.readlines() 565 for l in reversed(lines): 566 try: 567 (stamp, manifest, workdir) = l.split() 568 except ValueError: 569 bb.fatal("Invalid line '%s' in sstate manifest '%s'" % (l, i)) 570 for m in glob.glob(manifest + ".*"): 571 if m.endswith(".postrm"): 572 continue 573 sstate_clean_manifest(m, d) 574 bb.utils.remove(stamp + "*") 575 bb.utils.remove(workdir, recurse = True) 576 577def sanity_handle_abichanges(status, d): 578 # 579 # Check the 'ABI' of TMPDIR 580 # 581 import subprocess 582 583 current_abi = d.getVar('OELAYOUT_ABI') 584 abifile = d.getVar('SANITY_ABIFILE') 585 if os.path.exists(abifile): 586 with open(abifile, "r") as f: 587 abi = f.read().strip() 588 if not abi.isdigit(): 589 with open(abifile, "w") as f: 590 f.write(current_abi) 591 elif int(abi) <= 11 and current_abi == "12": 592 status.addresult("The layout of TMPDIR changed for Recipe Specific Sysroots.\nConversion doesn't make sense and this change will rebuild everything so please delete TMPDIR (%s).\n" % d.getVar("TMPDIR")) 593 elif int(abi) <= 13 and current_abi == "14": 594 status.addresult("TMPDIR changed to include path filtering from the pseudo database.\nIt is recommended to use a clean TMPDIR with the new pseudo path filtering so TMPDIR (%s) would need to be removed to continue.\n" % d.getVar("TMPDIR")) 595 elif int(abi) == 14 and current_abi == "15": 596 drop_v14_cross_builds(d) 597 with open(abifile, "w") as f: 598 f.write(current_abi) 599 elif (abi != current_abi): 600 # Code to convert from one ABI to another could go here if possible. 601 status.addresult("Error, TMPDIR has changed its layout version number (%s to %s) and you need to either rebuild, revert or adjust it at your own risk.\n" % (abi, current_abi)) 602 else: 603 with open(abifile, "w") as f: 604 f.write(current_abi) 605 606def check_sanity_sstate_dir_change(sstate_dir, data): 607 # Sanity checks to be done when the value of SSTATE_DIR changes 608 609 # Check that SSTATE_DIR isn't on a filesystem with limited filename length (eg. eCryptFS) 610 testmsg = "" 611 if sstate_dir != "": 612 testmsg = check_create_long_filename(sstate_dir, "SSTATE_DIR") 613 # If we don't have permissions to SSTATE_DIR, suggest the user set it as an SSTATE_MIRRORS 614 try: 615 err = testmsg.split(': ')[1].strip() 616 if err == "Permission denied.": 617 testmsg = testmsg + "You could try using %s in SSTATE_MIRRORS rather than as an SSTATE_CACHE.\n" % (sstate_dir) 618 except IndexError: 619 pass 620 return testmsg 621 622def check_sanity_version_change(status, d): 623 # Sanity checks to be done when SANITY_VERSION or NATIVELSBSTRING changes 624 # In other words, these tests run once in a given build directory and then 625 # never again until the sanity version or host distribution id/version changes. 626 627 # Check the python install is complete. Examples that are often removed in 628 # minimal installations: glib-2.0-natives requires xml.parsers.expat 629 try: 630 import xml.parsers.expat 631 except ImportError as e: 632 status.addresult('Your Python 3 is not a full install. Please install the module %s (see the Getting Started guide for further information).\n' % e.name) 633 634 status.addresult(check_gcc_version(d)) 635 status.addresult(check_make_version(d)) 636 status.addresult(check_patch_version(d)) 637 status.addresult(check_tar_version(d)) 638 status.addresult(check_git_version(d)) 639 status.addresult(check_perl_modules(d)) 640 status.addresult(check_wsl(d)) 641 642 missing = "" 643 644 if not check_app_exists("${MAKE}", d): 645 missing = missing + "GNU make," 646 647 if not check_app_exists('${BUILD_CC}', d): 648 missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC") 649 650 if not check_app_exists('${BUILD_CXX}', d): 651 missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX") 652 653 required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES') 654 655 for util in required_utilities.split(): 656 if not check_app_exists(util, d): 657 missing = missing + "%s," % util 658 659 if missing: 660 missing = missing.rstrip(',') 661 status.addresult("Please install the following missing utilities: %s\n" % missing) 662 663 assume_provided = d.getVar('ASSUME_PROVIDED').split() 664 # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf 665 if "diffstat-native" not in assume_provided: 666 status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n') 667 668 # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS) 669 import stat 670 tmpdir = d.getVar('TMPDIR') 671 status.addresult(check_create_long_filename(tmpdir, "TMPDIR")) 672 tmpdirmode = os.stat(tmpdir).st_mode 673 if (tmpdirmode & stat.S_ISGID): 674 status.addresult("TMPDIR is setgid, please don't build in a setgid directory") 675 if (tmpdirmode & stat.S_ISUID): 676 status.addresult("TMPDIR is setuid, please don't build in a setuid directory") 677 678 # Check that a user isn't building in a path in PSEUDO_IGNORE_PATHS 679 pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",") 680 workdir = d.getVar('WORKDIR', expand=True) 681 for i in pseudoignorepaths: 682 if i and workdir.startswith(i): 683 status.addresult("You are building in a path included in PSEUDO_IGNORE_PATHS " + str(i) + " please locate the build outside this path.\n") 684 685 # Check if PSEUDO_IGNORE_PATHS and paths under pseudo control overlap 686 pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",") 687 pseudo_control_dir = "${D},${PKGD},${PKGDEST},${IMAGEROOTFS},${SDK_OUTPUT}" 688 pseudocontroldir = d.expand(pseudo_control_dir).split(",") 689 for i in pseudoignorepaths: 690 for j in pseudocontroldir: 691 if i and j: 692 if j.startswith(i): 693 status.addresult("A path included in PSEUDO_IGNORE_PATHS " + str(i) + " and the path " + str(j) + " overlap and this will break pseudo permission and ownership tracking. Please set the path " + str(j) + " to a different directory which does not overlap with pseudo controlled directories. \n") 694 695 # Some third-party software apparently relies on chmod etc. being suid root (!!) 696 import stat 697 suid_check_bins = "chown chmod mknod".split() 698 for bin_cmd in suid_check_bins: 699 bin_path = bb.utils.which(os.environ["PATH"], bin_cmd) 700 if bin_path: 701 bin_stat = os.stat(bin_path) 702 if bin_stat.st_uid == 0 and bin_stat.st_mode & stat.S_ISUID: 703 status.addresult('%s has the setuid bit set. This interferes with pseudo and may cause other issues that break the build process.\n' % bin_path) 704 705 # Check that we can fetch from various network transports 706 netcheck = check_connectivity(d) 707 status.addresult(netcheck) 708 if netcheck: 709 status.network_error = True 710 711 nolibs = d.getVar('NO32LIBS') 712 if not nolibs: 713 lib32path = '/lib' 714 if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ): 715 lib32path = '/lib32' 716 717 if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'): 718 status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n") 719 720 bbpaths = d.getVar('BBPATH').split(":") 721 if ("." in bbpaths or "./" in bbpaths or "" in bbpaths): 722 status.addresult("BBPATH references the current directory, either through " \ 723 "an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\ 724 "layer configuration is adding empty elements to BBPATH.\n\t "\ 725 "Please check your layer.conf files and other BBPATH " \ 726 "settings to remove the current working directory " \ 727 "references.\n" \ 728 "Parsed BBPATH is" + str(bbpaths)); 729 730 oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF') 731 if not oes_bb_conf: 732 status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n') 733 734 # The length of TMPDIR can't be longer than 410 735 status.addresult(check_path_length(tmpdir, "TMPDIR", 410)) 736 737 # Check that TMPDIR isn't located on nfs 738 status.addresult(check_not_nfs(tmpdir, "TMPDIR")) 739 740 # Check for case-insensitive file systems (such as Linux in Docker on 741 # macOS with default HFS+ file system) 742 status.addresult(check_case_sensitive(tmpdir, "TMPDIR")) 743 744def sanity_check_locale(d): 745 """ 746 Currently bitbake switches locale to en_US.UTF-8 so check that this locale actually exists. 747 """ 748 import locale 749 try: 750 locale.setlocale(locale.LC_ALL, "en_US.UTF-8") 751 except locale.Error: 752 raise_sanity_error("Your system needs to support the en_US.UTF-8 locale.", d) 753 754def check_sanity_everybuild(status, d): 755 import os, stat 756 # Sanity tests which test the users environment so need to run at each build (or are so cheap 757 # it makes sense to always run them. 758 759 if 0 == os.getuid(): 760 raise_sanity_error("Do not use Bitbake as root.", d) 761 762 # Check the Python version, we now have a minimum of Python 3.8 763 import sys 764 if sys.hexversion < 0x030800F0: 765 status.addresult('The system requires at least Python 3.8 to run. Please update your Python interpreter.\n') 766 767 # Check the bitbake version meets minimum requirements 768 minversion = d.getVar('BB_MIN_VERSION') 769 if bb.utils.vercmp_string_op(bb.__version__, minversion, "<"): 770 status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__)) 771 772 sanity_check_locale(d) 773 774 paths = d.getVar('PATH').split(":") 775 if "." in paths or "./" in paths or "" in paths: 776 status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n") 777 778 #Check if bitbake is present in PATH environment variable 779 bb_check = bb.utils.which(d.getVar('PATH'), 'bitbake') 780 if not bb_check: 781 bb.warn("bitbake binary is not found in PATH, did you source the script?") 782 783 # Check whether 'inherit' directive is found (used for a class to inherit) 784 # in conf file it's supposed to be uppercase INHERIT 785 inherit = d.getVar('inherit') 786 if inherit: 787 status.addresult("Please don't use inherit directive in your local.conf. The directive is supposed to be used in classes and recipes only to inherit of bbclasses. Here INHERIT should be used.\n") 788 789 # Check that the DISTRO is valid, if set 790 # need to take into account DISTRO renaming DISTRO 791 distro = d.getVar('DISTRO') 792 if distro and distro != "nodistro": 793 if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ): 794 status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO")) 795 796 # Check that these variables don't use tilde-expansion as we don't do that 797 for v in ("TMPDIR", "DL_DIR", "SSTATE_DIR"): 798 if d.getVar(v).startswith("~"): 799 status.addresult("%s uses ~ but Bitbake will not expand this, use an absolute path or variables." % v) 800 801 # Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't 802 # set, since so much relies on it being set. 803 dldir = d.getVar('DL_DIR') 804 if not dldir: 805 status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n") 806 if os.path.exists(dldir) and not os.access(dldir, os.W_OK): 807 status.addresult("DL_DIR: %s exists but you do not appear to have write access to it. \n" % dldir) 808 check_symlink(dldir, d) 809 810 # Check that the MACHINE is valid, if it is set 811 machinevalid = True 812 if d.getVar('MACHINE'): 813 if not check_conf_exists("conf/machine/${MACHINE}.conf", d): 814 status.addresult('MACHINE=%s is invalid. Please set a valid MACHINE in your local.conf, environment or other configuration file.\n' % (d.getVar('MACHINE'))) 815 machinevalid = False 816 else: 817 status.addresult(check_sanity_validmachine(d)) 818 else: 819 status.addresult('Please set a MACHINE in your local.conf or environment\n') 820 machinevalid = False 821 if machinevalid: 822 status.addresult(check_toolchain(d)) 823 824 # Check that the SDKMACHINE is valid, if it is set 825 if d.getVar('SDKMACHINE'): 826 if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d): 827 status.addresult('Specified SDKMACHINE value is not valid\n') 828 elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}": 829 status.addresult('SDKMACHINE is set, but SDK_ARCH has not been changed as a result - SDKMACHINE may have been set too late (e.g. in the distro configuration)\n') 830 831 # If SDK_VENDOR looks like "-my-sdk" then the triples are badly formed so fail early 832 sdkvendor = d.getVar("SDK_VENDOR") 833 if not (sdkvendor.startswith("-") and sdkvendor.count("-") == 1): 834 status.addresult("SDK_VENDOR should be of the form '-foosdk' with a single dash; found '%s'\n" % sdkvendor) 835 836 check_supported_distro(d) 837 838 omask = os.umask(0o022) 839 if omask & 0o755: 840 status.addresult("Please use a umask which allows a+rx and u+rwx\n") 841 os.umask(omask) 842 843 # Ensure /tmp is NOT mounted with noexec 844 if os.statvfs("/tmp").f_flag & os.ST_NOEXEC: 845 raise_sanity_error("/tmp shouldn't be mounted with noexec.", d) 846 847 if d.getVar('TARGET_ARCH') == "arm": 848 # This path is no longer user-readable in modern (very recent) Linux 849 try: 850 if os.path.exists("/proc/sys/vm/mmap_min_addr"): 851 f = open("/proc/sys/vm/mmap_min_addr", "r") 852 try: 853 if (int(f.read().strip()) > 65536): 854 status.addresult("/proc/sys/vm/mmap_min_addr is not <= 65536. This will cause problems with qemu so please fix the value (as root).\n\nTo fix this in later reboots, set vm.mmap_min_addr = 65536 in /etc/sysctl.conf.\n") 855 finally: 856 f.close() 857 except: 858 pass 859 860 for checkdir in ['COREBASE', 'TMPDIR']: 861 val = d.getVar(checkdir) 862 if val.find('..') != -1: 863 status.addresult("Error, you have '..' in your %s directory path. Please ensure the variable contains an absolute path as this can break some recipe builds in obtuse ways." % checkdir) 864 if val.find('+') != -1: 865 status.addresult("Error, you have an invalid character (+) in your %s directory path. Please move the installation to a directory which doesn't include any + characters." % checkdir) 866 if val.find('@') != -1: 867 status.addresult("Error, you have an invalid character (@) in your %s directory path. Please move the installation to a directory which doesn't include any @ characters." % checkdir) 868 if val.find(' ') != -1: 869 status.addresult("Error, you have a space in your %s directory path. Please move the installation to a directory which doesn't include a space since autotools doesn't support this." % checkdir) 870 if val.find('%') != -1: 871 status.addresult("Error, you have an invalid character (%) in your %s directory path which causes problems with python string formatting. Please move the installation to a directory which doesn't include any % characters." % checkdir) 872 873 # Check the format of MIRRORS, PREMIRRORS and SSTATE_MIRRORS 874 import re 875 mirror_vars = ['MIRRORS', 'PREMIRRORS', 'SSTATE_MIRRORS'] 876 protocols = ['http', 'ftp', 'file', 'https', \ 877 'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \ 878 'bzr', 'cvs', 'npm', 'sftp', 'ssh', 's3', \ 879 'az', 'ftps', 'crate', 'gs'] 880 for mirror_var in mirror_vars: 881 mirrors = (d.getVar(mirror_var) or '').replace('\\n', ' ').split() 882 883 # Split into pairs 884 if len(mirrors) % 2 != 0: 885 bb.warn('Invalid mirror variable value for %s: %s, should contain paired members.' % (mirror_var, str(mirrors))) 886 continue 887 mirrors = list(zip(*[iter(mirrors)]*2)) 888 889 for mirror_entry in mirrors: 890 pattern, mirror = mirror_entry 891 892 decoded = bb.fetch2.decodeurl(pattern) 893 try: 894 pattern_scheme = re.compile(decoded[0]) 895 except re.error as exc: 896 bb.warn('Invalid scheme regex (%s) in %s; %s' % (pattern, mirror_var, mirror_entry)) 897 continue 898 899 if not any(pattern_scheme.match(protocol) for protocol in protocols): 900 bb.warn('Invalid protocol (%s) in %s: %s' % (decoded[0], mirror_var, mirror_entry)) 901 continue 902 903 if not any(mirror.startswith(protocol + '://') for protocol in protocols): 904 bb.warn('Invalid protocol in %s: %s' % (mirror_var, mirror_entry)) 905 continue 906 907 if mirror.startswith('file://'): 908 import urllib 909 check_symlink(urllib.parse.urlparse(mirror).path, d) 910 # SSTATE_MIRROR ends with a /PATH string 911 if mirror.endswith('/PATH'): 912 # remove /PATH$ from SSTATE_MIRROR to get a working 913 # base directory path 914 mirror_base = urllib.parse.urlparse(mirror[:-1*len('/PATH')]).path 915 check_symlink(mirror_base, d) 916 917 # Check sstate mirrors aren't being used with a local hash server and no remote 918 hashserv = d.getVar("BB_HASHSERVE") 919 if d.getVar("SSTATE_MIRRORS") and hashserv and hashserv.startswith("unix://") and not d.getVar("BB_HASHSERVE_UPSTREAM"): 920 bb.warn("You are using a local hash equivalence server but have configured an sstate mirror. This will likely mean no sstate will match from the mirror. You may wish to disable the hash equivalence use (BB_HASHSERVE), or use a hash equivalence server alongside the sstate mirror.") 921 922 # Check that TMPDIR hasn't changed location since the last time we were run 923 tmpdir = d.getVar('TMPDIR') 924 checkfile = os.path.join(tmpdir, "saved_tmpdir") 925 if os.path.exists(checkfile): 926 with open(checkfile, "r") as f: 927 saved_tmpdir = f.read().strip() 928 if (saved_tmpdir != tmpdir): 929 status.addresult("Error, TMPDIR has changed location. You need to either move it back to %s or delete it and rebuild\n" % saved_tmpdir) 930 else: 931 bb.utils.mkdirhier(tmpdir) 932 # Remove setuid, setgid and sticky bits from TMPDIR 933 try: 934 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISUID) 935 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISGID) 936 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISVTX) 937 except OSError as exc: 938 bb.warn("Unable to chmod TMPDIR: %s" % exc) 939 with open(checkfile, "w") as f: 940 f.write(tmpdir) 941 942 # If /bin/sh is a symlink, check that it points to dash or bash 943 if os.path.islink('/bin/sh'): 944 real_sh = os.path.realpath('/bin/sh') 945 # Due to update-alternatives, the shell name may take various 946 # forms, such as /bin/dash, bin/bash, /bin/bash.bash ... 947 if '/dash' not in real_sh and '/bash' not in real_sh: 948 status.addresult("Error, /bin/sh links to %s, must be dash or bash\n" % real_sh) 949 950def check_sanity(sanity_data): 951 class SanityStatus(object): 952 def __init__(self): 953 self.messages = "" 954 self.network_error = False 955 956 def addresult(self, message): 957 if message: 958 self.messages = self.messages + message 959 960 status = SanityStatus() 961 962 tmpdir = sanity_data.getVar('TMPDIR') 963 sstate_dir = sanity_data.getVar('SSTATE_DIR') 964 965 check_symlink(sstate_dir, sanity_data) 966 967 # Check saved sanity info 968 last_sanity_version = 0 969 last_tmpdir = "" 970 last_sstate_dir = "" 971 last_nativelsbstr = "" 972 sanityverfile = sanity_data.expand("${TOPDIR}/cache/sanity_info") 973 if os.path.exists(sanityverfile): 974 with open(sanityverfile, 'r') as f: 975 for line in f: 976 if line.startswith('SANITY_VERSION'): 977 last_sanity_version = int(line.split()[1]) 978 if line.startswith('TMPDIR'): 979 last_tmpdir = line.split()[1] 980 if line.startswith('SSTATE_DIR'): 981 last_sstate_dir = line.split()[1] 982 if line.startswith('NATIVELSBSTRING'): 983 last_nativelsbstr = line.split()[1] 984 985 check_sanity_everybuild(status, sanity_data) 986 987 sanity_version = int(sanity_data.getVar('SANITY_VERSION') or 1) 988 network_error = False 989 # NATIVELSBSTRING var may have been overridden with "universal", so 990 # get actual host distribution id and version 991 nativelsbstr = lsb_distro_identifier(sanity_data) 992 if last_sanity_version < sanity_version or last_nativelsbstr != nativelsbstr: 993 check_sanity_version_change(status, sanity_data) 994 status.addresult(check_sanity_sstate_dir_change(sstate_dir, sanity_data)) 995 else: 996 if last_sstate_dir != sstate_dir: 997 status.addresult(check_sanity_sstate_dir_change(sstate_dir, sanity_data)) 998 999 if os.path.exists(os.path.dirname(sanityverfile)) and not status.messages: 1000 with open(sanityverfile, 'w') as f: 1001 f.write("SANITY_VERSION %s\n" % sanity_version) 1002 f.write("TMPDIR %s\n" % tmpdir) 1003 f.write("SSTATE_DIR %s\n" % sstate_dir) 1004 f.write("NATIVELSBSTRING %s\n" % nativelsbstr) 1005 1006 sanity_handle_abichanges(status, sanity_data) 1007 1008 if status.messages != "": 1009 raise_sanity_error(sanity_data.expand(status.messages), sanity_data, status.network_error) 1010 1011addhandler config_reparse_eventhandler 1012config_reparse_eventhandler[eventmask] = "bb.event.ConfigParsed" 1013python config_reparse_eventhandler() { 1014 sanity_check_conffiles(e.data) 1015} 1016 1017addhandler check_sanity_eventhandler 1018check_sanity_eventhandler[eventmask] = "bb.event.SanityCheck bb.event.NetworkTest" 1019python check_sanity_eventhandler() { 1020 if bb.event.getName(e) == "SanityCheck": 1021 sanity_data = bb.data.createCopy(e.data) 1022 check_sanity(sanity_data) 1023 if e.generateevents: 1024 sanity_data.setVar("SANITY_USE_EVENTS", "1") 1025 bb.event.fire(bb.event.SanityCheckPassed(), e.data) 1026 elif bb.event.getName(e) == "NetworkTest": 1027 sanity_data = bb.data.createCopy(e.data) 1028 if e.generateevents: 1029 sanity_data.setVar("SANITY_USE_EVENTS", "1") 1030 bb.event.fire(bb.event.NetworkTestFailed() if check_connectivity(sanity_data) else bb.event.NetworkTestPassed(), e.data) 1031 1032 return 1033} 1034