xref: /openbmc/openbmc/poky/bitbake/lib/bb/utils.py (revision 96e4b4e121e0e2da1535d7d537d6a982a6ff5bc0)
1"""
2BitBake Utility Functions
3"""
4
5# Copyright (C) 2004 Michael Lauer
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import re, fcntl, os, string, stat, shutil, time
11import sys
12import errno
13import logging
14import bb
15import bb.msg
16import locale
17import multiprocessing
18import fcntl
19import importlib
20import importlib.machinery
21import importlib.util
22import itertools
23import subprocess
24import glob
25import fnmatch
26import traceback
27import errno
28import signal
29import collections
30import copy
31import ctypes
32import random
33import socket
34import struct
35import tempfile
36from subprocess import getstatusoutput
37from contextlib import contextmanager
38from ctypes import cdll
39
40logger = logging.getLogger("BitBake.Util")
41python_extensions = importlib.machinery.all_suffixes()
42
43
44def clean_context():
45    return {
46        "os": os,
47        "bb": bb,
48        "time": time,
49    }
50
51def get_context():
52    return _context
53
54
55def set_context(ctx):
56    _context = ctx
57
58# Context used in better_exec, eval
59_context = clean_context()
60
61class VersionStringException(Exception):
62    """Exception raised when an invalid version specification is found"""
63
64def explode_version(s):
65    r = []
66    alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
67    numeric_regexp = re.compile(r'^(\d+)(.*)$')
68    while (s != ''):
69        if s[0] in string.digits:
70            m = numeric_regexp.match(s)
71            r.append((0, int(m.group(1))))
72            s = m.group(2)
73            continue
74        if s[0] in string.ascii_letters:
75            m = alpha_regexp.match(s)
76            r.append((1, m.group(1)))
77            s = m.group(2)
78            continue
79        if s[0] == '~':
80            r.append((-1, s[0]))
81        else:
82            r.append((2, s[0]))
83        s = s[1:]
84    return r
85
86def split_version(s):
87    """Split a version string into its constituent parts (PE, PV, PR)"""
88    s = s.strip(" <>=")
89    e = 0
90    if s.count(':'):
91        e = int(s.split(":")[0])
92        s = s.split(":")[1]
93    r = ""
94    if s.count('-'):
95        r = s.rsplit("-", 1)[1]
96        s = s.rsplit("-", 1)[0]
97    v = s
98    return (e, v, r)
99
100def vercmp_part(a, b):
101    va = explode_version(a)
102    vb = explode_version(b)
103    while True:
104        if va == []:
105            (oa, ca) = (0, None)
106        else:
107            (oa, ca) = va.pop(0)
108        if vb == []:
109            (ob, cb) = (0, None)
110        else:
111            (ob, cb) = vb.pop(0)
112        if (oa, ca) == (0, None) and (ob, cb) == (0, None):
113            return 0
114        if oa < ob:
115            return -1
116        elif oa > ob:
117            return 1
118        elif ca is None:
119            return -1
120        elif cb is None:
121            return 1
122        elif ca < cb:
123            return -1
124        elif ca > cb:
125            return 1
126
127def vercmp(ta, tb):
128    (ea, va, ra) = ta
129    (eb, vb, rb) = tb
130
131    r = int(ea or 0) - int(eb or 0)
132    if (r == 0):
133        r = vercmp_part(va, vb)
134    if (r == 0):
135        r = vercmp_part(ra, rb)
136    return r
137
138def vercmp_string(a, b):
139    """ Split version strings and compare them """
140    ta = split_version(a)
141    tb = split_version(b)
142    return vercmp(ta, tb)
143
144def vercmp_string_op(a, b, op):
145    """
146    Compare two versions and check if the specified comparison operator matches the result of the comparison.
147    This function is fairly liberal about what operators it will accept since there are a variety of styles
148    depending on the context.
149    """
150    res = vercmp_string(a, b)
151    if op in ('=', '=='):
152        return res == 0
153    elif op == '<=':
154        return res <= 0
155    elif op == '>=':
156        return res >= 0
157    elif op in ('>', '>>'):
158        return res > 0
159    elif op in ('<', '<<'):
160        return res < 0
161    elif op == '!=':
162        return res != 0
163    else:
164        raise VersionStringException('Unsupported comparison operator "%s"' % op)
165
166def explode_deps(s):
167    """
168    Take an RDEPENDS style string of format:
169    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
170    and return a list of dependencies.
171    Version information is ignored.
172    """
173    r = []
174    l = s.split()
175    flag = False
176    for i in l:
177        if i[0] == '(':
178            flag = True
179            #j = []
180        if not flag:
181            r.append(i)
182        #else:
183        #    j.append(i)
184        if flag and i.endswith(')'):
185            flag = False
186            # Ignore version
187            #r[-1] += ' ' + ' '.join(j)
188    return r
189
190def explode_dep_versions2(s, *, sort=True):
191    """
192    Take an RDEPENDS style string of format:
193    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
194    and return a dictionary of dependencies and versions.
195    """
196    r = collections.OrderedDict()
197    l = s.replace(",", "").split()
198    lastdep = None
199    lastcmp = ""
200    lastver = ""
201    incmp = False
202    inversion = False
203    for i in l:
204        if i[0] == '(':
205            incmp = True
206            i = i[1:].strip()
207            if not i:
208                continue
209
210        if incmp:
211            incmp = False
212            inversion = True
213            # This list is based on behavior and supported comparisons from deb, opkg and rpm.
214            #
215            # Even though =<, <<, ==, !=, =>, and >> may not be supported,
216            # we list each possibly valid item.
217            # The build system is responsible for validation of what it supports.
218            if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
219                lastcmp = i[0:2]
220                i = i[2:]
221            elif i.startswith(('<', '>', '=')):
222                lastcmp = i[0:1]
223                i = i[1:]
224            else:
225                # This is an unsupported case!
226                raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
227                lastcmp = (i or "")
228                i = ""
229            i.strip()
230            if not i:
231                continue
232
233        if inversion:
234            if i.endswith(')'):
235                i = i[:-1] or ""
236                inversion = False
237                if lastver and i:
238                    lastver += " "
239            if i:
240                lastver += i
241                if lastdep not in r:
242                    r[lastdep] = []
243                r[lastdep].append(lastcmp + " " + lastver)
244            continue
245
246        #if not inversion:
247        lastdep = i
248        lastver = ""
249        lastcmp = ""
250        if not (i in r and r[i]):
251            r[lastdep] = []
252
253    if sort:
254        r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
255    return r
256
257def explode_dep_versions(s):
258    """
259    Take an RDEPENDS style string of format:
260    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
261    skip null value and items appeared in dependency string multiple times
262    and return a dictionary of dependencies and versions.
263    """
264    r = explode_dep_versions2(s)
265    for d in r:
266        if not r[d]:
267            r[d] = None
268            continue
269        if len(r[d]) > 1:
270            bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values.  explode_dep_versions cannot cope with this." % (d, s))
271        r[d] = r[d][0]
272    return r
273
274def join_deps(deps, commasep=True):
275    """
276    Take the result from explode_dep_versions and generate a dependency string
277    """
278    result = []
279    for dep in deps:
280        if deps[dep]:
281            if isinstance(deps[dep], list):
282                for v in deps[dep]:
283                    result.append(dep + " (" + v + ")")
284            else:
285                result.append(dep + " (" + deps[dep] + ")")
286        else:
287            result.append(dep)
288    if commasep:
289        return ", ".join(result)
290    else:
291        return " ".join(result)
292
293def _print_trace(body, line):
294    """
295    Print the Environment of a Text Body
296    """
297    error = []
298    # print the environment of the method
299    min_line = max(1, line-4)
300    max_line = min(line + 4, len(body))
301    for i in range(min_line, max_line + 1):
302        if line == i:
303            error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
304        else:
305            error.append('     %.4d:%s' % (i, body[i-1].rstrip()))
306    return error
307
308def better_compile(text, file, realfile, mode = "exec", lineno = 0):
309    """
310    A better compile method. This method
311    will print the offending lines.
312    """
313    try:
314        cache = bb.methodpool.compile_cache(text)
315        if cache:
316            return cache
317        # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
318        text2 = "\n" * int(lineno) + text
319        code = compile(text2, realfile, mode)
320        bb.methodpool.compile_cache_add(text, code)
321        return code
322    except Exception as e:
323        error = []
324        # split the text into lines again
325        body = text.split('\n')
326        error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
327        if hasattr(e, "lineno"):
328            error.append("The code lines resulting in this error were:")
329            # e.lineno: line's position in reaflile
330            # lineno: function name's "position -1" in realfile
331            # e.lineno - lineno: line's relative position in function
332            error.extend(_print_trace(body, e.lineno - lineno))
333        else:
334            error.append("The function causing this error was:")
335            for line in body:
336                error.append(line)
337        error.append("%s: %s" % (e.__class__.__name__, str(e)))
338
339        logger.error("\n".join(error))
340
341        e = bb.BBHandledException(e)
342        raise e
343
344def _print_exception(t, value, tb, realfile, text, context):
345    error = []
346    try:
347        exception = traceback.format_exception_only(t, value)
348        error.append('Error executing a python function in %s:\n' % realfile)
349
350        # Strip 'us' from the stack (better_exec call) unless that was where the
351        # error came from
352        if tb.tb_next is not None:
353            tb = tb.tb_next
354
355        textarray = text.split('\n')
356
357        linefailed = tb.tb_lineno
358
359        tbextract = traceback.extract_tb(tb)
360        tbformat = traceback.format_list(tbextract)
361        error.append("The stack trace of python calls that resulted in this exception/failure was:")
362        error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
363        error.extend(_print_trace(textarray, linefailed))
364
365        # See if this is a function we constructed and has calls back into other functions in
366        # "text". If so, try and improve the context of the error by diving down the trace
367        level = 0
368        nexttb = tb.tb_next
369        while nexttb is not None and (level+1) < len(tbextract):
370            error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
371            if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
372                # The code was possibly in the string we compiled ourselves
373                error.extend(_print_trace(textarray, tbextract[level+1][1]))
374            elif tbextract[level+1][0].startswith("/"):
375                # The code looks like it might be in a file, try and load it
376                try:
377                    with open(tbextract[level+1][0], "r") as f:
378                        text = f.readlines()
379                        error.extend(_print_trace(text, tbextract[level+1][1]))
380                except:
381                    error.append(tbformat[level+1])
382            else:
383                error.append(tbformat[level+1])
384            nexttb = tb.tb_next
385            level = level + 1
386
387        error.append("Exception: %s" % ''.join(exception))
388
389        # If the exception is from spawning a task, let's be helpful and display
390        # the output (which hopefully includes stderr).
391        if isinstance(value, subprocess.CalledProcessError) and value.output:
392            error.append("Subprocess output:")
393            error.append(value.output.decode("utf-8", errors="ignore"))
394    finally:
395        logger.error("\n".join(error))
396
397def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
398    """
399    Similiar to better_compile, better_exec will
400    print the lines that are responsible for the
401    error.
402    """
403    import bb.parse
404    if not text:
405        text = code
406    if not hasattr(code, "co_filename"):
407        code = better_compile(code, realfile, realfile)
408    try:
409        exec(code, get_context(), context)
410    except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError, bb.process.ExecutionError):
411        # Error already shown so passthrough, no need for traceback
412        raise
413    except Exception as e:
414        if pythonexception:
415            raise
416        (t, value, tb) = sys.exc_info()
417        try:
418            _print_exception(t, value, tb, realfile, text, context)
419        except Exception as e2:
420            logger.error("Exception handler error: %s" % str(e2))
421
422        e = bb.BBHandledException(e)
423        raise e
424
425def simple_exec(code, context):
426    exec(code, get_context(), context)
427
428def better_eval(source, locals, extraglobals = None):
429    ctx = get_context()
430    if extraglobals:
431        ctx = copy.copy(ctx)
432        for g in extraglobals:
433            ctx[g] = extraglobals[g]
434    return eval(source, ctx, locals)
435
436@contextmanager
437def fileslocked(files, *args, **kwargs):
438    """Context manager for locking and unlocking file locks."""
439    locks = []
440    if files:
441        for lockfile in files:
442            l = bb.utils.lockfile(lockfile, *args, **kwargs)
443            if l is not None:
444                locks.append(l)
445
446    try:
447        yield
448    finally:
449        locks.reverse()
450        for lock in locks:
451            bb.utils.unlockfile(lock)
452
453def lockfile(name, shared=False, retry=True, block=False):
454    """
455    Use the specified file as a lock file, return when the lock has
456    been acquired. Returns a variable to pass to unlockfile().
457    Parameters:
458        retry: True to re-try locking if it fails, False otherwise
459        block: True to block until the lock succeeds, False otherwise
460    The retry and block parameters are kind of equivalent unless you
461    consider the possibility of sending a signal to the process to break
462    out - at which point you want block=True rather than retry=True.
463    """
464    basename = os.path.basename(name)
465    if len(basename) > 255:
466        root, ext = os.path.splitext(basename)
467        basename = root[:255 - len(ext)] + ext
468
469    dirname = os.path.dirname(name)
470    mkdirhier(dirname)
471
472    name = os.path.join(dirname, basename)
473
474    if not os.access(dirname, os.W_OK):
475        logger.error("Unable to acquire lock '%s', directory is not writable",
476                     name)
477        sys.exit(1)
478
479    op = fcntl.LOCK_EX
480    if shared:
481        op = fcntl.LOCK_SH
482    if not retry and not block:
483        op = op | fcntl.LOCK_NB
484
485    while True:
486        # If we leave the lockfiles lying around there is no problem
487        # but we should clean up after ourselves. This gives potential
488        # for races though. To work around this, when we acquire the lock
489        # we check the file we locked was still the lock file on disk.
490        # by comparing inode numbers. If they don't match or the lockfile
491        # no longer exists, we start again.
492
493        # This implementation is unfair since the last person to request the
494        # lock is the most likely to win it.
495
496        try:
497            lf = open(name, 'a+')
498            fileno = lf.fileno()
499            fcntl.flock(fileno, op)
500            statinfo = os.fstat(fileno)
501            if os.path.exists(lf.name):
502                statinfo2 = os.stat(lf.name)
503                if statinfo.st_ino == statinfo2.st_ino:
504                    return lf
505            lf.close()
506        except OSError as e:
507            if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
508                logger.error("Unable to acquire lock '%s', %s",
509                             e.strerror, name)
510                sys.exit(1)
511            try:
512                lf.close()
513            except Exception:
514                pass
515            pass
516        if not retry:
517            return None
518
519def unlockfile(lf):
520    """
521    Unlock a file locked using lockfile()
522    """
523    try:
524        # If we had a shared lock, we need to promote to exclusive before
525        # removing the lockfile. Attempt this, ignore failures.
526        fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
527        os.unlink(lf.name)
528    except (IOError, OSError):
529        pass
530    fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
531    lf.close()
532
533def _hasher(method, filename):
534    import mmap
535
536    with open(filename, "rb") as f:
537        try:
538            with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
539                for chunk in iter(lambda: mm.read(8192), b''):
540                    method.update(chunk)
541        except ValueError:
542            # You can't mmap() an empty file so silence this exception
543            pass
544    return method.hexdigest()
545
546
547def md5_file(filename):
548    """
549    Return the hex string representation of the MD5 checksum of filename.
550    """
551    import hashlib
552    try:
553        sig = hashlib.new('MD5', usedforsecurity=False)
554    except TypeError:
555        # Some configurations don't appear to support two arguments
556        sig = hashlib.new('MD5')
557    return _hasher(sig, filename)
558
559def sha256_file(filename):
560    """
561    Return the hex string representation of the 256-bit SHA checksum of
562    filename.
563    """
564    import hashlib
565    return _hasher(hashlib.sha256(), filename)
566
567def sha1_file(filename):
568    """
569    Return the hex string representation of the SHA1 checksum of the filename
570    """
571    import hashlib
572    return _hasher(hashlib.sha1(), filename)
573
574def sha384_file(filename):
575    """
576    Return the hex string representation of the SHA384 checksum of the filename
577    """
578    import hashlib
579    return _hasher(hashlib.sha384(), filename)
580
581def sha512_file(filename):
582    """
583    Return the hex string representation of the SHA512 checksum of the filename
584    """
585    import hashlib
586    return _hasher(hashlib.sha512(), filename)
587
588def goh1_file(filename):
589    """
590    Return the hex string representation of the Go mod h1 checksum of the
591    filename. The Go mod h1 checksum uses the Go dirhash package. The package
592    defines hashes over directory trees and is used by go mod for mod files and
593    zip archives.
594    """
595    import hashlib
596    import zipfile
597
598    lines = []
599    if zipfile.is_zipfile(filename):
600        with zipfile.ZipFile(filename) as archive:
601            for fn in sorted(archive.namelist()):
602                method = hashlib.sha256()
603                method.update(archive.read(fn))
604                hash = method.hexdigest()
605                lines.append("%s  %s\n" % (hash, fn))
606    else:
607        hash = _hasher(hashlib.sha256(), filename)
608        lines.append("%s  go.mod\n" % hash)
609    method = hashlib.sha256()
610    method.update("".join(lines).encode('utf-8'))
611    return method.hexdigest()
612
613def preserved_envvars_exported():
614    """Variables which are taken from the environment and placed in and exported
615    from the metadata"""
616    return [
617        'BB_TASKHASH',
618        'HOME',
619        'LOGNAME',
620        'PATH',
621        'PWD',
622        'SHELL',
623        'USER',
624        'LC_ALL',
625        'BBSERVER',
626    ]
627
628def preserved_envvars():
629    """Variables which are taken from the environment and placed in the metadata"""
630    v = [
631        'BBPATH',
632        'BB_PRESERVE_ENV',
633        'BB_ENV_PASSTHROUGH_ADDITIONS',
634    ]
635    return v + preserved_envvars_exported()
636
637def check_system_locale():
638    """Make sure the required system locale are available and configured"""
639    default_locale = locale.getlocale(locale.LC_CTYPE)
640
641    try:
642        locale.setlocale(locale.LC_CTYPE, ("en_US", "UTF-8"))
643    except:
644        sys.exit("Please make sure locale 'en_US.UTF-8' is available on your system")
645    else:
646        locale.setlocale(locale.LC_CTYPE, default_locale)
647
648    if sys.getfilesystemencoding() != "utf-8":
649        sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\n"
650                 "Python can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
651
652def filter_environment(good_vars):
653    """
654    Create a pristine environment for bitbake. This will remove variables that
655    are not known and may influence the build in a negative way.
656    """
657
658    removed_vars = {}
659    for key in list(os.environ):
660        if key in good_vars:
661            continue
662
663        removed_vars[key] = os.environ[key]
664        del os.environ[key]
665
666    # If we spawn a python process, we need to have a UTF-8 locale, else python's file
667    # access methods will use ascii. You can't change that mode once the interpreter is
668    # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
669    # distros support that and we need to set something.
670    os.environ["LC_ALL"] = "en_US.UTF-8"
671
672    if removed_vars:
673        logger.debug("Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
674
675    return removed_vars
676
677def approved_variables():
678    """
679    Determine and return the list of variables which are approved
680    to remain in the environment.
681    """
682    if 'BB_PRESERVE_ENV' in os.environ:
683        return os.environ.keys()
684    approved = []
685    if 'BB_ENV_PASSTHROUGH' in os.environ:
686        approved = os.environ['BB_ENV_PASSTHROUGH'].split()
687        approved.extend(['BB_ENV_PASSTHROUGH'])
688    else:
689        approved = preserved_envvars()
690    if 'BB_ENV_PASSTHROUGH_ADDITIONS' in os.environ:
691        approved.extend(os.environ['BB_ENV_PASSTHROUGH_ADDITIONS'].split())
692        if 'BB_ENV_PASSTHROUGH_ADDITIONS' not in approved:
693            approved.extend(['BB_ENV_PASSTHROUGH_ADDITIONS'])
694    return approved
695
696def clean_environment():
697    """
698    Clean up any spurious environment variables. This will remove any
699    variables the user hasn't chosen to preserve.
700    """
701    if 'BB_PRESERVE_ENV' not in os.environ:
702        good_vars = approved_variables()
703        return filter_environment(good_vars)
704
705    return {}
706
707def empty_environment():
708    """
709    Remove all variables from the environment.
710    """
711    for s in list(os.environ.keys()):
712        os.unsetenv(s)
713        del os.environ[s]
714
715def build_environment(d):
716    """
717    Build an environment from all exported variables.
718    """
719    import bb.data
720    for var in bb.data.keys(d):
721        export = d.getVarFlag(var, "export", False)
722        if export:
723            os.environ[var] = d.getVar(var) or ""
724
725def _check_unsafe_delete_path(path):
726    """
727    Basic safeguard against recursively deleting something we shouldn't. If it returns True,
728    the caller should raise an exception with an appropriate message.
729    NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
730    with potentially disastrous results.
731    """
732    extra = ''
733    # HOME might not be /home/something, so in case we can get it, check against it
734    homedir = os.environ.get('HOME', '')
735    if homedir:
736        extra = '|%s' % homedir
737    if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
738        return True
739    return False
740
741def remove(path, recurse=False, ionice=False):
742    """Equivalent to rm -f or rm -rf"""
743    if not path:
744        return
745    if recurse:
746        for name in glob.glob(path):
747            if _check_unsafe_delete_path(name):
748                raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % name)
749        # shutil.rmtree(name) would be ideal but its too slow
750        cmd = []
751        if ionice:
752            cmd = ['ionice', '-c', '3']
753        subprocess.check_call(cmd + ['rm', '-rf'] + glob.glob(path))
754        return
755    for name in glob.glob(path):
756        try:
757            os.unlink(name)
758        except OSError as exc:
759            if exc.errno != errno.ENOENT:
760                raise
761
762def prunedir(topdir, ionice=False):
763    """ Delete everything reachable from the directory named in 'topdir'. """
764    # CAUTION:  This is dangerous!
765    if _check_unsafe_delete_path(topdir):
766        raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
767    remove(topdir, recurse=True, ionice=ionice)
768
769#
770# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
771# but thats possibly insane and suffixes is probably going to be small
772#
773def prune_suffix(var, suffixes, d):
774    """
775    See if var ends with any of the suffixes listed and
776    remove it if found
777    """
778    for suffix in suffixes:
779        if suffix and var.endswith(suffix):
780            return var[:-len(suffix)]
781    return var
782
783def mkdirhier(directory):
784    """Create a directory like 'mkdir -p', but does not complain if
785    directory already exists like os.makedirs
786    """
787    if '${' in str(directory):
788        bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory))
789    try:
790        os.makedirs(directory)
791    except OSError as e:
792        if e.errno != errno.EEXIST or not os.path.isdir(directory):
793            raise e
794
795def movefile(src, dest, newmtime = None, sstat = None):
796    """Moves a file from src to dest, preserving all permissions and
797    attributes; mtime will be preserved even when moving across
798    filesystems.  Returns true on success and false on failure. Move is
799    atomic.
800    """
801
802    #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
803    try:
804        if not sstat:
805            sstat = os.lstat(src)
806    except Exception as e:
807        logger.warning("movefile: Stating source file failed...", e)
808        return None
809
810    destexists = 1
811    try:
812        dstat = os.lstat(dest)
813    except:
814        dstat = os.lstat(os.path.dirname(dest))
815        destexists = 0
816
817    if destexists:
818        if stat.S_ISLNK(dstat[stat.ST_MODE]):
819            try:
820                os.unlink(dest)
821                destexists = 0
822            except Exception as e:
823                pass
824
825    if stat.S_ISLNK(sstat[stat.ST_MODE]):
826        try:
827            target = os.readlink(src)
828            if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
829                os.unlink(dest)
830            os.symlink(target, dest)
831            #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
832            os.unlink(src)
833            return os.lstat(dest)
834        except Exception as e:
835            logger.warning("movefile: failed to properly create symlink:", dest, "->", target, e)
836            return None
837
838    renamefailed = 1
839    # os.rename needs to know the dest path ending with file name
840    # so append the file name to a path only if it's a dir specified
841    srcfname = os.path.basename(src)
842    destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
843                else dest
844
845    if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
846        try:
847            bb.utils.rename(src, destpath)
848            renamefailed = 0
849        except Exception as e:
850            if e.errno != errno.EXDEV:
851                # Some random error.
852                logger.warning("movefile: Failed to move", src, "to", dest, e)
853                return None
854            # Invalid cross-device-link 'bind' mounted or actually Cross-Device
855
856    if renamefailed:
857        didcopy = 0
858        if stat.S_ISREG(sstat[stat.ST_MODE]):
859            try: # For safety copy then move it over.
860                shutil.copyfile(src, destpath + "#new")
861                bb.utils.rename(destpath + "#new", destpath)
862                didcopy = 1
863            except Exception as e:
864                logger.warning('movefile: copy', src, '->', dest, 'failed.', e)
865                return None
866        else:
867            #we don't yet handle special, so we need to fall back to /bin/mv
868            a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
869            if a[0] != 0:
870                logger.warning("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
871                return None # failure
872        try:
873            if didcopy:
874                os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
875                os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
876                os.unlink(src)
877        except Exception as e:
878            logger.warning("movefile: Failed to chown/chmod/unlink", dest, e)
879            return None
880
881    if newmtime:
882        os.utime(destpath, (newmtime, newmtime))
883    else:
884        os.utime(destpath, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
885        newmtime = sstat[stat.ST_MTIME]
886    return newmtime
887
888def copyfile(src, dest, newmtime = None, sstat = None):
889    """
890    Copies a file from src to dest, preserving all permissions and
891    attributes; mtime will be preserved even when moving across
892    filesystems.  Returns true on success and false on failure.
893    """
894    #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
895    try:
896        if not sstat:
897            sstat = os.lstat(src)
898    except Exception as e:
899        logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
900        return False
901
902    destexists = 1
903    try:
904        dstat = os.lstat(dest)
905    except:
906        dstat = os.lstat(os.path.dirname(dest))
907        destexists = 0
908
909    if destexists:
910        if stat.S_ISLNK(dstat[stat.ST_MODE]):
911            try:
912                os.unlink(dest)
913                destexists = 0
914            except Exception as e:
915                pass
916
917    if stat.S_ISLNK(sstat[stat.ST_MODE]):
918        try:
919            target = os.readlink(src)
920            if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
921                os.unlink(dest)
922            os.symlink(target, dest)
923            os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
924            return os.lstat(dest)
925        except Exception as e:
926            logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
927            return False
928
929    if stat.S_ISREG(sstat[stat.ST_MODE]):
930        try:
931            srcchown = False
932            if not os.access(src, os.R_OK):
933                # Make sure we can read it
934                srcchown = True
935                os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
936
937            # For safety copy then move it over.
938            shutil.copyfile(src, dest + "#new")
939            bb.utils.rename(dest + "#new", dest)
940        except Exception as e:
941            logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
942            return False
943        finally:
944            if srcchown:
945                os.chmod(src, sstat[stat.ST_MODE])
946                os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
947
948    else:
949        #we don't yet handle special, so we need to fall back to /bin/mv
950        a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
951        if a[0] != 0:
952            logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
953            return False # failure
954    try:
955        os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
956        os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
957    except Exception as e:
958        logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
959        return False
960
961    if newmtime:
962        os.utime(dest, (newmtime, newmtime))
963    else:
964        os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
965        newmtime = sstat[stat.ST_MTIME]
966    return newmtime
967
968def break_hardlinks(src, sstat = None):
969    """
970    Ensures src is the only hardlink to this file.  Other hardlinks,
971    if any, are not affected (other than in their st_nlink value, of
972    course).  Returns true on success and false on failure.
973
974    """
975    try:
976        if not sstat:
977            sstat = os.lstat(src)
978    except Exception as e:
979        logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
980        return False
981    if sstat[stat.ST_NLINK] == 1:
982        return True
983    return copyfile(src, src, sstat=sstat)
984
985def which(path, item, direction = 0, history = False, executable=False):
986    """
987    Locate `item` in the list of paths `path` (colon separated string like $PATH).
988    If `direction` is non-zero then the list is reversed.
989    If `history` is True then the list of candidates also returned as result,history.
990    If `executable` is True then the candidate has to be an executable file,
991    otherwise the candidate simply has to exist.
992    """
993
994    if executable:
995        is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
996    else:
997        is_candidate = lambda p: os.path.exists(p)
998
999    hist = []
1000    paths = (path or "").split(':')
1001    if direction != 0:
1002        paths.reverse()
1003
1004    for p in paths:
1005        next = os.path.join(p, item)
1006        hist.append(next)
1007        if is_candidate(next):
1008            if not os.path.isabs(next):
1009                next = os.path.abspath(next)
1010            if history:
1011                return next, hist
1012            return next
1013
1014    if history:
1015        return "", hist
1016    return ""
1017
1018@contextmanager
1019def umask(new_mask):
1020    """
1021    Context manager to set the umask to a specific mask, and restore it afterwards.
1022    """
1023    current_mask = os.umask(new_mask)
1024    try:
1025        yield
1026    finally:
1027        os.umask(current_mask)
1028
1029def to_boolean(string, default=None):
1030    """
1031    Check input string and return boolean value True/False/None
1032    depending upon the checks
1033    """
1034    if not string:
1035        return default
1036
1037    if isinstance(string, int):
1038        return string != 0
1039
1040    normalized = string.lower()
1041    if normalized in ("y", "yes", "1", "true"):
1042        return True
1043    elif normalized in ("n", "no", "0", "false"):
1044        return False
1045    else:
1046        raise ValueError("Invalid value for to_boolean: %s" % string)
1047
1048def contains(variable, checkvalues, truevalue, falsevalue, d):
1049    """Check if a variable contains all the values specified.
1050
1051    Arguments:
1052
1053    variable -- the variable name. This will be fetched and expanded (using
1054    d.getVar(variable)) and then split into a set().
1055
1056    checkvalues -- if this is a string it is split on whitespace into a set(),
1057    otherwise coerced directly into a set().
1058
1059    truevalue -- the value to return if checkvalues is a subset of variable.
1060
1061    falsevalue -- the value to return if variable is empty or if checkvalues is
1062    not a subset of variable.
1063
1064    d -- the data store.
1065    """
1066
1067    val = d.getVar(variable)
1068    if not val:
1069        return falsevalue
1070    val = set(val.split())
1071    if isinstance(checkvalues, str):
1072        checkvalues = set(checkvalues.split())
1073    else:
1074        checkvalues = set(checkvalues)
1075    if checkvalues.issubset(val):
1076        return truevalue
1077    return falsevalue
1078
1079def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1080    """Check if a variable contains any values specified.
1081
1082    Arguments:
1083
1084    variable -- the variable name. This will be fetched and expanded (using
1085    d.getVar(variable)) and then split into a set().
1086
1087    checkvalues -- if this is a string it is split on whitespace into a set(),
1088    otherwise coerced directly into a set().
1089
1090    truevalue -- the value to return if checkvalues is a subset of variable.
1091
1092    falsevalue -- the value to return if variable is empty or if checkvalues is
1093    not a subset of variable.
1094
1095    d -- the data store.
1096    """
1097    val = d.getVar(variable)
1098    if not val:
1099        return falsevalue
1100    val = set(val.split())
1101    if isinstance(checkvalues, str):
1102        checkvalues = set(checkvalues.split())
1103    else:
1104        checkvalues = set(checkvalues)
1105    if checkvalues & val:
1106        return truevalue
1107    return falsevalue
1108
1109def filter(variable, checkvalues, d):
1110    """Return all words in the variable that are present in the checkvalues.
1111
1112    Arguments:
1113
1114    variable -- the variable name. This will be fetched and expanded (using
1115    d.getVar(variable)) and then split into a set().
1116
1117    checkvalues -- if this is a string it is split on whitespace into a set(),
1118    otherwise coerced directly into a set().
1119
1120    d -- the data store.
1121    """
1122
1123    val = d.getVar(variable)
1124    if not val:
1125        return ''
1126    val = set(val.split())
1127    if isinstance(checkvalues, str):
1128        checkvalues = set(checkvalues.split())
1129    else:
1130        checkvalues = set(checkvalues)
1131    return ' '.join(sorted(checkvalues & val))
1132
1133
1134def get_referenced_vars(start_expr, d):
1135    """
1136    :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level
1137    are ordered arbitrarily)
1138    """
1139
1140    seen = set()
1141    ret = []
1142
1143    # The first entry in the queue is the unexpanded start expression
1144    queue = collections.deque([start_expr])
1145    # Subsequent entries will be variable names, so we need to track whether or not entry requires getVar
1146    is_first = True
1147
1148    empty_data = bb.data.init()
1149    while queue:
1150        entry = queue.popleft()
1151        if is_first:
1152            # Entry is the start expression - no expansion needed
1153            is_first = False
1154            expression = entry
1155        else:
1156            # This is a variable name - need to get the value
1157            expression = d.getVar(entry, False)
1158            ret.append(entry)
1159
1160        # expandWithRefs is how we actually get the referenced variables in the expression. We call it using an empty
1161        # data store because we only want the variables directly used in the expression. It returns a set, which is what
1162        # dooms us to only ever be "quasi-BFS" rather than full BFS.
1163        new_vars = empty_data.expandWithRefs(expression, None).references - set(seen)
1164
1165        queue.extend(new_vars)
1166        seen.update(new_vars)
1167    return ret
1168
1169
1170def cpu_count():
1171    try:
1172        return len(os.sched_getaffinity(0))
1173    except OSError:
1174        return multiprocessing.cpu_count()
1175
1176def nonblockingfd(fd):
1177    fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
1178
1179def process_profilelog(fn, pout = None):
1180    # Either call with a list of filenames and set pout or a filename and optionally pout.
1181    if not pout:
1182        pout = fn + '.processed'
1183
1184    with open(pout, 'w') as pout:
1185        import pstats
1186        if isinstance(fn, list):
1187            p = pstats.Stats(*fn, stream=pout)
1188        else:
1189            p = pstats.Stats(fn, stream=pout)
1190        p.sort_stats('time')
1191        p.print_stats()
1192        p.print_callers()
1193        p.sort_stats('cumulative')
1194        p.print_stats()
1195
1196        pout.flush()
1197
1198#
1199# Was present to work around multiprocessing pool bugs in python < 2.7.3
1200#
1201def multiprocessingpool(*args, **kwargs):
1202
1203    import multiprocessing.pool
1204    #import multiprocessing.util
1205    #multiprocessing.util.log_to_stderr(10)
1206    # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
1207    # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
1208    def wrapper(func):
1209        def wrap(self, timeout=None):
1210            return func(self, timeout=timeout if timeout is not None else 1e100)
1211        return wrap
1212    multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
1213
1214    return multiprocessing.Pool(*args, **kwargs)
1215
1216def exec_flat_python_func(func, *args, **kwargs):
1217    """Execute a flat python function (defined with def funcname(args):...)"""
1218    # Prepare a small piece of python code which calls the requested function
1219    # To do this we need to prepare two things - a set of variables we can use to pass
1220    # the values of arguments into the calling function, and the list of arguments for
1221    # the function being called
1222    context = {}
1223    funcargs = []
1224    # Handle unnamed arguments
1225    aidx = 1
1226    for arg in args:
1227        argname = 'arg_%s' % aidx
1228        context[argname] = arg
1229        funcargs.append(argname)
1230        aidx += 1
1231    # Handle keyword arguments
1232    context.update(kwargs)
1233    funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
1234    code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
1235    comp = bb.utils.better_compile(code, '<string>', '<string>')
1236    bb.utils.better_exec(comp, context, code, '<string>')
1237    return context['retval']
1238
1239def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1240    """Edit lines from a recipe or config file and modify one or more
1241    specified variable values set in the file using a specified callback
1242    function. Lines are expected to have trailing newlines.
1243    Parameters:
1244        meta_lines: lines from the file; can be a list or an iterable
1245            (e.g. file pointer)
1246        variables: a list of variable names to look for. Functions
1247            may also be specified, but must be specified with '()' at
1248            the end of the name. Note that the function doesn't have
1249            any intrinsic understanding of :append, :prepend, :remove,
1250            or overrides, so these are considered as part of the name.
1251            These values go into a regular expression, so regular
1252            expression syntax is allowed.
1253        varfunc: callback function called for every variable matching
1254            one of the entries in the variables parameter. The function
1255            should take four arguments:
1256                varname: name of variable matched
1257                origvalue: current value in file
1258                op: the operator (e.g. '+=')
1259                newlines: list of lines up to this point. You can use
1260                    this to prepend lines before this variable setting
1261                    if you wish.
1262            and should return a four-element tuple:
1263                newvalue: new value to substitute in, or None to drop
1264                    the variable setting entirely. (If the removal
1265                    results in two consecutive blank lines, one of the
1266                    blank lines will also be dropped).
1267                newop: the operator to use - if you specify None here,
1268                    the original operation will be used.
1269                indent: number of spaces to indent multi-line entries,
1270                    or -1 to indent up to the level of the assignment
1271                    and opening quote, or a string to use as the indent.
1272                minbreak: True to allow the first element of a
1273                    multi-line value to continue on the same line as
1274                    the assignment, False to indent before the first
1275                    element.
1276            To clarify, if you wish not to change the value, then you
1277            would return like this: return origvalue, None, 0, True
1278        match_overrides: True to match items with _overrides on the end,
1279            False otherwise
1280    Returns a tuple:
1281        updated:
1282            True if changes were made, False otherwise.
1283        newlines:
1284            Lines after processing
1285    """
1286
1287    var_res = {}
1288    if match_overrides:
1289        override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
1290    else:
1291        override_re = ''
1292    for var in variables:
1293        if var.endswith('()'):
1294            var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
1295        else:
1296            var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
1297
1298    updated = False
1299    varset_start = ''
1300    varlines = []
1301    newlines = []
1302    in_var = None
1303    full_value = ''
1304    var_end = ''
1305
1306    def handle_var_end():
1307        prerun_newlines = newlines[:]
1308        op = varset_start[len(in_var):].strip()
1309        (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
1310        changed = (prerun_newlines != newlines)
1311
1312        if newvalue is None:
1313            # Drop the value
1314            return True
1315        elif newvalue != full_value or (newop not in [None, op]):
1316            if newop not in [None, op]:
1317                # Callback changed the operator
1318                varset_new = "%s %s" % (in_var, newop)
1319            else:
1320                varset_new = varset_start
1321
1322            if isinstance(indent, int):
1323                if indent == -1:
1324                    indentspc = ' ' * (len(varset_new) + 2)
1325                else:
1326                    indentspc = ' ' * indent
1327            else:
1328                indentspc = indent
1329            if in_var.endswith('()'):
1330                # A function definition
1331                if isinstance(newvalue, list):
1332                    newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
1333                else:
1334                    if not newvalue.startswith('\n'):
1335                        newvalue = '\n' + newvalue
1336                    if not newvalue.endswith('\n'):
1337                        newvalue = newvalue + '\n'
1338                    newlines.append('%s {%s}\n' % (varset_new, newvalue))
1339            else:
1340                # Normal variable
1341                if isinstance(newvalue, list):
1342                    if not newvalue:
1343                        # Empty list -> empty string
1344                        newlines.append('%s ""\n' % varset_new)
1345                    elif minbreak:
1346                        # First item on first line
1347                        if len(newvalue) == 1:
1348                            newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
1349                        else:
1350                            newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
1351                            for item in newvalue[1:]:
1352                                newlines.append('%s%s \\\n' % (indentspc, item))
1353                            newlines.append('%s"\n' % indentspc)
1354                    else:
1355                        # No item on first line
1356                        newlines.append('%s " \\\n' % varset_new)
1357                        for item in newvalue:
1358                            newlines.append('%s%s \\\n' % (indentspc, item))
1359                        newlines.append('%s"\n' % indentspc)
1360                else:
1361                    newlines.append('%s "%s"\n' % (varset_new, newvalue))
1362            return True
1363        else:
1364            # Put the old lines back where they were
1365            newlines.extend(varlines)
1366            # If newlines was touched by the function, we'll need to return True
1367            return changed
1368
1369    checkspc = False
1370
1371    for line in meta_lines:
1372        if in_var:
1373            value = line.rstrip()
1374            varlines.append(line)
1375            if in_var.endswith('()'):
1376                full_value += '\n' + value
1377            else:
1378                full_value += value[:-1]
1379            if value.endswith(var_end):
1380                if in_var.endswith('()'):
1381                    if full_value.count('{') - full_value.count('}') >= 0:
1382                        continue
1383                    full_value = full_value[:-1]
1384                if handle_var_end():
1385                    updated = True
1386                    checkspc = True
1387                in_var = None
1388        else:
1389            skip = False
1390            for (varname, var_re) in var_res.items():
1391                res = var_re.match(line)
1392                if res:
1393                    isfunc = varname.endswith('()')
1394                    if isfunc:
1395                        splitvalue = line.split('{', 1)
1396                        var_end = '}'
1397                    else:
1398                        var_end = res.groups()[-1]
1399                        splitvalue = line.split(var_end, 1)
1400                    varset_start = splitvalue[0].rstrip()
1401                    value = splitvalue[1].rstrip()
1402                    if not isfunc and value.endswith('\\'):
1403                        value = value[:-1]
1404                    full_value = value
1405                    varlines = [line]
1406                    in_var = res.group(1)
1407                    if isfunc:
1408                        in_var += '()'
1409                    if value.endswith(var_end):
1410                        full_value = full_value[:-1]
1411                        if handle_var_end():
1412                            updated = True
1413                            checkspc = True
1414                        in_var = None
1415                    skip = True
1416                    break
1417            if not skip:
1418                if checkspc:
1419                    checkspc = False
1420                    if newlines and newlines[-1] == '\n' and line == '\n':
1421                        # Squash blank line if there are two consecutive blanks after a removal
1422                        continue
1423                newlines.append(line)
1424    return (updated, newlines)
1425
1426
1427def edit_metadata_file(meta_file, variables, varfunc):
1428    """Edit a recipe or config file and modify one or more specified
1429    variable values set in the file using a specified callback function.
1430    The file is only written to if the value(s) actually change.
1431    This is basically the file version of edit_metadata(), see that
1432    function's description for parameter/usage information.
1433    Returns True if the file was written to, False otherwise.
1434    """
1435    with open(meta_file, 'r') as f:
1436        (updated, newlines) = edit_metadata(f, variables, varfunc)
1437    if updated:
1438        with open(meta_file, 'w') as f:
1439            f.writelines(newlines)
1440    return updated
1441
1442
1443def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
1444    """Edit bblayers.conf, adding and/or removing layers
1445    Parameters:
1446        bblayers_conf: path to bblayers.conf file to edit
1447        add: layer path (or list of layer paths) to add; None or empty
1448            list to add nothing
1449        remove: layer path (or list of layer paths) to remove; None or
1450            empty list to remove nothing
1451        edit_cb: optional callback function that will be called after
1452            processing adds/removes once per existing entry.
1453    Returns a tuple:
1454        notadded: list of layers specified to be added but weren't
1455            (because they were already in the list)
1456        notremoved: list of layers that were specified to be removed
1457            but weren't (because they weren't in the list)
1458    """
1459
1460    import fnmatch
1461
1462    def remove_trailing_sep(pth):
1463        if pth and pth[-1] == os.sep:
1464            pth = pth[:-1]
1465        return pth
1466
1467    approved = bb.utils.approved_variables()
1468    def canonicalise_path(pth):
1469        pth = remove_trailing_sep(pth)
1470        if 'HOME' in approved and '~' in pth:
1471            pth = os.path.expanduser(pth)
1472        return pth
1473
1474    def layerlist_param(value):
1475        if not value:
1476            return []
1477        elif isinstance(value, list):
1478            return [remove_trailing_sep(x) for x in value]
1479        else:
1480            return [remove_trailing_sep(value)]
1481
1482    addlayers = layerlist_param(add)
1483    removelayers = layerlist_param(remove)
1484
1485    # Need to use a list here because we can't set non-local variables from a callback in python 2.x
1486    bblayercalls = []
1487    removed = []
1488    plusequals = False
1489    orig_bblayers = []
1490
1491    def handle_bblayers_firstpass(varname, origvalue, op, newlines):
1492        bblayercalls.append(op)
1493        if op == '=':
1494            del orig_bblayers[:]
1495        orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
1496        return (origvalue, None, 2, False)
1497
1498    def handle_bblayers(varname, origvalue, op, newlines):
1499        updated = False
1500        bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
1501        if removelayers:
1502            for removelayer in removelayers:
1503                for layer in bblayers:
1504                    if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
1505                        updated = True
1506                        bblayers.remove(layer)
1507                        removed.append(removelayer)
1508                        break
1509        if addlayers and not plusequals:
1510            for addlayer in addlayers:
1511                if addlayer not in bblayers:
1512                    updated = True
1513                    bblayers.append(addlayer)
1514            del addlayers[:]
1515
1516        if edit_cb:
1517            newlist = []
1518            for layer in bblayers:
1519                res = edit_cb(layer, canonicalise_path(layer))
1520                if res != layer:
1521                    newlist.append(res)
1522                    updated = True
1523                else:
1524                    newlist.append(layer)
1525            bblayers = newlist
1526
1527        if updated:
1528            if op == '+=' and not bblayers:
1529                bblayers = None
1530            return (bblayers, None, 2, False)
1531        else:
1532            return (origvalue, None, 2, False)
1533
1534    with open(bblayers_conf, 'r') as f:
1535        (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
1536
1537    if not bblayercalls:
1538        raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
1539
1540    # Try to do the "smart" thing depending on how the user has laid out
1541    # their bblayers.conf file
1542    if bblayercalls.count('+=') > 1:
1543        plusequals = True
1544
1545    removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
1546    notadded = []
1547    for layer in addlayers:
1548        layer_canon = canonicalise_path(layer)
1549        if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
1550            notadded.append(layer)
1551    notadded_canon = [canonicalise_path(layer) for layer in notadded]
1552    addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
1553
1554    (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
1555    if addlayers:
1556        # Still need to add these
1557        for addlayer in addlayers:
1558            newlines.append('BBLAYERS += "%s"\n' % addlayer)
1559        updated = True
1560
1561    if updated:
1562        with open(bblayers_conf, 'w') as f:
1563            f.writelines(newlines)
1564
1565    notremoved = list(set(removelayers) - set(removed))
1566
1567    return (notadded, notremoved)
1568
1569def get_collection_res(d):
1570    collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
1571    collection_res = {}
1572    for collection in collections:
1573        collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
1574
1575    return collection_res
1576
1577
1578def get_file_layer(filename, d, collection_res={}):
1579    """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
1580    if not collection_res:
1581        collection_res = get_collection_res(d)
1582
1583    def path_to_layer(path):
1584        # Use longest path so we handle nested layers
1585        matchlen = 0
1586        match = None
1587        for collection, regex in collection_res.items():
1588            if len(regex) > matchlen and re.match(regex, path):
1589                matchlen = len(regex)
1590                match = collection
1591        return match
1592
1593    result = None
1594    bbfiles = (d.getVar('BBFILES_PRIORITIZED') or '').split()
1595    bbfilesmatch = False
1596    for bbfilesentry in bbfiles:
1597        if fnmatch.fnmatchcase(filename, bbfilesentry):
1598            bbfilesmatch = True
1599            result = path_to_layer(bbfilesentry)
1600            break
1601
1602    if not bbfilesmatch:
1603        # Probably a bbclass
1604        result = path_to_layer(filename)
1605
1606    return result
1607
1608
1609# Constant taken from http://linux.die.net/include/linux/prctl.h
1610PR_SET_PDEATHSIG = 1
1611
1612class PrCtlError(Exception):
1613    pass
1614
1615def signal_on_parent_exit(signame):
1616    """
1617    Trigger signame to be sent when the parent process dies
1618    """
1619    signum = getattr(signal, signame)
1620    # http://linux.die.net/man/2/prctl
1621    result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
1622    if result != 0:
1623        raise PrCtlError('prctl failed with error code %s' % result)
1624
1625#
1626# Manually call the ioprio syscall. We could depend on other libs like psutil
1627# however this gets us enough of what we need to bitbake for now without the
1628# dependency
1629#
1630_unamearch = os.uname()[4]
1631IOPRIO_WHO_PROCESS = 1
1632IOPRIO_CLASS_SHIFT = 13
1633
1634def ioprio_set(who, cls, value):
1635    NR_ioprio_set = None
1636    if _unamearch == "x86_64":
1637      NR_ioprio_set = 251
1638    elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
1639      NR_ioprio_set = 289
1640    elif _unamearch == "aarch64":
1641      NR_ioprio_set = 30
1642
1643    if NR_ioprio_set:
1644        ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
1645        rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
1646        if rc != 0:
1647            raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
1648    else:
1649        bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
1650
1651def set_process_name(name):
1652    from ctypes import cdll, byref, create_string_buffer
1653    # This is nice to have for debugging, not essential
1654    try:
1655        libc = cdll.LoadLibrary('libc.so.6')
1656        buf = create_string_buffer(bytes(name, 'utf-8'))
1657        libc.prctl(15, byref(buf), 0, 0, 0)
1658    except:
1659        pass
1660
1661def enable_loopback_networking():
1662    # From bits/ioctls.h
1663    SIOCGIFFLAGS = 0x8913
1664    SIOCSIFFLAGS = 0x8914
1665    SIOCSIFADDR = 0x8916
1666    SIOCSIFNETMASK = 0x891C
1667
1668    # if.h
1669    IFF_UP = 0x1
1670    IFF_RUNNING = 0x40
1671
1672    # bits/socket.h
1673    AF_INET = 2
1674
1675    # char ifr_name[IFNAMSIZ=16]
1676    ifr_name = struct.pack("@16s", b"lo")
1677    def netdev_req(fd, req, data = b""):
1678        # Pad and add interface name
1679        data = ifr_name + data + (b'\x00' * (16 - len(data)))
1680        # Return all data after interface name
1681        return fcntl.ioctl(fd, req, data)[16:]
1682
1683    with socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_IP) as sock:
1684        fd = sock.fileno()
1685
1686        # struct sockaddr_in ifr_addr { unsigned short family; uint16_t sin_port ; uint32_t in_addr; }
1687        req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 127, 0, 0, 1)
1688        netdev_req(fd, SIOCSIFADDR, req)
1689
1690        # short ifr_flags
1691        flags = struct.unpack_from('@h', netdev_req(fd, SIOCGIFFLAGS))[0]
1692        flags |= IFF_UP | IFF_RUNNING
1693        netdev_req(fd, SIOCSIFFLAGS, struct.pack('@h', flags))
1694
1695        # struct sockaddr_in ifr_netmask
1696        req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 255, 0, 0, 0)
1697        netdev_req(fd, SIOCSIFNETMASK, req)
1698
1699def disable_network(uid=None, gid=None):
1700    """
1701    Disable networking in the current process if the kernel supports it, else
1702    just return after logging to debug. To do this we need to create a new user
1703    namespace, then map back to the original uid/gid.
1704    """
1705    libc = ctypes.CDLL('libc.so.6')
1706
1707    # From sched.h
1708    # New user namespace
1709    CLONE_NEWUSER = 0x10000000
1710    # New network namespace
1711    CLONE_NEWNET = 0x40000000
1712
1713    if uid is None:
1714        uid = os.getuid()
1715    if gid is None:
1716        gid = os.getgid()
1717
1718    ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER)
1719    if ret != 0:
1720        logger.debug("System doesn't support disabling network without admin privs")
1721        return
1722    with open("/proc/self/uid_map", "w") as f:
1723        f.write("%s %s 1" % (uid, uid))
1724    with open("/proc/self/setgroups", "w") as f:
1725        f.write("deny")
1726    with open("/proc/self/gid_map", "w") as f:
1727        f.write("%s %s 1" % (gid, gid))
1728
1729def export_proxies(d):
1730    from bb.fetch2 import get_fetcher_environment
1731    """ export common proxies variables from datastore to environment """
1732    newenv = get_fetcher_environment(d)
1733    for v in newenv:
1734        os.environ[v] = newenv[v]
1735
1736def load_plugins(logger, plugins, pluginpath):
1737    def load_plugin(name):
1738        logger.debug('Loading plugin %s' % name)
1739        spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
1740        if spec:
1741            mod = importlib.util.module_from_spec(spec)
1742            spec.loader.exec_module(mod)
1743            return mod
1744
1745    logger.debug('Loading plugins from %s...' % pluginpath)
1746
1747    expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
1748                for ext in python_extensions)
1749    files = itertools.chain.from_iterable(expanded)
1750    names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
1751    for name in names:
1752        if name != '__init__':
1753            plugin = load_plugin(name)
1754            if hasattr(plugin, 'plugin_init'):
1755                obj = plugin.plugin_init(plugins)
1756                plugins.append(obj or plugin)
1757            else:
1758                plugins.append(plugin)
1759
1760
1761class LogCatcher(logging.Handler):
1762    """Logging handler for collecting logged messages so you can check them later"""
1763    def __init__(self):
1764        self.messages = []
1765        logging.Handler.__init__(self, logging.WARNING)
1766    def emit(self, record):
1767        self.messages.append(bb.build.logformatter.format(record))
1768    def contains(self, message):
1769        return (message in self.messages)
1770
1771def is_semver(version):
1772    """
1773        Is the version string following the semver semantic?
1774
1775        https://semver.org/spec/v2.0.0.html
1776    """
1777    regex = re.compile(
1778    r"""
1779    ^
1780    (0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)
1781    (?:-(
1782        (?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)
1783        (?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*
1784    ))?
1785    (?:\+(
1786        [0-9a-zA-Z-]+
1787        (?:\.[0-9a-zA-Z-]+)*
1788    ))?
1789    $
1790    """, re.VERBOSE)
1791
1792    if regex.match(version) is None:
1793        return False
1794
1795    return True
1796
1797# Wrapper around os.rename which can handle cross device problems
1798# e.g. from container filesystems
1799def rename(src, dst):
1800    try:
1801        os.rename(src, dst)
1802    except OSError as err:
1803        if err.errno == 18:
1804            # Invalid cross-device link error
1805            shutil.move(src, dst)
1806        else:
1807            raise err
1808
1809@contextmanager
1810def environment(**envvars):
1811    """
1812    Context manager to selectively update the environment with the specified mapping.
1813    """
1814    backup = dict(os.environ)
1815    try:
1816        os.environ.update(envvars)
1817        yield
1818    finally:
1819        for var in envvars:
1820            if var in backup:
1821                os.environ[var] = backup[var]
1822            elif var in os.environ:
1823                del os.environ[var]
1824
1825def is_local_uid(uid=''):
1826    """
1827    Check whether uid is a local one or not.
1828    Can't use pwd module since it gets all UIDs, not local ones only.
1829    """
1830    if not uid:
1831        uid = os.getuid()
1832    with open('/etc/passwd', 'r') as f:
1833        for line in f:
1834            line_split = line.split(':')
1835            if len(line_split) < 3:
1836                continue
1837            if str(uid) == line_split[2]:
1838                return True
1839    return False
1840
1841def mkstemp(suffix=None, prefix=None, dir=None, text=False):
1842    """
1843    Generates a unique filename, independent of time.
1844
1845    mkstemp() in glibc (at least) generates unique file names based on the
1846    current system time. When combined with highly parallel builds, and
1847    operating over NFS (e.g. shared sstate/downloads) this can result in
1848    conflicts and race conditions.
1849
1850    This function adds additional entropy to the file name so that a collision
1851    is independent of time and thus extremely unlikely.
1852    """
1853    entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20))
1854    if prefix:
1855        prefix = prefix + entropy
1856    else:
1857        prefix = tempfile.gettempprefix() + entropy
1858    return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
1859
1860def path_is_descendant(descendant, ancestor):
1861    """
1862    Returns True if the path `descendant` is a descendant of `ancestor`
1863    (including being equivalent to `ancestor` itself). Otherwise returns False.
1864    Correctly accounts for symlinks, bind mounts, etc. by using
1865    os.path.samestat() to compare paths
1866
1867    May raise any exception that os.stat() raises
1868    """
1869
1870    ancestor_stat = os.stat(ancestor)
1871
1872    # Recurse up each directory component of the descendant to see if it is
1873    # equivalent to the ancestor
1874    check_dir = os.path.abspath(descendant).rstrip("/")
1875    while check_dir:
1876        check_stat = os.stat(check_dir)
1877        if os.path.samestat(check_stat, ancestor_stat):
1878            return True
1879        check_dir = os.path.dirname(check_dir).rstrip("/")
1880
1881    return False
1882
1883# If we don't have a timeout of some kind and a process/thread exits badly (for example
1884# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better
1885# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked.
1886@contextmanager
1887def lock_timeout(lock):
1888    held = lock.acquire(timeout=5*60)
1889    try:
1890        if not held:
1891            os._exit(1)
1892        yield held
1893    finally:
1894        lock.release()
1895