xref: /openbmc/openbmc/poky/bitbake/lib/bb/utils.py (revision 8460358c3d24c71d9d38fd126c745854a6301564)
1"""
2BitBake Utility Functions
3"""
4
5# Copyright (C) 2004 Michael Lauer
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import re, fcntl, os, string, stat, shutil, time
11import sys
12import errno
13import logging
14import bb
15import bb.msg
16import locale
17import multiprocessing
18import fcntl
19import importlib
20import importlib.machinery
21import importlib.util
22import itertools
23import subprocess
24import glob
25import fnmatch
26import traceback
27import errno
28import signal
29import collections
30import copy
31import ctypes
32import random
33import socket
34import struct
35import tempfile
36from subprocess import getstatusoutput
37from contextlib import contextmanager
38from ctypes import cdll
39
40logger = logging.getLogger("BitBake.Util")
41python_extensions = importlib.machinery.all_suffixes()
42
43
44def clean_context():
45    return {
46        "os": os,
47        "bb": bb,
48        "time": time,
49    }
50
51def get_context():
52    return _context
53
54
55def set_context(ctx):
56    _context = ctx
57
58# Context used in better_exec, eval
59_context = clean_context()
60
61class VersionStringException(Exception):
62    """Exception raised when an invalid version specification is found"""
63
64def explode_version(s):
65    r = []
66    alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
67    numeric_regexp = re.compile(r'^(\d+)(.*)$')
68    while (s != ''):
69        if s[0] in string.digits:
70            m = numeric_regexp.match(s)
71            r.append((0, int(m.group(1))))
72            s = m.group(2)
73            continue
74        if s[0] in string.ascii_letters:
75            m = alpha_regexp.match(s)
76            r.append((1, m.group(1)))
77            s = m.group(2)
78            continue
79        if s[0] == '~':
80            r.append((-1, s[0]))
81        else:
82            r.append((2, s[0]))
83        s = s[1:]
84    return r
85
86def split_version(s):
87    """Split a version string into its constituent parts (PE, PV, PR)"""
88    s = s.strip(" <>=")
89    e = 0
90    if s.count(':'):
91        e = int(s.split(":")[0])
92        s = s.split(":")[1]
93    r = ""
94    if s.count('-'):
95        r = s.rsplit("-", 1)[1]
96        s = s.rsplit("-", 1)[0]
97    v = s
98    return (e, v, r)
99
100def vercmp_part(a, b):
101    va = explode_version(a)
102    vb = explode_version(b)
103    while True:
104        if va == []:
105            (oa, ca) = (0, None)
106        else:
107            (oa, ca) = va.pop(0)
108        if vb == []:
109            (ob, cb) = (0, None)
110        else:
111            (ob, cb) = vb.pop(0)
112        if (oa, ca) == (0, None) and (ob, cb) == (0, None):
113            return 0
114        if oa < ob:
115            return -1
116        elif oa > ob:
117            return 1
118        elif ca is None:
119            return -1
120        elif cb is None:
121            return 1
122        elif ca < cb:
123            return -1
124        elif ca > cb:
125            return 1
126
127def vercmp(ta, tb):
128    (ea, va, ra) = ta
129    (eb, vb, rb) = tb
130
131    r = int(ea or 0) - int(eb or 0)
132    if (r == 0):
133        r = vercmp_part(va, vb)
134    if (r == 0):
135        r = vercmp_part(ra, rb)
136    return r
137
138def vercmp_string(a, b):
139    """ Split version strings and compare them """
140    ta = split_version(a)
141    tb = split_version(b)
142    return vercmp(ta, tb)
143
144def vercmp_string_op(a, b, op):
145    """
146    Compare two versions and check if the specified comparison operator matches the result of the comparison.
147    This function is fairly liberal about what operators it will accept since there are a variety of styles
148    depending on the context.
149    """
150    res = vercmp_string(a, b)
151    if op in ('=', '=='):
152        return res == 0
153    elif op == '<=':
154        return res <= 0
155    elif op == '>=':
156        return res >= 0
157    elif op in ('>', '>>'):
158        return res > 0
159    elif op in ('<', '<<'):
160        return res < 0
161    elif op == '!=':
162        return res != 0
163    else:
164        raise VersionStringException('Unsupported comparison operator "%s"' % op)
165
166def explode_deps(s):
167    """
168    Take an RDEPENDS style string of format:
169    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
170    and return a list of dependencies.
171    Version information is ignored.
172    """
173    r = []
174    l = s.split()
175    flag = False
176    for i in l:
177        if i[0] == '(':
178            flag = True
179            #j = []
180        if not flag:
181            r.append(i)
182        #else:
183        #    j.append(i)
184        if flag and i.endswith(')'):
185            flag = False
186            # Ignore version
187            #r[-1] += ' ' + ' '.join(j)
188    return r
189
190def explode_dep_versions2(s, *, sort=True):
191    """
192    Take an RDEPENDS style string of format:
193    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
194    and return a dictionary of dependencies and versions.
195    """
196    r = collections.OrderedDict()
197    l = s.replace(",", "").split()
198    lastdep = None
199    lastcmp = ""
200    lastver = ""
201    incmp = False
202    inversion = False
203    for i in l:
204        if i[0] == '(':
205            incmp = True
206            i = i[1:].strip()
207            if not i:
208                continue
209
210        if incmp:
211            incmp = False
212            inversion = True
213            # This list is based on behavior and supported comparisons from deb, opkg and rpm.
214            #
215            # Even though =<, <<, ==, !=, =>, and >> may not be supported,
216            # we list each possibly valid item.
217            # The build system is responsible for validation of what it supports.
218            if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
219                lastcmp = i[0:2]
220                i = i[2:]
221            elif i.startswith(('<', '>', '=')):
222                lastcmp = i[0:1]
223                i = i[1:]
224            else:
225                # This is an unsupported case!
226                raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
227                lastcmp = (i or "")
228                i = ""
229            i.strip()
230            if not i:
231                continue
232
233        if inversion:
234            if i.endswith(')'):
235                i = i[:-1] or ""
236                inversion = False
237                if lastver and i:
238                    lastver += " "
239            if i:
240                lastver += i
241                if lastdep not in r:
242                    r[lastdep] = []
243                r[lastdep].append(lastcmp + " " + lastver)
244            continue
245
246        #if not inversion:
247        lastdep = i
248        lastver = ""
249        lastcmp = ""
250        if not (i in r and r[i]):
251            r[lastdep] = []
252
253    if sort:
254        r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
255    return r
256
257def explode_dep_versions(s):
258    """
259    Take an RDEPENDS style string of format:
260    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
261    skip null value and items appeared in dependency string multiple times
262    and return a dictionary of dependencies and versions.
263    """
264    r = explode_dep_versions2(s)
265    for d in r:
266        if not r[d]:
267            r[d] = None
268            continue
269        if len(r[d]) > 1:
270            bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values.  explode_dep_versions cannot cope with this." % (d, s))
271        r[d] = r[d][0]
272    return r
273
274def join_deps(deps, commasep=True):
275    """
276    Take the result from explode_dep_versions and generate a dependency string
277    """
278    result = []
279    for dep in deps:
280        if deps[dep]:
281            if isinstance(deps[dep], list):
282                for v in deps[dep]:
283                    result.append(dep + " (" + v + ")")
284            else:
285                result.append(dep + " (" + deps[dep] + ")")
286        else:
287            result.append(dep)
288    if commasep:
289        return ", ".join(result)
290    else:
291        return " ".join(result)
292
293def _print_trace(body, line):
294    """
295    Print the Environment of a Text Body
296    """
297    error = []
298    # print the environment of the method
299    min_line = max(1, line-4)
300    max_line = min(line + 4, len(body))
301    for i in range(min_line, max_line + 1):
302        if line == i:
303            error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
304        else:
305            error.append('     %.4d:%s' % (i, body[i-1].rstrip()))
306    return error
307
308def better_compile(text, file, realfile, mode = "exec", lineno = 0):
309    """
310    A better compile method. This method
311    will print the offending lines.
312    """
313    try:
314        cache = bb.methodpool.compile_cache(text)
315        if cache:
316            return cache
317        # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
318        text2 = "\n" * int(lineno) + text
319        code = compile(text2, realfile, mode)
320        bb.methodpool.compile_cache_add(text, code)
321        return code
322    except Exception as e:
323        error = []
324        # split the text into lines again
325        body = text.split('\n')
326        error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
327        if hasattr(e, "lineno"):
328            error.append("The code lines resulting in this error were:")
329            # e.lineno: line's position in reaflile
330            # lineno: function name's "position -1" in realfile
331            # e.lineno - lineno: line's relative position in function
332            error.extend(_print_trace(body, e.lineno - lineno))
333        else:
334            error.append("The function causing this error was:")
335            for line in body:
336                error.append(line)
337        error.append("%s: %s" % (e.__class__.__name__, str(e)))
338
339        logger.error("\n".join(error))
340
341        e = bb.BBHandledException(e)
342        raise e
343
344def _print_exception(t, value, tb, realfile, text, context):
345    error = []
346    try:
347        exception = traceback.format_exception_only(t, value)
348        error.append('Error executing a python function in %s:\n' % realfile)
349
350        # Strip 'us' from the stack (better_exec call) unless that was where the
351        # error came from
352        if tb.tb_next is not None:
353            tb = tb.tb_next
354
355        textarray = text.split('\n')
356
357        linefailed = tb.tb_lineno
358
359        tbextract = traceback.extract_tb(tb)
360        tbformat = traceback.format_list(tbextract)
361        error.append("The stack trace of python calls that resulted in this exception/failure was:")
362        error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
363        error.extend(_print_trace(textarray, linefailed))
364
365        # See if this is a function we constructed and has calls back into other functions in
366        # "text". If so, try and improve the context of the error by diving down the trace
367        level = 0
368        nexttb = tb.tb_next
369        while nexttb is not None and (level+1) < len(tbextract):
370            error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
371            if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
372                # The code was possibly in the string we compiled ourselves
373                error.extend(_print_trace(textarray, tbextract[level+1][1]))
374            elif tbextract[level+1][0].startswith("/"):
375                # The code looks like it might be in a file, try and load it
376                try:
377                    with open(tbextract[level+1][0], "r") as f:
378                        text = f.readlines()
379                        error.extend(_print_trace(text, tbextract[level+1][1]))
380                except:
381                    error.append(tbformat[level+1])
382            else:
383                error.append(tbformat[level+1])
384            nexttb = tb.tb_next
385            level = level + 1
386
387        error.append("Exception: %s" % ''.join(exception))
388
389        # If the exception is from spawning a task, let's be helpful and display
390        # the output (which hopefully includes stderr).
391        if isinstance(value, subprocess.CalledProcessError) and value.output:
392            error.append("Subprocess output:")
393            error.append(value.output.decode("utf-8", errors="ignore"))
394    finally:
395        logger.error("\n".join(error))
396
397def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
398    """
399    Similiar to better_compile, better_exec will
400    print the lines that are responsible for the
401    error.
402    """
403    import bb.parse
404    if not text:
405        text = code
406    if not hasattr(code, "co_filename"):
407        code = better_compile(code, realfile, realfile)
408    try:
409        exec(code, get_context(), context)
410    except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError, bb.process.ExecutionError):
411        # Error already shown so passthrough, no need for traceback
412        raise
413    except Exception as e:
414        if pythonexception:
415            raise
416        (t, value, tb) = sys.exc_info()
417        try:
418            _print_exception(t, value, tb, realfile, text, context)
419        except Exception as e2:
420            logger.error("Exception handler error: %s" % str(e2))
421
422        e = bb.BBHandledException(e)
423        raise e
424
425def simple_exec(code, context):
426    exec(code, get_context(), context)
427
428def better_eval(source, locals, extraglobals = None):
429    ctx = get_context()
430    if extraglobals:
431        ctx = copy.copy(ctx)
432        for g in extraglobals:
433            ctx[g] = extraglobals[g]
434    return eval(source, ctx, locals)
435
436@contextmanager
437def fileslocked(files, *args, **kwargs):
438    """Context manager for locking and unlocking file locks."""
439    locks = []
440    if files:
441        for lockfile in files:
442            l = bb.utils.lockfile(lockfile, *args, **kwargs)
443            if l is not None:
444                locks.append(l)
445
446    try:
447        yield
448    finally:
449        locks.reverse()
450        for lock in locks:
451            bb.utils.unlockfile(lock)
452
453def lockfile(name, shared=False, retry=True, block=False):
454    """
455    Use the specified file as a lock file, return when the lock has
456    been acquired. Returns a variable to pass to unlockfile().
457    Parameters:
458        retry: True to re-try locking if it fails, False otherwise
459        block: True to block until the lock succeeds, False otherwise
460    The retry and block parameters are kind of equivalent unless you
461    consider the possibility of sending a signal to the process to break
462    out - at which point you want block=True rather than retry=True.
463    """
464    basename = os.path.basename(name)
465    if len(basename) > 255:
466        root, ext = os.path.splitext(basename)
467        basename = root[:255 - len(ext)] + ext
468
469    dirname = os.path.dirname(name)
470    mkdirhier(dirname)
471
472    name = os.path.join(dirname, basename)
473
474    if not os.access(dirname, os.W_OK):
475        logger.error("Unable to acquire lock '%s', directory is not writable",
476                     name)
477        sys.exit(1)
478
479    op = fcntl.LOCK_EX
480    if shared:
481        op = fcntl.LOCK_SH
482    if not retry and not block:
483        op = op | fcntl.LOCK_NB
484
485    while True:
486        # If we leave the lockfiles lying around there is no problem
487        # but we should clean up after ourselves. This gives potential
488        # for races though. To work around this, when we acquire the lock
489        # we check the file we locked was still the lock file on disk.
490        # by comparing inode numbers. If they don't match or the lockfile
491        # no longer exists, we start again.
492
493        # This implementation is unfair since the last person to request the
494        # lock is the most likely to win it.
495
496        try:
497            lf = open(name, 'a+')
498            fileno = lf.fileno()
499            fcntl.flock(fileno, op)
500            statinfo = os.fstat(fileno)
501            if os.path.exists(lf.name):
502                statinfo2 = os.stat(lf.name)
503                if statinfo.st_ino == statinfo2.st_ino:
504                    return lf
505            lf.close()
506        except OSError as e:
507            if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
508                logger.error("Unable to acquire lock '%s', %s",
509                             e.strerror, name)
510                sys.exit(1)
511            try:
512                lf.close()
513            except Exception:
514                pass
515            pass
516        if not retry:
517            return None
518
519def unlockfile(lf):
520    """
521    Unlock a file locked using lockfile()
522    """
523    try:
524        # If we had a shared lock, we need to promote to exclusive before
525        # removing the lockfile. Attempt this, ignore failures.
526        fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
527        os.unlink(lf.name)
528    except (IOError, OSError):
529        pass
530    fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
531    lf.close()
532
533def _hasher(method, filename):
534    import mmap
535
536    with open(filename, "rb") as f:
537        try:
538            with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
539                for chunk in iter(lambda: mm.read(8192), b''):
540                    method.update(chunk)
541        except ValueError:
542            # You can't mmap() an empty file so silence this exception
543            pass
544    return method.hexdigest()
545
546
547def md5_file(filename):
548    """
549    Return the hex string representation of the MD5 checksum of filename.
550    """
551    import hashlib
552    try:
553        sig = hashlib.new('MD5', usedforsecurity=False)
554    except TypeError:
555        # Some configurations don't appear to support two arguments
556        sig = hashlib.new('MD5')
557    return _hasher(sig, filename)
558
559def sha256_file(filename):
560    """
561    Return the hex string representation of the 256-bit SHA checksum of
562    filename.
563    """
564    import hashlib
565    return _hasher(hashlib.sha256(), filename)
566
567def sha1_file(filename):
568    """
569    Return the hex string representation of the SHA1 checksum of the filename
570    """
571    import hashlib
572    return _hasher(hashlib.sha1(), filename)
573
574def sha384_file(filename):
575    """
576    Return the hex string representation of the SHA384 checksum of the filename
577    """
578    import hashlib
579    return _hasher(hashlib.sha384(), filename)
580
581def sha512_file(filename):
582    """
583    Return the hex string representation of the SHA512 checksum of the filename
584    """
585    import hashlib
586    return _hasher(hashlib.sha512(), filename)
587
588def preserved_envvars_exported():
589    """Variables which are taken from the environment and placed in and exported
590    from the metadata"""
591    return [
592        'BB_TASKHASH',
593        'HOME',
594        'LOGNAME',
595        'PATH',
596        'PWD',
597        'SHELL',
598        'USER',
599        'LC_ALL',
600        'BBSERVER',
601    ]
602
603def preserved_envvars():
604    """Variables which are taken from the environment and placed in the metadata"""
605    v = [
606        'BBPATH',
607        'BB_PRESERVE_ENV',
608        'BB_ENV_PASSTHROUGH_ADDITIONS',
609    ]
610    return v + preserved_envvars_exported()
611
612def check_system_locale():
613    """Make sure the required system locale are available and configured"""
614    default_locale = locale.getlocale(locale.LC_CTYPE)
615
616    try:
617        locale.setlocale(locale.LC_CTYPE, ("en_US", "UTF-8"))
618    except:
619        sys.exit("Please make sure locale 'en_US.UTF-8' is available on your system")
620    else:
621        locale.setlocale(locale.LC_CTYPE, default_locale)
622
623    if sys.getfilesystemencoding() != "utf-8":
624        sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\n"
625                 "Python can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
626
627def filter_environment(good_vars):
628    """
629    Create a pristine environment for bitbake. This will remove variables that
630    are not known and may influence the build in a negative way.
631    """
632
633    removed_vars = {}
634    for key in list(os.environ):
635        if key in good_vars:
636            continue
637
638        removed_vars[key] = os.environ[key]
639        del os.environ[key]
640
641    # If we spawn a python process, we need to have a UTF-8 locale, else python's file
642    # access methods will use ascii. You can't change that mode once the interpreter is
643    # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
644    # distros support that and we need to set something.
645    os.environ["LC_ALL"] = "en_US.UTF-8"
646
647    if removed_vars:
648        logger.debug("Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
649
650    return removed_vars
651
652def approved_variables():
653    """
654    Determine and return the list of variables which are approved
655    to remain in the environment.
656    """
657    if 'BB_PRESERVE_ENV' in os.environ:
658        return os.environ.keys()
659    approved = []
660    if 'BB_ENV_PASSTHROUGH' in os.environ:
661        approved = os.environ['BB_ENV_PASSTHROUGH'].split()
662        approved.extend(['BB_ENV_PASSTHROUGH'])
663    else:
664        approved = preserved_envvars()
665    if 'BB_ENV_PASSTHROUGH_ADDITIONS' in os.environ:
666        approved.extend(os.environ['BB_ENV_PASSTHROUGH_ADDITIONS'].split())
667        if 'BB_ENV_PASSTHROUGH_ADDITIONS' not in approved:
668            approved.extend(['BB_ENV_PASSTHROUGH_ADDITIONS'])
669    return approved
670
671def clean_environment():
672    """
673    Clean up any spurious environment variables. This will remove any
674    variables the user hasn't chosen to preserve.
675    """
676    if 'BB_PRESERVE_ENV' not in os.environ:
677        good_vars = approved_variables()
678        return filter_environment(good_vars)
679
680    return {}
681
682def empty_environment():
683    """
684    Remove all variables from the environment.
685    """
686    for s in list(os.environ.keys()):
687        os.unsetenv(s)
688        del os.environ[s]
689
690def build_environment(d):
691    """
692    Build an environment from all exported variables.
693    """
694    import bb.data
695    for var in bb.data.keys(d):
696        export = d.getVarFlag(var, "export", False)
697        if export:
698            os.environ[var] = d.getVar(var) or ""
699
700def _check_unsafe_delete_path(path):
701    """
702    Basic safeguard against recursively deleting something we shouldn't. If it returns True,
703    the caller should raise an exception with an appropriate message.
704    NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
705    with potentially disastrous results.
706    """
707    extra = ''
708    # HOME might not be /home/something, so in case we can get it, check against it
709    homedir = os.environ.get('HOME', '')
710    if homedir:
711        extra = '|%s' % homedir
712    if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
713        return True
714    return False
715
716def remove(path, recurse=False, ionice=False):
717    """Equivalent to rm -f or rm -rf"""
718    if not path:
719        return
720    if recurse:
721        for name in glob.glob(path):
722            if _check_unsafe_delete_path(name):
723                raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % name)
724        # shutil.rmtree(name) would be ideal but its too slow
725        cmd = []
726        if ionice:
727            cmd = ['ionice', '-c', '3']
728        subprocess.check_call(cmd + ['rm', '-rf'] + glob.glob(path))
729        return
730    for name in glob.glob(path):
731        try:
732            os.unlink(name)
733        except OSError as exc:
734            if exc.errno != errno.ENOENT:
735                raise
736
737def prunedir(topdir, ionice=False):
738    """ Delete everything reachable from the directory named in 'topdir'. """
739    # CAUTION:  This is dangerous!
740    if _check_unsafe_delete_path(topdir):
741        raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
742    remove(topdir, recurse=True, ionice=ionice)
743
744#
745# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
746# but thats possibly insane and suffixes is probably going to be small
747#
748def prune_suffix(var, suffixes, d):
749    """
750    See if var ends with any of the suffixes listed and
751    remove it if found
752    """
753    for suffix in suffixes:
754        if suffix and var.endswith(suffix):
755            return var[:-len(suffix)]
756    return var
757
758def mkdirhier(directory):
759    """Create a directory like 'mkdir -p', but does not complain if
760    directory already exists like os.makedirs
761    """
762    if '${' in str(directory):
763        bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory))
764    try:
765        os.makedirs(directory)
766    except OSError as e:
767        if e.errno != errno.EEXIST or not os.path.isdir(directory):
768            raise e
769
770def movefile(src, dest, newmtime = None, sstat = None):
771    """Moves a file from src to dest, preserving all permissions and
772    attributes; mtime will be preserved even when moving across
773    filesystems.  Returns true on success and false on failure. Move is
774    atomic.
775    """
776
777    #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
778    try:
779        if not sstat:
780            sstat = os.lstat(src)
781    except Exception as e:
782        logger.warning("movefile: Stating source file failed...", e)
783        return None
784
785    destexists = 1
786    try:
787        dstat = os.lstat(dest)
788    except:
789        dstat = os.lstat(os.path.dirname(dest))
790        destexists = 0
791
792    if destexists:
793        if stat.S_ISLNK(dstat[stat.ST_MODE]):
794            try:
795                os.unlink(dest)
796                destexists = 0
797            except Exception as e:
798                pass
799
800    if stat.S_ISLNK(sstat[stat.ST_MODE]):
801        try:
802            target = os.readlink(src)
803            if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
804                os.unlink(dest)
805            os.symlink(target, dest)
806            #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
807            os.unlink(src)
808            return os.lstat(dest)
809        except Exception as e:
810            logger.warning("movefile: failed to properly create symlink:", dest, "->", target, e)
811            return None
812
813    renamefailed = 1
814    # os.rename needs to know the dest path ending with file name
815    # so append the file name to a path only if it's a dir specified
816    srcfname = os.path.basename(src)
817    destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
818                else dest
819
820    if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
821        try:
822            bb.utils.rename(src, destpath)
823            renamefailed = 0
824        except Exception as e:
825            if e.errno != errno.EXDEV:
826                # Some random error.
827                logger.warning("movefile: Failed to move", src, "to", dest, e)
828                return None
829            # Invalid cross-device-link 'bind' mounted or actually Cross-Device
830
831    if renamefailed:
832        didcopy = 0
833        if stat.S_ISREG(sstat[stat.ST_MODE]):
834            try: # For safety copy then move it over.
835                shutil.copyfile(src, destpath + "#new")
836                bb.utils.rename(destpath + "#new", destpath)
837                didcopy = 1
838            except Exception as e:
839                logger.warning('movefile: copy', src, '->', dest, 'failed.', e)
840                return None
841        else:
842            #we don't yet handle special, so we need to fall back to /bin/mv
843            a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
844            if a[0] != 0:
845                logger.warning("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
846                return None # failure
847        try:
848            if didcopy:
849                os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
850                os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
851                os.unlink(src)
852        except Exception as e:
853            logger.warning("movefile: Failed to chown/chmod/unlink", dest, e)
854            return None
855
856    if newmtime:
857        os.utime(destpath, (newmtime, newmtime))
858    else:
859        os.utime(destpath, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
860        newmtime = sstat[stat.ST_MTIME]
861    return newmtime
862
863def copyfile(src, dest, newmtime = None, sstat = None):
864    """
865    Copies a file from src to dest, preserving all permissions and
866    attributes; mtime will be preserved even when moving across
867    filesystems.  Returns true on success and false on failure.
868    """
869    #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
870    try:
871        if not sstat:
872            sstat = os.lstat(src)
873    except Exception as e:
874        logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
875        return False
876
877    destexists = 1
878    try:
879        dstat = os.lstat(dest)
880    except:
881        dstat = os.lstat(os.path.dirname(dest))
882        destexists = 0
883
884    if destexists:
885        if stat.S_ISLNK(dstat[stat.ST_MODE]):
886            try:
887                os.unlink(dest)
888                destexists = 0
889            except Exception as e:
890                pass
891
892    if stat.S_ISLNK(sstat[stat.ST_MODE]):
893        try:
894            target = os.readlink(src)
895            if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
896                os.unlink(dest)
897            os.symlink(target, dest)
898            os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
899            return os.lstat(dest)
900        except Exception as e:
901            logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
902            return False
903
904    if stat.S_ISREG(sstat[stat.ST_MODE]):
905        try:
906            srcchown = False
907            if not os.access(src, os.R_OK):
908                # Make sure we can read it
909                srcchown = True
910                os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
911
912            # For safety copy then move it over.
913            shutil.copyfile(src, dest + "#new")
914            bb.utils.rename(dest + "#new", dest)
915        except Exception as e:
916            logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
917            return False
918        finally:
919            if srcchown:
920                os.chmod(src, sstat[stat.ST_MODE])
921                os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
922
923    else:
924        #we don't yet handle special, so we need to fall back to /bin/mv
925        a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
926        if a[0] != 0:
927            logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
928            return False # failure
929    try:
930        os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
931        os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
932    except Exception as e:
933        logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
934        return False
935
936    if newmtime:
937        os.utime(dest, (newmtime, newmtime))
938    else:
939        os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
940        newmtime = sstat[stat.ST_MTIME]
941    return newmtime
942
943def break_hardlinks(src, sstat = None):
944    """
945    Ensures src is the only hardlink to this file.  Other hardlinks,
946    if any, are not affected (other than in their st_nlink value, of
947    course).  Returns true on success and false on failure.
948
949    """
950    try:
951        if not sstat:
952            sstat = os.lstat(src)
953    except Exception as e:
954        logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
955        return False
956    if sstat[stat.ST_NLINK] == 1:
957        return True
958    return copyfile(src, src, sstat=sstat)
959
960def which(path, item, direction = 0, history = False, executable=False):
961    """
962    Locate `item` in the list of paths `path` (colon separated string like $PATH).
963    If `direction` is non-zero then the list is reversed.
964    If `history` is True then the list of candidates also returned as result,history.
965    If `executable` is True then the candidate has to be an executable file,
966    otherwise the candidate simply has to exist.
967    """
968
969    if executable:
970        is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
971    else:
972        is_candidate = lambda p: os.path.exists(p)
973
974    hist = []
975    paths = (path or "").split(':')
976    if direction != 0:
977        paths.reverse()
978
979    for p in paths:
980        next = os.path.join(p, item)
981        hist.append(next)
982        if is_candidate(next):
983            if not os.path.isabs(next):
984                next = os.path.abspath(next)
985            if history:
986                return next, hist
987            return next
988
989    if history:
990        return "", hist
991    return ""
992
993@contextmanager
994def umask(new_mask):
995    """
996    Context manager to set the umask to a specific mask, and restore it afterwards.
997    """
998    current_mask = os.umask(new_mask)
999    try:
1000        yield
1001    finally:
1002        os.umask(current_mask)
1003
1004def to_boolean(string, default=None):
1005    """
1006    Check input string and return boolean value True/False/None
1007    depending upon the checks
1008    """
1009    if not string:
1010        return default
1011
1012    if isinstance(string, int):
1013        return string != 0
1014
1015    normalized = string.lower()
1016    if normalized in ("y", "yes", "1", "true"):
1017        return True
1018    elif normalized in ("n", "no", "0", "false"):
1019        return False
1020    else:
1021        raise ValueError("Invalid value for to_boolean: %s" % string)
1022
1023def contains(variable, checkvalues, truevalue, falsevalue, d):
1024    """Check if a variable contains all the values specified.
1025
1026    Arguments:
1027
1028    variable -- the variable name. This will be fetched and expanded (using
1029    d.getVar(variable)) and then split into a set().
1030
1031    checkvalues -- if this is a string it is split on whitespace into a set(),
1032    otherwise coerced directly into a set().
1033
1034    truevalue -- the value to return if checkvalues is a subset of variable.
1035
1036    falsevalue -- the value to return if variable is empty or if checkvalues is
1037    not a subset of variable.
1038
1039    d -- the data store.
1040    """
1041
1042    val = d.getVar(variable)
1043    if not val:
1044        return falsevalue
1045    val = set(val.split())
1046    if isinstance(checkvalues, str):
1047        checkvalues = set(checkvalues.split())
1048    else:
1049        checkvalues = set(checkvalues)
1050    if checkvalues.issubset(val):
1051        return truevalue
1052    return falsevalue
1053
1054def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1055    """Check if a variable contains any values specified.
1056
1057    Arguments:
1058
1059    variable -- the variable name. This will be fetched and expanded (using
1060    d.getVar(variable)) and then split into a set().
1061
1062    checkvalues -- if this is a string it is split on whitespace into a set(),
1063    otherwise coerced directly into a set().
1064
1065    truevalue -- the value to return if checkvalues is a subset of variable.
1066
1067    falsevalue -- the value to return if variable is empty or if checkvalues is
1068    not a subset of variable.
1069
1070    d -- the data store.
1071    """
1072    val = d.getVar(variable)
1073    if not val:
1074        return falsevalue
1075    val = set(val.split())
1076    if isinstance(checkvalues, str):
1077        checkvalues = set(checkvalues.split())
1078    else:
1079        checkvalues = set(checkvalues)
1080    if checkvalues & val:
1081        return truevalue
1082    return falsevalue
1083
1084def filter(variable, checkvalues, d):
1085    """Return all words in the variable that are present in the checkvalues.
1086
1087    Arguments:
1088
1089    variable -- the variable name. This will be fetched and expanded (using
1090    d.getVar(variable)) and then split into a set().
1091
1092    checkvalues -- if this is a string it is split on whitespace into a set(),
1093    otherwise coerced directly into a set().
1094
1095    d -- the data store.
1096    """
1097
1098    val = d.getVar(variable)
1099    if not val:
1100        return ''
1101    val = set(val.split())
1102    if isinstance(checkvalues, str):
1103        checkvalues = set(checkvalues.split())
1104    else:
1105        checkvalues = set(checkvalues)
1106    return ' '.join(sorted(checkvalues & val))
1107
1108
1109def get_referenced_vars(start_expr, d):
1110    """
1111    :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level
1112    are ordered arbitrarily)
1113    """
1114
1115    seen = set()
1116    ret = []
1117
1118    # The first entry in the queue is the unexpanded start expression
1119    queue = collections.deque([start_expr])
1120    # Subsequent entries will be variable names, so we need to track whether or not entry requires getVar
1121    is_first = True
1122
1123    empty_data = bb.data.init()
1124    while queue:
1125        entry = queue.popleft()
1126        if is_first:
1127            # Entry is the start expression - no expansion needed
1128            is_first = False
1129            expression = entry
1130        else:
1131            # This is a variable name - need to get the value
1132            expression = d.getVar(entry, False)
1133            ret.append(entry)
1134
1135        # expandWithRefs is how we actually get the referenced variables in the expression. We call it using an empty
1136        # data store because we only want the variables directly used in the expression. It returns a set, which is what
1137        # dooms us to only ever be "quasi-BFS" rather than full BFS.
1138        new_vars = empty_data.expandWithRefs(expression, None).references - set(seen)
1139
1140        queue.extend(new_vars)
1141        seen.update(new_vars)
1142    return ret
1143
1144
1145def cpu_count():
1146    try:
1147        return len(os.sched_getaffinity(0))
1148    except OSError:
1149        return multiprocessing.cpu_count()
1150
1151def nonblockingfd(fd):
1152    fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
1153
1154def process_profilelog(fn, pout = None):
1155    # Either call with a list of filenames and set pout or a filename and optionally pout.
1156    if not pout:
1157        pout = fn + '.processed'
1158
1159    with open(pout, 'w') as pout:
1160        import pstats
1161        if isinstance(fn, list):
1162            p = pstats.Stats(*fn, stream=pout)
1163        else:
1164            p = pstats.Stats(fn, stream=pout)
1165        p.sort_stats('time')
1166        p.print_stats()
1167        p.print_callers()
1168        p.sort_stats('cumulative')
1169        p.print_stats()
1170
1171        pout.flush()
1172
1173#
1174# Was present to work around multiprocessing pool bugs in python < 2.7.3
1175#
1176def multiprocessingpool(*args, **kwargs):
1177
1178    import multiprocessing.pool
1179    #import multiprocessing.util
1180    #multiprocessing.util.log_to_stderr(10)
1181    # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
1182    # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
1183    def wrapper(func):
1184        def wrap(self, timeout=None):
1185            return func(self, timeout=timeout if timeout is not None else 1e100)
1186        return wrap
1187    multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
1188
1189    return multiprocessing.Pool(*args, **kwargs)
1190
1191def exec_flat_python_func(func, *args, **kwargs):
1192    """Execute a flat python function (defined with def funcname(args):...)"""
1193    # Prepare a small piece of python code which calls the requested function
1194    # To do this we need to prepare two things - a set of variables we can use to pass
1195    # the values of arguments into the calling function, and the list of arguments for
1196    # the function being called
1197    context = {}
1198    funcargs = []
1199    # Handle unnamed arguments
1200    aidx = 1
1201    for arg in args:
1202        argname = 'arg_%s' % aidx
1203        context[argname] = arg
1204        funcargs.append(argname)
1205        aidx += 1
1206    # Handle keyword arguments
1207    context.update(kwargs)
1208    funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
1209    code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
1210    comp = bb.utils.better_compile(code, '<string>', '<string>')
1211    bb.utils.better_exec(comp, context, code, '<string>')
1212    return context['retval']
1213
1214def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1215    """Edit lines from a recipe or config file and modify one or more
1216    specified variable values set in the file using a specified callback
1217    function. Lines are expected to have trailing newlines.
1218    Parameters:
1219        meta_lines: lines from the file; can be a list or an iterable
1220            (e.g. file pointer)
1221        variables: a list of variable names to look for. Functions
1222            may also be specified, but must be specified with '()' at
1223            the end of the name. Note that the function doesn't have
1224            any intrinsic understanding of :append, :prepend, :remove,
1225            or overrides, so these are considered as part of the name.
1226            These values go into a regular expression, so regular
1227            expression syntax is allowed.
1228        varfunc: callback function called for every variable matching
1229            one of the entries in the variables parameter. The function
1230            should take four arguments:
1231                varname: name of variable matched
1232                origvalue: current value in file
1233                op: the operator (e.g. '+=')
1234                newlines: list of lines up to this point. You can use
1235                    this to prepend lines before this variable setting
1236                    if you wish.
1237            and should return a four-element tuple:
1238                newvalue: new value to substitute in, or None to drop
1239                    the variable setting entirely. (If the removal
1240                    results in two consecutive blank lines, one of the
1241                    blank lines will also be dropped).
1242                newop: the operator to use - if you specify None here,
1243                    the original operation will be used.
1244                indent: number of spaces to indent multi-line entries,
1245                    or -1 to indent up to the level of the assignment
1246                    and opening quote, or a string to use as the indent.
1247                minbreak: True to allow the first element of a
1248                    multi-line value to continue on the same line as
1249                    the assignment, False to indent before the first
1250                    element.
1251            To clarify, if you wish not to change the value, then you
1252            would return like this: return origvalue, None, 0, True
1253        match_overrides: True to match items with _overrides on the end,
1254            False otherwise
1255    Returns a tuple:
1256        updated:
1257            True if changes were made, False otherwise.
1258        newlines:
1259            Lines after processing
1260    """
1261
1262    var_res = {}
1263    if match_overrides:
1264        override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
1265    else:
1266        override_re = ''
1267    for var in variables:
1268        if var.endswith('()'):
1269            var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
1270        else:
1271            var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
1272
1273    updated = False
1274    varset_start = ''
1275    varlines = []
1276    newlines = []
1277    in_var = None
1278    full_value = ''
1279    var_end = ''
1280
1281    def handle_var_end():
1282        prerun_newlines = newlines[:]
1283        op = varset_start[len(in_var):].strip()
1284        (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
1285        changed = (prerun_newlines != newlines)
1286
1287        if newvalue is None:
1288            # Drop the value
1289            return True
1290        elif newvalue != full_value or (newop not in [None, op]):
1291            if newop not in [None, op]:
1292                # Callback changed the operator
1293                varset_new = "%s %s" % (in_var, newop)
1294            else:
1295                varset_new = varset_start
1296
1297            if isinstance(indent, int):
1298                if indent == -1:
1299                    indentspc = ' ' * (len(varset_new) + 2)
1300                else:
1301                    indentspc = ' ' * indent
1302            else:
1303                indentspc = indent
1304            if in_var.endswith('()'):
1305                # A function definition
1306                if isinstance(newvalue, list):
1307                    newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
1308                else:
1309                    if not newvalue.startswith('\n'):
1310                        newvalue = '\n' + newvalue
1311                    if not newvalue.endswith('\n'):
1312                        newvalue = newvalue + '\n'
1313                    newlines.append('%s {%s}\n' % (varset_new, newvalue))
1314            else:
1315                # Normal variable
1316                if isinstance(newvalue, list):
1317                    if not newvalue:
1318                        # Empty list -> empty string
1319                        newlines.append('%s ""\n' % varset_new)
1320                    elif minbreak:
1321                        # First item on first line
1322                        if len(newvalue) == 1:
1323                            newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
1324                        else:
1325                            newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
1326                            for item in newvalue[1:]:
1327                                newlines.append('%s%s \\\n' % (indentspc, item))
1328                            newlines.append('%s"\n' % indentspc)
1329                    else:
1330                        # No item on first line
1331                        newlines.append('%s " \\\n' % varset_new)
1332                        for item in newvalue:
1333                            newlines.append('%s%s \\\n' % (indentspc, item))
1334                        newlines.append('%s"\n' % indentspc)
1335                else:
1336                    newlines.append('%s "%s"\n' % (varset_new, newvalue))
1337            return True
1338        else:
1339            # Put the old lines back where they were
1340            newlines.extend(varlines)
1341            # If newlines was touched by the function, we'll need to return True
1342            return changed
1343
1344    checkspc = False
1345
1346    for line in meta_lines:
1347        if in_var:
1348            value = line.rstrip()
1349            varlines.append(line)
1350            if in_var.endswith('()'):
1351                full_value += '\n' + value
1352            else:
1353                full_value += value[:-1]
1354            if value.endswith(var_end):
1355                if in_var.endswith('()'):
1356                    if full_value.count('{') - full_value.count('}') >= 0:
1357                        continue
1358                    full_value = full_value[:-1]
1359                if handle_var_end():
1360                    updated = True
1361                    checkspc = True
1362                in_var = None
1363        else:
1364            skip = False
1365            for (varname, var_re) in var_res.items():
1366                res = var_re.match(line)
1367                if res:
1368                    isfunc = varname.endswith('()')
1369                    if isfunc:
1370                        splitvalue = line.split('{', 1)
1371                        var_end = '}'
1372                    else:
1373                        var_end = res.groups()[-1]
1374                        splitvalue = line.split(var_end, 1)
1375                    varset_start = splitvalue[0].rstrip()
1376                    value = splitvalue[1].rstrip()
1377                    if not isfunc and value.endswith('\\'):
1378                        value = value[:-1]
1379                    full_value = value
1380                    varlines = [line]
1381                    in_var = res.group(1)
1382                    if isfunc:
1383                        in_var += '()'
1384                    if value.endswith(var_end):
1385                        full_value = full_value[:-1]
1386                        if handle_var_end():
1387                            updated = True
1388                            checkspc = True
1389                        in_var = None
1390                    skip = True
1391                    break
1392            if not skip:
1393                if checkspc:
1394                    checkspc = False
1395                    if newlines and newlines[-1] == '\n' and line == '\n':
1396                        # Squash blank line if there are two consecutive blanks after a removal
1397                        continue
1398                newlines.append(line)
1399    return (updated, newlines)
1400
1401
1402def edit_metadata_file(meta_file, variables, varfunc):
1403    """Edit a recipe or config file and modify one or more specified
1404    variable values set in the file using a specified callback function.
1405    The file is only written to if the value(s) actually change.
1406    This is basically the file version of edit_metadata(), see that
1407    function's description for parameter/usage information.
1408    Returns True if the file was written to, False otherwise.
1409    """
1410    with open(meta_file, 'r') as f:
1411        (updated, newlines) = edit_metadata(f, variables, varfunc)
1412    if updated:
1413        with open(meta_file, 'w') as f:
1414            f.writelines(newlines)
1415    return updated
1416
1417
1418def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
1419    """Edit bblayers.conf, adding and/or removing layers
1420    Parameters:
1421        bblayers_conf: path to bblayers.conf file to edit
1422        add: layer path (or list of layer paths) to add; None or empty
1423            list to add nothing
1424        remove: layer path (or list of layer paths) to remove; None or
1425            empty list to remove nothing
1426        edit_cb: optional callback function that will be called after
1427            processing adds/removes once per existing entry.
1428    Returns a tuple:
1429        notadded: list of layers specified to be added but weren't
1430            (because they were already in the list)
1431        notremoved: list of layers that were specified to be removed
1432            but weren't (because they weren't in the list)
1433    """
1434
1435    import fnmatch
1436
1437    def remove_trailing_sep(pth):
1438        if pth and pth[-1] == os.sep:
1439            pth = pth[:-1]
1440        return pth
1441
1442    approved = bb.utils.approved_variables()
1443    def canonicalise_path(pth):
1444        pth = remove_trailing_sep(pth)
1445        if 'HOME' in approved and '~' in pth:
1446            pth = os.path.expanduser(pth)
1447        return pth
1448
1449    def layerlist_param(value):
1450        if not value:
1451            return []
1452        elif isinstance(value, list):
1453            return [remove_trailing_sep(x) for x in value]
1454        else:
1455            return [remove_trailing_sep(value)]
1456
1457    addlayers = layerlist_param(add)
1458    removelayers = layerlist_param(remove)
1459
1460    # Need to use a list here because we can't set non-local variables from a callback in python 2.x
1461    bblayercalls = []
1462    removed = []
1463    plusequals = False
1464    orig_bblayers = []
1465
1466    def handle_bblayers_firstpass(varname, origvalue, op, newlines):
1467        bblayercalls.append(op)
1468        if op == '=':
1469            del orig_bblayers[:]
1470        orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
1471        return (origvalue, None, 2, False)
1472
1473    def handle_bblayers(varname, origvalue, op, newlines):
1474        updated = False
1475        bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
1476        if removelayers:
1477            for removelayer in removelayers:
1478                for layer in bblayers:
1479                    if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
1480                        updated = True
1481                        bblayers.remove(layer)
1482                        removed.append(removelayer)
1483                        break
1484        if addlayers and not plusequals:
1485            for addlayer in addlayers:
1486                if addlayer not in bblayers:
1487                    updated = True
1488                    bblayers.append(addlayer)
1489            del addlayers[:]
1490
1491        if edit_cb:
1492            newlist = []
1493            for layer in bblayers:
1494                res = edit_cb(layer, canonicalise_path(layer))
1495                if res != layer:
1496                    newlist.append(res)
1497                    updated = True
1498                else:
1499                    newlist.append(layer)
1500            bblayers = newlist
1501
1502        if updated:
1503            if op == '+=' and not bblayers:
1504                bblayers = None
1505            return (bblayers, None, 2, False)
1506        else:
1507            return (origvalue, None, 2, False)
1508
1509    with open(bblayers_conf, 'r') as f:
1510        (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
1511
1512    if not bblayercalls:
1513        raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
1514
1515    # Try to do the "smart" thing depending on how the user has laid out
1516    # their bblayers.conf file
1517    if bblayercalls.count('+=') > 1:
1518        plusequals = True
1519
1520    removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
1521    notadded = []
1522    for layer in addlayers:
1523        layer_canon = canonicalise_path(layer)
1524        if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
1525            notadded.append(layer)
1526    notadded_canon = [canonicalise_path(layer) for layer in notadded]
1527    addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
1528
1529    (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
1530    if addlayers:
1531        # Still need to add these
1532        for addlayer in addlayers:
1533            newlines.append('BBLAYERS += "%s"\n' % addlayer)
1534        updated = True
1535
1536    if updated:
1537        with open(bblayers_conf, 'w') as f:
1538            f.writelines(newlines)
1539
1540    notremoved = list(set(removelayers) - set(removed))
1541
1542    return (notadded, notremoved)
1543
1544def get_collection_res(d):
1545    collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
1546    collection_res = {}
1547    for collection in collections:
1548        collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
1549
1550    return collection_res
1551
1552
1553def get_file_layer(filename, d, collection_res={}):
1554    """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
1555    if not collection_res:
1556        collection_res = get_collection_res(d)
1557
1558    def path_to_layer(path):
1559        # Use longest path so we handle nested layers
1560        matchlen = 0
1561        match = None
1562        for collection, regex in collection_res.items():
1563            if len(regex) > matchlen and re.match(regex, path):
1564                matchlen = len(regex)
1565                match = collection
1566        return match
1567
1568    result = None
1569    bbfiles = (d.getVar('BBFILES_PRIORITIZED') or '').split()
1570    bbfilesmatch = False
1571    for bbfilesentry in bbfiles:
1572        if fnmatch.fnmatchcase(filename, bbfilesentry):
1573            bbfilesmatch = True
1574            result = path_to_layer(bbfilesentry)
1575            break
1576
1577    if not bbfilesmatch:
1578        # Probably a bbclass
1579        result = path_to_layer(filename)
1580
1581    return result
1582
1583
1584# Constant taken from http://linux.die.net/include/linux/prctl.h
1585PR_SET_PDEATHSIG = 1
1586
1587class PrCtlError(Exception):
1588    pass
1589
1590def signal_on_parent_exit(signame):
1591    """
1592    Trigger signame to be sent when the parent process dies
1593    """
1594    signum = getattr(signal, signame)
1595    # http://linux.die.net/man/2/prctl
1596    result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
1597    if result != 0:
1598        raise PrCtlError('prctl failed with error code %s' % result)
1599
1600#
1601# Manually call the ioprio syscall. We could depend on other libs like psutil
1602# however this gets us enough of what we need to bitbake for now without the
1603# dependency
1604#
1605_unamearch = os.uname()[4]
1606IOPRIO_WHO_PROCESS = 1
1607IOPRIO_CLASS_SHIFT = 13
1608
1609def ioprio_set(who, cls, value):
1610    NR_ioprio_set = None
1611    if _unamearch == "x86_64":
1612      NR_ioprio_set = 251
1613    elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
1614      NR_ioprio_set = 289
1615    elif _unamearch == "aarch64":
1616      NR_ioprio_set = 30
1617
1618    if NR_ioprio_set:
1619        ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
1620        rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
1621        if rc != 0:
1622            raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
1623    else:
1624        bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
1625
1626def set_process_name(name):
1627    from ctypes import cdll, byref, create_string_buffer
1628    # This is nice to have for debugging, not essential
1629    try:
1630        libc = cdll.LoadLibrary('libc.so.6')
1631        buf = create_string_buffer(bytes(name, 'utf-8'))
1632        libc.prctl(15, byref(buf), 0, 0, 0)
1633    except:
1634        pass
1635
1636def enable_loopback_networking():
1637    # From bits/ioctls.h
1638    SIOCGIFFLAGS = 0x8913
1639    SIOCSIFFLAGS = 0x8914
1640    SIOCSIFADDR = 0x8916
1641    SIOCSIFNETMASK = 0x891C
1642
1643    # if.h
1644    IFF_UP = 0x1
1645    IFF_RUNNING = 0x40
1646
1647    # bits/socket.h
1648    AF_INET = 2
1649
1650    # char ifr_name[IFNAMSIZ=16]
1651    ifr_name = struct.pack("@16s", b"lo")
1652    def netdev_req(fd, req, data = b""):
1653        # Pad and add interface name
1654        data = ifr_name + data + (b'\x00' * (16 - len(data)))
1655        # Return all data after interface name
1656        return fcntl.ioctl(fd, req, data)[16:]
1657
1658    with socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_IP) as sock:
1659        fd = sock.fileno()
1660
1661        # struct sockaddr_in ifr_addr { unsigned short family; uint16_t sin_port ; uint32_t in_addr; }
1662        req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 127, 0, 0, 1)
1663        netdev_req(fd, SIOCSIFADDR, req)
1664
1665        # short ifr_flags
1666        flags = struct.unpack_from('@h', netdev_req(fd, SIOCGIFFLAGS))[0]
1667        flags |= IFF_UP | IFF_RUNNING
1668        netdev_req(fd, SIOCSIFFLAGS, struct.pack('@h', flags))
1669
1670        # struct sockaddr_in ifr_netmask
1671        req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 255, 0, 0, 0)
1672        netdev_req(fd, SIOCSIFNETMASK, req)
1673
1674def disable_network(uid=None, gid=None):
1675    """
1676    Disable networking in the current process if the kernel supports it, else
1677    just return after logging to debug. To do this we need to create a new user
1678    namespace, then map back to the original uid/gid.
1679    """
1680    libc = ctypes.CDLL('libc.so.6')
1681
1682    # From sched.h
1683    # New user namespace
1684    CLONE_NEWUSER = 0x10000000
1685    # New network namespace
1686    CLONE_NEWNET = 0x40000000
1687
1688    if uid is None:
1689        uid = os.getuid()
1690    if gid is None:
1691        gid = os.getgid()
1692
1693    ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER)
1694    if ret != 0:
1695        logger.debug("System doesn't support disabling network without admin privs")
1696        return
1697    with open("/proc/self/uid_map", "w") as f:
1698        f.write("%s %s 1" % (uid, uid))
1699    with open("/proc/self/setgroups", "w") as f:
1700        f.write("deny")
1701    with open("/proc/self/gid_map", "w") as f:
1702        f.write("%s %s 1" % (gid, gid))
1703
1704def export_proxies(d):
1705    from bb.fetch2 import get_fetcher_environment
1706    """ export common proxies variables from datastore to environment """
1707    newenv = get_fetcher_environment(d)
1708    for v in newenv:
1709        os.environ[v] = newenv[v]
1710
1711def load_plugins(logger, plugins, pluginpath):
1712    def load_plugin(name):
1713        logger.debug('Loading plugin %s' % name)
1714        spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
1715        if spec:
1716            mod = importlib.util.module_from_spec(spec)
1717            spec.loader.exec_module(mod)
1718            return mod
1719
1720    logger.debug('Loading plugins from %s...' % pluginpath)
1721
1722    expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
1723                for ext in python_extensions)
1724    files = itertools.chain.from_iterable(expanded)
1725    names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
1726    for name in names:
1727        if name != '__init__':
1728            plugin = load_plugin(name)
1729            if hasattr(plugin, 'plugin_init'):
1730                obj = plugin.plugin_init(plugins)
1731                plugins.append(obj or plugin)
1732            else:
1733                plugins.append(plugin)
1734
1735
1736class LogCatcher(logging.Handler):
1737    """Logging handler for collecting logged messages so you can check them later"""
1738    def __init__(self):
1739        self.messages = []
1740        logging.Handler.__init__(self, logging.WARNING)
1741    def emit(self, record):
1742        self.messages.append(bb.build.logformatter.format(record))
1743    def contains(self, message):
1744        return (message in self.messages)
1745
1746def is_semver(version):
1747    """
1748        Is the version string following the semver semantic?
1749
1750        https://semver.org/spec/v2.0.0.html
1751    """
1752    regex = re.compile(
1753    r"""
1754    ^
1755    (0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)
1756    (?:-(
1757        (?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)
1758        (?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*
1759    ))?
1760    (?:\+(
1761        [0-9a-zA-Z-]+
1762        (?:\.[0-9a-zA-Z-]+)*
1763    ))?
1764    $
1765    """, re.VERBOSE)
1766
1767    if regex.match(version) is None:
1768        return False
1769
1770    return True
1771
1772# Wrapper around os.rename which can handle cross device problems
1773# e.g. from container filesystems
1774def rename(src, dst):
1775    try:
1776        os.rename(src, dst)
1777    except OSError as err:
1778        if err.errno == 18:
1779            # Invalid cross-device link error
1780            shutil.move(src, dst)
1781        else:
1782            raise err
1783
1784@contextmanager
1785def environment(**envvars):
1786    """
1787    Context manager to selectively update the environment with the specified mapping.
1788    """
1789    backup = dict(os.environ)
1790    try:
1791        os.environ.update(envvars)
1792        yield
1793    finally:
1794        for var in envvars:
1795            if var in backup:
1796                os.environ[var] = backup[var]
1797            elif var in os.environ:
1798                del os.environ[var]
1799
1800def is_local_uid(uid=''):
1801    """
1802    Check whether uid is a local one or not.
1803    Can't use pwd module since it gets all UIDs, not local ones only.
1804    """
1805    if not uid:
1806        uid = os.getuid()
1807    with open('/etc/passwd', 'r') as f:
1808        for line in f:
1809            line_split = line.split(':')
1810            if len(line_split) < 3:
1811                continue
1812            if str(uid) == line_split[2]:
1813                return True
1814    return False
1815
1816def mkstemp(suffix=None, prefix=None, dir=None, text=False):
1817    """
1818    Generates a unique filename, independent of time.
1819
1820    mkstemp() in glibc (at least) generates unique file names based on the
1821    current system time. When combined with highly parallel builds, and
1822    operating over NFS (e.g. shared sstate/downloads) this can result in
1823    conflicts and race conditions.
1824
1825    This function adds additional entropy to the file name so that a collision
1826    is independent of time and thus extremely unlikely.
1827    """
1828    entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20))
1829    if prefix:
1830        prefix = prefix + entropy
1831    else:
1832        prefix = tempfile.gettempprefix() + entropy
1833    return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
1834
1835def path_is_descendant(descendant, ancestor):
1836    """
1837    Returns True if the path `descendant` is a descendant of `ancestor`
1838    (including being equivalent to `ancestor` itself). Otherwise returns False.
1839    Correctly accounts for symlinks, bind mounts, etc. by using
1840    os.path.samestat() to compare paths
1841
1842    May raise any exception that os.stat() raises
1843    """
1844
1845    ancestor_stat = os.stat(ancestor)
1846
1847    # Recurse up each directory component of the descendant to see if it is
1848    # equivalent to the ancestor
1849    check_dir = os.path.abspath(descendant).rstrip("/")
1850    while check_dir:
1851        check_stat = os.stat(check_dir)
1852        if os.path.samestat(check_stat, ancestor_stat):
1853            return True
1854        check_dir = os.path.dirname(check_dir).rstrip("/")
1855
1856    return False
1857
1858# If we don't have a timeout of some kind and a process/thread exits badly (for example
1859# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better
1860# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked.
1861@contextmanager
1862def lock_timeout(lock):
1863    held = lock.acquire(timeout=5*60)
1864    try:
1865        if not held:
1866            os._exit(1)
1867        yield held
1868    finally:
1869        lock.release()
1870