xref: /openbmc/openbmc/poky/bitbake/lib/bb/utils.py (revision 03514f19)
1"""
2BitBake Utility Functions
3"""
4
5# Copyright (C) 2004 Michael Lauer
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import re, fcntl, os, string, stat, shutil, time
11import sys
12import errno
13import logging
14import bb
15import bb.msg
16import locale
17import multiprocessing
18import fcntl
19import importlib
20import importlib.machinery
21import importlib.util
22import itertools
23import subprocess
24import glob
25import fnmatch
26import traceback
27import errno
28import signal
29import collections
30import copy
31import ctypes
32import random
33import socket
34import struct
35import tempfile
36from subprocess import getstatusoutput
37from contextlib import contextmanager
38from ctypes import cdll
39
40logger = logging.getLogger("BitBake.Util")
41python_extensions = importlib.machinery.all_suffixes()
42
43
44def clean_context():
45    return {
46        "os": os,
47        "bb": bb,
48        "time": time,
49    }
50
51def get_context():
52    return _context
53
54
55def set_context(ctx):
56    _context = ctx
57
58# Context used in better_exec, eval
59_context = clean_context()
60
61class VersionStringException(Exception):
62    """Exception raised when an invalid version specification is found"""
63
64def explode_version(s):
65    r = []
66    alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
67    numeric_regexp = re.compile(r'^(\d+)(.*)$')
68    while (s != ''):
69        if s[0] in string.digits:
70            m = numeric_regexp.match(s)
71            r.append((0, int(m.group(1))))
72            s = m.group(2)
73            continue
74        if s[0] in string.ascii_letters:
75            m = alpha_regexp.match(s)
76            r.append((1, m.group(1)))
77            s = m.group(2)
78            continue
79        if s[0] == '~':
80            r.append((-1, s[0]))
81        else:
82            r.append((2, s[0]))
83        s = s[1:]
84    return r
85
86def split_version(s):
87    """Split a version string into its constituent parts (PE, PV, PR)"""
88    s = s.strip(" <>=")
89    e = 0
90    if s.count(':'):
91        e = int(s.split(":")[0])
92        s = s.split(":")[1]
93    r = ""
94    if s.count('-'):
95        r = s.rsplit("-", 1)[1]
96        s = s.rsplit("-", 1)[0]
97    v = s
98    return (e, v, r)
99
100def vercmp_part(a, b):
101    va = explode_version(a)
102    vb = explode_version(b)
103    while True:
104        if va == []:
105            (oa, ca) = (0, None)
106        else:
107            (oa, ca) = va.pop(0)
108        if vb == []:
109            (ob, cb) = (0, None)
110        else:
111            (ob, cb) = vb.pop(0)
112        if (oa, ca) == (0, None) and (ob, cb) == (0, None):
113            return 0
114        if oa < ob:
115            return -1
116        elif oa > ob:
117            return 1
118        elif ca is None:
119            return -1
120        elif cb is None:
121            return 1
122        elif ca < cb:
123            return -1
124        elif ca > cb:
125            return 1
126
127def vercmp(ta, tb):
128    (ea, va, ra) = ta
129    (eb, vb, rb) = tb
130
131    r = int(ea or 0) - int(eb or 0)
132    if (r == 0):
133        r = vercmp_part(va, vb)
134    if (r == 0):
135        r = vercmp_part(ra, rb)
136    return r
137
138def vercmp_string(a, b):
139    """ Split version strings and compare them """
140    ta = split_version(a)
141    tb = split_version(b)
142    return vercmp(ta, tb)
143
144def vercmp_string_op(a, b, op):
145    """
146    Compare two versions and check if the specified comparison operator matches the result of the comparison.
147    This function is fairly liberal about what operators it will accept since there are a variety of styles
148    depending on the context.
149    """
150    res = vercmp_string(a, b)
151    if op in ('=', '=='):
152        return res == 0
153    elif op == '<=':
154        return res <= 0
155    elif op == '>=':
156        return res >= 0
157    elif op in ('>', '>>'):
158        return res > 0
159    elif op in ('<', '<<'):
160        return res < 0
161    elif op == '!=':
162        return res != 0
163    else:
164        raise VersionStringException('Unsupported comparison operator "%s"' % op)
165
166def explode_deps(s):
167    """
168    Take an RDEPENDS style string of format:
169    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
170    and return a list of dependencies.
171    Version information is ignored.
172    """
173    r = []
174    l = s.split()
175    flag = False
176    for i in l:
177        if i[0] == '(':
178            flag = True
179            #j = []
180        if not flag:
181            r.append(i)
182        #else:
183        #    j.append(i)
184        if flag and i.endswith(')'):
185            flag = False
186            # Ignore version
187            #r[-1] += ' ' + ' '.join(j)
188    return r
189
190def explode_dep_versions2(s, *, sort=True):
191    """
192    Take an RDEPENDS style string of format:
193    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
194    and return a dictionary of dependencies and versions.
195    """
196    r = collections.OrderedDict()
197    l = s.replace(",", "").split()
198    lastdep = None
199    lastcmp = ""
200    lastver = ""
201    incmp = False
202    inversion = False
203    for i in l:
204        if i[0] == '(':
205            incmp = True
206            i = i[1:].strip()
207            if not i:
208                continue
209
210        if incmp:
211            incmp = False
212            inversion = True
213            # This list is based on behavior and supported comparisons from deb, opkg and rpm.
214            #
215            # Even though =<, <<, ==, !=, =>, and >> may not be supported,
216            # we list each possibly valid item.
217            # The build system is responsible for validation of what it supports.
218            if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
219                lastcmp = i[0:2]
220                i = i[2:]
221            elif i.startswith(('<', '>', '=')):
222                lastcmp = i[0:1]
223                i = i[1:]
224            else:
225                # This is an unsupported case!
226                raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
227                lastcmp = (i or "")
228                i = ""
229            i.strip()
230            if not i:
231                continue
232
233        if inversion:
234            if i.endswith(')'):
235                i = i[:-1] or ""
236                inversion = False
237                if lastver and i:
238                    lastver += " "
239            if i:
240                lastver += i
241                if lastdep not in r:
242                    r[lastdep] = []
243                r[lastdep].append(lastcmp + " " + lastver)
244            continue
245
246        #if not inversion:
247        lastdep = i
248        lastver = ""
249        lastcmp = ""
250        if not (i in r and r[i]):
251            r[lastdep] = []
252
253    if sort:
254        r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
255    return r
256
257def explode_dep_versions(s):
258    """
259    Take an RDEPENDS style string of format:
260    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
261    skip null value and items appeared in dependency string multiple times
262    and return a dictionary of dependencies and versions.
263    """
264    r = explode_dep_versions2(s)
265    for d in r:
266        if not r[d]:
267            r[d] = None
268            continue
269        if len(r[d]) > 1:
270            bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values.  explode_dep_versions cannot cope with this." % (d, s))
271        r[d] = r[d][0]
272    return r
273
274def join_deps(deps, commasep=True):
275    """
276    Take the result from explode_dep_versions and generate a dependency string
277    """
278    result = []
279    for dep in deps:
280        if deps[dep]:
281            if isinstance(deps[dep], list):
282                for v in deps[dep]:
283                    result.append(dep + " (" + v + ")")
284            else:
285                result.append(dep + " (" + deps[dep] + ")")
286        else:
287            result.append(dep)
288    if commasep:
289        return ", ".join(result)
290    else:
291        return " ".join(result)
292
293def _print_trace(body, line):
294    """
295    Print the Environment of a Text Body
296    """
297    error = []
298    # print the environment of the method
299    min_line = max(1, line-4)
300    max_line = min(line + 4, len(body))
301    for i in range(min_line, max_line + 1):
302        if line == i:
303            error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
304        else:
305            error.append('     %.4d:%s' % (i, body[i-1].rstrip()))
306    return error
307
308def better_compile(text, file, realfile, mode = "exec", lineno = 0):
309    """
310    A better compile method. This method
311    will print the offending lines.
312    """
313    try:
314        cache = bb.methodpool.compile_cache(text)
315        if cache:
316            return cache
317        # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
318        text2 = "\n" * int(lineno) + text
319        code = compile(text2, realfile, mode)
320        bb.methodpool.compile_cache_add(text, code)
321        return code
322    except Exception as e:
323        error = []
324        # split the text into lines again
325        body = text.split('\n')
326        error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
327        if hasattr(e, "lineno"):
328            error.append("The code lines resulting in this error were:")
329            # e.lineno: line's position in reaflile
330            # lineno: function name's "position -1" in realfile
331            # e.lineno - lineno: line's relative position in function
332            error.extend(_print_trace(body, e.lineno - lineno))
333        else:
334            error.append("The function causing this error was:")
335            for line in body:
336                error.append(line)
337        error.append("%s: %s" % (e.__class__.__name__, str(e)))
338
339        logger.error("\n".join(error))
340
341        e = bb.BBHandledException(e)
342        raise e
343
344def _print_exception(t, value, tb, realfile, text, context):
345    error = []
346    try:
347        exception = traceback.format_exception_only(t, value)
348        error.append('Error executing a python function in %s:\n' % realfile)
349
350        # Strip 'us' from the stack (better_exec call) unless that was where the
351        # error came from
352        if tb.tb_next is not None:
353            tb = tb.tb_next
354
355        textarray = text.split('\n')
356
357        linefailed = tb.tb_lineno
358
359        tbextract = traceback.extract_tb(tb)
360        tbformat = traceback.format_list(tbextract)
361        error.append("The stack trace of python calls that resulted in this exception/failure was:")
362        error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
363        error.extend(_print_trace(textarray, linefailed))
364
365        # See if this is a function we constructed and has calls back into other functions in
366        # "text". If so, try and improve the context of the error by diving down the trace
367        level = 0
368        nexttb = tb.tb_next
369        while nexttb is not None and (level+1) < len(tbextract):
370            error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
371            if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
372                # The code was possibly in the string we compiled ourselves
373                error.extend(_print_trace(textarray, tbextract[level+1][1]))
374            elif tbextract[level+1][0].startswith("/"):
375                # The code looks like it might be in a file, try and load it
376                try:
377                    with open(tbextract[level+1][0], "r") as f:
378                        text = f.readlines()
379                        error.extend(_print_trace(text, tbextract[level+1][1]))
380                except:
381                    error.append(tbformat[level+1])
382            else:
383                error.append(tbformat[level+1])
384            nexttb = tb.tb_next
385            level = level + 1
386
387        error.append("Exception: %s" % ''.join(exception))
388
389        # If the exception is from spawning a task, let's be helpful and display
390        # the output (which hopefully includes stderr).
391        if isinstance(value, subprocess.CalledProcessError) and value.output:
392            error.append("Subprocess output:")
393            error.append(value.output.decode("utf-8", errors="ignore"))
394    finally:
395        logger.error("\n".join(error))
396
397def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
398    """
399    Similiar to better_compile, better_exec will
400    print the lines that are responsible for the
401    error.
402    """
403    import bb.parse
404    if not text:
405        text = code
406    if not hasattr(code, "co_filename"):
407        code = better_compile(code, realfile, realfile)
408    try:
409        exec(code, get_context(), context)
410    except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError, bb.process.ExecutionError):
411        # Error already shown so passthrough, no need for traceback
412        raise
413    except Exception as e:
414        if pythonexception:
415            raise
416        (t, value, tb) = sys.exc_info()
417        try:
418            _print_exception(t, value, tb, realfile, text, context)
419        except Exception as e2:
420            logger.error("Exception handler error: %s" % str(e2))
421
422        e = bb.BBHandledException(e)
423        raise e
424
425def simple_exec(code, context):
426    exec(code, get_context(), context)
427
428def better_eval(source, locals, extraglobals = None):
429    ctx = get_context()
430    if extraglobals:
431        ctx = copy.copy(ctx)
432        for g in extraglobals:
433            ctx[g] = extraglobals[g]
434    return eval(source, ctx, locals)
435
436@contextmanager
437def fileslocked(files, *args, **kwargs):
438    """Context manager for locking and unlocking file locks."""
439    locks = []
440    if files:
441        for lockfile in files:
442            l = bb.utils.lockfile(lockfile, *args, **kwargs)
443            if l is not None:
444                locks.append(l)
445
446    try:
447        yield
448    finally:
449        for lock in locks:
450            bb.utils.unlockfile(lock)
451
452def lockfile(name, shared=False, retry=True, block=False):
453    """
454    Use the specified file as a lock file, return when the lock has
455    been acquired. Returns a variable to pass to unlockfile().
456    Parameters:
457        retry: True to re-try locking if it fails, False otherwise
458        block: True to block until the lock succeeds, False otherwise
459    The retry and block parameters are kind of equivalent unless you
460    consider the possibility of sending a signal to the process to break
461    out - at which point you want block=True rather than retry=True.
462    """
463    basename = os.path.basename(name)
464    if len(basename) > 255:
465        root, ext = os.path.splitext(basename)
466        basename = root[:255 - len(ext)] + ext
467
468    dirname = os.path.dirname(name)
469    mkdirhier(dirname)
470
471    name = os.path.join(dirname, basename)
472
473    if not os.access(dirname, os.W_OK):
474        logger.error("Unable to acquire lock '%s', directory is not writable",
475                     name)
476        sys.exit(1)
477
478    op = fcntl.LOCK_EX
479    if shared:
480        op = fcntl.LOCK_SH
481    if not retry and not block:
482        op = op | fcntl.LOCK_NB
483
484    while True:
485        # If we leave the lockfiles lying around there is no problem
486        # but we should clean up after ourselves. This gives potential
487        # for races though. To work around this, when we acquire the lock
488        # we check the file we locked was still the lock file on disk.
489        # by comparing inode numbers. If they don't match or the lockfile
490        # no longer exists, we start again.
491
492        # This implementation is unfair since the last person to request the
493        # lock is the most likely to win it.
494
495        try:
496            lf = open(name, 'a+')
497            fileno = lf.fileno()
498            fcntl.flock(fileno, op)
499            statinfo = os.fstat(fileno)
500            if os.path.exists(lf.name):
501                statinfo2 = os.stat(lf.name)
502                if statinfo.st_ino == statinfo2.st_ino:
503                    return lf
504            lf.close()
505        except OSError as e:
506            if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
507                logger.error("Unable to acquire lock '%s', %s",
508                             e.strerror, name)
509                sys.exit(1)
510            try:
511                lf.close()
512            except Exception:
513                pass
514            pass
515        if not retry:
516            return None
517
518def unlockfile(lf):
519    """
520    Unlock a file locked using lockfile()
521    """
522    try:
523        # If we had a shared lock, we need to promote to exclusive before
524        # removing the lockfile. Attempt this, ignore failures.
525        fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
526        os.unlink(lf.name)
527    except (IOError, OSError):
528        pass
529    fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
530    lf.close()
531
532def _hasher(method, filename):
533    import mmap
534
535    with open(filename, "rb") as f:
536        try:
537            with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
538                for chunk in iter(lambda: mm.read(8192), b''):
539                    method.update(chunk)
540        except ValueError:
541            # You can't mmap() an empty file so silence this exception
542            pass
543    return method.hexdigest()
544
545
546def md5_file(filename):
547    """
548    Return the hex string representation of the MD5 checksum of filename.
549    """
550    import hashlib
551    try:
552        sig = hashlib.new('MD5', usedforsecurity=False)
553    except TypeError:
554        # Some configurations don't appear to support two arguments
555        sig = hashlib.new('MD5')
556    return _hasher(sig, filename)
557
558def sha256_file(filename):
559    """
560    Return the hex string representation of the 256-bit SHA checksum of
561    filename.
562    """
563    import hashlib
564    return _hasher(hashlib.sha256(), filename)
565
566def sha1_file(filename):
567    """
568    Return the hex string representation of the SHA1 checksum of the filename
569    """
570    import hashlib
571    return _hasher(hashlib.sha1(), filename)
572
573def sha384_file(filename):
574    """
575    Return the hex string representation of the SHA384 checksum of the filename
576    """
577    import hashlib
578    return _hasher(hashlib.sha384(), filename)
579
580def sha512_file(filename):
581    """
582    Return the hex string representation of the SHA512 checksum of the filename
583    """
584    import hashlib
585    return _hasher(hashlib.sha512(), filename)
586
587def preserved_envvars_exported():
588    """Variables which are taken from the environment and placed in and exported
589    from the metadata"""
590    return [
591        'BB_TASKHASH',
592        'HOME',
593        'LOGNAME',
594        'PATH',
595        'PWD',
596        'SHELL',
597        'USER',
598        'LC_ALL',
599        'BBSERVER',
600    ]
601
602def preserved_envvars():
603    """Variables which are taken from the environment and placed in the metadata"""
604    v = [
605        'BBPATH',
606        'BB_PRESERVE_ENV',
607        'BB_ENV_PASSTHROUGH_ADDITIONS',
608    ]
609    return v + preserved_envvars_exported()
610
611def check_system_locale():
612    """Make sure the required system locale are available and configured"""
613    default_locale = locale.getlocale(locale.LC_CTYPE)
614
615    try:
616        locale.setlocale(locale.LC_CTYPE, ("en_US", "UTF-8"))
617    except:
618        sys.exit("Please make sure locale 'en_US.UTF-8' is available on your system")
619    else:
620        locale.setlocale(locale.LC_CTYPE, default_locale)
621
622    if sys.getfilesystemencoding() != "utf-8":
623        sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\n"
624                 "Python can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
625
626def filter_environment(good_vars):
627    """
628    Create a pristine environment for bitbake. This will remove variables that
629    are not known and may influence the build in a negative way.
630    """
631
632    removed_vars = {}
633    for key in list(os.environ):
634        if key in good_vars:
635            continue
636
637        removed_vars[key] = os.environ[key]
638        del os.environ[key]
639
640    # If we spawn a python process, we need to have a UTF-8 locale, else python's file
641    # access methods will use ascii. You can't change that mode once the interpreter is
642    # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
643    # distros support that and we need to set something.
644    os.environ["LC_ALL"] = "en_US.UTF-8"
645
646    if removed_vars:
647        logger.debug("Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
648
649    return removed_vars
650
651def approved_variables():
652    """
653    Determine and return the list of variables which are approved
654    to remain in the environment.
655    """
656    if 'BB_PRESERVE_ENV' in os.environ:
657        return os.environ.keys()
658    approved = []
659    if 'BB_ENV_PASSTHROUGH' in os.environ:
660        approved = os.environ['BB_ENV_PASSTHROUGH'].split()
661        approved.extend(['BB_ENV_PASSTHROUGH'])
662    else:
663        approved = preserved_envvars()
664    if 'BB_ENV_PASSTHROUGH_ADDITIONS' in os.environ:
665        approved.extend(os.environ['BB_ENV_PASSTHROUGH_ADDITIONS'].split())
666        if 'BB_ENV_PASSTHROUGH_ADDITIONS' not in approved:
667            approved.extend(['BB_ENV_PASSTHROUGH_ADDITIONS'])
668    return approved
669
670def clean_environment():
671    """
672    Clean up any spurious environment variables. This will remove any
673    variables the user hasn't chosen to preserve.
674    """
675    if 'BB_PRESERVE_ENV' not in os.environ:
676        good_vars = approved_variables()
677        return filter_environment(good_vars)
678
679    return {}
680
681def empty_environment():
682    """
683    Remove all variables from the environment.
684    """
685    for s in list(os.environ.keys()):
686        os.unsetenv(s)
687        del os.environ[s]
688
689def build_environment(d):
690    """
691    Build an environment from all exported variables.
692    """
693    import bb.data
694    for var in bb.data.keys(d):
695        export = d.getVarFlag(var, "export", False)
696        if export:
697            os.environ[var] = d.getVar(var) or ""
698
699def _check_unsafe_delete_path(path):
700    """
701    Basic safeguard against recursively deleting something we shouldn't. If it returns True,
702    the caller should raise an exception with an appropriate message.
703    NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
704    with potentially disastrous results.
705    """
706    extra = ''
707    # HOME might not be /home/something, so in case we can get it, check against it
708    homedir = os.environ.get('HOME', '')
709    if homedir:
710        extra = '|%s' % homedir
711    if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
712        return True
713    return False
714
715def remove(path, recurse=False, ionice=False):
716    """Equivalent to rm -f or rm -rf"""
717    if not path:
718        return
719    if recurse:
720        for name in glob.glob(path):
721            if _check_unsafe_delete_path(name):
722                raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % name)
723        # shutil.rmtree(name) would be ideal but its too slow
724        cmd = []
725        if ionice:
726            cmd = ['ionice', '-c', '3']
727        subprocess.check_call(cmd + ['rm', '-rf'] + glob.glob(path))
728        return
729    for name in glob.glob(path):
730        try:
731            os.unlink(name)
732        except OSError as exc:
733            if exc.errno != errno.ENOENT:
734                raise
735
736def prunedir(topdir, ionice=False):
737    """ Delete everything reachable from the directory named in 'topdir'. """
738    # CAUTION:  This is dangerous!
739    if _check_unsafe_delete_path(topdir):
740        raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
741    remove(topdir, recurse=True, ionice=ionice)
742
743#
744# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
745# but thats possibly insane and suffixes is probably going to be small
746#
747def prune_suffix(var, suffixes, d):
748    """
749    See if var ends with any of the suffixes listed and
750    remove it if found
751    """
752    for suffix in suffixes:
753        if suffix and var.endswith(suffix):
754            return var[:-len(suffix)]
755    return var
756
757def mkdirhier(directory):
758    """Create a directory like 'mkdir -p', but does not complain if
759    directory already exists like os.makedirs
760    """
761    if '${' in str(directory):
762        bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory))
763    try:
764        os.makedirs(directory)
765    except OSError as e:
766        if e.errno != errno.EEXIST or not os.path.isdir(directory):
767            raise e
768
769def movefile(src, dest, newmtime = None, sstat = None):
770    """Moves a file from src to dest, preserving all permissions and
771    attributes; mtime will be preserved even when moving across
772    filesystems.  Returns true on success and false on failure. Move is
773    atomic.
774    """
775
776    #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
777    try:
778        if not sstat:
779            sstat = os.lstat(src)
780    except Exception as e:
781        logger.warning("movefile: Stating source file failed...", e)
782        return None
783
784    destexists = 1
785    try:
786        dstat = os.lstat(dest)
787    except:
788        dstat = os.lstat(os.path.dirname(dest))
789        destexists = 0
790
791    if destexists:
792        if stat.S_ISLNK(dstat[stat.ST_MODE]):
793            try:
794                os.unlink(dest)
795                destexists = 0
796            except Exception as e:
797                pass
798
799    if stat.S_ISLNK(sstat[stat.ST_MODE]):
800        try:
801            target = os.readlink(src)
802            if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
803                os.unlink(dest)
804            os.symlink(target, dest)
805            #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
806            os.unlink(src)
807            return os.lstat(dest)
808        except Exception as e:
809            logger.warning("movefile: failed to properly create symlink:", dest, "->", target, e)
810            return None
811
812    renamefailed = 1
813    # os.rename needs to know the dest path ending with file name
814    # so append the file name to a path only if it's a dir specified
815    srcfname = os.path.basename(src)
816    destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
817                else dest
818
819    if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
820        try:
821            bb.utils.rename(src, destpath)
822            renamefailed = 0
823        except Exception as e:
824            if e.errno != errno.EXDEV:
825                # Some random error.
826                logger.warning("movefile: Failed to move", src, "to", dest, e)
827                return None
828            # Invalid cross-device-link 'bind' mounted or actually Cross-Device
829
830    if renamefailed:
831        didcopy = 0
832        if stat.S_ISREG(sstat[stat.ST_MODE]):
833            try: # For safety copy then move it over.
834                shutil.copyfile(src, destpath + "#new")
835                bb.utils.rename(destpath + "#new", destpath)
836                didcopy = 1
837            except Exception as e:
838                logger.warning('movefile: copy', src, '->', dest, 'failed.', e)
839                return None
840        else:
841            #we don't yet handle special, so we need to fall back to /bin/mv
842            a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
843            if a[0] != 0:
844                logger.warning("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
845                return None # failure
846        try:
847            if didcopy:
848                os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
849                os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
850                os.unlink(src)
851        except Exception as e:
852            logger.warning("movefile: Failed to chown/chmod/unlink", dest, e)
853            return None
854
855    if newmtime:
856        os.utime(destpath, (newmtime, newmtime))
857    else:
858        os.utime(destpath, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
859        newmtime = sstat[stat.ST_MTIME]
860    return newmtime
861
862def copyfile(src, dest, newmtime = None, sstat = None):
863    """
864    Copies a file from src to dest, preserving all permissions and
865    attributes; mtime will be preserved even when moving across
866    filesystems.  Returns true on success and false on failure.
867    """
868    #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
869    try:
870        if not sstat:
871            sstat = os.lstat(src)
872    except Exception as e:
873        logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
874        return False
875
876    destexists = 1
877    try:
878        dstat = os.lstat(dest)
879    except:
880        dstat = os.lstat(os.path.dirname(dest))
881        destexists = 0
882
883    if destexists:
884        if stat.S_ISLNK(dstat[stat.ST_MODE]):
885            try:
886                os.unlink(dest)
887                destexists = 0
888            except Exception as e:
889                pass
890
891    if stat.S_ISLNK(sstat[stat.ST_MODE]):
892        try:
893            target = os.readlink(src)
894            if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
895                os.unlink(dest)
896            os.symlink(target, dest)
897            os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
898            return os.lstat(dest)
899        except Exception as e:
900            logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
901            return False
902
903    if stat.S_ISREG(sstat[stat.ST_MODE]):
904        try:
905            srcchown = False
906            if not os.access(src, os.R_OK):
907                # Make sure we can read it
908                srcchown = True
909                os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
910
911            # For safety copy then move it over.
912            shutil.copyfile(src, dest + "#new")
913            bb.utils.rename(dest + "#new", dest)
914        except Exception as e:
915            logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
916            return False
917        finally:
918            if srcchown:
919                os.chmod(src, sstat[stat.ST_MODE])
920                os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
921
922    else:
923        #we don't yet handle special, so we need to fall back to /bin/mv
924        a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
925        if a[0] != 0:
926            logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
927            return False # failure
928    try:
929        os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
930        os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
931    except Exception as e:
932        logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
933        return False
934
935    if newmtime:
936        os.utime(dest, (newmtime, newmtime))
937    else:
938        os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
939        newmtime = sstat[stat.ST_MTIME]
940    return newmtime
941
942def break_hardlinks(src, sstat = None):
943    """
944    Ensures src is the only hardlink to this file.  Other hardlinks,
945    if any, are not affected (other than in their st_nlink value, of
946    course).  Returns true on success and false on failure.
947
948    """
949    try:
950        if not sstat:
951            sstat = os.lstat(src)
952    except Exception as e:
953        logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
954        return False
955    if sstat[stat.ST_NLINK] == 1:
956        return True
957    return copyfile(src, src, sstat=sstat)
958
959def which(path, item, direction = 0, history = False, executable=False):
960    """
961    Locate `item` in the list of paths `path` (colon separated string like $PATH).
962    If `direction` is non-zero then the list is reversed.
963    If `history` is True then the list of candidates also returned as result,history.
964    If `executable` is True then the candidate has to be an executable file,
965    otherwise the candidate simply has to exist.
966    """
967
968    if executable:
969        is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
970    else:
971        is_candidate = lambda p: os.path.exists(p)
972
973    hist = []
974    paths = (path or "").split(':')
975    if direction != 0:
976        paths.reverse()
977
978    for p in paths:
979        next = os.path.join(p, item)
980        hist.append(next)
981        if is_candidate(next):
982            if not os.path.isabs(next):
983                next = os.path.abspath(next)
984            if history:
985                return next, hist
986            return next
987
988    if history:
989        return "", hist
990    return ""
991
992@contextmanager
993def umask(new_mask):
994    """
995    Context manager to set the umask to a specific mask, and restore it afterwards.
996    """
997    current_mask = os.umask(new_mask)
998    try:
999        yield
1000    finally:
1001        os.umask(current_mask)
1002
1003def to_boolean(string, default=None):
1004    """
1005    Check input string and return boolean value True/False/None
1006    depending upon the checks
1007    """
1008    if not string:
1009        return default
1010
1011    if isinstance(string, int):
1012        return string != 0
1013
1014    normalized = string.lower()
1015    if normalized in ("y", "yes", "1", "true"):
1016        return True
1017    elif normalized in ("n", "no", "0", "false"):
1018        return False
1019    else:
1020        raise ValueError("Invalid value for to_boolean: %s" % string)
1021
1022def contains(variable, checkvalues, truevalue, falsevalue, d):
1023    """Check if a variable contains all the values specified.
1024
1025    Arguments:
1026
1027    variable -- the variable name. This will be fetched and expanded (using
1028    d.getVar(variable)) and then split into a set().
1029
1030    checkvalues -- if this is a string it is split on whitespace into a set(),
1031    otherwise coerced directly into a set().
1032
1033    truevalue -- the value to return if checkvalues is a subset of variable.
1034
1035    falsevalue -- the value to return if variable is empty or if checkvalues is
1036    not a subset of variable.
1037
1038    d -- the data store.
1039    """
1040
1041    val = d.getVar(variable)
1042    if not val:
1043        return falsevalue
1044    val = set(val.split())
1045    if isinstance(checkvalues, str):
1046        checkvalues = set(checkvalues.split())
1047    else:
1048        checkvalues = set(checkvalues)
1049    if checkvalues.issubset(val):
1050        return truevalue
1051    return falsevalue
1052
1053def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1054    """Check if a variable contains any values specified.
1055
1056    Arguments:
1057
1058    variable -- the variable name. This will be fetched and expanded (using
1059    d.getVar(variable)) and then split into a set().
1060
1061    checkvalues -- if this is a string it is split on whitespace into a set(),
1062    otherwise coerced directly into a set().
1063
1064    truevalue -- the value to return if checkvalues is a subset of variable.
1065
1066    falsevalue -- the value to return if variable is empty or if checkvalues is
1067    not a subset of variable.
1068
1069    d -- the data store.
1070    """
1071    val = d.getVar(variable)
1072    if not val:
1073        return falsevalue
1074    val = set(val.split())
1075    if isinstance(checkvalues, str):
1076        checkvalues = set(checkvalues.split())
1077    else:
1078        checkvalues = set(checkvalues)
1079    if checkvalues & val:
1080        return truevalue
1081    return falsevalue
1082
1083def filter(variable, checkvalues, d):
1084    """Return all words in the variable that are present in the checkvalues.
1085
1086    Arguments:
1087
1088    variable -- the variable name. This will be fetched and expanded (using
1089    d.getVar(variable)) and then split into a set().
1090
1091    checkvalues -- if this is a string it is split on whitespace into a set(),
1092    otherwise coerced directly into a set().
1093
1094    d -- the data store.
1095    """
1096
1097    val = d.getVar(variable)
1098    if not val:
1099        return ''
1100    val = set(val.split())
1101    if isinstance(checkvalues, str):
1102        checkvalues = set(checkvalues.split())
1103    else:
1104        checkvalues = set(checkvalues)
1105    return ' '.join(sorted(checkvalues & val))
1106
1107
1108def get_referenced_vars(start_expr, d):
1109    """
1110    :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level
1111    are ordered arbitrarily)
1112    """
1113
1114    seen = set()
1115    ret = []
1116
1117    # The first entry in the queue is the unexpanded start expression
1118    queue = collections.deque([start_expr])
1119    # Subsequent entries will be variable names, so we need to track whether or not entry requires getVar
1120    is_first = True
1121
1122    empty_data = bb.data.init()
1123    while queue:
1124        entry = queue.popleft()
1125        if is_first:
1126            # Entry is the start expression - no expansion needed
1127            is_first = False
1128            expression = entry
1129        else:
1130            # This is a variable name - need to get the value
1131            expression = d.getVar(entry, False)
1132            ret.append(entry)
1133
1134        # expandWithRefs is how we actually get the referenced variables in the expression. We call it using an empty
1135        # data store because we only want the variables directly used in the expression. It returns a set, which is what
1136        # dooms us to only ever be "quasi-BFS" rather than full BFS.
1137        new_vars = empty_data.expandWithRefs(expression, None).references - set(seen)
1138
1139        queue.extend(new_vars)
1140        seen.update(new_vars)
1141    return ret
1142
1143
1144def cpu_count():
1145    try:
1146        return len(os.sched_getaffinity(0))
1147    except OSError:
1148        return multiprocessing.cpu_count()
1149
1150def nonblockingfd(fd):
1151    fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
1152
1153def process_profilelog(fn, pout = None):
1154    # Either call with a list of filenames and set pout or a filename and optionally pout.
1155    if not pout:
1156        pout = fn + '.processed'
1157
1158    with open(pout, 'w') as pout:
1159        import pstats
1160        if isinstance(fn, list):
1161            p = pstats.Stats(*fn, stream=pout)
1162        else:
1163            p = pstats.Stats(fn, stream=pout)
1164        p.sort_stats('time')
1165        p.print_stats()
1166        p.print_callers()
1167        p.sort_stats('cumulative')
1168        p.print_stats()
1169
1170        pout.flush()
1171
1172#
1173# Was present to work around multiprocessing pool bugs in python < 2.7.3
1174#
1175def multiprocessingpool(*args, **kwargs):
1176
1177    import multiprocessing.pool
1178    #import multiprocessing.util
1179    #multiprocessing.util.log_to_stderr(10)
1180    # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
1181    # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
1182    def wrapper(func):
1183        def wrap(self, timeout=None):
1184            return func(self, timeout=timeout if timeout is not None else 1e100)
1185        return wrap
1186    multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
1187
1188    return multiprocessing.Pool(*args, **kwargs)
1189
1190def exec_flat_python_func(func, *args, **kwargs):
1191    """Execute a flat python function (defined with def funcname(args):...)"""
1192    # Prepare a small piece of python code which calls the requested function
1193    # To do this we need to prepare two things - a set of variables we can use to pass
1194    # the values of arguments into the calling function, and the list of arguments for
1195    # the function being called
1196    context = {}
1197    funcargs = []
1198    # Handle unnamed arguments
1199    aidx = 1
1200    for arg in args:
1201        argname = 'arg_%s' % aidx
1202        context[argname] = arg
1203        funcargs.append(argname)
1204        aidx += 1
1205    # Handle keyword arguments
1206    context.update(kwargs)
1207    funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
1208    code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
1209    comp = bb.utils.better_compile(code, '<string>', '<string>')
1210    bb.utils.better_exec(comp, context, code, '<string>')
1211    return context['retval']
1212
1213def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1214    """Edit lines from a recipe or config file and modify one or more
1215    specified variable values set in the file using a specified callback
1216    function. Lines are expected to have trailing newlines.
1217    Parameters:
1218        meta_lines: lines from the file; can be a list or an iterable
1219            (e.g. file pointer)
1220        variables: a list of variable names to look for. Functions
1221            may also be specified, but must be specified with '()' at
1222            the end of the name. Note that the function doesn't have
1223            any intrinsic understanding of :append, :prepend, :remove,
1224            or overrides, so these are considered as part of the name.
1225            These values go into a regular expression, so regular
1226            expression syntax is allowed.
1227        varfunc: callback function called for every variable matching
1228            one of the entries in the variables parameter. The function
1229            should take four arguments:
1230                varname: name of variable matched
1231                origvalue: current value in file
1232                op: the operator (e.g. '+=')
1233                newlines: list of lines up to this point. You can use
1234                    this to prepend lines before this variable setting
1235                    if you wish.
1236            and should return a four-element tuple:
1237                newvalue: new value to substitute in, or None to drop
1238                    the variable setting entirely. (If the removal
1239                    results in two consecutive blank lines, one of the
1240                    blank lines will also be dropped).
1241                newop: the operator to use - if you specify None here,
1242                    the original operation will be used.
1243                indent: number of spaces to indent multi-line entries,
1244                    or -1 to indent up to the level of the assignment
1245                    and opening quote, or a string to use as the indent.
1246                minbreak: True to allow the first element of a
1247                    multi-line value to continue on the same line as
1248                    the assignment, False to indent before the first
1249                    element.
1250            To clarify, if you wish not to change the value, then you
1251            would return like this: return origvalue, None, 0, True
1252        match_overrides: True to match items with _overrides on the end,
1253            False otherwise
1254    Returns a tuple:
1255        updated:
1256            True if changes were made, False otherwise.
1257        newlines:
1258            Lines after processing
1259    """
1260
1261    var_res = {}
1262    if match_overrides:
1263        override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
1264    else:
1265        override_re = ''
1266    for var in variables:
1267        if var.endswith('()'):
1268            var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
1269        else:
1270            var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
1271
1272    updated = False
1273    varset_start = ''
1274    varlines = []
1275    newlines = []
1276    in_var = None
1277    full_value = ''
1278    var_end = ''
1279
1280    def handle_var_end():
1281        prerun_newlines = newlines[:]
1282        op = varset_start[len(in_var):].strip()
1283        (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
1284        changed = (prerun_newlines != newlines)
1285
1286        if newvalue is None:
1287            # Drop the value
1288            return True
1289        elif newvalue != full_value or (newop not in [None, op]):
1290            if newop not in [None, op]:
1291                # Callback changed the operator
1292                varset_new = "%s %s" % (in_var, newop)
1293            else:
1294                varset_new = varset_start
1295
1296            if isinstance(indent, int):
1297                if indent == -1:
1298                    indentspc = ' ' * (len(varset_new) + 2)
1299                else:
1300                    indentspc = ' ' * indent
1301            else:
1302                indentspc = indent
1303            if in_var.endswith('()'):
1304                # A function definition
1305                if isinstance(newvalue, list):
1306                    newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
1307                else:
1308                    if not newvalue.startswith('\n'):
1309                        newvalue = '\n' + newvalue
1310                    if not newvalue.endswith('\n'):
1311                        newvalue = newvalue + '\n'
1312                    newlines.append('%s {%s}\n' % (varset_new, newvalue))
1313            else:
1314                # Normal variable
1315                if isinstance(newvalue, list):
1316                    if not newvalue:
1317                        # Empty list -> empty string
1318                        newlines.append('%s ""\n' % varset_new)
1319                    elif minbreak:
1320                        # First item on first line
1321                        if len(newvalue) == 1:
1322                            newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
1323                        else:
1324                            newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
1325                            for item in newvalue[1:]:
1326                                newlines.append('%s%s \\\n' % (indentspc, item))
1327                            newlines.append('%s"\n' % indentspc)
1328                    else:
1329                        # No item on first line
1330                        newlines.append('%s " \\\n' % varset_new)
1331                        for item in newvalue:
1332                            newlines.append('%s%s \\\n' % (indentspc, item))
1333                        newlines.append('%s"\n' % indentspc)
1334                else:
1335                    newlines.append('%s "%s"\n' % (varset_new, newvalue))
1336            return True
1337        else:
1338            # Put the old lines back where they were
1339            newlines.extend(varlines)
1340            # If newlines was touched by the function, we'll need to return True
1341            return changed
1342
1343    checkspc = False
1344
1345    for line in meta_lines:
1346        if in_var:
1347            value = line.rstrip()
1348            varlines.append(line)
1349            if in_var.endswith('()'):
1350                full_value += '\n' + value
1351            else:
1352                full_value += value[:-1]
1353            if value.endswith(var_end):
1354                if in_var.endswith('()'):
1355                    if full_value.count('{') - full_value.count('}') >= 0:
1356                        continue
1357                    full_value = full_value[:-1]
1358                if handle_var_end():
1359                    updated = True
1360                    checkspc = True
1361                in_var = None
1362        else:
1363            skip = False
1364            for (varname, var_re) in var_res.items():
1365                res = var_re.match(line)
1366                if res:
1367                    isfunc = varname.endswith('()')
1368                    if isfunc:
1369                        splitvalue = line.split('{', 1)
1370                        var_end = '}'
1371                    else:
1372                        var_end = res.groups()[-1]
1373                        splitvalue = line.split(var_end, 1)
1374                    varset_start = splitvalue[0].rstrip()
1375                    value = splitvalue[1].rstrip()
1376                    if not isfunc and value.endswith('\\'):
1377                        value = value[:-1]
1378                    full_value = value
1379                    varlines = [line]
1380                    in_var = res.group(1)
1381                    if isfunc:
1382                        in_var += '()'
1383                    if value.endswith(var_end):
1384                        full_value = full_value[:-1]
1385                        if handle_var_end():
1386                            updated = True
1387                            checkspc = True
1388                        in_var = None
1389                    skip = True
1390                    break
1391            if not skip:
1392                if checkspc:
1393                    checkspc = False
1394                    if newlines and newlines[-1] == '\n' and line == '\n':
1395                        # Squash blank line if there are two consecutive blanks after a removal
1396                        continue
1397                newlines.append(line)
1398    return (updated, newlines)
1399
1400
1401def edit_metadata_file(meta_file, variables, varfunc):
1402    """Edit a recipe or config file and modify one or more specified
1403    variable values set in the file using a specified callback function.
1404    The file is only written to if the value(s) actually change.
1405    This is basically the file version of edit_metadata(), see that
1406    function's description for parameter/usage information.
1407    Returns True if the file was written to, False otherwise.
1408    """
1409    with open(meta_file, 'r') as f:
1410        (updated, newlines) = edit_metadata(f, variables, varfunc)
1411    if updated:
1412        with open(meta_file, 'w') as f:
1413            f.writelines(newlines)
1414    return updated
1415
1416
1417def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
1418    """Edit bblayers.conf, adding and/or removing layers
1419    Parameters:
1420        bblayers_conf: path to bblayers.conf file to edit
1421        add: layer path (or list of layer paths) to add; None or empty
1422            list to add nothing
1423        remove: layer path (or list of layer paths) to remove; None or
1424            empty list to remove nothing
1425        edit_cb: optional callback function that will be called after
1426            processing adds/removes once per existing entry.
1427    Returns a tuple:
1428        notadded: list of layers specified to be added but weren't
1429            (because they were already in the list)
1430        notremoved: list of layers that were specified to be removed
1431            but weren't (because they weren't in the list)
1432    """
1433
1434    import fnmatch
1435
1436    def remove_trailing_sep(pth):
1437        if pth and pth[-1] == os.sep:
1438            pth = pth[:-1]
1439        return pth
1440
1441    approved = bb.utils.approved_variables()
1442    def canonicalise_path(pth):
1443        pth = remove_trailing_sep(pth)
1444        if 'HOME' in approved and '~' in pth:
1445            pth = os.path.expanduser(pth)
1446        return pth
1447
1448    def layerlist_param(value):
1449        if not value:
1450            return []
1451        elif isinstance(value, list):
1452            return [remove_trailing_sep(x) for x in value]
1453        else:
1454            return [remove_trailing_sep(value)]
1455
1456    addlayers = layerlist_param(add)
1457    removelayers = layerlist_param(remove)
1458
1459    # Need to use a list here because we can't set non-local variables from a callback in python 2.x
1460    bblayercalls = []
1461    removed = []
1462    plusequals = False
1463    orig_bblayers = []
1464
1465    def handle_bblayers_firstpass(varname, origvalue, op, newlines):
1466        bblayercalls.append(op)
1467        if op == '=':
1468            del orig_bblayers[:]
1469        orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
1470        return (origvalue, None, 2, False)
1471
1472    def handle_bblayers(varname, origvalue, op, newlines):
1473        updated = False
1474        bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
1475        if removelayers:
1476            for removelayer in removelayers:
1477                for layer in bblayers:
1478                    if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
1479                        updated = True
1480                        bblayers.remove(layer)
1481                        removed.append(removelayer)
1482                        break
1483        if addlayers and not plusequals:
1484            for addlayer in addlayers:
1485                if addlayer not in bblayers:
1486                    updated = True
1487                    bblayers.append(addlayer)
1488            del addlayers[:]
1489
1490        if edit_cb:
1491            newlist = []
1492            for layer in bblayers:
1493                res = edit_cb(layer, canonicalise_path(layer))
1494                if res != layer:
1495                    newlist.append(res)
1496                    updated = True
1497                else:
1498                    newlist.append(layer)
1499            bblayers = newlist
1500
1501        if updated:
1502            if op == '+=' and not bblayers:
1503                bblayers = None
1504            return (bblayers, None, 2, False)
1505        else:
1506            return (origvalue, None, 2, False)
1507
1508    with open(bblayers_conf, 'r') as f:
1509        (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
1510
1511    if not bblayercalls:
1512        raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
1513
1514    # Try to do the "smart" thing depending on how the user has laid out
1515    # their bblayers.conf file
1516    if bblayercalls.count('+=') > 1:
1517        plusequals = True
1518
1519    removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
1520    notadded = []
1521    for layer in addlayers:
1522        layer_canon = canonicalise_path(layer)
1523        if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
1524            notadded.append(layer)
1525    notadded_canon = [canonicalise_path(layer) for layer in notadded]
1526    addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
1527
1528    (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
1529    if addlayers:
1530        # Still need to add these
1531        for addlayer in addlayers:
1532            newlines.append('BBLAYERS += "%s"\n' % addlayer)
1533        updated = True
1534
1535    if updated:
1536        with open(bblayers_conf, 'w') as f:
1537            f.writelines(newlines)
1538
1539    notremoved = list(set(removelayers) - set(removed))
1540
1541    return (notadded, notremoved)
1542
1543def get_collection_res(d):
1544    collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
1545    collection_res = {}
1546    for collection in collections:
1547        collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
1548
1549    return collection_res
1550
1551
1552def get_file_layer(filename, d, collection_res={}):
1553    """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
1554    if not collection_res:
1555        collection_res = get_collection_res(d)
1556
1557    def path_to_layer(path):
1558        # Use longest path so we handle nested layers
1559        matchlen = 0
1560        match = None
1561        for collection, regex in collection_res.items():
1562            if len(regex) > matchlen and re.match(regex, path):
1563                matchlen = len(regex)
1564                match = collection
1565        return match
1566
1567    result = None
1568    bbfiles = (d.getVar('BBFILES_PRIORITIZED') or '').split()
1569    bbfilesmatch = False
1570    for bbfilesentry in bbfiles:
1571        if fnmatch.fnmatchcase(filename, bbfilesentry):
1572            bbfilesmatch = True
1573            result = path_to_layer(bbfilesentry)
1574            break
1575
1576    if not bbfilesmatch:
1577        # Probably a bbclass
1578        result = path_to_layer(filename)
1579
1580    return result
1581
1582
1583# Constant taken from http://linux.die.net/include/linux/prctl.h
1584PR_SET_PDEATHSIG = 1
1585
1586class PrCtlError(Exception):
1587    pass
1588
1589def signal_on_parent_exit(signame):
1590    """
1591    Trigger signame to be sent when the parent process dies
1592    """
1593    signum = getattr(signal, signame)
1594    # http://linux.die.net/man/2/prctl
1595    result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
1596    if result != 0:
1597        raise PrCtlError('prctl failed with error code %s' % result)
1598
1599#
1600# Manually call the ioprio syscall. We could depend on other libs like psutil
1601# however this gets us enough of what we need to bitbake for now without the
1602# dependency
1603#
1604_unamearch = os.uname()[4]
1605IOPRIO_WHO_PROCESS = 1
1606IOPRIO_CLASS_SHIFT = 13
1607
1608def ioprio_set(who, cls, value):
1609    NR_ioprio_set = None
1610    if _unamearch == "x86_64":
1611      NR_ioprio_set = 251
1612    elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
1613      NR_ioprio_set = 289
1614    elif _unamearch == "aarch64":
1615      NR_ioprio_set = 30
1616
1617    if NR_ioprio_set:
1618        ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
1619        rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
1620        if rc != 0:
1621            raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
1622    else:
1623        bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
1624
1625def set_process_name(name):
1626    from ctypes import cdll, byref, create_string_buffer
1627    # This is nice to have for debugging, not essential
1628    try:
1629        libc = cdll.LoadLibrary('libc.so.6')
1630        buf = create_string_buffer(bytes(name, 'utf-8'))
1631        libc.prctl(15, byref(buf), 0, 0, 0)
1632    except:
1633        pass
1634
1635def enable_loopback_networking():
1636    # From bits/ioctls.h
1637    SIOCGIFFLAGS = 0x8913
1638    SIOCSIFFLAGS = 0x8914
1639    SIOCSIFADDR = 0x8916
1640    SIOCSIFNETMASK = 0x891C
1641
1642    # if.h
1643    IFF_UP = 0x1
1644    IFF_RUNNING = 0x40
1645
1646    # bits/socket.h
1647    AF_INET = 2
1648
1649    # char ifr_name[IFNAMSIZ=16]
1650    ifr_name = struct.pack("@16s", b"lo")
1651    def netdev_req(fd, req, data = b""):
1652        # Pad and add interface name
1653        data = ifr_name + data + (b'\x00' * (16 - len(data)))
1654        # Return all data after interface name
1655        return fcntl.ioctl(fd, req, data)[16:]
1656
1657    with socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_IP) as sock:
1658        fd = sock.fileno()
1659
1660        # struct sockaddr_in ifr_addr { unsigned short family; uint16_t sin_port ; uint32_t in_addr; }
1661        req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 127, 0, 0, 1)
1662        netdev_req(fd, SIOCSIFADDR, req)
1663
1664        # short ifr_flags
1665        flags = struct.unpack_from('@h', netdev_req(fd, SIOCGIFFLAGS))[0]
1666        flags |= IFF_UP | IFF_RUNNING
1667        netdev_req(fd, SIOCSIFFLAGS, struct.pack('@h', flags))
1668
1669        # struct sockaddr_in ifr_netmask
1670        req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 255, 0, 0, 0)
1671        netdev_req(fd, SIOCSIFNETMASK, req)
1672
1673def disable_network(uid=None, gid=None):
1674    """
1675    Disable networking in the current process if the kernel supports it, else
1676    just return after logging to debug. To do this we need to create a new user
1677    namespace, then map back to the original uid/gid.
1678    """
1679    libc = ctypes.CDLL('libc.so.6')
1680
1681    # From sched.h
1682    # New user namespace
1683    CLONE_NEWUSER = 0x10000000
1684    # New network namespace
1685    CLONE_NEWNET = 0x40000000
1686
1687    if uid is None:
1688        uid = os.getuid()
1689    if gid is None:
1690        gid = os.getgid()
1691
1692    ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER)
1693    if ret != 0:
1694        logger.debug("System doesn't support disabling network without admin privs")
1695        return
1696    with open("/proc/self/uid_map", "w") as f:
1697        f.write("%s %s 1" % (uid, uid))
1698    with open("/proc/self/setgroups", "w") as f:
1699        f.write("deny")
1700    with open("/proc/self/gid_map", "w") as f:
1701        f.write("%s %s 1" % (gid, gid))
1702
1703def export_proxies(d):
1704    from bb.fetch2 import get_fetcher_environment
1705    """ export common proxies variables from datastore to environment """
1706    newenv = get_fetcher_environment(d)
1707    for v in newenv:
1708        os.environ[v] = newenv[v]
1709
1710def load_plugins(logger, plugins, pluginpath):
1711    def load_plugin(name):
1712        logger.debug('Loading plugin %s' % name)
1713        spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
1714        if spec:
1715            mod = importlib.util.module_from_spec(spec)
1716            spec.loader.exec_module(mod)
1717            return mod
1718
1719    logger.debug('Loading plugins from %s...' % pluginpath)
1720
1721    expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
1722                for ext in python_extensions)
1723    files = itertools.chain.from_iterable(expanded)
1724    names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
1725    for name in names:
1726        if name != '__init__':
1727            plugin = load_plugin(name)
1728            if hasattr(plugin, 'plugin_init'):
1729                obj = plugin.plugin_init(plugins)
1730                plugins.append(obj or plugin)
1731            else:
1732                plugins.append(plugin)
1733
1734
1735class LogCatcher(logging.Handler):
1736    """Logging handler for collecting logged messages so you can check them later"""
1737    def __init__(self):
1738        self.messages = []
1739        logging.Handler.__init__(self, logging.WARNING)
1740    def emit(self, record):
1741        self.messages.append(bb.build.logformatter.format(record))
1742    def contains(self, message):
1743        return (message in self.messages)
1744
1745def is_semver(version):
1746    """
1747        Is the version string following the semver semantic?
1748
1749        https://semver.org/spec/v2.0.0.html
1750    """
1751    regex = re.compile(
1752    r"""
1753    ^
1754    (0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)
1755    (?:-(
1756        (?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)
1757        (?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*
1758    ))?
1759    (?:\+(
1760        [0-9a-zA-Z-]+
1761        (?:\.[0-9a-zA-Z-]+)*
1762    ))?
1763    $
1764    """, re.VERBOSE)
1765
1766    if regex.match(version) is None:
1767        return False
1768
1769    return True
1770
1771# Wrapper around os.rename which can handle cross device problems
1772# e.g. from container filesystems
1773def rename(src, dst):
1774    try:
1775        os.rename(src, dst)
1776    except OSError as err:
1777        if err.errno == 18:
1778            # Invalid cross-device link error
1779            shutil.move(src, dst)
1780        else:
1781            raise err
1782
1783@contextmanager
1784def environment(**envvars):
1785    """
1786    Context manager to selectively update the environment with the specified mapping.
1787    """
1788    backup = dict(os.environ)
1789    try:
1790        os.environ.update(envvars)
1791        yield
1792    finally:
1793        for var in envvars:
1794            if var in backup:
1795                os.environ[var] = backup[var]
1796            elif var in os.environ:
1797                del os.environ[var]
1798
1799def is_local_uid(uid=''):
1800    """
1801    Check whether uid is a local one or not.
1802    Can't use pwd module since it gets all UIDs, not local ones only.
1803    """
1804    if not uid:
1805        uid = os.getuid()
1806    with open('/etc/passwd', 'r') as f:
1807        for line in f:
1808            line_split = line.split(':')
1809            if len(line_split) < 3:
1810                continue
1811            if str(uid) == line_split[2]:
1812                return True
1813    return False
1814
1815def mkstemp(suffix=None, prefix=None, dir=None, text=False):
1816    """
1817    Generates a unique filename, independent of time.
1818
1819    mkstemp() in glibc (at least) generates unique file names based on the
1820    current system time. When combined with highly parallel builds, and
1821    operating over NFS (e.g. shared sstate/downloads) this can result in
1822    conflicts and race conditions.
1823
1824    This function adds additional entropy to the file name so that a collision
1825    is independent of time and thus extremely unlikely.
1826    """
1827    entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20))
1828    if prefix:
1829        prefix = prefix + entropy
1830    else:
1831        prefix = tempfile.gettempprefix() + entropy
1832    return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
1833
1834def path_is_descendant(descendant, ancestor):
1835    """
1836    Returns True if the path `descendant` is a descendant of `ancestor`
1837    (including being equivalent to `ancestor` itself). Otherwise returns False.
1838    Correctly accounts for symlinks, bind mounts, etc. by using
1839    os.path.samestat() to compare paths
1840
1841    May raise any exception that os.stat() raises
1842    """
1843
1844    ancestor_stat = os.stat(ancestor)
1845
1846    # Recurse up each directory component of the descendant to see if it is
1847    # equivalent to the ancestor
1848    check_dir = os.path.abspath(descendant).rstrip("/")
1849    while check_dir:
1850        check_stat = os.stat(check_dir)
1851        if os.path.samestat(check_stat, ancestor_stat):
1852            return True
1853        check_dir = os.path.dirname(check_dir).rstrip("/")
1854
1855    return False
1856
1857# If we don't have a timeout of some kind and a process/thread exits badly (for example
1858# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better
1859# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked.
1860@contextmanager
1861def lock_timeout(lock):
1862    held = lock.acquire(timeout=5*60)
1863    try:
1864        if not held:
1865            os._exit(1)
1866        yield held
1867    finally:
1868        lock.release()
1869