xref: /openbmc/openbmc/poky/bitbake/lib/bb/utils.py (revision 82c905dc)
1"""
2BitBake Utility Functions
3"""
4
5# Copyright (C) 2004 Michael Lauer
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import re, fcntl, os, string, stat, shutil, time
11import sys
12import errno
13import logging
14import bb
15import bb.msg
16import multiprocessing
17import fcntl
18import importlib
19from importlib import machinery
20import itertools
21import subprocess
22import glob
23import fnmatch
24import traceback
25import errno
26import signal
27import collections
28import copy
29from subprocess import getstatusoutput
30from contextlib import contextmanager
31from ctypes import cdll
32
33logger = logging.getLogger("BitBake.Util")
34python_extensions = importlib.machinery.all_suffixes()
35
36
37def clean_context():
38    return {
39        "os": os,
40        "bb": bb,
41        "time": time,
42    }
43
44def get_context():
45    return _context
46
47
48def set_context(ctx):
49    _context = ctx
50
51# Context used in better_exec, eval
52_context = clean_context()
53
54class VersionStringException(Exception):
55    """Exception raised when an invalid version specification is found"""
56
57def explode_version(s):
58    r = []
59    alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
60    numeric_regexp = re.compile(r'^(\d+)(.*)$')
61    while (s != ''):
62        if s[0] in string.digits:
63            m = numeric_regexp.match(s)
64            r.append((0, int(m.group(1))))
65            s = m.group(2)
66            continue
67        if s[0] in string.ascii_letters:
68            m = alpha_regexp.match(s)
69            r.append((1, m.group(1)))
70            s = m.group(2)
71            continue
72        if s[0] == '~':
73            r.append((-1, s[0]))
74        else:
75            r.append((2, s[0]))
76        s = s[1:]
77    return r
78
79def split_version(s):
80    """Split a version string into its constituent parts (PE, PV, PR)"""
81    s = s.strip(" <>=")
82    e = 0
83    if s.count(':'):
84        e = int(s.split(":")[0])
85        s = s.split(":")[1]
86    r = ""
87    if s.count('-'):
88        r = s.rsplit("-", 1)[1]
89        s = s.rsplit("-", 1)[0]
90    v = s
91    return (e, v, r)
92
93def vercmp_part(a, b):
94    va = explode_version(a)
95    vb = explode_version(b)
96    while True:
97        if va == []:
98            (oa, ca) = (0, None)
99        else:
100            (oa, ca) = va.pop(0)
101        if vb == []:
102            (ob, cb) = (0, None)
103        else:
104            (ob, cb) = vb.pop(0)
105        if (oa, ca) == (0, None) and (ob, cb) == (0, None):
106            return 0
107        if oa < ob:
108            return -1
109        elif oa > ob:
110            return 1
111        elif ca is None:
112            return -1
113        elif cb is None:
114            return 1
115        elif ca < cb:
116            return -1
117        elif ca > cb:
118            return 1
119
120def vercmp(ta, tb):
121    (ea, va, ra) = ta
122    (eb, vb, rb) = tb
123
124    r = int(ea or 0) - int(eb or 0)
125    if (r == 0):
126        r = vercmp_part(va, vb)
127    if (r == 0):
128        r = vercmp_part(ra, rb)
129    return r
130
131def vercmp_string(a, b):
132    ta = split_version(a)
133    tb = split_version(b)
134    return vercmp(ta, tb)
135
136def vercmp_string_op(a, b, op):
137    """
138    Compare two versions and check if the specified comparison operator matches the result of the comparison.
139    This function is fairly liberal about what operators it will accept since there are a variety of styles
140    depending on the context.
141    """
142    res = vercmp_string(a, b)
143    if op in ('=', '=='):
144        return res == 0
145    elif op == '<=':
146        return res <= 0
147    elif op == '>=':
148        return res >= 0
149    elif op in ('>', '>>'):
150        return res > 0
151    elif op in ('<', '<<'):
152        return res < 0
153    elif op == '!=':
154        return res != 0
155    else:
156        raise VersionStringException('Unsupported comparison operator "%s"' % op)
157
158def explode_deps(s):
159    """
160    Take an RDEPENDS style string of format:
161    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
162    and return a list of dependencies.
163    Version information is ignored.
164    """
165    r = []
166    l = s.split()
167    flag = False
168    for i in l:
169        if i[0] == '(':
170            flag = True
171            #j = []
172        if not flag:
173            r.append(i)
174        #else:
175        #    j.append(i)
176        if flag and i.endswith(')'):
177            flag = False
178            # Ignore version
179            #r[-1] += ' ' + ' '.join(j)
180    return r
181
182def explode_dep_versions2(s, *, sort=True):
183    """
184    Take an RDEPENDS style string of format:
185    "DEPEND1 (optional version) DEPEND2 (optional version) ..."
186    and return a dictionary of dependencies and versions.
187    """
188    r = collections.OrderedDict()
189    l = s.replace(",", "").split()
190    lastdep = None
191    lastcmp = ""
192    lastver = ""
193    incmp = False
194    inversion = False
195    for i in l:
196        if i[0] == '(':
197            incmp = True
198            i = i[1:].strip()
199            if not i:
200                continue
201
202        if incmp:
203            incmp = False
204            inversion = True
205            # This list is based on behavior and supported comparisons from deb, opkg and rpm.
206            #
207            # Even though =<, <<, ==, !=, =>, and >> may not be supported,
208            # we list each possibly valid item.
209            # The build system is responsible for validation of what it supports.
210            if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
211                lastcmp = i[0:2]
212                i = i[2:]
213            elif i.startswith(('<', '>', '=')):
214                lastcmp = i[0:1]
215                i = i[1:]
216            else:
217                # This is an unsupported case!
218                raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
219                lastcmp = (i or "")
220                i = ""
221            i.strip()
222            if not i:
223                continue
224
225        if inversion:
226            if i.endswith(')'):
227                i = i[:-1] or ""
228                inversion = False
229                if lastver and i:
230                    lastver += " "
231            if i:
232                lastver += i
233                if lastdep not in r:
234                    r[lastdep] = []
235                r[lastdep].append(lastcmp + " " + lastver)
236            continue
237
238        #if not inversion:
239        lastdep = i
240        lastver = ""
241        lastcmp = ""
242        if not (i in r and r[i]):
243            r[lastdep] = []
244
245    if sort:
246        r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
247    return r
248
249def explode_dep_versions(s):
250    r = explode_dep_versions2(s)
251    for d in r:
252        if not r[d]:
253            r[d] = None
254            continue
255        if len(r[d]) > 1:
256            bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values.  explode_dep_versions cannot cope with this." % (d, s))
257        r[d] = r[d][0]
258    return r
259
260def join_deps(deps, commasep=True):
261    """
262    Take the result from explode_dep_versions and generate a dependency string
263    """
264    result = []
265    for dep in deps:
266        if deps[dep]:
267            if isinstance(deps[dep], list):
268                for v in deps[dep]:
269                    result.append(dep + " (" + v + ")")
270            else:
271                result.append(dep + " (" + deps[dep] + ")")
272        else:
273            result.append(dep)
274    if commasep:
275        return ", ".join(result)
276    else:
277        return " ".join(result)
278
279def _print_trace(body, line):
280    """
281    Print the Environment of a Text Body
282    """
283    error = []
284    # print the environment of the method
285    min_line = max(1, line-4)
286    max_line = min(line + 4, len(body))
287    for i in range(min_line, max_line + 1):
288        if line == i:
289            error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
290        else:
291            error.append('     %.4d:%s' % (i, body[i-1].rstrip()))
292    return error
293
294def better_compile(text, file, realfile, mode = "exec", lineno = 0):
295    """
296    A better compile method. This method
297    will print the offending lines.
298    """
299    try:
300        cache = bb.methodpool.compile_cache(text)
301        if cache:
302            return cache
303        # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
304        text2 = "\n" * int(lineno) + text
305        code = compile(text2, realfile, mode)
306        bb.methodpool.compile_cache_add(text, code)
307        return code
308    except Exception as e:
309        error = []
310        # split the text into lines again
311        body = text.split('\n')
312        error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
313        if hasattr(e, "lineno"):
314            error.append("The code lines resulting in this error were:")
315            # e.lineno: line's position in reaflile
316            # lineno: function name's "position -1" in realfile
317            # e.lineno - lineno: line's relative position in function
318            error.extend(_print_trace(body, e.lineno - lineno))
319        else:
320            error.append("The function causing this error was:")
321            for line in body:
322                error.append(line)
323        error.append("%s: %s" % (e.__class__.__name__, str(e)))
324
325        logger.error("\n".join(error))
326
327        e = bb.BBHandledException(e)
328        raise e
329
330def _print_exception(t, value, tb, realfile, text, context):
331    error = []
332    try:
333        exception = traceback.format_exception_only(t, value)
334        error.append('Error executing a python function in %s:\n' % realfile)
335
336        # Strip 'us' from the stack (better_exec call) unless that was where the
337        # error came from
338        if tb.tb_next is not None:
339            tb = tb.tb_next
340
341        textarray = text.split('\n')
342
343        linefailed = tb.tb_lineno
344
345        tbextract = traceback.extract_tb(tb)
346        tbformat = traceback.format_list(tbextract)
347        error.append("The stack trace of python calls that resulted in this exception/failure was:")
348        error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
349        error.extend(_print_trace(textarray, linefailed))
350
351        # See if this is a function we constructed and has calls back into other functions in
352        # "text". If so, try and improve the context of the error by diving down the trace
353        level = 0
354        nexttb = tb.tb_next
355        while nexttb is not None and (level+1) < len(tbextract):
356            error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
357            if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
358                # The code was possibly in the string we compiled ourselves
359                error.extend(_print_trace(textarray, tbextract[level+1][1]))
360            elif tbextract[level+1][0].startswith("/"):
361                # The code looks like it might be in a file, try and load it
362                try:
363                    with open(tbextract[level+1][0], "r") as f:
364                        text = f.readlines()
365                        error.extend(_print_trace(text, tbextract[level+1][1]))
366                except:
367                    error.append(tbformat[level+1])
368            else:
369                error.append(tbformat[level+1])
370            nexttb = tb.tb_next
371            level = level + 1
372
373        error.append("Exception: %s" % ''.join(exception))
374
375        # If the exception is from spwaning a task, let's be helpful and display
376        # the output (which hopefully includes stderr).
377        if isinstance(value, subprocess.CalledProcessError) and value.output:
378            error.append("Subprocess output:")
379            error.append(value.output.decode("utf-8", errors="ignore"))
380    finally:
381        logger.error("\n".join(error))
382
383def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
384    """
385    Similiar to better_compile, better_exec will
386    print the lines that are responsible for the
387    error.
388    """
389    import bb.parse
390    if not text:
391        text = code
392    if not hasattr(code, "co_filename"):
393        code = better_compile(code, realfile, realfile)
394    try:
395        exec(code, get_context(), context)
396    except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError):
397        # Error already shown so passthrough, no need for traceback
398        raise
399    except Exception as e:
400        if pythonexception:
401            raise
402        (t, value, tb) = sys.exc_info()
403        try:
404            _print_exception(t, value, tb, realfile, text, context)
405        except Exception as e:
406            logger.error("Exception handler error: %s" % str(e))
407
408        e = bb.BBHandledException(e)
409        raise e
410
411def simple_exec(code, context):
412    exec(code, get_context(), context)
413
414def better_eval(source, locals, extraglobals = None):
415    ctx = get_context()
416    if extraglobals:
417        ctx = copy.copy(ctx)
418        for g in extraglobals:
419            ctx[g] = extraglobals[g]
420    return eval(source, ctx, locals)
421
422@contextmanager
423def fileslocked(files):
424    """Context manager for locking and unlocking file locks."""
425    locks = []
426    if files:
427        for lockfile in files:
428            locks.append(bb.utils.lockfile(lockfile))
429
430    try:
431        yield
432    finally:
433        for lock in locks:
434            bb.utils.unlockfile(lock)
435
436@contextmanager
437def timeout(seconds):
438    def timeout_handler(signum, frame):
439        pass
440
441    original_handler = signal.signal(signal.SIGALRM, timeout_handler)
442
443    try:
444        signal.alarm(seconds)
445        yield
446    finally:
447        signal.alarm(0)
448        signal.signal(signal.SIGALRM, original_handler)
449
450def lockfile(name, shared=False, retry=True, block=False):
451    """
452    Use the specified file as a lock file, return when the lock has
453    been acquired. Returns a variable to pass to unlockfile().
454    Parameters:
455        retry: True to re-try locking if it fails, False otherwise
456        block: True to block until the lock succeeds, False otherwise
457    The retry and block parameters are kind of equivalent unless you
458    consider the possibility of sending a signal to the process to break
459    out - at which point you want block=True rather than retry=True.
460    """
461    dirname = os.path.dirname(name)
462    mkdirhier(dirname)
463
464    if not os.access(dirname, os.W_OK):
465        logger.error("Unable to acquire lock '%s', directory is not writable",
466                     name)
467        sys.exit(1)
468
469    op = fcntl.LOCK_EX
470    if shared:
471        op = fcntl.LOCK_SH
472    if not retry and not block:
473        op = op | fcntl.LOCK_NB
474
475    while True:
476        # If we leave the lockfiles lying around there is no problem
477        # but we should clean up after ourselves. This gives potential
478        # for races though. To work around this, when we acquire the lock
479        # we check the file we locked was still the lock file on disk.
480        # by comparing inode numbers. If they don't match or the lockfile
481        # no longer exists, we start again.
482
483        # This implementation is unfair since the last person to request the
484        # lock is the most likely to win it.
485
486        try:
487            lf = open(name, 'a+')
488            fileno = lf.fileno()
489            fcntl.flock(fileno, op)
490            statinfo = os.fstat(fileno)
491            if os.path.exists(lf.name):
492                statinfo2 = os.stat(lf.name)
493                if statinfo.st_ino == statinfo2.st_ino:
494                    return lf
495            lf.close()
496        except OSError as e:
497            if e.errno == errno.EACCES:
498                logger.error("Unable to acquire lock '%s', %s",
499                             e.strerror, name)
500                sys.exit(1)
501            try:
502                lf.close()
503            except Exception:
504                pass
505            pass
506        if not retry:
507            return None
508
509def unlockfile(lf):
510    """
511    Unlock a file locked using lockfile()
512    """
513    try:
514        # If we had a shared lock, we need to promote to exclusive before
515        # removing the lockfile. Attempt this, ignore failures.
516        fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
517        os.unlink(lf.name)
518    except (IOError, OSError):
519        pass
520    fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
521    lf.close()
522
523def _hasher(method, filename):
524    import mmap
525
526    with open(filename, "rb") as f:
527        try:
528            with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
529                for chunk in iter(lambda: mm.read(8192), b''):
530                    method.update(chunk)
531        except ValueError:
532            # You can't mmap() an empty file so silence this exception
533            pass
534    return method.hexdigest()
535
536
537def md5_file(filename):
538    """
539    Return the hex string representation of the MD5 checksum of filename.
540    """
541    import hashlib
542    return _hasher(hashlib.md5(), filename)
543
544def sha256_file(filename):
545    """
546    Return the hex string representation of the 256-bit SHA checksum of
547    filename.
548    """
549    import hashlib
550    return _hasher(hashlib.sha256(), filename)
551
552def sha1_file(filename):
553    """
554    Return the hex string representation of the SHA1 checksum of the filename
555    """
556    import hashlib
557    return _hasher(hashlib.sha1(), filename)
558
559def sha384_file(filename):
560    """
561    Return the hex string representation of the SHA384 checksum of the filename
562    """
563    import hashlib
564    return _hasher(hashlib.sha384(), filename)
565
566def sha512_file(filename):
567    """
568    Return the hex string representation of the SHA512 checksum of the filename
569    """
570    import hashlib
571    return _hasher(hashlib.sha512(), filename)
572
573def preserved_envvars_exported():
574    """Variables which are taken from the environment and placed in and exported
575    from the metadata"""
576    return [
577        'BB_TASKHASH',
578        'HOME',
579        'LOGNAME',
580        'PATH',
581        'PWD',
582        'SHELL',
583        'TERM',
584        'USER',
585        'LC_ALL',
586        'BBSERVER',
587    ]
588
589def preserved_envvars():
590    """Variables which are taken from the environment and placed in the metadata"""
591    v = [
592        'BBPATH',
593        'BB_PRESERVE_ENV',
594        'BB_ENV_WHITELIST',
595        'BB_ENV_EXTRAWHITE',
596    ]
597    return v + preserved_envvars_exported()
598
599def filter_environment(good_vars):
600    """
601    Create a pristine environment for bitbake. This will remove variables that
602    are not known and may influence the build in a negative way.
603    """
604
605    removed_vars = {}
606    for key in list(os.environ):
607        if key in good_vars:
608            continue
609
610        removed_vars[key] = os.environ[key]
611        del os.environ[key]
612
613    # If we spawn a python process, we need to have a UTF-8 locale, else python's file
614    # access methods will use ascii. You can't change that mode once the interpreter is
615    # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
616    # distros support that and we need to set something.
617    os.environ["LC_ALL"] = "en_US.UTF-8"
618
619    if removed_vars:
620        logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
621
622    return removed_vars
623
624def approved_variables():
625    """
626    Determine and return the list of whitelisted variables which are approved
627    to remain in the environment.
628    """
629    if 'BB_PRESERVE_ENV' in os.environ:
630        return os.environ.keys()
631    approved = []
632    if 'BB_ENV_WHITELIST' in os.environ:
633        approved = os.environ['BB_ENV_WHITELIST'].split()
634        approved.extend(['BB_ENV_WHITELIST'])
635    else:
636        approved = preserved_envvars()
637    if 'BB_ENV_EXTRAWHITE' in os.environ:
638        approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
639        if 'BB_ENV_EXTRAWHITE' not in approved:
640            approved.extend(['BB_ENV_EXTRAWHITE'])
641    return approved
642
643def clean_environment():
644    """
645    Clean up any spurious environment variables. This will remove any
646    variables the user hasn't chosen to preserve.
647    """
648    if 'BB_PRESERVE_ENV' not in os.environ:
649        good_vars = approved_variables()
650        return filter_environment(good_vars)
651
652    return {}
653
654def empty_environment():
655    """
656    Remove all variables from the environment.
657    """
658    for s in list(os.environ.keys()):
659        os.unsetenv(s)
660        del os.environ[s]
661
662def build_environment(d):
663    """
664    Build an environment from all exported variables.
665    """
666    import bb.data
667    for var in bb.data.keys(d):
668        export = d.getVarFlag(var, "export", False)
669        if export:
670            os.environ[var] = d.getVar(var) or ""
671
672def _check_unsafe_delete_path(path):
673    """
674    Basic safeguard against recursively deleting something we shouldn't. If it returns True,
675    the caller should raise an exception with an appropriate message.
676    NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
677    with potentially disastrous results.
678    """
679    extra = ''
680    # HOME might not be /home/something, so in case we can get it, check against it
681    homedir = os.environ.get('HOME', '')
682    if homedir:
683        extra = '|%s' % homedir
684    if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
685        return True
686    return False
687
688def remove(path, recurse=False, ionice=False):
689    """Equivalent to rm -f or rm -rf"""
690    if not path:
691        return
692    if recurse:
693        for name in glob.glob(path):
694            if _check_unsafe_delete_path(path):
695                raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
696        # shutil.rmtree(name) would be ideal but its too slow
697        cmd = []
698        if ionice:
699            cmd = ['ionice', '-c', '3']
700        subprocess.check_call(cmd + ['rm', '-rf'] + glob.glob(path))
701        return
702    for name in glob.glob(path):
703        try:
704            os.unlink(name)
705        except OSError as exc:
706            if exc.errno != errno.ENOENT:
707                raise
708
709def prunedir(topdir, ionice=False):
710    # Delete everything reachable from the directory named in 'topdir'.
711    # CAUTION:  This is dangerous!
712    if _check_unsafe_delete_path(topdir):
713        raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
714    remove(topdir, recurse=True, ionice=ionice)
715
716#
717# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
718# but thats possibly insane and suffixes is probably going to be small
719#
720def prune_suffix(var, suffixes, d):
721    # See if var ends with any of the suffixes listed and
722    # remove it if found
723    for suffix in suffixes:
724        if suffix and var.endswith(suffix):
725            return var[:-len(suffix)]
726    return var
727
728def mkdirhier(directory):
729    """Create a directory like 'mkdir -p', but does not complain if
730    directory already exists like os.makedirs
731    """
732
733    try:
734        os.makedirs(directory)
735    except OSError as e:
736        if e.errno != errno.EEXIST or not os.path.isdir(directory):
737            raise e
738
739def movefile(src, dest, newmtime = None, sstat = None):
740    """Moves a file from src to dest, preserving all permissions and
741    attributes; mtime will be preserved even when moving across
742    filesystems.  Returns true on success and false on failure. Move is
743    atomic.
744    """
745
746    #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
747    try:
748        if not sstat:
749            sstat = os.lstat(src)
750    except Exception as e:
751        print("movefile: Stating source file failed...", e)
752        return None
753
754    destexists = 1
755    try:
756        dstat = os.lstat(dest)
757    except:
758        dstat = os.lstat(os.path.dirname(dest))
759        destexists = 0
760
761    if destexists:
762        if stat.S_ISLNK(dstat[stat.ST_MODE]):
763            try:
764                os.unlink(dest)
765                destexists = 0
766            except Exception as e:
767                pass
768
769    if stat.S_ISLNK(sstat[stat.ST_MODE]):
770        try:
771            target = os.readlink(src)
772            if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
773                os.unlink(dest)
774            os.symlink(target, dest)
775            #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
776            os.unlink(src)
777            return os.lstat(dest)
778        except Exception as e:
779            print("movefile: failed to properly create symlink:", dest, "->", target, e)
780            return None
781
782    renamefailed = 1
783    # os.rename needs to know the dest path ending with file name
784    # so append the file name to a path only if it's a dir specified
785    srcfname = os.path.basename(src)
786    destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
787                else dest
788
789    if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
790        try:
791            os.rename(src, destpath)
792            renamefailed = 0
793        except Exception as e:
794            if e.errno != errno.EXDEV:
795                # Some random error.
796                print("movefile: Failed to move", src, "to", dest, e)
797                return None
798            # Invalid cross-device-link 'bind' mounted or actually Cross-Device
799
800    if renamefailed:
801        didcopy = 0
802        if stat.S_ISREG(sstat[stat.ST_MODE]):
803            try: # For safety copy then move it over.
804                shutil.copyfile(src, destpath + "#new")
805                os.rename(destpath + "#new", destpath)
806                didcopy = 1
807            except Exception as e:
808                print('movefile: copy', src, '->', dest, 'failed.', e)
809                return None
810        else:
811            #we don't yet handle special, so we need to fall back to /bin/mv
812            a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
813            if a[0] != 0:
814                print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
815                return None # failure
816        try:
817            if didcopy:
818                os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
819                os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
820                os.unlink(src)
821        except Exception as e:
822            print("movefile: Failed to chown/chmod/unlink", dest, e)
823            return None
824
825    if newmtime:
826        os.utime(destpath, (newmtime, newmtime))
827    else:
828        os.utime(destpath, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
829        newmtime = sstat[stat.ST_MTIME]
830    return newmtime
831
832def copyfile(src, dest, newmtime = None, sstat = None):
833    """
834    Copies a file from src to dest, preserving all permissions and
835    attributes; mtime will be preserved even when moving across
836    filesystems.  Returns true on success and false on failure.
837    """
838    #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
839    try:
840        if not sstat:
841            sstat = os.lstat(src)
842    except Exception as e:
843        logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
844        return False
845
846    destexists = 1
847    try:
848        dstat = os.lstat(dest)
849    except:
850        dstat = os.lstat(os.path.dirname(dest))
851        destexists = 0
852
853    if destexists:
854        if stat.S_ISLNK(dstat[stat.ST_MODE]):
855            try:
856                os.unlink(dest)
857                destexists = 0
858            except Exception as e:
859                pass
860
861    if stat.S_ISLNK(sstat[stat.ST_MODE]):
862        try:
863            target = os.readlink(src)
864            if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
865                os.unlink(dest)
866            os.symlink(target, dest)
867            os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
868            return os.lstat(dest)
869        except Exception as e:
870            logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
871            return False
872
873    if stat.S_ISREG(sstat[stat.ST_MODE]):
874        try:
875            srcchown = False
876            if not os.access(src, os.R_OK):
877                # Make sure we can read it
878                srcchown = True
879                os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
880
881            # For safety copy then move it over.
882            shutil.copyfile(src, dest + "#new")
883            os.rename(dest + "#new", dest)
884        except Exception as e:
885            logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
886            return False
887        finally:
888            if srcchown:
889                os.chmod(src, sstat[stat.ST_MODE])
890                os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
891
892    else:
893        #we don't yet handle special, so we need to fall back to /bin/mv
894        a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
895        if a[0] != 0:
896            logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
897            return False # failure
898    try:
899        os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
900        os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
901    except Exception as e:
902        logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
903        return False
904
905    if newmtime:
906        os.utime(dest, (newmtime, newmtime))
907    else:
908        os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
909        newmtime = sstat[stat.ST_MTIME]
910    return newmtime
911
912def break_hardlinks(src, sstat = None):
913    """
914    Ensures src is the only hardlink to this file.  Other hardlinks,
915    if any, are not affected (other than in their st_nlink value, of
916    course).  Returns true on success and false on failure.
917
918    """
919    try:
920        if not sstat:
921            sstat = os.lstat(src)
922    except Exception as e:
923        logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
924        return False
925    if sstat[stat.ST_NLINK] == 1:
926        return True
927    return copyfile(src, src, sstat=sstat)
928
929def which(path, item, direction = 0, history = False, executable=False):
930    """
931    Locate `item` in the list of paths `path` (colon separated string like $PATH).
932    If `direction` is non-zero then the list is reversed.
933    If `history` is True then the list of candidates also returned as result,history.
934    If `executable` is True then the candidate has to be an executable file,
935    otherwise the candidate simply has to exist.
936    """
937
938    if executable:
939        is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
940    else:
941        is_candidate = lambda p: os.path.exists(p)
942
943    hist = []
944    paths = (path or "").split(':')
945    if direction != 0:
946        paths.reverse()
947
948    for p in paths:
949        next = os.path.join(p, item)
950        hist.append(next)
951        if is_candidate(next):
952            if not os.path.isabs(next):
953                next = os.path.abspath(next)
954            if history:
955                return next, hist
956            return next
957
958    if history:
959        return "", hist
960    return ""
961
962def to_boolean(string, default=None):
963    if not string:
964        return default
965
966    normalized = string.lower()
967    if normalized in ("y", "yes", "1", "true"):
968        return True
969    elif normalized in ("n", "no", "0", "false"):
970        return False
971    else:
972        raise ValueError("Invalid value for to_boolean: %s" % string)
973
974def contains(variable, checkvalues, truevalue, falsevalue, d):
975    """Check if a variable contains all the values specified.
976
977    Arguments:
978
979    variable -- the variable name. This will be fetched and expanded (using
980    d.getVar(variable)) and then split into a set().
981
982    checkvalues -- if this is a string it is split on whitespace into a set(),
983    otherwise coerced directly into a set().
984
985    truevalue -- the value to return if checkvalues is a subset of variable.
986
987    falsevalue -- the value to return if variable is empty or if checkvalues is
988    not a subset of variable.
989
990    d -- the data store.
991    """
992
993    val = d.getVar(variable)
994    if not val:
995        return falsevalue
996    val = set(val.split())
997    if isinstance(checkvalues, str):
998        checkvalues = set(checkvalues.split())
999    else:
1000        checkvalues = set(checkvalues)
1001    if checkvalues.issubset(val):
1002        return truevalue
1003    return falsevalue
1004
1005def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1006    val = d.getVar(variable)
1007    if not val:
1008        return falsevalue
1009    val = set(val.split())
1010    if isinstance(checkvalues, str):
1011        checkvalues = set(checkvalues.split())
1012    else:
1013        checkvalues = set(checkvalues)
1014    if checkvalues & val:
1015        return truevalue
1016    return falsevalue
1017
1018def filter(variable, checkvalues, d):
1019    """Return all words in the variable that are present in the checkvalues.
1020
1021    Arguments:
1022
1023    variable -- the variable name. This will be fetched and expanded (using
1024    d.getVar(variable)) and then split into a set().
1025
1026    checkvalues -- if this is a string it is split on whitespace into a set(),
1027    otherwise coerced directly into a set().
1028
1029    d -- the data store.
1030    """
1031
1032    val = d.getVar(variable)
1033    if not val:
1034        return ''
1035    val = set(val.split())
1036    if isinstance(checkvalues, str):
1037        checkvalues = set(checkvalues.split())
1038    else:
1039        checkvalues = set(checkvalues)
1040    return ' '.join(sorted(checkvalues & val))
1041
1042
1043def get_referenced_vars(start_expr, d):
1044    """
1045    :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level
1046    are ordered arbitrarily)
1047    """
1048
1049    seen = set()
1050    ret = []
1051
1052    # The first entry in the queue is the unexpanded start expression
1053    queue = collections.deque([start_expr])
1054    # Subsequent entries will be variable names, so we need to track whether or not entry requires getVar
1055    is_first = True
1056
1057    empty_data = bb.data.init()
1058    while queue:
1059        entry = queue.popleft()
1060        if is_first:
1061            # Entry is the start expression - no expansion needed
1062            is_first = False
1063            expression = entry
1064        else:
1065            # This is a variable name - need to get the value
1066            expression = d.getVar(entry, False)
1067            ret.append(entry)
1068
1069        # expandWithRefs is how we actually get the referenced variables in the expression. We call it using an empty
1070        # data store because we only want the variables directly used in the expression. It returns a set, which is what
1071        # dooms us to only ever be "quasi-BFS" rather than full BFS.
1072        new_vars = empty_data.expandWithRefs(expression, None).references - set(seen)
1073
1074        queue.extend(new_vars)
1075        seen.update(new_vars)
1076    return ret
1077
1078
1079def cpu_count():
1080    return multiprocessing.cpu_count()
1081
1082def nonblockingfd(fd):
1083    fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
1084
1085def process_profilelog(fn, pout = None):
1086    # Either call with a list of filenames and set pout or a filename and optionally pout.
1087    if not pout:
1088        pout = fn + '.processed'
1089    pout = open(pout, 'w')
1090
1091    import pstats
1092    if isinstance(fn, list):
1093        p = pstats.Stats(*fn, stream=pout)
1094    else:
1095        p = pstats.Stats(fn, stream=pout)
1096    p.sort_stats('time')
1097    p.print_stats()
1098    p.print_callers()
1099    p.sort_stats('cumulative')
1100    p.print_stats()
1101
1102    pout.flush()
1103    pout.close()
1104
1105#
1106# Was present to work around multiprocessing pool bugs in python < 2.7.3
1107#
1108def multiprocessingpool(*args, **kwargs):
1109
1110    import multiprocessing.pool
1111    #import multiprocessing.util
1112    #multiprocessing.util.log_to_stderr(10)
1113    # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
1114    # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
1115    def wrapper(func):
1116        def wrap(self, timeout=None):
1117            return func(self, timeout=timeout if timeout is not None else 1e100)
1118        return wrap
1119    multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
1120
1121    return multiprocessing.Pool(*args, **kwargs)
1122
1123def exec_flat_python_func(func, *args, **kwargs):
1124    """Execute a flat python function (defined with def funcname(args):...)"""
1125    # Prepare a small piece of python code which calls the requested function
1126    # To do this we need to prepare two things - a set of variables we can use to pass
1127    # the values of arguments into the calling function, and the list of arguments for
1128    # the function being called
1129    context = {}
1130    funcargs = []
1131    # Handle unnamed arguments
1132    aidx = 1
1133    for arg in args:
1134        argname = 'arg_%s' % aidx
1135        context[argname] = arg
1136        funcargs.append(argname)
1137        aidx += 1
1138    # Handle keyword arguments
1139    context.update(kwargs)
1140    funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
1141    code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
1142    comp = bb.utils.better_compile(code, '<string>', '<string>')
1143    bb.utils.better_exec(comp, context, code, '<string>')
1144    return context['retval']
1145
1146def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1147    """Edit lines from a recipe or config file and modify one or more
1148    specified variable values set in the file using a specified callback
1149    function. Lines are expected to have trailing newlines.
1150    Parameters:
1151        meta_lines: lines from the file; can be a list or an iterable
1152            (e.g. file pointer)
1153        variables: a list of variable names to look for. Functions
1154            may also be specified, but must be specified with '()' at
1155            the end of the name. Note that the function doesn't have
1156            any intrinsic understanding of _append, _prepend, _remove,
1157            or overrides, so these are considered as part of the name.
1158            These values go into a regular expression, so regular
1159            expression syntax is allowed.
1160        varfunc: callback function called for every variable matching
1161            one of the entries in the variables parameter. The function
1162            should take four arguments:
1163                varname: name of variable matched
1164                origvalue: current value in file
1165                op: the operator (e.g. '+=')
1166                newlines: list of lines up to this point. You can use
1167                    this to prepend lines before this variable setting
1168                    if you wish.
1169            and should return a four-element tuple:
1170                newvalue: new value to substitute in, or None to drop
1171                    the variable setting entirely. (If the removal
1172                    results in two consecutive blank lines, one of the
1173                    blank lines will also be dropped).
1174                newop: the operator to use - if you specify None here,
1175                    the original operation will be used.
1176                indent: number of spaces to indent multi-line entries,
1177                    or -1 to indent up to the level of the assignment
1178                    and opening quote, or a string to use as the indent.
1179                minbreak: True to allow the first element of a
1180                    multi-line value to continue on the same line as
1181                    the assignment, False to indent before the first
1182                    element.
1183            To clarify, if you wish not to change the value, then you
1184            would return like this: return origvalue, None, 0, True
1185        match_overrides: True to match items with _overrides on the end,
1186            False otherwise
1187    Returns a tuple:
1188        updated:
1189            True if changes were made, False otherwise.
1190        newlines:
1191            Lines after processing
1192    """
1193
1194    var_res = {}
1195    if match_overrides:
1196        override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
1197    else:
1198        override_re = ''
1199    for var in variables:
1200        if var.endswith('()'):
1201            var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
1202        else:
1203            var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
1204
1205    updated = False
1206    varset_start = ''
1207    varlines = []
1208    newlines = []
1209    in_var = None
1210    full_value = ''
1211    var_end = ''
1212
1213    def handle_var_end():
1214        prerun_newlines = newlines[:]
1215        op = varset_start[len(in_var):].strip()
1216        (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
1217        changed = (prerun_newlines != newlines)
1218
1219        if newvalue is None:
1220            # Drop the value
1221            return True
1222        elif newvalue != full_value or (newop not in [None, op]):
1223            if newop not in [None, op]:
1224                # Callback changed the operator
1225                varset_new = "%s %s" % (in_var, newop)
1226            else:
1227                varset_new = varset_start
1228
1229            if isinstance(indent, int):
1230                if indent == -1:
1231                    indentspc = ' ' * (len(varset_new) + 2)
1232                else:
1233                    indentspc = ' ' * indent
1234            else:
1235                indentspc = indent
1236            if in_var.endswith('()'):
1237                # A function definition
1238                if isinstance(newvalue, list):
1239                    newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
1240                else:
1241                    if not newvalue.startswith('\n'):
1242                        newvalue = '\n' + newvalue
1243                    if not newvalue.endswith('\n'):
1244                        newvalue = newvalue + '\n'
1245                    newlines.append('%s {%s}\n' % (varset_new, newvalue))
1246            else:
1247                # Normal variable
1248                if isinstance(newvalue, list):
1249                    if not newvalue:
1250                        # Empty list -> empty string
1251                        newlines.append('%s ""\n' % varset_new)
1252                    elif minbreak:
1253                        # First item on first line
1254                        if len(newvalue) == 1:
1255                            newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
1256                        else:
1257                            newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
1258                            for item in newvalue[1:]:
1259                                newlines.append('%s%s \\\n' % (indentspc, item))
1260                            newlines.append('%s"\n' % indentspc)
1261                    else:
1262                        # No item on first line
1263                        newlines.append('%s " \\\n' % varset_new)
1264                        for item in newvalue:
1265                            newlines.append('%s%s \\\n' % (indentspc, item))
1266                        newlines.append('%s"\n' % indentspc)
1267                else:
1268                    newlines.append('%s "%s"\n' % (varset_new, newvalue))
1269            return True
1270        else:
1271            # Put the old lines back where they were
1272            newlines.extend(varlines)
1273            # If newlines was touched by the function, we'll need to return True
1274            return changed
1275
1276    checkspc = False
1277
1278    for line in meta_lines:
1279        if in_var:
1280            value = line.rstrip()
1281            varlines.append(line)
1282            if in_var.endswith('()'):
1283                full_value += '\n' + value
1284            else:
1285                full_value += value[:-1]
1286            if value.endswith(var_end):
1287                if in_var.endswith('()'):
1288                    if full_value.count('{') - full_value.count('}') >= 0:
1289                        continue
1290                    full_value = full_value[:-1]
1291                if handle_var_end():
1292                    updated = True
1293                    checkspc = True
1294                in_var = None
1295        else:
1296            skip = False
1297            for (varname, var_re) in var_res.items():
1298                res = var_re.match(line)
1299                if res:
1300                    isfunc = varname.endswith('()')
1301                    if isfunc:
1302                        splitvalue = line.split('{', 1)
1303                        var_end = '}'
1304                    else:
1305                        var_end = res.groups()[-1]
1306                        splitvalue = line.split(var_end, 1)
1307                    varset_start = splitvalue[0].rstrip()
1308                    value = splitvalue[1].rstrip()
1309                    if not isfunc and value.endswith('\\'):
1310                        value = value[:-1]
1311                    full_value = value
1312                    varlines = [line]
1313                    in_var = res.group(1)
1314                    if isfunc:
1315                        in_var += '()'
1316                    if value.endswith(var_end):
1317                        full_value = full_value[:-1]
1318                        if handle_var_end():
1319                            updated = True
1320                            checkspc = True
1321                        in_var = None
1322                    skip = True
1323                    break
1324            if not skip:
1325                if checkspc:
1326                    checkspc = False
1327                    if newlines and newlines[-1] == '\n' and line == '\n':
1328                        # Squash blank line if there are two consecutive blanks after a removal
1329                        continue
1330                newlines.append(line)
1331    return (updated, newlines)
1332
1333
1334def edit_metadata_file(meta_file, variables, varfunc):
1335    """Edit a recipe or config file and modify one or more specified
1336    variable values set in the file using a specified callback function.
1337    The file is only written to if the value(s) actually change.
1338    This is basically the file version of edit_metadata(), see that
1339    function's description for parameter/usage information.
1340    Returns True if the file was written to, False otherwise.
1341    """
1342    with open(meta_file, 'r') as f:
1343        (updated, newlines) = edit_metadata(f, variables, varfunc)
1344    if updated:
1345        with open(meta_file, 'w') as f:
1346            f.writelines(newlines)
1347    return updated
1348
1349
1350def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
1351    """Edit bblayers.conf, adding and/or removing layers
1352    Parameters:
1353        bblayers_conf: path to bblayers.conf file to edit
1354        add: layer path (or list of layer paths) to add; None or empty
1355            list to add nothing
1356        remove: layer path (or list of layer paths) to remove; None or
1357            empty list to remove nothing
1358        edit_cb: optional callback function that will be called after
1359            processing adds/removes once per existing entry.
1360    Returns a tuple:
1361        notadded: list of layers specified to be added but weren't
1362            (because they were already in the list)
1363        notremoved: list of layers that were specified to be removed
1364            but weren't (because they weren't in the list)
1365    """
1366
1367    import fnmatch
1368
1369    def remove_trailing_sep(pth):
1370        if pth and pth[-1] == os.sep:
1371            pth = pth[:-1]
1372        return pth
1373
1374    approved = bb.utils.approved_variables()
1375    def canonicalise_path(pth):
1376        pth = remove_trailing_sep(pth)
1377        if 'HOME' in approved and '~' in pth:
1378            pth = os.path.expanduser(pth)
1379        return pth
1380
1381    def layerlist_param(value):
1382        if not value:
1383            return []
1384        elif isinstance(value, list):
1385            return [remove_trailing_sep(x) for x in value]
1386        else:
1387            return [remove_trailing_sep(value)]
1388
1389    addlayers = layerlist_param(add)
1390    removelayers = layerlist_param(remove)
1391
1392    # Need to use a list here because we can't set non-local variables from a callback in python 2.x
1393    bblayercalls = []
1394    removed = []
1395    plusequals = False
1396    orig_bblayers = []
1397
1398    def handle_bblayers_firstpass(varname, origvalue, op, newlines):
1399        bblayercalls.append(op)
1400        if op == '=':
1401            del orig_bblayers[:]
1402        orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
1403        return (origvalue, None, 2, False)
1404
1405    def handle_bblayers(varname, origvalue, op, newlines):
1406        updated = False
1407        bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
1408        if removelayers:
1409            for removelayer in removelayers:
1410                for layer in bblayers:
1411                    if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
1412                        updated = True
1413                        bblayers.remove(layer)
1414                        removed.append(removelayer)
1415                        break
1416        if addlayers and not plusequals:
1417            for addlayer in addlayers:
1418                if addlayer not in bblayers:
1419                    updated = True
1420                    bblayers.append(addlayer)
1421            del addlayers[:]
1422
1423        if edit_cb:
1424            newlist = []
1425            for layer in bblayers:
1426                res = edit_cb(layer, canonicalise_path(layer))
1427                if res != layer:
1428                    newlist.append(res)
1429                    updated = True
1430                else:
1431                    newlist.append(layer)
1432            bblayers = newlist
1433
1434        if updated:
1435            if op == '+=' and not bblayers:
1436                bblayers = None
1437            return (bblayers, None, 2, False)
1438        else:
1439            return (origvalue, None, 2, False)
1440
1441    with open(bblayers_conf, 'r') as f:
1442        (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
1443
1444    if not bblayercalls:
1445        raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
1446
1447    # Try to do the "smart" thing depending on how the user has laid out
1448    # their bblayers.conf file
1449    if bblayercalls.count('+=') > 1:
1450        plusequals = True
1451
1452    removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
1453    notadded = []
1454    for layer in addlayers:
1455        layer_canon = canonicalise_path(layer)
1456        if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
1457            notadded.append(layer)
1458    notadded_canon = [canonicalise_path(layer) for layer in notadded]
1459    addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
1460
1461    (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
1462    if addlayers:
1463        # Still need to add these
1464        for addlayer in addlayers:
1465            newlines.append('BBLAYERS += "%s"\n' % addlayer)
1466        updated = True
1467
1468    if updated:
1469        with open(bblayers_conf, 'w') as f:
1470            f.writelines(newlines)
1471
1472    notremoved = list(set(removelayers) - set(removed))
1473
1474    return (notadded, notremoved)
1475
1476
1477def get_file_layer(filename, d):
1478    """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
1479    collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
1480    collection_res = {}
1481    for collection in collections:
1482        collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
1483
1484    def path_to_layer(path):
1485        # Use longest path so we handle nested layers
1486        matchlen = 0
1487        match = None
1488        for collection, regex in collection_res.items():
1489            if len(regex) > matchlen and re.match(regex, path):
1490                matchlen = len(regex)
1491                match = collection
1492        return match
1493
1494    result = None
1495    bbfiles = (d.getVar('BBFILES') or '').split()
1496    bbfilesmatch = False
1497    for bbfilesentry in bbfiles:
1498        if fnmatch.fnmatch(filename, bbfilesentry):
1499            bbfilesmatch = True
1500            result = path_to_layer(bbfilesentry)
1501
1502    if not bbfilesmatch:
1503        # Probably a bbclass
1504        result = path_to_layer(filename)
1505
1506    return result
1507
1508
1509# Constant taken from http://linux.die.net/include/linux/prctl.h
1510PR_SET_PDEATHSIG = 1
1511
1512class PrCtlError(Exception):
1513    pass
1514
1515def signal_on_parent_exit(signame):
1516    """
1517    Trigger signame to be sent when the parent process dies
1518    """
1519    signum = getattr(signal, signame)
1520    # http://linux.die.net/man/2/prctl
1521    result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
1522    if result != 0:
1523        raise PrCtlError('prctl failed with error code %s' % result)
1524
1525#
1526# Manually call the ioprio syscall. We could depend on other libs like psutil
1527# however this gets us enough of what we need to bitbake for now without the
1528# dependency
1529#
1530_unamearch = os.uname()[4]
1531IOPRIO_WHO_PROCESS = 1
1532IOPRIO_CLASS_SHIFT = 13
1533
1534def ioprio_set(who, cls, value):
1535    NR_ioprio_set = None
1536    if _unamearch == "x86_64":
1537      NR_ioprio_set = 251
1538    elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
1539      NR_ioprio_set = 289
1540    elif _unamearch == "aarch64":
1541      NR_ioprio_set = 30
1542
1543    if NR_ioprio_set:
1544        ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
1545        rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
1546        if rc != 0:
1547            raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
1548    else:
1549        bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
1550
1551def set_process_name(name):
1552    from ctypes import cdll, byref, create_string_buffer
1553    # This is nice to have for debugging, not essential
1554    try:
1555        libc = cdll.LoadLibrary('libc.so.6')
1556        buf = create_string_buffer(bytes(name, 'utf-8'))
1557        libc.prctl(15, byref(buf), 0, 0, 0)
1558    except:
1559        pass
1560
1561# export common proxies variables from datastore to environment
1562def export_proxies(d):
1563    import os
1564
1565    variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
1566                    'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
1567                    'GIT_PROXY_COMMAND']
1568    exported = False
1569
1570    for v in variables:
1571        if v in os.environ.keys():
1572            exported = True
1573        else:
1574            v_proxy = d.getVar(v)
1575            if v_proxy is not None:
1576                os.environ[v] = v_proxy
1577                exported = True
1578
1579    return exported
1580
1581
1582def load_plugins(logger, plugins, pluginpath):
1583    def load_plugin(name):
1584        logger.debug(1, 'Loading plugin %s' % name)
1585        spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
1586        if spec:
1587            return spec.loader.load_module()
1588
1589    logger.debug(1, 'Loading plugins from %s...' % pluginpath)
1590
1591    expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
1592                for ext in python_extensions)
1593    files = itertools.chain.from_iterable(expanded)
1594    names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
1595    for name in names:
1596        if name != '__init__':
1597            plugin = load_plugin(name)
1598            if hasattr(plugin, 'plugin_init'):
1599                obj = plugin.plugin_init(plugins)
1600                plugins.append(obj or plugin)
1601            else:
1602                plugins.append(plugin)
1603
1604
1605class LogCatcher(logging.Handler):
1606    """Logging handler for collecting logged messages so you can check them later"""
1607    def __init__(self):
1608        self.messages = []
1609        logging.Handler.__init__(self, logging.WARNING)
1610    def emit(self, record):
1611        self.messages.append(bb.build.logformatter.format(record))
1612    def contains(self, message):
1613        return (message in self.messages)
1614
1615def is_semver(version):
1616    """
1617        Is the version string following the semver semantic?
1618
1619        https://semver.org/spec/v2.0.0.html
1620    """
1621    regex = re.compile(
1622    r"""
1623    ^
1624    (0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)
1625    (?:-(
1626        (?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)
1627        (?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*
1628    ))?
1629    (?:\+(
1630        [0-9a-zA-Z-]+
1631        (?:\.[0-9a-zA-Z-]+)*
1632    ))?
1633    $
1634    """, re.VERBOSE)
1635
1636    if regex.match(version) is None:
1637        return False
1638
1639    return True
1640