xref: /openbmc/openbmc/poky/meta/lib/oe/utils.py (revision ac13d5f3)
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import subprocess
8import multiprocessing
9import traceback
10import errno
11
12def read_file(filename):
13    try:
14        f = open( filename, "r" )
15    except IOError as reason:
16        return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
17    else:
18        data = f.read().strip()
19        f.close()
20        return data
21    return None
22
23def ifelse(condition, iftrue = True, iffalse = False):
24    if condition:
25        return iftrue
26    else:
27        return iffalse
28
29def conditional(variable, checkvalue, truevalue, falsevalue, d):
30    if d.getVar(variable) == checkvalue:
31        return truevalue
32    else:
33        return falsevalue
34
35def vartrue(var, iftrue, iffalse, d):
36    import oe.types
37    if oe.types.boolean(d.getVar(var)):
38        return iftrue
39    else:
40        return iffalse
41
42def less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
43    if float(d.getVar(variable)) <= float(checkvalue):
44        return truevalue
45    else:
46        return falsevalue
47
48def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
49    result = bb.utils.vercmp_string(d.getVar(variable), checkvalue)
50    if result <= 0:
51        return truevalue
52    else:
53        return falsevalue
54
55def both_contain(variable1, variable2, checkvalue, d):
56    val1 = d.getVar(variable1)
57    val2 = d.getVar(variable2)
58    val1 = set(val1.split())
59    val2 = set(val2.split())
60    if isinstance(checkvalue, str):
61        checkvalue = set(checkvalue.split())
62    else:
63        checkvalue = set(checkvalue)
64    if checkvalue.issubset(val1) and checkvalue.issubset(val2):
65        return " ".join(checkvalue)
66    else:
67        return ""
68
69def set_intersect(variable1, variable2, d):
70    """
71    Expand both variables, interpret them as lists of strings, and return the
72    intersection as a flattened string.
73
74    For example:
75    s1 = "a b c"
76    s2 = "b c d"
77    s3 = set_intersect(s1, s2)
78    => s3 = "b c"
79    """
80    val1 = set(d.getVar(variable1).split())
81    val2 = set(d.getVar(variable2).split())
82    return " ".join(val1 & val2)
83
84def prune_suffix(var, suffixes, d):
85    # See if var ends with any of the suffixes listed and
86    # remove it if found
87    for suffix in suffixes:
88        if suffix and var.endswith(suffix):
89            var = var[:-len(suffix)]
90
91    prefix = d.getVar("MLPREFIX")
92    if prefix and var.startswith(prefix):
93        var = var[len(prefix):]
94
95    return var
96
97def str_filter(f, str, d):
98    from re import match
99    return " ".join([x for x in str.split() if match(f, x, 0)])
100
101def str_filter_out(f, str, d):
102    from re import match
103    return " ".join([x for x in str.split() if not match(f, x, 0)])
104
105def build_depends_string(depends, task):
106    """Append a taskname to a string of dependencies as used by the [depends] flag"""
107    return " ".join(dep + ":" + task for dep in depends.split())
108
109def inherits(d, *classes):
110    """Return True if the metadata inherits any of the specified classes"""
111    return any(bb.data.inherits_class(cls, d) for cls in classes)
112
113def features_backfill(var,d):
114    # This construct allows the addition of new features to variable specified
115    # as var
116    # Example for var = "DISTRO_FEATURES"
117    # This construct allows the addition of new features to DISTRO_FEATURES
118    # that if not present would disable existing functionality, without
119    # disturbing distributions that have already set DISTRO_FEATURES.
120    # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should
121    # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED
122    features = (d.getVar(var) or "").split()
123    backfill = (d.getVar(var+"_BACKFILL") or "").split()
124    considered = (d.getVar(var+"_BACKFILL_CONSIDERED") or "").split()
125
126    addfeatures = []
127    for feature in backfill:
128        if feature not in features and feature not in considered:
129            addfeatures.append(feature)
130
131    if addfeatures:
132        d.appendVar(var, " " + " ".join(addfeatures))
133
134def all_distro_features(d, features, truevalue="1", falsevalue=""):
135    """
136    Returns truevalue if *all* given features are set in DISTRO_FEATURES,
137    else falsevalue. The features can be given as single string or anything
138    that can be turned into a set.
139
140    This is a shorter, more flexible version of
141    bb.utils.contains("DISTRO_FEATURES", features, truevalue, falsevalue, d).
142
143    Without explicit true/false values it can be used directly where
144    Python expects a boolean:
145       if oe.utils.all_distro_features(d, "foo bar"):
146           bb.fatal("foo and bar are mutually exclusive DISTRO_FEATURES")
147
148    With just a truevalue, it can be used to include files that are meant to be
149    used only when requested via DISTRO_FEATURES:
150       require ${@ oe.utils.all_distro_features(d, "foo bar", "foo-and-bar.inc")
151    """
152    return bb.utils.contains("DISTRO_FEATURES", features, truevalue, falsevalue, d)
153
154def any_distro_features(d, features, truevalue="1", falsevalue=""):
155    """
156    Returns truevalue if at least *one* of the given features is set in DISTRO_FEATURES,
157    else falsevalue. The features can be given as single string or anything
158    that can be turned into a set.
159
160    This is a shorter, more flexible version of
161    bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d).
162
163    Without explicit true/false values it can be used directly where
164    Python expects a boolean:
165       if not oe.utils.any_distro_features(d, "foo bar"):
166           bb.fatal("foo, bar or both must be set in DISTRO_FEATURES")
167
168    With just a truevalue, it can be used to include files that are meant to be
169    used only when requested via DISTRO_FEATURES:
170       require ${@ oe.utils.any_distro_features(d, "foo bar", "foo-or-bar.inc")
171
172    """
173    return bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d)
174
175def parallel_make(d, makeinst=False):
176    """
177    Return the integer value for the number of parallel threads to use when
178    building, scraped out of PARALLEL_MAKE. If no parallelization option is
179    found, returns None
180
181    e.g. if PARALLEL_MAKE = "-j 10", this will return 10 as an integer.
182    """
183    if makeinst:
184        pm = (d.getVar('PARALLEL_MAKEINST') or '').split()
185    else:
186        pm = (d.getVar('PARALLEL_MAKE') or '').split()
187    # look for '-j' and throw other options (e.g. '-l') away
188    while pm:
189        opt = pm.pop(0)
190        if opt == '-j':
191            v = pm.pop(0)
192        elif opt.startswith('-j'):
193            v = opt[2:].strip()
194        else:
195            continue
196
197        return int(v)
198
199    return ''
200
201def parallel_make_argument(d, fmt, limit=None, makeinst=False):
202    """
203    Helper utility to construct a parallel make argument from the number of
204    parallel threads specified in PARALLEL_MAKE.
205
206    Returns the input format string `fmt` where a single '%d' will be expanded
207    with the number of parallel threads to use. If `limit` is specified, the
208    number of parallel threads will be no larger than it. If no parallelization
209    option is found in PARALLEL_MAKE, returns an empty string
210
211    e.g. if PARALLEL_MAKE = "-j 10", parallel_make_argument(d, "-n %d") will return
212    "-n 10"
213    """
214    v = parallel_make(d, makeinst)
215    if v:
216        if limit:
217            v = min(limit, v)
218        return fmt % v
219    return ''
220
221def packages_filter_out_system(d):
222    """
223    Return a list of packages from PACKAGES with the "system" packages such as
224    PN-dbg PN-doc PN-locale-eb-gb removed.
225    """
226    pn = d.getVar('PN')
227    pkgfilter = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')]
228    localepkg = pn + "-locale-"
229    pkgs = []
230
231    for pkg in d.getVar('PACKAGES').split():
232        if pkg not in pkgfilter and localepkg not in pkg:
233            pkgs.append(pkg)
234    return pkgs
235
236def getstatusoutput(cmd):
237    return subprocess.getstatusoutput(cmd)
238
239
240def trim_version(version, num_parts=2):
241    """
242    Return just the first <num_parts> of <version>, split by periods.  For
243    example, trim_version("1.2.3", 2) will return "1.2".
244    """
245    if type(version) is not str:
246        raise TypeError("Version should be a string")
247    if num_parts < 1:
248        raise ValueError("Cannot split to parts < 1")
249
250    parts = version.split(".")
251    trimmed = ".".join(parts[:num_parts])
252    return trimmed
253
254def cpu_count(at_least=1, at_most=64):
255    cpus = len(os.sched_getaffinity(0))
256    return max(min(cpus, at_most), at_least)
257
258def execute_pre_post_process(d, cmds):
259    if cmds is None:
260        return
261
262    cmds = cmds.replace(";", " ")
263
264    for cmd in cmds.split():
265        bb.note("Executing %s ..." % cmd)
266        bb.build.exec_func(cmd, d)
267
268def get_bb_number_threads(d):
269    return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
270
271def multiprocess_launch(target, items, d, extraargs=None):
272    max_process = get_bb_number_threads(d)
273    return multiprocess_launch_mp(target, items, max_process, extraargs)
274
275# For each item in items, call the function 'target' with item as the first
276# argument, extraargs as the other arguments and handle any exceptions in the
277# parent thread
278def multiprocess_launch_mp(target, items, max_process, extraargs=None):
279
280    class ProcessLaunch(multiprocessing.Process):
281        def __init__(self, *args, **kwargs):
282            multiprocessing.Process.__init__(self, *args, **kwargs)
283            self._pconn, self._cconn = multiprocessing.Pipe()
284            self._exception = None
285            self._result = None
286
287        def run(self):
288            try:
289                ret = self._target(*self._args, **self._kwargs)
290                self._cconn.send((None, ret))
291            except Exception as e:
292                tb = traceback.format_exc()
293                self._cconn.send((e, tb))
294
295        def update(self):
296            if self._pconn.poll():
297                (e, tb) = self._pconn.recv()
298                if e is not None:
299                    self._exception = (e, tb)
300                else:
301                    self._result = tb
302
303        @property
304        def exception(self):
305            self.update()
306            return self._exception
307
308        @property
309        def result(self):
310            self.update()
311            return self._result
312
313    launched = []
314    errors = []
315    results = []
316    items = list(items)
317    while (items and not errors) or launched:
318        if not errors and items and len(launched) < max_process:
319            args = (items.pop(),)
320            if extraargs is not None:
321                args = args + extraargs
322            p = ProcessLaunch(target=target, args=args)
323            p.start()
324            launched.append(p)
325        for q in launched:
326            # Have to manually call update() to avoid deadlocks. The pipe can be full and
327            # transfer stalled until we try and read the results object but the subprocess won't exit
328            # as it still has data to write (https://bugs.python.org/issue8426)
329            q.update()
330            # The finished processes are joined when calling is_alive()
331            if not q.is_alive():
332                if q.exception:
333                    errors.append(q.exception)
334                if q.result:
335                    results.append(q.result)
336                launched.remove(q)
337    # Paranoia doesn't hurt
338    for p in launched:
339        p.join()
340    if errors:
341        msg = ""
342        for (e, tb) in errors:
343            if isinstance(e, subprocess.CalledProcessError) and e.output:
344                msg = msg + str(e) + "\n"
345                msg = msg + "Subprocess output:"
346                msg = msg + e.output.decode("utf-8", errors="ignore")
347            else:
348                msg = msg + str(e) + ": " + str(tb) + "\n"
349        bb.fatal("Fatal errors occurred in subprocesses:\n%s" % msg)
350    return results
351
352def squashspaces(string):
353    import re
354    return re.sub(r"\s+", " ", string).strip()
355
356def rprovides_map(pkgdata_dir, pkg_dict):
357    # Map file -> pkg provider
358    rprov_map = {}
359
360    for pkg in pkg_dict:
361        path_to_pkgfile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
362        if not os.path.isfile(path_to_pkgfile):
363            continue
364        with open(path_to_pkgfile) as f:
365            for line in f:
366                if line.startswith('RPROVIDES') or line.startswith('FILERPROVIDES'):
367                    # List all components provided by pkg.
368                    # Exclude version strings, i.e. those starting with (
369                    provides = [x for x in line.split()[1:] if not x.startswith('(')]
370                    for prov in provides:
371                        if prov in rprov_map:
372                            rprov_map[prov].append(pkg)
373                        else:
374                            rprov_map[prov] = [pkg]
375
376    return rprov_map
377
378def format_pkg_list(pkg_dict, ret_format=None, pkgdata_dir=None):
379    output = []
380
381    if ret_format == "arch":
382        for pkg in sorted(pkg_dict):
383            output.append("%s %s" % (pkg, pkg_dict[pkg]["arch"]))
384    elif ret_format == "file":
385        for pkg in sorted(pkg_dict):
386            output.append("%s %s %s" % (pkg, pkg_dict[pkg]["filename"], pkg_dict[pkg]["arch"]))
387    elif ret_format == "ver":
388        for pkg in sorted(pkg_dict):
389            output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"]))
390    elif ret_format == "deps":
391        rprov_map = rprovides_map(pkgdata_dir, pkg_dict)
392        for pkg in sorted(pkg_dict):
393            for dep in pkg_dict[pkg]["deps"]:
394                if dep in rprov_map:
395                    # There could be multiple providers within the image
396                    for pkg_provider in rprov_map[dep]:
397                        output.append("%s|%s * %s [RPROVIDES]" % (pkg, pkg_provider, dep))
398                else:
399                    output.append("%s|%s" % (pkg, dep))
400    else:
401        for pkg in sorted(pkg_dict):
402            output.append(pkg)
403
404    output_str = '\n'.join(output)
405
406    if output_str:
407        # make sure last line is newline terminated
408        output_str += '\n'
409
410    return output_str
411
412
413# Helper function to get the host compiler version
414# Do not assume the compiler is gcc
415def get_host_compiler_version(d, taskcontextonly=False):
416    import re, subprocess
417
418    if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
419        return
420
421    compiler = d.getVar("BUILD_CC")
422    # Get rid of ccache since it is not present when parsing.
423    if compiler.startswith('ccache '):
424        compiler = compiler[7:]
425    try:
426        env = os.environ.copy()
427        # datastore PATH does not contain session PATH as set by environment-setup-...
428        # this breaks the install-buildtools use-case
429        # env["PATH"] = d.getVar("PATH")
430        output = subprocess.check_output("%s --version" % compiler, \
431                    shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
432    except subprocess.CalledProcessError as e:
433        bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
434
435    match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
436    if not match:
437        bb.fatal("Can't get compiler version from %s --version output" % compiler)
438
439    version = match.group(1)
440    return compiler, version
441
442
443def host_gcc_version(d, taskcontextonly=False):
444    import re, subprocess
445
446    if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
447        return
448
449    compiler = d.getVar("BUILD_CC")
450    # Get rid of ccache since it is not present when parsing.
451    if compiler.startswith('ccache '):
452        compiler = compiler[7:]
453    try:
454        env = os.environ.copy()
455        env["PATH"] = d.getVar("PATH")
456        output = subprocess.check_output("%s --version" % compiler, \
457                    shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
458    except subprocess.CalledProcessError as e:
459        bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
460
461    match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
462    if not match:
463        bb.fatal("Can't get compiler version from %s --version output" % compiler)
464
465    version = match.group(1)
466    return "-%s" % version if version in ("4.8", "4.9") else ""
467
468
469def get_multilib_datastore(variant, d):
470    localdata = bb.data.createCopy(d)
471    if variant:
472        overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + variant
473        localdata.setVar("OVERRIDES", overrides)
474        localdata.setVar("MLPREFIX", variant + "-")
475    else:
476        origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL")
477        if origdefault:
478            localdata.setVar("DEFAULTTUNE", origdefault)
479        overrides = localdata.getVar("OVERRIDES", False).split(":")
480        overrides = ":".join([x for x in overrides if not x.startswith("virtclass-multilib-")])
481        localdata.setVar("OVERRIDES", overrides)
482        localdata.setVar("MLPREFIX", "")
483    return localdata
484
485class ImageQAFailed(Exception):
486    def __init__(self, description, name=None, logfile=None):
487        self.description = description
488        self.name = name
489        self.logfile=logfile
490
491    def __str__(self):
492        msg = 'Function failed: %s' % self.name
493        if self.description:
494            msg = msg + ' (%s)' % self.description
495
496        return msg
497
498def sh_quote(string):
499    import shlex
500    return shlex.quote(string)
501
502def directory_size(root, blocksize=4096):
503    """
504    Calculate the size of the directory, taking into account hard links,
505    rounding up every size to multiples of the blocksize.
506    """
507    def roundup(size):
508        """
509        Round the size up to the nearest multiple of the block size.
510        """
511        import math
512        return math.ceil(size / blocksize) * blocksize
513
514    def getsize(filename):
515        """
516        Get the size of the filename, not following symlinks, taking into
517        account hard links.
518        """
519        stat = os.lstat(filename)
520        if stat.st_ino not in inodes:
521            inodes.add(stat.st_ino)
522            return stat.st_size
523        else:
524            return 0
525
526    inodes = set()
527    total = 0
528    for root, dirs, files in os.walk(root):
529        total += sum(roundup(getsize(os.path.join(root, name))) for name in files)
530        total += roundup(getsize(root))
531    return total
532
533# Update the mtime of a file, skip if permission/read-only issues
534def touch(filename):
535    try:
536        os.utime(filename, None)
537    except PermissionError:
538        pass
539    except OSError as e:
540        # Handle read-only file systems gracefully
541        if e.errno != errno.EROFS:
542            raise e
543