xref: /openbmc/openbmc/poky/meta/lib/oe/package.py (revision c9537f57ab488bf5d90132917b0184e2527970a5)
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import errno
8import fnmatch
9import itertools
10import os
11import shlex
12import re
13import glob
14import stat
15import mmap
16import subprocess
17import shutil
18
19import oe.cachedpath
20
21def runstrip(file, elftype, strip, extra_strip_sections=''):
22    # Function to strip a single file, called from split_and_strip_files below
23    # A working 'file' (one which works on the target architecture)
24    #
25    # The elftype is a bit pattern (explained in is_elf below) to tell
26    # us what type of file we're processing...
27    # 4 - executable
28    # 8 - shared library
29    # 16 - kernel module
30
31    newmode = None
32    if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
33        origmode = os.stat(file)[stat.ST_MODE]
34        newmode = origmode | stat.S_IWRITE | stat.S_IREAD
35        os.chmod(file, newmode)
36
37    stripcmd = [strip]
38    skip_strip = False
39    # kernel module
40    if elftype & 16:
41        if is_kernel_module_signed(file):
42            bb.debug(1, "Skip strip on signed module %s" % file)
43            skip_strip = True
44        else:
45            stripcmd.extend(["--strip-debug", "--remove-section=.comment",
46                "--remove-section=.note", "--preserve-dates"])
47    # .so and shared library
48    elif ".so" in file and elftype & 8:
49        stripcmd.extend(["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"])
50    # shared or executable:
51    elif elftype & 8 or elftype & 4:
52        stripcmd.extend(["--remove-section=.comment", "--remove-section=.note"])
53        if extra_strip_sections != '':
54            for section in extra_strip_sections.split():
55                stripcmd.extend(["--remove-section=" + section])
56
57    stripcmd.append(file)
58    bb.debug(1, "runstrip: %s" % stripcmd)
59
60    if not skip_strip:
61        output = subprocess.check_output(stripcmd, stderr=subprocess.STDOUT)
62
63    if newmode:
64        os.chmod(file, origmode)
65
66# Detect .ko module by searching for "vermagic=" string
67def is_kernel_module(path):
68    with open(path) as f:
69        return mmap.mmap(f.fileno(), 0, prot=mmap.PROT_READ).find(b"vermagic=") >= 0
70
71# Detect if .ko module is signed
72def is_kernel_module_signed(path):
73    with open(path, "rb") as f:
74        f.seek(-28, 2)
75        module_tail = f.read()
76        return "Module signature appended" in "".join(chr(c) for c in bytearray(module_tail))
77
78# Return type (bits):
79# 0 - not elf
80# 1 - ELF
81# 2 - stripped
82# 4 - executable
83# 8 - shared library
84# 16 - kernel module
85def is_elf(path):
86    exec_type = 0
87    result = subprocess.check_output(["file", "-b", path], stderr=subprocess.STDOUT).decode("utf-8")
88
89    if "ELF" in result:
90        exec_type |= 1
91        if "not stripped" not in result:
92            exec_type |= 2
93        if "executable" in result:
94            exec_type |= 4
95        if "shared" in result:
96            exec_type |= 8
97        if "relocatable" in result:
98            if path.endswith(".ko") and path.find("/lib/modules/") != -1 and is_kernel_module(path):
99                exec_type |= 16
100    return (path, exec_type)
101
102def is_static_lib(path):
103    if path.endswith('.a') and not os.path.islink(path):
104        with open(path, 'rb') as fh:
105            # The magic must include the first slash to avoid
106            # matching golang static libraries
107            magic = b'!<arch>\x0a/'
108            start = fh.read(len(magic))
109            return start == magic
110    return False
111
112def strip_execs(pn, dstdir, strip_cmd, libdir, base_libdir, max_process, qa_already_stripped=False):
113    """
114    Strip executable code (like executables, shared libraries) _in_place_
115    - Based on sysroot_strip in staging.bbclass
116    :param dstdir: directory in which to strip files
117    :param strip_cmd: Strip command (usually ${STRIP})
118    :param libdir: ${libdir} - strip .so files in this directory
119    :param base_libdir: ${base_libdir} - strip .so files in this directory
120    :param max_process: number of stripping processes started in parallel
121    :param qa_already_stripped: Set to True if already-stripped' in ${INSANE_SKIP}
122    This is for proper logging and messages only.
123    """
124    import stat, errno, oe.path, oe.utils
125
126    elffiles = {}
127    inodes = {}
128    libdir = os.path.abspath(dstdir + os.sep + libdir)
129    base_libdir = os.path.abspath(dstdir + os.sep + base_libdir)
130    exec_mask = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
131    #
132    # First lets figure out all of the files we may have to process
133    #
134    checkelf = []
135    inodecache = {}
136    for root, dirs, files in os.walk(dstdir):
137        for f in files:
138            file = os.path.join(root, f)
139
140            try:
141                ltarget = oe.path.realpath(file, dstdir, False)
142                s = os.lstat(ltarget)
143            except OSError as e:
144                (err, strerror) = e.args
145                if err != errno.ENOENT:
146                    raise
147                # Skip broken symlinks
148                continue
149            if not s:
150                continue
151            # Check its an excutable
152            if s[stat.ST_MODE] & exec_mask \
153                    or ((file.startswith(libdir) or file.startswith(base_libdir)) and ".so" in f) \
154                    or file.endswith('.ko'):
155                # If it's a symlink, and points to an ELF file, we capture the readlink target
156                if os.path.islink(file):
157                    continue
158
159                # It's a file (or hardlink), not a link
160                # ...but is it ELF, and is it already stripped?
161                checkelf.append(file)
162                inodecache[file] = s.st_ino
163    results = oe.utils.multiprocess_launch_mp(is_elf, checkelf, max_process)
164    for (file, elf_file) in results:
165                #elf_file = is_elf(file)
166                if elf_file & 1:
167                    if elf_file & 2:
168                        if qa_already_stripped:
169                            bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dstdir):], pn))
170                        else:
171                            bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dstdir):], pn))
172                        continue
173
174                    if inodecache[file] in inodes:
175                        os.unlink(file)
176                        os.link(inodes[inodecache[file]], file)
177                    else:
178                        # break hardlinks so that we do not strip the original.
179                        inodes[inodecache[file]] = file
180                        bb.utils.break_hardlinks(file)
181                        elffiles[file] = elf_file
182
183    #
184    # Now strip them (in parallel)
185    #
186    sfiles = []
187    for file in elffiles:
188        elf_file = int(elffiles[file])
189        sfiles.append((file, elf_file, strip_cmd))
190
191    oe.utils.multiprocess_launch_mp(runstrip, sfiles, max_process)
192
193TRANSLATE = (
194    ("@", "@at@"),
195    (" ", "@space@"),
196    ("\t", "@tab@"),
197    ("[", "@openbrace@"),
198    ("]", "@closebrace@"),
199    ("_", "@underscore@"),
200    (":", "@colon@"),
201)
202
203def file_translate(file):
204    ft = file
205    for s, replace in TRANSLATE:
206        ft = ft.replace(s, replace)
207
208    return ft
209
210def file_reverse_translate(file):
211    ft = file
212    for s, replace in reversed(TRANSLATE):
213        ft = ft.replace(replace, s)
214
215    return ft
216
217def filedeprunner(pkg, pkgfiles, rpmdeps, pkgdest):
218    import re, subprocess, shlex
219
220    provides = {}
221    requires = {}
222
223    file_re = re.compile(r'\s+\d+\s(.*)')
224    dep_re = re.compile(r'\s+(\S)\s+(.*)')
225    r = re.compile(r'[<>=]+\s+\S*')
226
227    def process_deps(pipe, pkg, pkgdest, provides, requires):
228        file = None
229        for line in pipe.split("\n"):
230
231            m = file_re.match(line)
232            if m:
233                file = m.group(1)
234                file = file.replace(pkgdest + "/" + pkg, "")
235                file = file_translate(file)
236                continue
237
238            m = dep_re.match(line)
239            if not m or not file:
240                continue
241
242            type, dep = m.groups()
243
244            if type == 'R':
245                i = requires
246            elif type == 'P':
247                i = provides
248            else:
249               continue
250
251            if dep.startswith("python("):
252                continue
253
254            # Ignore all perl(VMS::...) and perl(Mac::...) dependencies. These
255            # are typically used conditionally from the Perl code, but are
256            # generated as unconditional dependencies.
257            if dep.startswith('perl(VMS::') or dep.startswith('perl(Mac::'):
258                continue
259
260            # Ignore perl dependencies on .pl files.
261            if dep.startswith('perl(') and dep.endswith('.pl)'):
262                continue
263
264            # Remove perl versions and perl module versions since they typically
265            # do not make sense when used as package versions.
266            if dep.startswith('perl') and r.search(dep):
267                dep = dep.split()[0]
268
269            # Put parentheses around any version specifications.
270            dep = r.sub(r'(\g<0>)',dep)
271
272            if file not in i:
273                i[file] = []
274            i[file].append(dep)
275
276        return provides, requires
277
278    output = subprocess.check_output(shlex.split(rpmdeps) + pkgfiles, stderr=subprocess.STDOUT).decode("utf-8")
279    provides, requires = process_deps(output, pkg, pkgdest, provides, requires)
280
281    return (pkg, provides, requires)
282
283
284def read_shlib_providers(d):
285    import re
286
287    shlib_provider = {}
288    shlibs_dirs = d.getVar('SHLIBSDIRS').split()
289    list_re = re.compile(r'^(.*)\.list$')
290    # Go from least to most specific since the last one found wins
291    for dir in reversed(shlibs_dirs):
292        bb.debug(2, "Reading shlib providers in %s" % (dir))
293        if not os.path.exists(dir):
294            continue
295        for file in sorted(os.listdir(dir)):
296            m = list_re.match(file)
297            if m:
298                dep_pkg = m.group(1)
299                try:
300                    fd = open(os.path.join(dir, file))
301                except IOError:
302                    # During a build unrelated shlib files may be deleted, so
303                    # handle files disappearing between the listdirs and open.
304                    continue
305                lines = fd.readlines()
306                fd.close()
307                for l in lines:
308                    s = l.strip().split(":")
309                    if s[0] not in shlib_provider:
310                        shlib_provider[s[0]] = {}
311                    shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
312    return shlib_provider
313
314# We generate a master list of directories to process, we start by
315# seeding this list with reasonable defaults, then load from
316# the fs-perms.txt files
317def fixup_perms(d):
318    import pwd, grp
319
320    cpath = oe.cachedpath.CachedPath()
321    dvar = d.getVar('PKGD')
322
323    # init using a string with the same format as a line as documented in
324    # the fs-perms.txt file
325    # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
326    # <path> link <link target>
327    #
328    # __str__ can be used to print out an entry in the input format
329    #
330    # if fs_perms_entry.path is None:
331    #    an error occurred
332    # if fs_perms_entry.link, you can retrieve:
333    #    fs_perms_entry.path = path
334    #    fs_perms_entry.link = target of link
335    # if not fs_perms_entry.link, you can retrieve:
336    #    fs_perms_entry.path = path
337    #    fs_perms_entry.mode = expected dir mode or None
338    #    fs_perms_entry.uid = expected uid or -1
339    #    fs_perms_entry.gid = expected gid or -1
340    #    fs_perms_entry.walk = 'true' or something else
341    #    fs_perms_entry.fmode = expected file mode or None
342    #    fs_perms_entry.fuid = expected file uid or -1
343    #    fs_perms_entry_fgid = expected file gid or -1
344    class fs_perms_entry():
345        def __init__(self, line):
346            lsplit = line.split()
347            if len(lsplit) == 3 and lsplit[1].lower() == "link":
348                self._setlink(lsplit[0], lsplit[2])
349            elif len(lsplit) == 8:
350                self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
351            else:
352                msg = "Fixup Perms: invalid config line %s" % line
353                oe.qa.handle_error("perm-config", msg, d)
354                self.path = None
355                self.link = None
356
357        def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
358            self.path = os.path.normpath(path)
359            self.link = None
360            self.mode = self._procmode(mode)
361            self.uid  = self._procuid(uid)
362            self.gid  = self._procgid(gid)
363            self.walk = walk.lower()
364            self.fmode = self._procmode(fmode)
365            self.fuid = self._procuid(fuid)
366            self.fgid = self._procgid(fgid)
367
368        def _setlink(self, path, link):
369            self.path = os.path.normpath(path)
370            self.link = link
371
372        def _procmode(self, mode):
373            if not mode or (mode and mode == "-"):
374                return None
375            else:
376                return int(mode,8)
377
378        # Note uid/gid -1 has special significance in os.lchown
379        def _procuid(self, uid):
380            if uid is None or uid == "-":
381                return -1
382            elif uid.isdigit():
383                return int(uid)
384            else:
385                return pwd.getpwnam(uid).pw_uid
386
387        def _procgid(self, gid):
388            if gid is None or gid == "-":
389                return -1
390            elif gid.isdigit():
391                return int(gid)
392            else:
393                return grp.getgrnam(gid).gr_gid
394
395        # Use for debugging the entries
396        def __str__(self):
397            if self.link:
398                return "%s link %s" % (self.path, self.link)
399            else:
400                mode = "-"
401                if self.mode:
402                    mode = "0%o" % self.mode
403                fmode = "-"
404                if self.fmode:
405                    fmode = "0%o" % self.fmode
406                uid = self._mapugid(self.uid)
407                gid = self._mapugid(self.gid)
408                fuid = self._mapugid(self.fuid)
409                fgid = self._mapugid(self.fgid)
410                return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)
411
412        def _mapugid(self, id):
413            if id is None or id == -1:
414                return "-"
415            else:
416                return "%d" % id
417
418    # Fix the permission, owner and group of path
419    def fix_perms(path, mode, uid, gid, dir):
420        if mode and not os.path.islink(path):
421            #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
422            os.chmod(path, mode)
423        # -1 is a special value that means don't change the uid/gid
424        # if they are BOTH -1, don't bother to lchown
425        if not (uid == -1 and gid == -1):
426            #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
427            os.lchown(path, uid, gid)
428
429    # Return a list of configuration files based on either the default
430    # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
431    # paths are resolved via BBPATH
432    def get_fs_perms_list(d):
433        str = ""
434        bbpath = d.getVar('BBPATH')
435        fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
436        for conf_file in fs_perms_tables.split():
437            confpath = bb.utils.which(bbpath, conf_file)
438            if confpath:
439                str += " %s" % bb.utils.which(bbpath, conf_file)
440            else:
441                bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
442        return str
443
444    fs_perms_table = {}
445    fs_link_table = {}
446
447    # By default all of the standard directories specified in
448    # bitbake.conf will get 0755 root:root.
449    target_path_vars = [    'base_prefix',
450                'prefix',
451                'exec_prefix',
452                'base_bindir',
453                'base_sbindir',
454                'base_libdir',
455                'datadir',
456                'sysconfdir',
457                'servicedir',
458                'sharedstatedir',
459                'localstatedir',
460                'infodir',
461                'mandir',
462                'docdir',
463                'bindir',
464                'sbindir',
465                'libexecdir',
466                'libdir',
467                'includedir' ]
468
469    for path in target_path_vars:
470        dir = d.getVar(path) or ""
471        if dir == "":
472            continue
473        fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))
474
475    # Now we actually load from the configuration files
476    for conf in get_fs_perms_list(d).split():
477        if not os.path.exists(conf):
478            continue
479        with open(conf) as f:
480            for line in f:
481                if line.startswith('#'):
482                    continue
483                lsplit = line.split()
484                if len(lsplit) == 0:
485                    continue
486                if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
487                    msg = "Fixup perms: %s invalid line: %s" % (conf, line)
488                    oe.qa.handle_error("perm-line", msg, d)
489                    continue
490                entry = fs_perms_entry(d.expand(line))
491                if entry and entry.path:
492                    if entry.link:
493                        fs_link_table[entry.path] = entry
494                        if entry.path in fs_perms_table:
495                            fs_perms_table.pop(entry.path)
496                    else:
497                        fs_perms_table[entry.path] = entry
498                        if entry.path in fs_link_table:
499                            fs_link_table.pop(entry.path)
500
501    # Debug -- list out in-memory table
502    #for dir in fs_perms_table:
503    #    bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
504    #for link in fs_link_table:
505    #    bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))
506
507    # We process links first, so we can go back and fixup directory ownership
508    # for any newly created directories
509    # Process in sorted order so /run gets created before /run/lock, etc.
510    for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
511        link = entry.link
512        dir = entry.path
513        origin = dvar + dir
514        if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
515            continue
516
517        if link[0] == "/":
518            target = dvar + link
519            ptarget = link
520        else:
521            target = os.path.join(os.path.dirname(origin), link)
522            ptarget = os.path.join(os.path.dirname(dir), link)
523        if os.path.exists(target):
524            msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
525            oe.qa.handle_error("perm-link", msg, d)
526            continue
527
528        # Create path to move directory to, move it, and then setup the symlink
529        bb.utils.mkdirhier(os.path.dirname(target))
530        #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
531        bb.utils.rename(origin, target)
532        #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
533        os.symlink(link, origin)
534
535    for dir in fs_perms_table:
536        origin = dvar + dir
537        if not (cpath.exists(origin) and cpath.isdir(origin)):
538            continue
539
540        fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
541
542        if fs_perms_table[dir].walk == 'true':
543            for root, dirs, files in os.walk(origin):
544                for dr in dirs:
545                    each_dir = os.path.join(root, dr)
546                    fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
547                for f in files:
548                    each_file = os.path.join(root, f)
549                    fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)
550
551# Get a list of files from file vars by searching files under current working directory
552# The list contains symlinks, directories and normal files.
553def files_from_filevars(filevars):
554    cpath = oe.cachedpath.CachedPath()
555    files = []
556    for f in filevars:
557        if os.path.isabs(f):
558            f = '.' + f
559        if not f.startswith("./"):
560            f = './' + f
561        globbed = glob.glob(f, recursive=True)
562        if globbed:
563            if [ f ] != globbed:
564                files += globbed
565                continue
566        files.append(f)
567
568    symlink_paths = []
569    for ind, f in enumerate(files):
570        # Handle directory symlinks. Truncate path to the lowest level symlink
571        parent = ''
572        for dirname in f.split('/')[:-1]:
573            parent = os.path.join(parent, dirname)
574            if dirname == '.':
575                continue
576            if cpath.islink(parent):
577                bb.warn("FILES contains file '%s' which resides under a "
578                        "directory symlink. Please fix the recipe and use the "
579                        "real path for the file." % f[1:])
580                symlink_paths.append(f)
581                files[ind] = parent
582                f = parent
583                break
584
585        if not cpath.islink(f):
586            if cpath.isdir(f):
587                newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
588                if newfiles:
589                    files += newfiles
590
591    return files, symlink_paths
592
593# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
594def get_conffiles(pkg, d):
595    pkgdest = d.getVar('PKGDEST')
596    root = os.path.join(pkgdest, pkg)
597    cwd = os.getcwd()
598    os.chdir(root)
599
600    conffiles = d.getVar('CONFFILES:%s' % pkg);
601    if conffiles == None:
602        conffiles = d.getVar('CONFFILES')
603    if conffiles == None:
604        conffiles = ""
605    conffiles = conffiles.split()
606    conf_orig_list = files_from_filevars(conffiles)[0]
607
608    # Remove links and directories from conf_orig_list to get conf_list which only contains normal files
609    conf_list = []
610    for f in conf_orig_list:
611        if os.path.isdir(f):
612            continue
613        if os.path.islink(f):
614            continue
615        if not os.path.exists(f):
616            continue
617        conf_list.append(f)
618
619    # Remove the leading './'
620    for i in range(0, len(conf_list)):
621        conf_list[i] = conf_list[i][1:]
622
623    os.chdir(cwd)
624    return sorted(conf_list)
625
626def legitimize_package_name(s):
627    """
628    Make sure package names are legitimate strings
629    """
630
631    def fixutf(m):
632        cp = m.group(1)
633        if cp:
634            return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')
635
636    # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
637    s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)
638
639    # Remaining package name validity fixes
640    return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
641
642def split_locales(d):
643    cpath = oe.cachedpath.CachedPath()
644    if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
645        bb.debug(1, "package requested not splitting locales")
646        return
647
648    packages = (d.getVar('PACKAGES') or "").split()
649
650    dvar = d.getVar('PKGD')
651    pn = d.getVar('LOCALEBASEPN')
652
653    try:
654        locale_index = packages.index(pn + '-locale')
655        packages.pop(locale_index)
656    except ValueError:
657        locale_index = len(packages)
658
659    lic = d.getVar("LICENSE:" + pn + "-locale")
660
661    localepaths = []
662    locales = set()
663    for localepath in (d.getVar('LOCALE_PATHS') or "").split():
664        localedir = dvar + localepath
665        if not cpath.isdir(localedir):
666            bb.debug(1, 'No locale files in %s' % localepath)
667            continue
668
669        localepaths.append(localepath)
670        with os.scandir(localedir) as it:
671            for entry in it:
672                if entry.is_dir():
673                    locales.add(entry.name)
674
675    if len(locales) == 0:
676        bb.debug(1, "No locale files in this package")
677        return
678
679    summary = d.getVar('SUMMARY') or pn
680    description = d.getVar('DESCRIPTION') or ""
681    locale_section = d.getVar('LOCALE_SECTION')
682    mlprefix = d.getVar('MLPREFIX') or ""
683    for l in sorted(locales):
684        ln = legitimize_package_name(l)
685        pkg = pn + '-locale-' + ln
686        packages.insert(locale_index, pkg)
687        locale_index += 1
688        files = []
689        for localepath in localepaths:
690            files.append(os.path.join(localepath, l))
691        d.setVar('FILES:' + pkg, " ".join(files))
692        d.setVar('RRECOMMENDS:' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
693        d.setVar('RPROVIDES:' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
694        d.setVar('SUMMARY:' + pkg, '%s - %s translations' % (summary, l))
695        d.setVar('DESCRIPTION:' + pkg, '%s  This package contains language translation files for the %s locale.' % (description, l))
696        if lic:
697            d.setVar('LICENSE:' + pkg, lic)
698        if locale_section:
699            d.setVar('SECTION:' + pkg, locale_section)
700
701    d.setVar('PACKAGES', ' '.join(packages))
702
703    # Disabled by RP 18/06/07
704    # Wildcards aren't supported in debian
705    # They break with ipkg since glibc-locale* will mean that
706    # glibc-localedata-translit* won't install as a dependency
707    # for some other package which breaks meta-toolchain
708    # Probably breaks since virtual-locale- isn't provided anywhere
709    #rdep = (d.getVar('RDEPENDS:%s' % pn) or "").split()
710    #rdep.append('%s-locale*' % pn)
711    #d.setVar('RDEPENDS:%s' % pn, ' '.join(rdep))
712
713def package_debug_vars(d):
714    # We default to '.debug' style
715    if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
716        # Single debug-file-directory style debug info
717        debug_vars = {
718            "append": ".debug",
719            "staticappend": "",
720            "dir": "",
721            "staticdir": "",
722            "libdir": "/usr/lib/debug",
723            "staticlibdir": "/usr/lib/debug-static",
724            "srcdir": "/usr/src/debug",
725        }
726    elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
727        # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
728        debug_vars = {
729            "append": "",
730            "staticappend": "",
731            "dir": "/.debug",
732            "staticdir": "/.debug-static",
733            "libdir": "",
734            "staticlibdir": "",
735            "srcdir": "",
736        }
737    elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
738        debug_vars = {
739            "append": "",
740            "staticappend": "",
741            "dir": "/.debug",
742            "staticdir": "/.debug-static",
743            "libdir": "",
744            "staticlibdir": "",
745            "srcdir": "/usr/src/debug",
746        }
747    else:
748        # Original OE-core, a.k.a. ".debug", style debug info
749        debug_vars = {
750            "append": "",
751            "staticappend": "",
752            "dir": "/.debug",
753            "staticdir": "/.debug-static",
754            "libdir": "",
755            "staticlibdir": "",
756            "srcdir": "/usr/src/debug",
757        }
758
759    return debug_vars
760
761
762def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
763    debugfiles = {}
764
765    for line in dwarfsrcfiles_output.splitlines():
766        if line.startswith("\t"):
767            debugfiles[os.path.normpath(line.split()[0])] = ""
768
769    return debugfiles.keys()
770
771def source_info(file, d, fatal=True):
772    cmd = ["dwarfsrcfiles", file]
773    try:
774        output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
775        retval = 0
776    except subprocess.CalledProcessError as exc:
777        output = exc.output
778        retval = exc.returncode
779
780    # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
781    if retval != 0 and retval != 255:
782        msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
783        if fatal:
784            bb.fatal(msg)
785        bb.note(msg)
786
787    debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)
788
789    return list(debugsources)
790
791def splitdebuginfo(file, dvar, dv, d):
792    # Function to split a single file into two components, one is the stripped
793    # target system binary, the other contains any debugging information. The
794    # two files are linked to reference each other.
795    #
796    # return a mapping of files:debugsources
797
798    src = file[len(dvar):]
799    dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
800    debugfile = dvar + dest
801    sources = []
802
803    if file.endswith(".ko") and file.find("/lib/modules/") != -1:
804        if oe.package.is_kernel_module_signed(file):
805            bb.debug(1, "Skip strip on signed module %s" % file)
806            return (file, sources)
807
808    # Split the file...
809    bb.utils.mkdirhier(os.path.dirname(debugfile))
810    #bb.note("Split %s -> %s" % (file, debugfile))
811    # Only store off the hard link reference if we successfully split!
812
813    dvar = d.getVar('PKGD')
814    objcopy = d.getVar("OBJCOPY")
815
816    newmode = None
817    if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
818        origmode = os.stat(file)[stat.ST_MODE]
819        newmode = origmode | stat.S_IWRITE | stat.S_IREAD
820        os.chmod(file, newmode)
821
822    # We need to extract the debug src information here...
823    if dv["srcdir"]:
824        sources = source_info(file, d)
825
826    bb.utils.mkdirhier(os.path.dirname(debugfile))
827
828    subprocess.check_output([objcopy, '--only-keep-debug', file, debugfile], stderr=subprocess.STDOUT)
829
830    # Set the debuglink to have the view of the file path on the target
831    subprocess.check_output([objcopy, '--add-gnu-debuglink', debugfile, file], stderr=subprocess.STDOUT)
832
833    if newmode:
834        os.chmod(file, origmode)
835
836    return (file, sources)
837
838def splitstaticdebuginfo(file, dvar, dv, d):
839    # Unlike the function above, there is no way to split a static library
840    # two components.  So to get similar results we will copy the unmodified
841    # static library (containing the debug symbols) into a new directory.
842    # We will then strip (preserving symbols) the static library in the
843    # typical location.
844    #
845    # return a mapping of files:debugsources
846
847    src = file[len(dvar):]
848    dest = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(src) + dv["staticappend"]
849    debugfile = dvar + dest
850    sources = []
851
852    # Copy the file...
853    bb.utils.mkdirhier(os.path.dirname(debugfile))
854    #bb.note("Copy %s -> %s" % (file, debugfile))
855
856    dvar = d.getVar('PKGD')
857
858    newmode = None
859    if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
860        origmode = os.stat(file)[stat.ST_MODE]
861        newmode = origmode | stat.S_IWRITE | stat.S_IREAD
862        os.chmod(file, newmode)
863
864    # We need to extract the debug src information here...
865    if dv["srcdir"]:
866        sources = source_info(file, d)
867
868    bb.utils.mkdirhier(os.path.dirname(debugfile))
869
870    # Copy the unmodified item to the debug directory
871    shutil.copy2(file, debugfile)
872
873    if newmode:
874        os.chmod(file, origmode)
875
876    return (file, sources)
877
878def inject_minidebuginfo(file, dvar, dv, d):
879    # Extract just the symbols from debuginfo into minidebuginfo,
880    # compress it with xz and inject it back into the binary in a .gnu_debugdata section.
881    # https://sourceware.org/gdb/onlinedocs/gdb/MiniDebugInfo.html
882
883    readelf = d.getVar('READELF')
884    nm = d.getVar('NM')
885    objcopy = d.getVar('OBJCOPY')
886
887    minidebuginfodir = d.expand('${WORKDIR}/minidebuginfo')
888
889    src = file[len(dvar):]
890    dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
891    debugfile = dvar + dest
892    minidebugfile = minidebuginfodir + src + '.minidebug'
893    bb.utils.mkdirhier(os.path.dirname(minidebugfile))
894
895    # If we didn't produce debuginfo for any reason, we can't produce minidebuginfo either
896    # so skip it.
897    if not os.path.exists(debugfile):
898        bb.debug(1, 'ELF file {} has no debuginfo, skipping minidebuginfo injection'.format(file))
899        return
900
901    # minidebuginfo does not make sense to apply to ELF objects other than
902    # executables and shared libraries, skip applying the minidebuginfo
903    # generation for objects like kernel modules.
904    for line in subprocess.check_output([readelf, '-h', debugfile], universal_newlines=True).splitlines():
905        if not line.strip().startswith("Type:"):
906            continue
907        elftype = line.split(":")[1].strip()
908        if not any(elftype.startswith(i) for i in ["EXEC", "DYN"]):
909            bb.debug(1, 'ELF file {} is not executable/shared, skipping minidebuginfo injection'.format(file))
910            return
911        break
912
913    # Find non-allocated PROGBITS, NOTE, and NOBITS sections in the debuginfo.
914    # We will exclude all of these from minidebuginfo to save space.
915    remove_section_names = []
916    for line in subprocess.check_output([readelf, '-W', '-S', debugfile], universal_newlines=True).splitlines():
917        # strip the leading "  [ 1]" section index to allow splitting on space
918        if ']' not in line:
919            continue
920        fields = line[line.index(']') + 1:].split()
921        if len(fields) < 7:
922            continue
923        name = fields[0]
924        type = fields[1]
925        flags = fields[6]
926        # .debug_ sections will be removed by objcopy -S so no need to explicitly remove them
927        if name.startswith('.debug_'):
928            continue
929        if 'A' not in flags and type in ['PROGBITS', 'NOTE', 'NOBITS']:
930            remove_section_names.append(name)
931
932    # List dynamic symbols in the binary. We can exclude these from minidebuginfo
933    # because they are always present in the binary.
934    dynsyms = set()
935    for line in subprocess.check_output([nm, '-D', file, '--format=posix', '--defined-only'], universal_newlines=True).splitlines():
936        dynsyms.add(line.split()[0])
937
938    # Find all function symbols from debuginfo which aren't in the dynamic symbols table.
939    # These are the ones we want to keep in minidebuginfo.
940    keep_symbols_file = minidebugfile + '.symlist'
941    found_any_symbols = False
942    with open(keep_symbols_file, 'w') as f:
943        for line in subprocess.check_output([nm, debugfile, '--format=sysv', '--defined-only'], universal_newlines=True).splitlines():
944            fields = line.split('|')
945            if len(fields) < 7:
946                continue
947            name = fields[0].strip()
948            type = fields[3].strip()
949            if type == 'FUNC' and name not in dynsyms:
950                f.write('{}\n'.format(name))
951                found_any_symbols = True
952
953    if not found_any_symbols:
954        bb.debug(1, 'ELF file {} contains no symbols, skipping minidebuginfo injection'.format(file))
955        return
956
957    bb.utils.remove(minidebugfile)
958    bb.utils.remove(minidebugfile + '.xz')
959
960    subprocess.check_call([objcopy, '-S'] +
961                          ['--remove-section={}'.format(s) for s in remove_section_names] +
962                          ['--keep-symbols={}'.format(keep_symbols_file), debugfile, minidebugfile])
963
964    subprocess.check_call(['xz', '--keep', minidebugfile])
965
966    subprocess.check_call([objcopy, '--add-section', '.gnu_debugdata={}.xz'.format(minidebugfile), file])
967
968def copydebugsources(debugsrcdir, sources, d):
969    # The debug src information written out to sourcefile is further processed
970    # and copied to the destination here.
971
972    cpath = oe.cachedpath.CachedPath()
973
974    if debugsrcdir and sources:
975        sourcefile = d.expand("${WORKDIR}/debugsources.list")
976        bb.utils.remove(sourcefile)
977
978        # filenames are null-separated - this is an artefact of the previous use
979        # of rpm's debugedit, which was writing them out that way, and the code elsewhere
980        # is still assuming that.
981        debuglistoutput = '\0'.join(sources) + '\0'
982        with open(sourcefile, 'a') as sf:
983           sf.write(debuglistoutput)
984
985        dvar = d.getVar('PKGD')
986        strip = d.getVar("STRIP")
987        objcopy = d.getVar("OBJCOPY")
988        workdir = d.getVar("WORKDIR")
989        sdir = d.getVar("S")
990        cflags = d.expand("${CFLAGS}")
991
992        prefixmap = {}
993        for flag in cflags.split():
994            if not flag.startswith("-ffile-prefix-map"):
995                continue
996            if "recipe-sysroot" in flag:
997                continue
998            flag = flag.split("=")
999            prefixmap[flag[1]] = flag[2]
1000
1001        nosuchdir = []
1002        basepath = dvar
1003        for p in debugsrcdir.split("/"):
1004            basepath = basepath + "/" + p
1005            if not cpath.exists(basepath):
1006                nosuchdir.append(basepath)
1007        bb.utils.mkdirhier(basepath)
1008        cpath.updatecache(basepath)
1009
1010        for pmap in prefixmap:
1011            # Ignore files from the recipe sysroots (target and native)
1012            cmd =  "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " % sourcefile
1013            # We need to ignore files that are not actually ours
1014            # we do this by only paying attention to items from this package
1015            cmd += "fgrep -zw '%s' | " % prefixmap[pmap]
1016            # Remove prefix in the source paths
1017            cmd += "sed 's#%s/##g' | " % (prefixmap[pmap])
1018            cmd += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)" % (pmap, dvar, prefixmap[pmap])
1019
1020            try:
1021                subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1022            except subprocess.CalledProcessError:
1023                # Can "fail" if internal headers/transient sources are attempted
1024                pass
1025            # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
1026            # Work around this by manually finding and copying any symbolic links that made it through.
1027            cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
1028                    (dvar, prefixmap[pmap], dvar, prefixmap[pmap], pmap, dvar, prefixmap[pmap])
1029            subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1030
1031        # debugsources.list may be polluted from the host if we used externalsrc,
1032        # cpio uses copy-pass and may have just created a directory structure
1033        # matching the one from the host, if thats the case move those files to
1034        # debugsrcdir to avoid host contamination.
1035        # Empty dir structure will be deleted in the next step.
1036
1037        # Same check as above for externalsrc
1038        if workdir not in sdir:
1039            if os.path.exists(dvar + debugsrcdir + sdir):
1040                cmd = "mv %s%s%s/* %s%s" % (dvar, debugsrcdir, sdir, dvar,debugsrcdir)
1041                subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1042
1043        # The copy by cpio may have resulted in some empty directories!  Remove these
1044        cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
1045        subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
1046
1047        # Also remove debugsrcdir if its empty
1048        for p in nosuchdir[::-1]:
1049            if os.path.exists(p) and not os.listdir(p):
1050                os.rmdir(p)
1051
1052
1053def process_split_and_strip_files(d):
1054    cpath = oe.cachedpath.CachedPath()
1055
1056    dvar = d.getVar('PKGD')
1057    pn = d.getVar('PN')
1058    hostos = d.getVar('HOST_OS')
1059
1060    oldcwd = os.getcwd()
1061    os.chdir(dvar)
1062
1063    dv = package_debug_vars(d)
1064
1065    #
1066    # First lets figure out all of the files we may have to process ... do this only once!
1067    #
1068    elffiles = {}
1069    symlinks = {}
1070    staticlibs = []
1071    inodes = {}
1072    libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
1073    baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
1074    skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
1075    if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
1076            d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1077        checkelf = {}
1078        checkelflinks = {}
1079        checkstatic = {}
1080        for root, dirs, files in cpath.walk(dvar):
1081            for f in files:
1082                file = os.path.join(root, f)
1083
1084                # Skip debug files
1085                if dv["append"] and file.endswith(dv["append"]):
1086                    continue
1087                if dv["dir"] and dv["dir"] in os.path.dirname(file[len(dvar):]):
1088                    continue
1089
1090                if file in skipfiles:
1091                    continue
1092
1093                try:
1094                    ltarget = cpath.realpath(file, dvar, False)
1095                    s = cpath.lstat(ltarget)
1096                except OSError as e:
1097                    (err, strerror) = e.args
1098                    if err != errno.ENOENT:
1099                        raise
1100                    # Skip broken symlinks
1101                    continue
1102                if not s:
1103                    continue
1104
1105                if oe.package.is_static_lib(file):
1106                    # Use a reference of device ID and inode number to identify files
1107                    file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1108                    checkstatic[file] = (file, file_reference)
1109                    continue
1110
1111                # Check its an executable
1112                if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) \
1113                        or (s[stat.ST_MODE] & stat.S_IXOTH) \
1114                        or ((file.startswith(libdir) or file.startswith(baselibdir)) \
1115                        and (".so" in f or ".node" in f)) \
1116                        or (f.startswith('vmlinux') or ".ko" in f):
1117
1118                    if cpath.islink(file):
1119                        checkelflinks[file] = ltarget
1120                        continue
1121                    # Use a reference of device ID and inode number to identify files
1122                    file_reference = "%d_%d" % (s.st_dev, s.st_ino)
1123                    checkelf[file] = (file, file_reference)
1124
1125        results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
1126        results_map = {}
1127        for (ltarget, elf_file) in results:
1128            results_map[ltarget] = elf_file
1129        for file in checkelflinks:
1130            ltarget = checkelflinks[file]
1131            # If it's a symlink, and points to an ELF file, we capture the readlink target
1132            if results_map[ltarget]:
1133                target = os.readlink(file)
1134                #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
1135                symlinks[file] = target
1136
1137        results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)
1138
1139        # Sort results by file path. This ensures that the files are always
1140        # processed in the same order, which is important to make sure builds
1141        # are reproducible when dealing with hardlinks
1142        results.sort(key=lambda x: x[0])
1143
1144        for (file, elf_file) in results:
1145            # It's a file (or hardlink), not a link
1146            # ...but is it ELF, and is it already stripped?
1147            if elf_file & 1:
1148                if elf_file & 2:
1149                    if 'already-stripped' in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1150                        bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
1151                    else:
1152                        msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
1153                        oe.qa.handle_error("already-stripped", msg, d)
1154                    continue
1155
1156                # At this point we have an unstripped elf file. We need to:
1157                #  a) Make sure any file we strip is not hardlinked to anything else outside this tree
1158                #  b) Only strip any hardlinked file once (no races)
1159                #  c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks
1160
1161                # Use a reference of device ID and inode number to identify files
1162                file_reference = checkelf[file][1]
1163                if file_reference in inodes:
1164                    os.unlink(file)
1165                    os.link(inodes[file_reference][0], file)
1166                    inodes[file_reference].append(file)
1167                else:
1168                    inodes[file_reference] = [file]
1169                    # break hardlink
1170                    bb.utils.break_hardlinks(file)
1171                    elffiles[file] = elf_file
1172                # Modified the file so clear the cache
1173                cpath.updatecache(file)
1174
1175        # Do the same hardlink processing as above, but for static libraries
1176        results = list(checkstatic.keys())
1177
1178        # As above, sort the results.
1179        results.sort(key=lambda x: x[0])
1180
1181        for file in results:
1182            # Use a reference of device ID and inode number to identify files
1183            file_reference = checkstatic[file][1]
1184            if file_reference in inodes:
1185                os.unlink(file)
1186                os.link(inodes[file_reference][0], file)
1187                inodes[file_reference].append(file)
1188            else:
1189                inodes[file_reference] = [file]
1190                # break hardlink
1191                bb.utils.break_hardlinks(file)
1192                staticlibs.append(file)
1193            # Modified the file so clear the cache
1194            cpath.updatecache(file)
1195
1196    def strip_pkgd_prefix(f):
1197        nonlocal dvar
1198
1199        if f.startswith(dvar):
1200            return f[len(dvar):]
1201
1202        return f
1203
1204    #
1205    # First lets process debug splitting
1206    #
1207    if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
1208        results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, dv, d))
1209
1210        if dv["srcdir"] and not hostos.startswith("mingw"):
1211            if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1212                results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, dv, d))
1213            else:
1214                for file in staticlibs:
1215                    results.append( (file,source_info(file, d)) )
1216
1217        d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
1218
1219        sources = set()
1220        for r in results:
1221            sources.update(r[1])
1222
1223        # Hardlink our debug symbols to the other hardlink copies
1224        for ref in inodes:
1225            if len(inodes[ref]) == 1:
1226                continue
1227
1228            target = inodes[ref][0][len(dvar):]
1229            for file in inodes[ref][1:]:
1230                src = file[len(dvar):]
1231                dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1232                fpath = dvar + dest
1233                ftarget = dvar + dv["libdir"] + os.path.dirname(target) + dv["dir"] + "/" + os.path.basename(target) + dv["append"]
1234                if os.access(ftarget, os.R_OK):
1235                    bb.utils.mkdirhier(os.path.dirname(fpath))
1236                    # Only one hardlink of separated debug info file in each directory
1237                    if not os.access(fpath, os.R_OK):
1238                        #bb.note("Link %s -> %s" % (fpath, ftarget))
1239                        os.link(ftarget, fpath)
1240                elif (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1241                    deststatic = dv["staticlibdir"] + os.path.dirname(src) + dv["staticdir"] + "/" + os.path.basename(file) + dv["staticappend"]
1242                    fpath = dvar + deststatic
1243                    ftarget = dvar + dv["staticlibdir"] + os.path.dirname(target) + dv["staticdir"] + "/" + os.path.basename(target) + dv["staticappend"]
1244                    if os.access(ftarget, os.R_OK):
1245                        bb.utils.mkdirhier(os.path.dirname(fpath))
1246                        # Only one hardlink of separated debug info file in each directory
1247                        if not os.access(fpath, os.R_OK):
1248                            #bb.note("Link %s -> %s" % (fpath, ftarget))
1249                            os.link(ftarget, fpath)
1250                else:
1251                    bb.note("Unable to find inode link target %s" % (target))
1252
1253        # Create symlinks for all cases we were able to split symbols
1254        for file in symlinks:
1255            src = file[len(dvar):]
1256            dest = dv["libdir"] + os.path.dirname(src) + dv["dir"] + "/" + os.path.basename(src) + dv["append"]
1257            fpath = dvar + dest
1258            # Skip it if the target doesn't exist
1259            try:
1260                s = os.stat(fpath)
1261            except OSError as e:
1262                (err, strerror) = e.args
1263                if err != errno.ENOENT:
1264                    raise
1265                continue
1266
1267            ltarget = symlinks[file]
1268            lpath = os.path.dirname(ltarget)
1269            lbase = os.path.basename(ltarget)
1270            ftarget = ""
1271            if lpath and lpath != ".":
1272                ftarget += lpath + dv["dir"] + "/"
1273            ftarget += lbase + dv["append"]
1274            if lpath.startswith(".."):
1275                ftarget = os.path.join("..", ftarget)
1276            bb.utils.mkdirhier(os.path.dirname(fpath))
1277            #bb.note("Symlink %s -> %s" % (fpath, ftarget))
1278            os.symlink(ftarget, fpath)
1279
1280        # Process the dv["srcdir"] if requested...
1281        # This copies and places the referenced sources for later debugging...
1282        copydebugsources(dv["srcdir"], sources, d)
1283    #
1284    # End of debug splitting
1285    #
1286
1287    #
1288    # Now lets go back over things and strip them
1289    #
1290    if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
1291        strip = d.getVar("STRIP")
1292        sfiles = []
1293        for file in elffiles:
1294            elf_file = int(elffiles[file])
1295            #bb.note("Strip %s" % file)
1296            sfiles.append((file, elf_file, strip))
1297        if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
1298            for f in staticlibs:
1299                sfiles.append((f, 16, strip))
1300
1301        oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)
1302
1303    # Build "minidebuginfo" and reinject it back into the stripped binaries
1304    if bb.utils.contains('DISTRO_FEATURES', 'minidebuginfo', True, False, d):
1305        oe.utils.multiprocess_launch(inject_minidebuginfo, list(elffiles), d,
1306                                     extraargs=(dvar, dv, d))
1307
1308    #
1309    # End of strip
1310    #
1311    os.chdir(oldcwd)
1312
1313
1314def populate_packages(d):
1315    cpath = oe.cachedpath.CachedPath()
1316
1317    workdir = d.getVar('WORKDIR')
1318    outdir = d.getVar('DEPLOY_DIR')
1319    dvar = d.getVar('PKGD')
1320    packages = d.getVar('PACKAGES').split()
1321    pn = d.getVar('PN')
1322
1323    bb.utils.mkdirhier(outdir)
1324    os.chdir(dvar)
1325
1326    autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
1327
1328    split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')
1329
1330    # If debug-with-srcpkg mode is enabled then add the source package if it
1331    # doesn't exist and add the source file contents to the source package.
1332    if split_source_package:
1333        src_package_name = ('%s-src' % d.getVar('PN'))
1334        if not src_package_name in packages:
1335            packages.append(src_package_name)
1336        d.setVar('FILES:%s' % src_package_name, '/usr/src/debug')
1337
1338    # Sanity check PACKAGES for duplicates
1339    # Sanity should be moved to sanity.bbclass once we have the infrastructure
1340    package_dict = {}
1341
1342    for i, pkg in enumerate(packages):
1343        if pkg in package_dict:
1344            msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
1345            oe.qa.handle_error("packages-list", msg, d)
1346        # Ensure the source package gets the chance to pick up the source files
1347        # before the debug package by ordering it first in PACKAGES. Whether it
1348        # actually picks up any source files is controlled by
1349        # PACKAGE_DEBUG_SPLIT_STYLE.
1350        elif pkg.endswith("-src"):
1351            package_dict[pkg] = (10, i)
1352        elif autodebug and pkg.endswith("-dbg"):
1353            package_dict[pkg] = (30, i)
1354        else:
1355            package_dict[pkg] = (50, i)
1356    packages = sorted(package_dict.keys(), key=package_dict.get)
1357    d.setVar('PACKAGES', ' '.join(packages))
1358    pkgdest = d.getVar('PKGDEST')
1359
1360    seen = []
1361
1362    # os.mkdir masks the permissions with umask so we have to unset it first
1363    oldumask = os.umask(0)
1364
1365    debug = []
1366    for root, dirs, files in cpath.walk(dvar):
1367        dir = root[len(dvar):]
1368        if not dir:
1369            dir = os.sep
1370        for f in (files + dirs):
1371            path = "." + os.path.join(dir, f)
1372            if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
1373                debug.append(path)
1374
1375    for pkg in packages:
1376        root = os.path.join(pkgdest, pkg)
1377        bb.utils.mkdirhier(root)
1378
1379        filesvar = d.getVar('FILES:%s' % pkg) or ""
1380        if "//" in filesvar:
1381            msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
1382            oe.qa.handle_error("files-invalid", msg, d)
1383            filesvar.replace("//", "/")
1384
1385        origfiles = filesvar.split()
1386        files, symlink_paths = oe.package.files_from_filevars(origfiles)
1387
1388        if autodebug and pkg.endswith("-dbg"):
1389            files.extend(debug)
1390
1391        for file in files:
1392            if (not cpath.islink(file)) and (not cpath.exists(file)):
1393                continue
1394            if file in seen:
1395                continue
1396            seen.append(file)
1397
1398            def mkdir(src, dest, p):
1399                src = os.path.join(src, p)
1400                dest = os.path.join(dest, p)
1401                fstat = cpath.stat(src)
1402                os.mkdir(dest)
1403                os.chmod(dest, fstat.st_mode)
1404                os.chown(dest, fstat.st_uid, fstat.st_gid)
1405                if p not in seen:
1406                    seen.append(p)
1407                cpath.updatecache(dest)
1408
1409            def mkdir_recurse(src, dest, paths):
1410                if cpath.exists(dest + '/' + paths):
1411                    return
1412                while paths.startswith("./"):
1413                    paths = paths[2:]
1414                p = "."
1415                for c in paths.split("/"):
1416                    p = os.path.join(p, c)
1417                    if not cpath.exists(os.path.join(dest, p)):
1418                        mkdir(src, dest, p)
1419
1420            if cpath.isdir(file) and not cpath.islink(file):
1421                mkdir_recurse(dvar, root, file)
1422                continue
1423
1424            mkdir_recurse(dvar, root, os.path.dirname(file))
1425            fpath = os.path.join(root,file)
1426            if not cpath.islink(file):
1427                os.link(file, fpath)
1428                continue
1429            ret = bb.utils.copyfile(file, fpath)
1430            if ret is False or ret == 0:
1431                bb.fatal("File population failed")
1432
1433        # Check if symlink paths exist
1434        for file in symlink_paths:
1435            if not os.path.exists(os.path.join(root,file)):
1436                bb.fatal("File '%s' cannot be packaged into '%s' because its "
1437                         "parent directory structure does not exist. One of "
1438                         "its parent directories is a symlink whose target "
1439                         "directory is not included in the package." %
1440                         (file, pkg))
1441
1442    os.umask(oldumask)
1443    os.chdir(workdir)
1444
1445    # Handle excluding packages with incompatible licenses
1446    package_list = []
1447    skipped_pkgs = oe.license.skip_incompatible_package_licenses(d, packages)
1448    for pkg in packages:
1449        if pkg in skipped_pkgs:
1450            msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, skipped_pkgs[pkg])
1451            oe.qa.handle_error("incompatible-license", msg, d)
1452        else:
1453            package_list.append(pkg)
1454    d.setVar('PACKAGES', ' '.join(package_list))
1455
1456    unshipped = []
1457    for root, dirs, files in cpath.walk(dvar):
1458        dir = root[len(dvar):]
1459        if not dir:
1460            dir = os.sep
1461        for f in (files + dirs):
1462            path = os.path.join(dir, f)
1463            if ('.' + path) not in seen:
1464                unshipped.append(path)
1465
1466    if unshipped != []:
1467        msg = pn + ": Files/directories were installed but not shipped in any package:"
1468        if "installed-vs-shipped" in (d.getVar('INSANE_SKIP:' + pn) or "").split():
1469            bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
1470        else:
1471            for f in unshipped:
1472                msg = msg + "\n  " + f
1473            msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
1474            msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
1475            oe.qa.handle_error("installed-vs-shipped", msg, d)
1476
1477def process_fixsymlinks(pkgfiles, d):
1478    cpath = oe.cachedpath.CachedPath()
1479    pkgdest = d.getVar('PKGDEST')
1480    packages = d.getVar("PACKAGES", False).split()
1481
1482    dangling_links = {}
1483    pkg_files = {}
1484    for pkg in packages:
1485        dangling_links[pkg] = []
1486        pkg_files[pkg] = []
1487        inst_root = os.path.join(pkgdest, pkg)
1488        for path in pkgfiles[pkg]:
1489                rpath = path[len(inst_root):]
1490                pkg_files[pkg].append(rpath)
1491                rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
1492                if not cpath.lexists(rtarget):
1493                    dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))
1494
1495    newrdepends = {}
1496    for pkg in dangling_links:
1497        for l in dangling_links[pkg]:
1498            found = False
1499            bb.debug(1, "%s contains dangling link %s" % (pkg, l))
1500            for p in packages:
1501                if l in pkg_files[p]:
1502                        found = True
1503                        bb.debug(1, "target found in %s" % p)
1504                        if p == pkg:
1505                            break
1506                        if pkg not in newrdepends:
1507                            newrdepends[pkg] = []
1508                        newrdepends[pkg].append(p)
1509                        break
1510            if found == False:
1511                bb.note("%s contains dangling symlink to %s" % (pkg, l))
1512
1513    for pkg in newrdepends:
1514        rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS:' + pkg) or "")
1515        for p in newrdepends[pkg]:
1516            if p not in rdepends:
1517                rdepends[p] = []
1518        d.setVar('RDEPENDS:' + pkg, bb.utils.join_deps(rdepends, commasep=False))
1519
1520def process_filedeps(pkgfiles, d):
1521    """
1522    Collect perfile run-time dependency metadata
1523    Output:
1524     FILERPROVIDESFLIST:pkg - list of all files w/ deps
1525     FILERPROVIDES:filepath:pkg - per file dep
1526
1527      FILERDEPENDSFLIST:pkg - list of all files w/ deps
1528      FILERDEPENDS:filepath:pkg - per file dep
1529    """
1530    if d.getVar('SKIP_FILEDEPS') == '1':
1531        return
1532
1533    pkgdest = d.getVar('PKGDEST')
1534    packages = d.getVar('PACKAGES')
1535    rpmdeps = d.getVar('RPMDEPS')
1536
1537    def chunks(files, n):
1538        return [files[i:i+n] for i in range(0, len(files), n)]
1539
1540    pkglist = []
1541    for pkg in packages.split():
1542        if d.getVar('SKIP_FILEDEPS:' + pkg) == '1':
1543            continue
1544        if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
1545            continue
1546        for files in chunks(pkgfiles[pkg], 100):
1547            pkglist.append((pkg, files, rpmdeps, pkgdest))
1548
1549    processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)
1550
1551    provides_files = {}
1552    requires_files = {}
1553
1554    for result in processed:
1555        (pkg, provides, requires) = result
1556
1557        if pkg not in provides_files:
1558            provides_files[pkg] = []
1559        if pkg not in requires_files:
1560            requires_files[pkg] = []
1561
1562        for file in sorted(provides):
1563            provides_files[pkg].append(file)
1564            key = "FILERPROVIDES:" + file + ":" + pkg
1565            d.appendVar(key, " " + " ".join(provides[file]))
1566
1567        for file in sorted(requires):
1568            requires_files[pkg].append(file)
1569            key = "FILERDEPENDS:" + file + ":" + pkg
1570            d.appendVar(key, " " + " ".join(requires[file]))
1571
1572    for pkg in requires_files:
1573        d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
1574    for pkg in provides_files:
1575        d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
1576
1577def process_shlibs(pkgfiles, d):
1578    cpath = oe.cachedpath.CachedPath()
1579
1580    exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
1581    if exclude_shlibs:
1582        bb.note("not generating shlibs")
1583        return
1584
1585    lib_re = re.compile(r"^.*\.so")
1586    libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))
1587
1588    packages = d.getVar('PACKAGES')
1589
1590    shlib_pkgs = []
1591    exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
1592    if exclusion_list:
1593        for pkg in packages.split():
1594            if pkg not in exclusion_list.split():
1595                shlib_pkgs.append(pkg)
1596            else:
1597                bb.note("not generating shlibs for %s" % pkg)
1598    else:
1599        shlib_pkgs = packages.split()
1600
1601    hostos = d.getVar('HOST_OS')
1602
1603    workdir = d.getVar('WORKDIR')
1604
1605    ver = d.getVar('PKGV')
1606    if not ver:
1607        msg = "PKGV not defined"
1608        oe.qa.handle_error("pkgv-undefined", msg, d)
1609        return
1610
1611    pkgdest = d.getVar('PKGDEST')
1612
1613    shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1614
1615    def linux_so(file, pkg, pkgver, d):
1616        needs_ldconfig = False
1617        needed = set()
1618        sonames = set()
1619        ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1620        cmd = d.getVar('OBJDUMP') + " -p " + shlex.quote(file) + " 2>/dev/null"
1621        fd = os.popen(cmd)
1622        lines = fd.readlines()
1623        fd.close()
1624        rpath = tuple()
1625        for l in lines:
1626            m = re.match(r"\s+RPATH\s+([^\s]*)", l)
1627            if m:
1628                rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
1629                rpath = tuple(map(os.path.normpath, rpaths))
1630        for l in lines:
1631            m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
1632            if m:
1633                dep = m.group(1)
1634                if dep not in needed:
1635                    needed.add((dep, file, rpath))
1636            m = re.match(r"\s+SONAME\s+([^\s]*)", l)
1637            if m:
1638                this_soname = m.group(1)
1639                prov = (this_soname, ldir, pkgver)
1640                if not prov in sonames:
1641                    # if library is private (only used by package) then do not build shlib for it
1642                    if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
1643                        sonames.add(prov)
1644                if libdir_re.match(os.path.dirname(file)):
1645                    needs_ldconfig = True
1646        return (needs_ldconfig, needed, sonames)
1647
1648    def darwin_so(file, needed, sonames, pkgver):
1649        if not os.path.exists(file):
1650            return
1651        ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
1652
1653        def get_combinations(base):
1654            #
1655            # Given a base library name, find all combinations of this split by "." and "-"
1656            #
1657            combos = []
1658            options = base.split(".")
1659            for i in range(1, len(options) + 1):
1660                combos.append(".".join(options[0:i]))
1661            options = base.split("-")
1662            for i in range(1, len(options) + 1):
1663                combos.append("-".join(options[0:i]))
1664            return combos
1665
1666        if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
1667            # Drop suffix
1668            name = os.path.basename(file).rsplit(".",1)[0]
1669            # Find all combinations
1670            combos = get_combinations(name)
1671            for combo in combos:
1672                if not combo in sonames:
1673                    prov = (combo, ldir, pkgver)
1674                    sonames.add(prov)
1675        if file.endswith('.dylib') or file.endswith('.so'):
1676            rpath = []
1677            p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
1678            out, err = p.communicate()
1679            # If returned successfully, process stdout for results
1680            if p.returncode == 0:
1681                for l in out.split("\n"):
1682                    l = l.strip()
1683                    if l.startswith('path '):
1684                        rpath.append(l.split()[1])
1685
1686        p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
1687        out, err = p.communicate()
1688        # If returned successfully, process stdout for results
1689        if p.returncode == 0:
1690            for l in out.split("\n"):
1691                l = l.strip()
1692                if not l or l.endswith(":"):
1693                    continue
1694                if "is not an object file" in l:
1695                    continue
1696                name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
1697                if name and name not in needed[pkg]:
1698                     needed[pkg].add((name, file, tuple()))
1699
1700    def mingw_dll(file, needed, sonames, pkgver):
1701        if not os.path.exists(file):
1702            return
1703
1704        if file.endswith(".dll"):
1705            # assume all dlls are shared objects provided by the package
1706            sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))
1707
1708        if (file.endswith(".dll") or file.endswith(".exe")):
1709            # use objdump to search for "DLL Name: .*\.dll"
1710            p = subprocess.Popen([d.expand("${OBJDUMP}"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
1711            out, err = p.communicate()
1712            # process the output, grabbing all .dll names
1713            if p.returncode == 0:
1714                for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
1715                    dllname = m.group(1)
1716                    if dllname:
1717                        needed[pkg].add((dllname, file, tuple()))
1718
1719    needed = {}
1720
1721    shlib_provider = oe.package.read_shlib_providers(d)
1722
1723    for pkg in shlib_pkgs:
1724        private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1725        private_libs = private_libs.split()
1726        needs_ldconfig = False
1727        bb.debug(2, "calculating shlib provides for %s" % pkg)
1728
1729        pkgver = d.getVar('PKGV:' + pkg)
1730        if not pkgver:
1731            pkgver = d.getVar('PV_' + pkg)
1732        if not pkgver:
1733            pkgver = ver
1734
1735        needed[pkg] = set()
1736        sonames = set()
1737        linuxlist = []
1738        for file in pkgfiles[pkg]:
1739                soname = None
1740                if cpath.islink(file):
1741                    continue
1742                if hostos.startswith("darwin"):
1743                    darwin_so(file, needed, sonames, pkgver)
1744                elif hostos.startswith("mingw"):
1745                    mingw_dll(file, needed, sonames, pkgver)
1746                elif os.access(file, os.X_OK) or lib_re.match(file):
1747                    linuxlist.append(file)
1748
1749        if linuxlist:
1750            results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
1751            for r in results:
1752                ldconfig = r[0]
1753                needed[pkg] |= r[1]
1754                sonames |= r[2]
1755                needs_ldconfig = needs_ldconfig or ldconfig
1756
1757        shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
1758        if len(sonames):
1759            with open(shlibs_file, 'w') as fd:
1760                for s in sorted(sonames):
1761                    if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
1762                        (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
1763                        if old_pkg != pkg:
1764                            bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
1765                    bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
1766                    fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
1767                    if s[0] not in shlib_provider:
1768                        shlib_provider[s[0]] = {}
1769                    shlib_provider[s[0]][s[1]] = (pkg, pkgver)
1770        if needs_ldconfig:
1771            bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
1772            postinst = d.getVar('pkg_postinst:%s' % pkg)
1773            if not postinst:
1774                postinst = '#!/bin/sh\n'
1775            postinst += d.getVar('ldconfig_postinst_fragment')
1776            d.setVar('pkg_postinst:%s' % pkg, postinst)
1777        bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
1778
1779    assumed_libs = d.getVar('ASSUME_SHLIBS')
1780    if assumed_libs:
1781        libdir = d.getVar("libdir")
1782        for e in assumed_libs.split():
1783            l, dep_pkg = e.split(":")
1784            lib_ver = None
1785            dep_pkg = dep_pkg.rsplit("_", 1)
1786            if len(dep_pkg) == 2:
1787                lib_ver = dep_pkg[1]
1788            dep_pkg = dep_pkg[0]
1789            if l not in shlib_provider:
1790                shlib_provider[l] = {}
1791            shlib_provider[l][libdir] = (dep_pkg, lib_ver)
1792
1793    libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
1794
1795    for pkg in shlib_pkgs:
1796        bb.debug(2, "calculating shlib requirements for %s" % pkg)
1797
1798        private_libs = d.getVar('PRIVATE_LIBS:' + pkg) or d.getVar('PRIVATE_LIBS') or ""
1799        private_libs = private_libs.split()
1800
1801        deps = list()
1802        for n in needed[pkg]:
1803            # if n is in private libraries, don't try to search provider for it
1804            # this could cause problem in case some abc.bb provides private
1805            # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
1806            # but skipping it is still better alternative than providing own
1807            # version and then adding runtime dependency for the same system library
1808            if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
1809                bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
1810                continue
1811            if n[0] in shlib_provider.keys():
1812                shlib_provider_map = shlib_provider[n[0]]
1813                matches = set()
1814                for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
1815                    if p in shlib_provider_map:
1816                        matches.add(p)
1817                if len(matches) > 1:
1818                    matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
1819                    bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
1820                elif len(matches) == 1:
1821                    (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]
1822
1823                    bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))
1824
1825                    if dep_pkg == pkg:
1826                        continue
1827
1828                    if ver_needed:
1829                        dep = "%s (>= %s)" % (dep_pkg, ver_needed)
1830                    else:
1831                        dep = dep_pkg
1832                    if not dep in deps:
1833                        deps.append(dep)
1834                    continue
1835            bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))
1836
1837        deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
1838        if os.path.exists(deps_file):
1839            os.remove(deps_file)
1840        if deps:
1841            with open(deps_file, 'w') as fd:
1842                for dep in sorted(deps):
1843                    fd.write(dep + '\n')
1844
1845def process_pkgconfig(pkgfiles, d):
1846    packages = d.getVar('PACKAGES')
1847    workdir = d.getVar('WORKDIR')
1848    pkgdest = d.getVar('PKGDEST')
1849
1850    shlibs_dirs = d.getVar('SHLIBSDIRS').split()
1851    shlibswork_dir = d.getVar('SHLIBSWORKDIR')
1852
1853    pc_re = re.compile(r'(.*)\.pc$')
1854    var_re = re.compile(r'(.*)=(.*)')
1855    field_re = re.compile(r'(.*): (.*)')
1856
1857    pkgconfig_provided = {}
1858    pkgconfig_needed = {}
1859    for pkg in packages.split():
1860        pkgconfig_provided[pkg] = []
1861        pkgconfig_needed[pkg] = []
1862        for file in sorted(pkgfiles[pkg]):
1863                m = pc_re.match(file)
1864                if m:
1865                    pd = bb.data.init()
1866                    name = m.group(1)
1867                    pkgconfig_provided[pkg].append(os.path.basename(name))
1868                    if not os.access(file, os.R_OK):
1869                        continue
1870                    with open(file, 'r') as f:
1871                        lines = f.readlines()
1872                    for l in lines:
1873                        m = field_re.match(l)
1874                        if m:
1875                            hdr = m.group(1)
1876                            exp = pd.expand(m.group(2))
1877                            if hdr == 'Requires' or hdr == 'Requires.private':
1878                                pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
1879                                continue
1880                        m = var_re.match(l)
1881                        if m:
1882                            name = m.group(1)
1883                            val = m.group(2)
1884                            pd.setVar(name, pd.expand(val))
1885
1886    for pkg in packages.split():
1887        pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
1888        if pkgconfig_provided[pkg] != []:
1889            with open(pkgs_file, 'w') as f:
1890                for p in sorted(pkgconfig_provided[pkg]):
1891                    f.write('%s\n' % p)
1892
1893    # Go from least to most specific since the last one found wins
1894    for dir in reversed(shlibs_dirs):
1895        if not os.path.exists(dir):
1896            continue
1897        for file in sorted(os.listdir(dir)):
1898            m = re.match(r'^(.*)\.pclist$', file)
1899            if m:
1900                pkg = m.group(1)
1901                with open(os.path.join(dir, file)) as fd:
1902                    lines = fd.readlines()
1903                pkgconfig_provided[pkg] = []
1904                for l in lines:
1905                    pkgconfig_provided[pkg].append(l.rstrip())
1906
1907    for pkg in packages.split():
1908        deps = []
1909        for n in pkgconfig_needed[pkg]:
1910            found = False
1911            for k in pkgconfig_provided.keys():
1912                if n in pkgconfig_provided[k]:
1913                    if k != pkg and not (k in deps):
1914                        deps.append(k)
1915                    found = True
1916            if found == False:
1917                bb.note("couldn't find pkgconfig module '%s' in any package" % n)
1918        deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
1919        if len(deps):
1920            with open(deps_file, 'w') as fd:
1921                for dep in deps:
1922                    fd.write(dep + '\n')
1923
1924def read_libdep_files(d):
1925    pkglibdeps = {}
1926    packages = d.getVar('PACKAGES').split()
1927    for pkg in packages:
1928        pkglibdeps[pkg] = {}
1929        for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
1930            depsfile = d.expand("${PKGDEST}/" + pkg + extension)
1931            if os.access(depsfile, os.R_OK):
1932                with open(depsfile) as fd:
1933                    lines = fd.readlines()
1934                for l in lines:
1935                    l.rstrip()
1936                    deps = bb.utils.explode_dep_versions2(l)
1937                    for dep in deps:
1938                        if not dep in pkglibdeps[pkg]:
1939                            pkglibdeps[pkg][dep] = deps[dep]
1940    return pkglibdeps
1941
1942def process_depchains(pkgfiles, d):
1943    """
1944    For a given set of prefix and postfix modifiers, make those packages
1945    RRECOMMENDS on the corresponding packages for its RDEPENDS.
1946
1947    Example:  If package A depends upon package B, and A's .bb emits an
1948    A-dev package, this would make A-dev Recommends: B-dev.
1949
1950    If only one of a given suffix is specified, it will take the RRECOMMENDS
1951    based on the RDEPENDS of *all* other packages. If more than one of a given
1952    suffix is specified, its will only use the RDEPENDS of the single parent
1953    package.
1954    """
1955
1956    packages  = d.getVar('PACKAGES')
1957    postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
1958    prefixes  = (d.getVar('DEPCHAIN_PRE') or '').split()
1959
1960    def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
1961
1962        #bb.note('depends for %s is %s' % (base, depends))
1963        rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
1964
1965        for depend in sorted(depends):
1966            if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
1967                #bb.note("Skipping %s" % depend)
1968                continue
1969            if depend.endswith('-dev'):
1970                depend = depend[:-4]
1971            if depend.endswith('-dbg'):
1972                depend = depend[:-4]
1973            pkgname = getname(depend, suffix)
1974            #bb.note("Adding %s for %s" % (pkgname, depend))
1975            if pkgname not in rreclist and pkgname != pkg:
1976                rreclist[pkgname] = []
1977
1978        #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
1979        d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
1980
1981    def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
1982
1983        #bb.note('rdepends for %s is %s' % (base, rdepends))
1984        rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS:' + pkg) or "")
1985
1986        for depend in sorted(rdepends):
1987            if depend.find('virtual-locale-') != -1:
1988                #bb.note("Skipping %s" % depend)
1989                continue
1990            if depend.endswith('-dev'):
1991                depend = depend[:-4]
1992            if depend.endswith('-dbg'):
1993                depend = depend[:-4]
1994            pkgname = getname(depend, suffix)
1995            #bb.note("Adding %s for %s" % (pkgname, depend))
1996            if pkgname not in rreclist and pkgname != pkg:
1997                rreclist[pkgname] = []
1998
1999        #bb.note('setting: RRECOMMENDS:%s=%s' % (pkg, ' '.join(rreclist)))
2000        d.setVar('RRECOMMENDS:%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
2001
2002    def add_dep(list, dep):
2003        if dep not in list:
2004            list.append(dep)
2005
2006    depends = []
2007    for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
2008        add_dep(depends, dep)
2009
2010    rdepends = []
2011    for pkg in packages.split():
2012        for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + pkg) or ""):
2013            add_dep(rdepends, dep)
2014
2015    #bb.note('rdepends is %s' % rdepends)
2016
2017    def post_getname(name, suffix):
2018        return '%s%s' % (name, suffix)
2019    def pre_getname(name, suffix):
2020        return '%s%s' % (suffix, name)
2021
2022    pkgs = {}
2023    for pkg in packages.split():
2024        for postfix in postfixes:
2025            if pkg.endswith(postfix):
2026                if not postfix in pkgs:
2027                    pkgs[postfix] = {}
2028                pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)
2029
2030        for prefix in prefixes:
2031            if pkg.startswith(prefix):
2032                if not prefix in pkgs:
2033                    pkgs[prefix] = {}
2034                pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)
2035
2036    if "-dbg" in pkgs:
2037        pkglibdeps = read_libdep_files(d)
2038        pkglibdeplist = []
2039        for pkg in pkglibdeps:
2040            for k in pkglibdeps[pkg]:
2041                add_dep(pkglibdeplist, k)
2042        dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
2043
2044    for suffix in pkgs:
2045        for pkg in pkgs[suffix]:
2046            if d.getVarFlag('RRECOMMENDS:' + pkg, 'nodeprrecs'):
2047                continue
2048            (base, func) = pkgs[suffix][pkg]
2049            if suffix == "-dev":
2050                pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
2051            elif suffix == "-dbg":
2052                if not dbgdefaultdeps:
2053                    pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
2054                    continue
2055            if len(pkgs[suffix]) == 1:
2056                pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
2057            else:
2058                rdeps = []
2059                for dep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + base) or ""):
2060                    add_dep(rdeps, dep)
2061                pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
2062